summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/alert_management/alert_spec.rb7
-rw-r--r--spec/models/alert_management/http_integration_spec.rb48
-rw-r--r--spec/models/analytics/devops_adoption/segment_selection_spec.rb69
-rw-r--r--spec/models/application_setting_spec.rb13
-rw-r--r--spec/models/bulk_imports/entity_spec.rb64
-rw-r--r--spec/models/bulk_imports/failure_spec.rb17
-rw-r--r--spec/models/ci/bridge_spec.rb16
-rw-r--r--spec/models/ci/build_dependencies_spec.rb202
-rw-r--r--spec/models/ci/build_spec.rb194
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb2
-rw-r--r--spec/models/ci/build_trace_chunks/fog_spec.rb46
-rw-r--r--spec/models/ci/job_artifact_spec.rb16
-rw-r--r--spec/models/ci/pipeline_spec.rb575
-rw-r--r--spec/models/clusters/agent_spec.rb12
-rw-r--r--spec/models/clusters/applications/helm_spec.rb40
-rw-r--r--spec/models/clusters/cluster_spec.rb6
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb399
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb93
-rw-r--r--spec/models/concerns/case_sensitivity_spec.rb12
-rw-r--r--spec/models/concerns/ignorable_columns_spec.rb8
-rw-r--r--spec/models/concerns/mentionable_spec.rb36
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb2
-rw-r--r--spec/models/concerns/routable_spec.rb150
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb4
-rw-r--r--spec/models/container_repository_spec.rb2
-rw-r--r--spec/models/custom_emoji_spec.rb9
-rw-r--r--spec/models/cycle_analytics/code_spec.rb2
-rw-r--r--spec/models/cycle_analytics/issue_spec.rb2
-rw-r--r--spec/models/cycle_analytics/plan_spec.rb2
-rw-r--r--spec/models/cycle_analytics/review_spec.rb2
-rw-r--r--spec/models/cycle_analytics/staging_spec.rb2
-rw-r--r--spec/models/cycle_analytics/test_spec.rb2
-rw-r--r--spec/models/dependency_proxy/manifest_spec.rb52
-rw-r--r--spec/models/dependency_proxy/registry_spec.rb7
-rw-r--r--spec/models/deployment_spec.rb86
-rw-r--r--spec/models/diff_note_spec.rb20
-rw-r--r--spec/models/environment_spec.rb173
-rw-r--r--spec/models/experiment_spec.rb113
-rw-r--r--spec/models/experiment_subject_spec.rb54
-rw-r--r--spec/models/exported_protected_branch_spec.rb21
-rw-r--r--spec/models/group_import_state_spec.rb20
-rw-r--r--spec/models/group_spec.rb69
-rw-r--r--spec/models/identity_spec.rb30
-rw-r--r--spec/models/instance_configuration_spec.rb16
-rw-r--r--spec/models/issue_spec.rb19
-rw-r--r--spec/models/label_priority_spec.rb6
-rw-r--r--spec/models/merge_request_diff_spec.rb212
-rw-r--r--spec/models/merge_request_reviewer_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb221
-rw-r--r--spec/models/namespace_onboarding_action_spec.rb53
-rw-r--r--spec/models/namespace_spec.rb1
-rw-r--r--spec/models/note_spec.rb4
-rw-r--r--spec/models/packages/package_file_spec.rb4
-rw-r--r--spec/models/packages/package_spec.rb73
-rw-r--r--spec/models/pages/lookup_path_spec.rb63
-rw-r--r--spec/models/project_feature_spec.rb2
-rw-r--r--spec/models/project_feature_usage_spec.rb4
-rw-r--r--spec/models/project_repository_storage_move_spec.rb94
-rw-r--r--spec/models/project_services/datadog_service_spec.rb179
-rw-r--r--spec/models/project_services/jenkins_service_spec.rb255
-rw-r--r--spec/models/project_services/jira_service_spec.rb13
-rw-r--r--spec/models/project_spec.rb160
-rw-r--r--spec/models/project_statistics_spec.rb11
-rw-r--r--spec/models/protected_branch/push_access_level_spec.rb55
-rw-r--r--spec/models/raw_usage_data_spec.rb24
-rw-r--r--spec/models/release_highlight_spec.rb181
-rw-r--r--spec/models/repository_spec.rb2
-rw-r--r--spec/models/resource_label_event_spec.rb8
-rw-r--r--spec/models/resource_state_event_spec.rb8
-rw-r--r--spec/models/sentry_issue_spec.rb6
-rw-r--r--spec/models/service_spec.rb13
-rw-r--r--spec/models/snippet_repository_storage_move_spec.rb13
-rw-r--r--spec/models/snippet_spec.rb39
-rw-r--r--spec/models/suggestion_spec.rb6
-rw-r--r--spec/models/system_note_metadata_spec.rb12
-rw-r--r--spec/models/terraform/state_spec.rb113
-rw-r--r--spec/models/timelog_spec.rb8
-rw-r--r--spec/models/user_spec.rb276
-rw-r--r--spec/models/wiki_page/meta_spec.rb9
-rw-r--r--spec/models/wiki_page/slug_spec.rb3
-rw-r--r--spec/models/zoom_meeting_spec.rb14
81 files changed, 4072 insertions, 809 deletions
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index b57062b5fc1..80a45b1c1be 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -99,7 +99,8 @@ RSpec.describe AlertManagement::Alert do
describe 'fingerprint' do
let_it_be(:fingerprint) { 'fingerprint' }
- let(:new_alert) { build(:alert_management_alert, fingerprint: fingerprint, project: project) }
+ let_it_be(:project3, refind: true) { create(:project) }
+ let(:new_alert) { build(:alert_management_alert, fingerprint: fingerprint, project: project3) }
subject { new_alert }
@@ -107,7 +108,7 @@ RSpec.describe AlertManagement::Alert do
context 'same project, various states' do
using RSpec::Parameterized::TableSyntax
- let_it_be(:existing_alert) { create(:alert_management_alert, fingerprint: fingerprint, project: project) }
+ let_it_be(:existing_alert, refind: true) { create(:alert_management_alert, fingerprint: fingerprint, project: project3) }
# We are only validating uniqueness for non-resolved alerts
where(:existing_status, :new_status, :valid) do
@@ -130,7 +131,7 @@ RSpec.describe AlertManagement::Alert do
end
with_them do
- let(:new_alert) { build(:alert_management_alert, new_status, fingerprint: fingerprint, project: project) }
+ let(:new_alert) { build(:alert_management_alert, new_status, fingerprint: fingerprint, project: project3) }
before do
existing_alert.change_status_to(existing_status)
diff --git a/spec/models/alert_management/http_integration_spec.rb b/spec/models/alert_management/http_integration_spec.rb
index a3e7b47c116..910df51801a 100644
--- a/spec/models/alert_management/http_integration_spec.rb
+++ b/spec/models/alert_management/http_integration_spec.rb
@@ -31,6 +31,54 @@ RSpec.describe AlertManagement::HttpIntegration do
it { is_expected.not_to validate_uniqueness_of(:endpoint_identifier).scoped_to(:project_id, :active) }
end
+
+ context 'payload_attribute_mapping' do
+ subject { build(:alert_management_http_integration, payload_attribute_mapping: attribute_mapping) }
+
+ context 'with valid JSON schema' do
+ let(:attribute_mapping) do
+ {
+ title: { path: %w(a b c), type: 'string' },
+ description: { path: %w(a), type: 'string' }
+ }
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with invalid JSON schema' do
+ shared_examples 'is invalid record' do
+ it do
+ expect(subject).to be_invalid
+ expect(subject.errors.messages[:payload_attribute_mapping]).to eq(['must be a valid json schema'])
+ end
+ end
+
+ context 'when property is not an object' do
+ let(:attribute_mapping) do
+ { title: 'That is not a valid schema' }
+ end
+
+ it_behaves_like 'is invalid record'
+ end
+
+ context 'when property missing required attributes' do
+ let(:attribute_mapping) do
+ { title: { type: 'string' } }
+ end
+
+ it_behaves_like 'is invalid record'
+ end
+
+ context 'when property has extra attributes' do
+ let(:attribute_mapping) do
+ { title: { path: %w(a b c), type: 'string', extra: 'property' } }
+ end
+
+ it_behaves_like 'is invalid record'
+ end
+ end
+ end
end
describe '#token' do
diff --git a/spec/models/analytics/devops_adoption/segment_selection_spec.rb b/spec/models/analytics/devops_adoption/segment_selection_spec.rb
deleted file mode 100644
index 5866cbaa48e..00000000000
--- a/spec/models/analytics/devops_adoption/segment_selection_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Analytics::DevopsAdoption::SegmentSelection, type: :model do
- subject { build(:devops_adoption_segment_selection, :project) }
-
- describe 'validation' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project) }
-
- it { is_expected.to validate_presence_of(:segment) }
-
- context do
- subject { create(:devops_adoption_segment_selection, :project, project: project) }
-
- it { is_expected.to validate_uniqueness_of(:project_id).scoped_to(:segment_id) }
- end
-
- context do
- subject { create(:devops_adoption_segment_selection, :group, group: group) }
-
- it { is_expected.to validate_uniqueness_of(:group_id).scoped_to(:segment_id) }
- end
-
- it 'project is required' do
- selection = build(:devops_adoption_segment_selection, project: nil, group: nil)
-
- selection.validate
-
- expect(selection.errors).to have_key(:project)
- end
-
- it 'project is not required when a group is given' do
- selection = build(:devops_adoption_segment_selection, :group, group: group)
-
- expect(selection).to be_valid
- end
-
- it 'does not allow group to be set when project is present' do
- selection = build(:devops_adoption_segment_selection)
-
- selection.group = group
- selection.project = project
-
- selection.validate
-
- expect(selection.errors[:group]).to eq([s_('DevopsAdoptionSegmentSelection|The selection cannot be configured for a project and for a group at the same time')])
- end
-
- context 'limit the number of segment selections' do
- let_it_be(:segment) { create(:devops_adoption_segment) }
-
- subject { build(:devops_adoption_segment_selection, segment: segment, project: project) }
-
- before do
- create(:devops_adoption_segment_selection, :project, segment: segment)
-
- stub_const("#{described_class}::ALLOWED_SELECTIONS_PER_SEGMENT", 1)
- end
-
- it 'shows validation error' do
- subject.validate
-
- expect(subject.errors[:segment]).to eq([s_('DevopsAdoptionSegment|The maximum number of selections has been reached')])
- end
- end
- end
-end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index efe62a1d086..ea03cbc3706 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -259,7 +259,18 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value('access-key-id-12').for(:eks_access_key_id) }
it { is_expected.not_to allow_value('a' * 129).for(:eks_access_key_id) }
it { is_expected.not_to allow_value('short-key').for(:eks_access_key_id) }
- it { is_expected.not_to allow_value(nil).for(:eks_access_key_id) }
+ it { is_expected.to allow_value(nil).for(:eks_access_key_id) }
+
+ it { is_expected.to allow_value('secret-access-key').for(:eks_secret_access_key) }
+ it { is_expected.to allow_value(nil).for(:eks_secret_access_key) }
+ end
+
+ context 'access key is specified' do
+ let(:eks_enabled) { true }
+
+ before do
+ setting.eks_access_key_id = '123456789012'
+ end
it { is_expected.to allow_value('secret-access-key').for(:eks_secret_access_key) }
it { is_expected.not_to allow_value(nil).for(:eks_secret_access_key) }
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index ad6e3ec6f30..0732b671729 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -82,4 +82,68 @@ RSpec.describe BulkImports::Entity, type: :model do
end
end
end
+
+ describe "#update_tracker_for" do
+ let(:entity) { create(:bulk_import_entity) }
+
+ it "inserts new tracker when it does not exist" do
+ expect do
+ entity.update_tracker_for(relation: :relation, has_next_page: false)
+ end.to change(BulkImports::Tracker, :count).by(1)
+
+ tracker = entity.trackers.last
+
+ expect(tracker.relation).to eq('relation')
+ expect(tracker.has_next_page).to eq(false)
+ expect(tracker.next_page).to eq(nil)
+ end
+
+ it "updates the tracker if it already exist" do
+ create(
+ :bulk_import_tracker,
+ relation: :relation,
+ has_next_page: false,
+ entity: entity
+ )
+
+ expect do
+ entity.update_tracker_for(relation: :relation, has_next_page: true, next_page: 'nextPage')
+ end.not_to change(BulkImports::Tracker, :count)
+
+ tracker = entity.trackers.last
+
+ expect(tracker.relation).to eq('relation')
+ expect(tracker.has_next_page).to eq(true)
+ expect(tracker.next_page).to eq('nextPage')
+ end
+ end
+
+ describe "#has_next_page?" do
+ it "queries for the given relation if it has more pages to be fetched" do
+ entity = create(:bulk_import_entity)
+ create(
+ :bulk_import_tracker,
+ relation: :relation,
+ has_next_page: false,
+ entity: entity
+ )
+
+ expect(entity.has_next_page?(:relation)).to eq(false)
+ end
+ end
+
+ describe "#next_page_for" do
+ it "queries for the next page of the given relation" do
+ entity = create(:bulk_import_entity)
+ create(
+ :bulk_import_tracker,
+ relation: :relation,
+ has_next_page: false,
+ next_page: 'nextPage',
+ entity: entity
+ )
+
+ expect(entity.next_page_for(:relation)).to eq('nextPage')
+ end
+ end
end
diff --git a/spec/models/bulk_imports/failure_spec.rb b/spec/models/bulk_imports/failure_spec.rb
new file mode 100644
index 00000000000..cde62659a48
--- /dev/null
+++ b/spec/models/bulk_imports/failure_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Failure, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:entity).required }
+ end
+
+ describe 'validations' do
+ before do
+ create(:bulk_import_failure)
+ end
+
+ it { is_expected.to validate_presence_of(:entity) }
+ end
+end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 51e82061d97..11dcecd50ca 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -330,14 +330,6 @@ RSpec.describe Ci::Bridge do
subject { build_stubbed(:ci_bridge, :manual).playable? }
it { is_expected.to be_truthy }
-
- context 'when FF ci_manual_bridges is disabled' do
- before do
- stub_feature_flags(ci_manual_bridges: false)
- end
-
- it { is_expected.to be_falsey }
- end
end
context 'when build is not a manual action' do
@@ -352,14 +344,6 @@ RSpec.describe Ci::Bridge do
subject { build_stubbed(:ci_bridge, :manual).action? }
it { is_expected.to be_truthy }
-
- context 'when FF ci_manual_bridges is disabled' do
- before do
- stub_feature_flags(ci_manual_bridges: false)
- end
-
- it { is_expected.to be_falsey }
- end
end
context 'when build is not a manual action' do
diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb
index 4fa1b3eb5a5..c5f56dbe5bc 100644
--- a/spec/models/ci/build_dependencies_spec.rb
+++ b/spec/models/ci/build_dependencies_spec.rb
@@ -146,6 +146,204 @@ RSpec.describe Ci::BuildDependencies do
end
end
+ describe '#cross_pipeline' do
+ let!(:job) do
+ create(:ci_build,
+ pipeline: pipeline,
+ name: 'build_with_pipeline_dependency',
+ options: { cross_dependencies: dependencies })
+ end
+
+ subject { described_class.new(job) }
+
+ let(:cross_pipeline_deps) { subject.cross_pipeline }
+
+ context 'when dependency specifications are valid' do
+ context 'when pipeline exists in the hierarchy' do
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when job exists' do
+ let(:dependencies) do
+ [{ pipeline: parent_pipeline.id.to_s, job: upstream_job.name, artifacts: true }]
+ end
+
+ let!(:upstream_job) { create(:ci_build, :success, pipeline: parent_pipeline) }
+
+ it { expect(cross_pipeline_deps).to contain_exactly(upstream_job) }
+ it { is_expected.to be_valid }
+
+ context 'when pipeline and job are specified via variables' do
+ let(:dependencies) do
+ [{ pipeline: '$parent_pipeline_ID', job: '$UPSTREAM_JOB', artifacts: true }]
+ end
+
+ before do
+ job.yaml_variables.push(key: 'parent_pipeline_ID', value: parent_pipeline.id.to_s, public: true)
+ job.yaml_variables.push(key: 'UPSTREAM_JOB', value: upstream_job.name, public: true)
+ job.save!
+ end
+
+ it { expect(cross_pipeline_deps).to contain_exactly(upstream_job) }
+ it { is_expected.to be_valid }
+ end
+
+ context 'when feature flag `ci_cross_pipeline_artifacts_download` is disabled' do
+ before do
+ stub_feature_flags(ci_cross_pipeline_artifacts_download: false)
+ end
+
+ it { expect(cross_pipeline_deps).to be_empty }
+ it { is_expected.to be_valid }
+ end
+ end
+
+ context 'when same job names exist in other pipelines in the hierarchy' do
+ let(:cross_pipeline_limit) do
+ ::Gitlab::Ci::Config::Entry::Needs::NEEDS_CROSS_PIPELINE_DEPENDENCIES_LIMIT
+ end
+
+ let(:sibling_pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+
+ before do
+ cross_pipeline_limit.times do |index|
+ create(:ci_build, :success,
+ pipeline: parent_pipeline, name: "dependency-#{index}",
+ stage_idx: 1, stage: 'build', user: user
+ )
+
+ create(:ci_build, :success,
+ pipeline: sibling_pipeline, name: "dependency-#{index}",
+ stage_idx: 1, stage: 'build', user: user
+ )
+ end
+ end
+
+ let(:dependencies) do
+ [
+ { pipeline: parent_pipeline.id.to_s, job: 'dependency-0', artifacts: true },
+ { pipeline: parent_pipeline.id.to_s, job: 'dependency-1', artifacts: true },
+ { pipeline: parent_pipeline.id.to_s, job: 'dependency-2', artifacts: true },
+ { pipeline: sibling_pipeline.id.to_s, job: 'dependency-3', artifacts: true },
+ { pipeline: sibling_pipeline.id.to_s, job: 'dependency-4', artifacts: true },
+ { pipeline: sibling_pipeline.id.to_s, job: 'dependency-5', artifacts: true }
+ ]
+ end
+
+ it 'returns a limited number of dependencies with the right match' do
+ expect(job.options[:cross_dependencies].size).to eq(cross_pipeline_limit.next)
+ expect(cross_pipeline_deps.size).to eq(cross_pipeline_limit)
+ expect(cross_pipeline_deps.map { |dep| [dep.pipeline_id, dep.name] }).to contain_exactly(
+ [parent_pipeline.id, 'dependency-0'],
+ [parent_pipeline.id, 'dependency-1'],
+ [parent_pipeline.id, 'dependency-2'],
+ [sibling_pipeline.id, 'dependency-3'],
+ [sibling_pipeline.id, 'dependency-4'])
+ end
+ end
+
+ context 'when job does not exist' do
+ let(:dependencies) do
+ [{ pipeline: parent_pipeline.id.to_s, job: 'non-existent', artifacts: true }]
+ end
+
+ it { expect(cross_pipeline_deps).to be_empty }
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ let(:dependencies) do
+ [{ pipeline: '123', job: 'non-existent', artifacts: true }]
+ end
+
+ it { expect(cross_pipeline_deps).to be_empty }
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when jobs exist in different pipelines in the hierarchy' do
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:parent_job) { create(:ci_build, :success, name: 'parent_job', pipeline: parent_pipeline) }
+
+ let!(:sibling_pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+ let!(:sibling_job) { create(:ci_build, :success, name: 'sibling_job', pipeline: sibling_pipeline) }
+
+ context 'when pipeline and jobs dependencies are mismatched' do
+ let(:dependencies) do
+ [
+ { pipeline: parent_pipeline.id.to_s, job: sibling_job.name, artifacts: true },
+ { pipeline: sibling_pipeline.id.to_s, job: parent_job.name, artifacts: true }
+ ]
+ end
+
+ it { expect(cross_pipeline_deps).to be_empty }
+ it { is_expected.not_to be_valid }
+
+ context 'when dependencies contain a valid pair' do
+ let(:dependencies) do
+ [
+ { pipeline: parent_pipeline.id.to_s, job: sibling_job.name, artifacts: true },
+ { pipeline: sibling_pipeline.id.to_s, job: parent_job.name, artifacts: true },
+ { pipeline: sibling_pipeline.id.to_s, job: sibling_job.name, artifacts: true }
+ ]
+ end
+
+ it 'filters out the invalid ones' do
+ expect(cross_pipeline_deps).to contain_exactly(sibling_job)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+ end
+
+ context 'when job and pipeline exist outside the hierarchy' do
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:another_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:dependency) { create(:ci_build, :success, pipeline: another_pipeline) }
+
+ let(:dependencies) do
+ [{ pipeline: another_pipeline.id.to_s, job: dependency.name, artifacts: true }]
+ end
+
+ it 'ignores jobs outside the pipeline hierarchy' do
+ expect(cross_pipeline_deps).to be_empty
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when current pipeline is specified' do
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:dependency) { create(:ci_build, :success, pipeline: pipeline) }
+
+ let(:dependencies) do
+ [{ pipeline: pipeline.id.to_s, job: dependency.name, artifacts: true }]
+ end
+
+ it 'ignores jobs from the current pipeline as simple needs should be used instead' do
+ expect(cross_pipeline_deps).to be_empty
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ context 'when artifacts:false' do
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:parent_job) { create(:ci_build, :success, name: 'parent_job', pipeline: parent_pipeline) }
+
+ let(:dependencies) do
+ [{ pipeline: parent_pipeline.id.to_s, job: parent_job.name, artifacts: false }]
+ end
+
+ it { expect(cross_pipeline_deps).to be_empty }
+ it { is_expected.to be_valid } # we simply ignore it
+ end
+ end
+
describe '#all' do
let!(:job) do
create(:ci_build, pipeline: pipeline, name: 'deploy', stage_idx: 3, stage: 'deploy')
@@ -155,9 +353,9 @@ RSpec.describe Ci::BuildDependencies do
subject { dependencies.all }
- it 'returns the union of all local dependencies and any cross pipeline dependencies' do
+ it 'returns the union of all local dependencies and any cross project dependencies' do
expect(dependencies).to receive(:local).and_return([1, 2, 3])
- expect(dependencies).to receive(:cross_pipeline).and_return([3, 4])
+ expect(dependencies).to receive(:cross_project).and_return([3, 4])
expect(subject).to contain_exactly(1, 2, 3, 4)
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 5ff9b4dd493..9f412d64d56 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Ci::Build do
status: 'success')
end
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:build, refind: true) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:trigger_request) }
@@ -307,8 +307,6 @@ RSpec.describe Ci::Build do
end
describe '.without_needs' do
- let!(:build) { create(:ci_build) }
-
subject { described_class.without_needs }
context 'when no build_need is created' do
@@ -1151,12 +1149,26 @@ RSpec.describe Ci::Build do
end
context 'when transits to skipped' do
- before do
- build.skip!
+ context 'when cd_skipped_deployment_status is disabled' do
+ before do
+ stub_feature_flags(cd_skipped_deployment_status: false)
+ build.skip!
+ end
+
+ it 'transits deployment status to canceled' do
+ expect(deployment).to be_canceled
+ end
end
- it 'transits deployment status to canceled' do
- expect(deployment).to be_canceled
+ context 'when cd_skipped_deployment_status is enabled' do
+ before do
+ stub_feature_flags(cd_skipped_deployment_status: project)
+ build.skip!
+ end
+
+ it 'transits deployment status to skipped' do
+ expect(deployment).to be_skipped
+ end
end
end
@@ -2005,6 +2017,8 @@ RSpec.describe Ci::Build do
end
context 'when ci_build_metadata_config is disabled' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
before do
stub_feature_flags(ci_build_metadata_config: false)
end
@@ -2392,6 +2406,7 @@ RSpec.describe Ci::Build do
before do
stub_container_registry_config(enabled: container_registry_enabled, host_port: 'registry.example.com')
+ stub_config(dependency_proxy: { enabled: true })
end
subject { build.variables }
@@ -2409,6 +2424,8 @@ RSpec.describe Ci::Build do
{ key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true, masked: false },
{ key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false, masked: true },
{ key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false, masked: false },
+ { key: 'CI_DEPENDENCY_PROXY_USER', value: 'gitlab-ci-token', public: true, masked: false },
+ { key: 'CI_DEPENDENCY_PROXY_PASSWORD', value: 'my-token', public: false, masked: true },
{ key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true },
{ key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
{ key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
@@ -2441,6 +2458,11 @@ RSpec.describe Ci::Build do
{ key: 'CI_DEFAULT_BRANCH', value: project.default_branch, public: true, masked: false },
{ key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true, masked: false },
{ key: 'CI_PAGES_URL', value: project.pages_url, public: true, masked: false },
+ { key: 'CI_DEPENDENCY_PROXY_SERVER', value: "#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}", public: true, masked: false },
+ { key: 'CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX',
+ value: "#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}/#{project.namespace.root_ancestor.path}#{DependencyProxy::URL_SUFFIX}",
+ public: true,
+ masked: false },
{ key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false },
{ key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
@@ -2502,6 +2524,7 @@ RSpec.describe Ci::Build do
let(:project_pre_var) { { key: 'project', value: 'value', public: true, masked: false } }
let(:pipeline_pre_var) { { key: 'pipeline', value: 'value', public: true, masked: false } }
let(:build_yaml_var) { { key: 'yaml', value: 'value', public: true, masked: false } }
+ let(:dependency_proxy_var) { { key: 'dependency_proxy', value: 'value', public: true, masked: false } }
let(:job_jwt_var) { { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true } }
let(:job_dependency_var) { { key: 'job_dependency', value: 'value', public: true, masked: false } }
@@ -2511,6 +2534,7 @@ RSpec.describe Ci::Build do
allow(build).to receive(:persisted_variables) { [] }
allow(build).to receive(:job_jwt_variables) { [job_jwt_var] }
allow(build).to receive(:dependency_variables) { [job_dependency_var] }
+ allow(build).to receive(:dependency_proxy_variables) { [dependency_proxy_var] }
allow(build.project)
.to receive(:predefined_variables) { [project_pre_var] }
@@ -2523,7 +2547,8 @@ RSpec.describe Ci::Build do
it 'returns variables in order depending on resource hierarchy' do
is_expected.to eq(
- [job_jwt_var,
+ [dependency_proxy_var,
+ job_jwt_var,
build_pre_var,
project_pre_var,
pipeline_pre_var,
@@ -2730,7 +2755,11 @@ RSpec.describe Ci::Build do
pipeline.update!(tag: true)
end
- it { is_expected.to include(tag_variable) }
+ it do
+ build.reload
+
+ expect(subject).to include(tag_variable)
+ end
end
context 'when CI variable is defined' do
@@ -2964,8 +2993,11 @@ RSpec.describe Ci::Build do
end
context 'when pipeline variable overrides build variable' do
+ let(:build) do
+ create(:ci_build, pipeline: pipeline, yaml_variables: [{ key: 'MYVAR', value: 'myvar', public: true }])
+ end
+
before do
- build.yaml_variables = [{ key: 'MYVAR', value: 'myvar', public: true }]
pipeline.variables.build(key: 'MYVAR', value: 'pipeline value')
end
@@ -3281,9 +3313,12 @@ RSpec.describe Ci::Build do
end
context 'when overriding user-provided variables' do
+ let(:build) do
+ create(:ci_build, pipeline: pipeline, yaml_variables: [{ key: 'MY_VAR', value: 'myvar', public: true }])
+ end
+
before do
pipeline.variables.build(key: 'MY_VAR', value: 'pipeline value')
- build.yaml_variables = [{ key: 'MY_VAR', value: 'myvar', public: true }]
end
it 'returns a hash including variable with higher precedence' do
@@ -3640,7 +3675,7 @@ RSpec.describe Ci::Build do
end
it 'handles raised exception' do
- expect { subject.drop! }.not_to raise_exception(Gitlab::Access::AccessDeniedError)
+ expect { subject.drop! }.not_to raise_error
end
it 'logs the error' do
@@ -4045,13 +4080,72 @@ RSpec.describe Ci::Build do
end
end
+ context 'when there is a Cobertura coverage report with class filename paths not relative to project root' do
+ before do
+ allow(build.project).to receive(:full_path).and_return('root/javademo')
+ allow(build.pipeline).to receive(:all_worktree_paths).and_return(['src/main/java/com/example/javademo/User.java'])
+
+ create(:ci_job_artifact, :coverage_with_paths_not_relative_to_project_root, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the coverage report with corrected paths' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files.keys).to match_array(['src/main/java/com/example/javademo/User.java'])
+ end
+
+ context 'and smart_cobertura_parser feature flag is disabled' do
+ before do
+ stub_feature_flags(smart_cobertura_parser: false)
+ end
+
+ it 'parses blobs and add the results to the coverage report with unmodified paths' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files.keys).to match_array(['com/example/javademo/User.java'])
+ end
+ end
+ end
+
context 'when there is a corrupted Cobertura coverage report' do
before do
create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: build.project)
end
it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::CoberturaParserError)
+ expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
+ end
+ end
+ end
+ end
+
+ describe '#collect_codequality_reports!' do
+ subject(:codequality_report) { build.collect_codequality_reports!(Gitlab::Ci::Reports::CodequalityReports.new) }
+
+ it { expect(codequality_report.degradations).to eq({}) }
+
+ context 'when build has a codequality report' do
+ context 'when there is a codequality report' do
+ before do
+ create(:ci_job_artifact, :codequality, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the codequality report' do
+ expect { codequality_report }.not_to raise_error
+
+ expect(codequality_report.degradations_count).to eq(3)
+ end
+ end
+
+ context 'when there is an codequality report without errors' do
+ before do
+ create(:ci_job_artifact, :codequality_without_errors, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the codequality report' do
+ expect { codequality_report }.not_to raise_error
+
+ expect(codequality_report.degradations_count).to eq(0)
end
end
end
@@ -4639,4 +4733,78 @@ RSpec.describe Ci::Build do
expect(action).not_to have_received(:perform!)
end
end
+
+ describe '#debug_mode?' do
+ subject { build.debug_mode? }
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(restrict_access_to_build_debug_mode: false)
+ end
+
+ it { is_expected.to eq false }
+
+ context 'when in variables' do
+ before do
+ create(:ci_instance_variable, key: 'CI_DEBUG_TRACE', value: 'true')
+ end
+
+ it { is_expected.to eq false }
+ end
+ end
+
+ context 'when CI_DEBUG_TRACE=true is in variables' do
+ context 'when in instance variables' do
+ before do
+ create(:ci_instance_variable, key: 'CI_DEBUG_TRACE', value: 'true')
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when in group variables' do
+ before do
+ create(:ci_group_variable, key: 'CI_DEBUG_TRACE', value: 'true', group: project.group)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when in pipeline variables' do
+ before do
+ create(:ci_pipeline_variable, key: 'CI_DEBUG_TRACE', value: 'true', pipeline: pipeline)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when in project variables' do
+ before do
+ create(:ci_variable, key: 'CI_DEBUG_TRACE', value: 'true', project: project)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when in job variables' do
+ before do
+ create(:ci_job_variable, key: 'CI_DEBUG_TRACE', value: 'true', job: build)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when in yaml variables' do
+ before do
+ build.update!(yaml_variables: [{ key: :CI_DEBUG_TRACE, value: 'true' }])
+ end
+
+ it { is_expected.to eq true }
+ end
+ end
+
+ context 'when CI_DEBUG_TRACE is not in variables' do
+ it { is_expected.to eq false }
+ end
+ end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index dce7b1d30ca..75ed5939724 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
describe 'CHUNK_SIZE' do
- it 'Chunk size can not be changed without special care' do
+ it 'chunk size can not be changed without special care' do
expect(described_class::CHUNK_SIZE).to eq(128.kilobytes)
end
end
diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb
index 20ca0c8b710..bc96e2584cf 100644
--- a/spec/models/ci/build_trace_chunks/fog_spec.rb
+++ b/spec/models/ci/build_trace_chunks/fog_spec.rb
@@ -74,6 +74,52 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
expect(data_store.data(model)).to eq new_data
end
+
+ context 'when S3 server side encryption is enabled' do
+ before do
+ config = Gitlab.config.artifacts.object_store.to_h
+ config[:storage_options] = { server_side_encryption: 'AES256' }
+ allow(data_store).to receive(:object_store_raw_config).and_return(config)
+ end
+
+ it 'creates a file with attributes' do
+ expect_next_instance_of(Fog::AWS::Storage::Files) do |files|
+ expect(files).to receive(:create).with(
+ hash_including(
+ key: anything,
+ body: new_data,
+ 'x-amz-server-side-encryption' => 'AES256')
+ ).and_call_original
+ end
+
+ expect(data_store.data(model)).to be_nil
+
+ data_store.set_data(model, new_data)
+
+ expect(data_store.data(model)).to eq new_data
+ end
+
+ context 'when ci_live_trace_use_fog_attributes flag is disabled' do
+ before do
+ stub_feature_flags(ci_live_trace_use_fog_attributes: false)
+ end
+
+ it 'does not pass along Fog attributes' do
+ expect_next_instance_of(Fog::AWS::Storage::Files) do |files|
+ expect(files).to receive(:create).with(
+ key: anything,
+ body: new_data
+ ).and_call_original
+ end
+
+ expect(data_store.data(model)).to be_nil
+
+ data_store.set_data(model, new_data)
+
+ expect(data_store.data(model)).to eq new_data
+ end
+ end
+ end
end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 26851c93ac3..ef21ca8f100 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -96,6 +96,22 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.codequality_reports' do
+ subject { described_class.codequality_reports }
+
+ context 'when there is a codequality report' do
+ let!(:artifact) { create(:ci_job_artifact, :codequality) }
+
+ it { is_expected.to eq([artifact]) }
+ end
+
+ context 'when there are no codequality reports' do
+ let!(:artifact) { create(:ci_job_artifact, :archive) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.terraform_reports' do
context 'when there is a terraform report' do
it 'return the job artifact' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 1ca370dc950..f5e824bb066 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -222,6 +222,26 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '.for_branch' do
+ subject { described_class.for_branch(branch) }
+
+ let(:branch) { 'master' }
+ let!(:pipeline) { create(:ci_pipeline, ref: 'master') }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+
+ context 'with tag pipeline' do
+ let(:branch) { 'v1.0' }
+ let!(:pipeline) { create(:ci_pipeline, ref: 'v1.0', tag: true) }
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '.ci_sources' do
subject { described_class.ci_sources }
@@ -242,6 +262,27 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '.ci_branch_sources' do
+ subject { described_class.ci_branch_sources }
+
+ let_it_be(:push_pipeline) { create(:ci_pipeline, source: :push) }
+ let_it_be(:web_pipeline) { create(:ci_pipeline, source: :web) }
+ let_it_be(:api_pipeline) { create(:ci_pipeline, source: :api) }
+ let_it_be(:webide_pipeline) { create(:ci_pipeline, source: :webide) }
+ let_it_be(:child_pipeline) { create(:ci_pipeline, source: :parent_pipeline) }
+ let_it_be(:merge_request_pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline) }
+
+ it 'contains pipelines having CI only sources' do
+ expect(subject).to contain_exactly(push_pipeline, web_pipeline, api_pipeline)
+ end
+
+ it 'filters on expected sources' do
+ expect(::Enums::Ci::Pipeline.ci_branch_sources.keys).to contain_exactly(
+ *%i[unknown push web trigger schedule api external pipeline chat
+ external_pull_request_event])
+ end
+ end
+
describe '.outside_pipeline_family' do
subject(:outside_pipeline_family) { described_class.outside_pipeline_family(upstream_pipeline) }
@@ -269,7 +310,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let!(:older_other_pipeline) { create(:ci_pipeline, project: project) }
let!(:upstream_pipeline) { create(:ci_pipeline, project: project) }
- let!(:child_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:child_pipeline) { create(:ci_pipeline, child_of: upstream_pipeline) }
let!(:other_pipeline) { create(:ci_pipeline, project: project) }
@@ -498,6 +539,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ context 'when pipeline has a codequality report' do
+ subject { described_class.with_reports(Ci::JobArtifact.codequality_reports) }
+
+ let(:pipeline_with_report) { create(:ci_pipeline, :with_codequality_reports) }
+
+ it 'selects the pipeline' do
+ is_expected.to eq([pipeline_with_report])
+ end
+ end
+
context 'when pipeline has a terraform report' do
it 'selects the pipeline' do
pipeline_with_report = create(:ci_pipeline, :with_terraform_reports)
@@ -744,11 +795,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
]
end
- context 'when pipeline is merge request' do
- let(:pipeline) do
- create(:ci_pipeline, merge_request: merge_request)
- end
-
+ context 'when merge request is present' do
let(:merge_request) do
create(:merge_request,
source_project: project,
@@ -764,64 +811,142 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let(:milestone) { create(:milestone, project: project) }
let(:labels) { create_list(:label, 2) }
- it 'exposes merge request pipeline variables' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
- 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
- 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
- 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
- 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
- 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => pipeline.target_sha.to_s,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => pipeline.source_sha.to_s,
- 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
- 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
- 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
- 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
- 'CI_MERGE_REQUEST_EVENT_TYPE' => pipeline.merge_request_event_type.to_s)
- end
-
- context 'when source project does not exist' do
+ context 'when pipeline for merge request is created' do
+ let(:pipeline) do
+ create(:ci_pipeline, :detached_merge_request_pipeline,
+ ci_ref_presence: false,
+ user: user,
+ merge_request: merge_request)
+ end
+
before do
- merge_request.update_column(:source_project_id, nil)
+ project.add_developer(user)
end
- it 'does not expose source project related variables' do
- expect(subject.to_hash.keys).not_to include(
- %w[CI_MERGE_REQUEST_SOURCE_PROJECT_ID
- CI_MERGE_REQUEST_SOURCE_PROJECT_PATH
- CI_MERGE_REQUEST_SOURCE_PROJECT_URL
- CI_MERGE_REQUEST_SOURCE_BRANCH_NAME])
+ it 'exposes merge request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
+ 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
+ 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
+ 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => '',
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => '',
+ 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
+ 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
+ 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
+ 'CI_MERGE_REQUEST_EVENT_TYPE' => 'detached',
+ 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
+ end
+
+ it 'exposes diff variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s,
+ 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha)
+ end
+
+ context 'without assignee' do
+ let(:assignees) { [] }
+
+ it 'does not expose assignee variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES')
+ end
end
- end
- context 'without assignee' do
- let(:assignees) { [] }
+ context 'without milestone' do
+ let(:milestone) { nil }
- it 'does not expose assignee variable' do
- expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES')
+ it 'does not expose milestone variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE')
+ end
+ end
+
+ context 'without labels' do
+ let(:labels) { [] }
+
+ it 'does not expose labels variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS')
+ end
end
end
- context 'without milestone' do
- let(:milestone) { nil }
+ context 'when pipeline on branch is created' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project, user: user, ref: 'feature')
+ end
+
+ context 'when a merge request is created' do
+ before do
+ merge_request
+ end
- it 'does not expose milestone variable' do
- expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE')
+ context 'when user has access to project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'merge request references are returned matching the pipeline' do
+ expect(subject.to_hash).to include(
+ 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
+ end
+ end
+
+ context 'when user does not have access to project' do
+ it 'CI_OPEN_MERGE_REQUESTS is not returned' do
+ expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS')
+ end
+ end
+ end
+
+ context 'when no a merge request is created' do
+ it 'CI_OPEN_MERGE_REQUESTS is not returned' do
+ expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS')
+ end
end
end
- context 'without labels' do
- let(:labels) { [] }
+ context 'with merged results' do
+ let(:pipeline) do
+ create(:ci_pipeline, :merged_result_pipeline, merge_request: merge_request)
+ end
+
+ it 'exposes merge request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
+ 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
+ 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
+ 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => merge_request.target_branch_sha,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => merge_request.source_branch_sha,
+ 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
+ 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
+ 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
+ 'CI_MERGE_REQUEST_EVENT_TYPE' => 'merged_result')
+ end
- it 'does not expose labels variable' do
- expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS')
+ it 'exposes diff variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s,
+ 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha)
end
end
end
@@ -1126,6 +1251,40 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe 'synching status to Jira' do
+ let(:worker) { ::JiraConnect::SyncBuildsWorker }
+
+ %i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event|
+ context "when we call pipeline.#{event}" do
+ it 'triggers a Jira synch worker' do
+ expect(worker).to receive(:perform_async).with(pipeline.id, Integer)
+
+ pipeline.send(event)
+ end
+
+ context 'the feature is disabled' do
+ it 'does not trigger a worker' do
+ stub_feature_flags(jira_sync_builds: false)
+
+ expect(worker).not_to receive(:perform_async)
+
+ pipeline.send(event)
+ end
+ end
+
+ context 'the feature is enabled for this project' do
+ it 'does trigger a worker' do
+ stub_feature_flags(jira_sync_builds: pipeline.project)
+
+ expect(worker).to receive(:perform_async)
+
+ pipeline.send(event)
+ end
+ end
+ end
+ end
+ end
+
describe '#duration', :sidekiq_inline do
context 'when multiple builds are finished' do
before do
@@ -2539,6 +2698,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'receives a pending event once' do
expect(WebMock).to have_requested_pipeline_hook('pending').once
end
+
+ it 'builds hook data once' do
+ create(:pipelines_email_service, project: project)
+
+ expect(Gitlab::DataBuilder::Pipeline).to receive(:build).once.and_call_original
+
+ pipeline.execute_hooks
+ end
end
context 'when build is run' do
@@ -2600,6 +2767,12 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'did not execute pipeline_hook after touched' do
expect(WebMock).not_to have_requested(:post, hook.url)
end
+
+ it 'does not build hook data' do
+ expect(Gitlab::DataBuilder::Pipeline).not_to receive(:build)
+
+ pipeline.execute_hooks
+ end
end
def create_build(name, stage_idx)
@@ -2734,6 +2907,93 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#related_merge_requests' do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
+ let(:other_merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'stable') }
+ let(:branch_pipeline) { create(:ci_pipeline, project: project, ref: 'feature') }
+ let(:merge_pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline, merge_request: merge_request) }
+
+ context 'for a branch pipeline' do
+ subject { branch_pipeline.related_merge_requests }
+
+ it 'when no merge request is created' do
+ is_expected.to be_empty
+ end
+
+ it 'when another merge requests are created' do
+ merge_request
+ other_merge_request
+
+ is_expected.to contain_exactly(merge_request, other_merge_request)
+ end
+ end
+
+ context 'for a merge pipeline' do
+ subject { merge_pipeline.related_merge_requests }
+
+ it 'when only merge pipeline is created' do
+ merge_pipeline
+
+ is_expected.to contain_exactly(merge_request)
+ end
+
+ it 'when a merge request is created' do
+ merge_pipeline
+ other_merge_request
+
+ is_expected.to contain_exactly(merge_request, other_merge_request)
+ end
+ end
+ end
+
+ describe '#open_merge_requests_refs' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let!(:pipeline) { create(:ci_pipeline, user: user, project: project, ref: 'feature') }
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
+
+ subject { pipeline.open_merge_requests_refs }
+
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns open merge requests' do
+ is_expected.to eq([merge_request.to_reference(full: true)])
+ end
+
+ it 'does not return closed merge requests' do
+ merge_request.close!
+
+ is_expected.to be_empty
+ end
+
+ context 'limits amount of returned merge requests' do
+ let!(:other_merge_requests) do
+ Array.new(4) do |idx|
+ create(:merge_request, source_project: project, source_branch: 'feature', target_branch: "master-#{idx}")
+ end
+ end
+
+ let(:other_merge_requests_refs) do
+ other_merge_requests.map { |mr| mr.to_reference(full: true) }
+ end
+
+ it 'returns only last 4 in a reverse order' do
+ is_expected.to eq(other_merge_requests_refs.reverse)
+ end
+ end
+ end
+
+ context 'when user does not have permissions' do
+ it 'does not return any merge requests' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '#same_family_pipeline_ids' do
subject { pipeline.same_family_pipeline_ids.map(&:id) }
@@ -2744,13 +3004,9 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is child' do
- let(:parent) { create(:ci_pipeline, project: pipeline.project) }
- let(:sibling) { create(:ci_pipeline, project: pipeline.project) }
-
- before do
- create_source_pipeline(parent, pipeline)
- create_source_pipeline(parent, sibling)
- end
+ let(:parent) { create(:ci_pipeline, project: project) }
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent) }
+ let!(:sibling) { create(:ci_pipeline, child_of: parent) }
it 'returns parent sibling and self ids' do
expect(subject).to contain_exactly(parent.id, pipeline.id, sibling.id)
@@ -2758,11 +3014,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is parent' do
- let(:child) { create(:ci_pipeline, project: pipeline.project) }
-
- before do
- create_source_pipeline(pipeline, child)
- end
+ let!(:child) { create(:ci_pipeline, child_of: pipeline) }
it 'returns self and child ids' do
expect(subject).to contain_exactly(pipeline.id, child.id)
@@ -2770,17 +3022,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is a child of a child pipeline' do
- let(:ancestor) { create(:ci_pipeline, project: pipeline.project) }
- let(:parent) { create(:ci_pipeline, project: pipeline.project) }
- let(:cousin_parent) { create(:ci_pipeline, project: pipeline.project) }
- let(:cousin) { create(:ci_pipeline, project: pipeline.project) }
-
- before do
- create_source_pipeline(ancestor, parent)
- create_source_pipeline(ancestor, cousin_parent)
- create_source_pipeline(parent, pipeline)
- create_source_pipeline(cousin_parent, cousin)
- end
+ let(:ancestor) { create(:ci_pipeline, project: project) }
+ let!(:parent) { create(:ci_pipeline, child_of: ancestor) }
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent) }
+ let!(:cousin_parent) { create(:ci_pipeline, child_of: ancestor) }
+ let!(:cousin) { create(:ci_pipeline, child_of: cousin_parent) }
it 'returns all family ids' do
expect(subject).to contain_exactly(
@@ -2790,11 +3036,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is a triggered pipeline' do
- let(:upstream) { create(:ci_pipeline, project: create(:project)) }
-
- before do
- create_source_pipeline(upstream, pipeline)
- end
+ let!(:upstream) { create(:ci_pipeline, project: create(:project), upstream_of: pipeline)}
it 'returns self id' do
expect(subject).to contain_exactly(pipeline.id)
@@ -2802,6 +3044,46 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#root_ancestor' do
+ subject { pipeline.root_ancestor }
+
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when pipeline is child of child pipeline' do
+ let!(:root_ancestor) { create(:ci_pipeline, project: project) }
+ let!(:parent_pipeline) { create(:ci_pipeline, child_of: root_ancestor) }
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+
+ it 'returns the root ancestor' do
+ expect(subject).to eq(root_ancestor)
+ end
+ end
+
+ context 'when pipeline is root ancestor' do
+ let!(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+
+ it 'returns itself' do
+ expect(subject).to eq(pipeline)
+ end
+ end
+
+ context 'when pipeline is standalone' do
+ it 'returns itself' do
+ expect(subject).to eq(pipeline)
+ end
+ end
+
+ context 'when pipeline is multi-project downstream pipeline' do
+ let!(:upstream_pipeline) do
+ create(:ci_pipeline, project: create(:project), upstream_of: pipeline)
+ end
+
+ it 'ignores cross project ancestors' do
+ expect(subject).to eq(pipeline)
+ end
+ end
+ end
+
describe '#stuck?' do
before do
create(:ci_build, :pending, pipeline: pipeline)
@@ -2838,7 +3120,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
stub_feature_flags(ci_store_pipeline_messages: false)
end
- it ' does not add pipeline error message' do
+ it 'does not add pipeline error message' do
pipeline.add_error_message('The error message')
expect(pipeline.messages).to be_empty
@@ -3343,6 +3625,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
])
end
+ it 'does not execute N+1 queries' do
+ single_build_pipeline = create(:ci_empty_pipeline, status: :created, project: project)
+ single_rspec = create(:ci_build, :success, name: 'rspec', pipeline: single_build_pipeline, project: project)
+ create(:ci_job_artifact, :cobertura, job: single_rspec, project: project)
+
+ control = ActiveRecord::QueryRecorder.new { single_build_pipeline.coverage_reports }
+
+ expect { subject }.not_to exceed_query_limit(control)
+ end
+
context 'when builds are retried' do
let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline, project: project) }
@@ -3360,6 +3652,39 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#codequality_reports' do
+ subject(:codequality_reports) { pipeline.codequality_reports }
+
+ context 'when pipeline has multiple builds with codequality reports' do
+ let(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline, project: project) }
+
+ before do
+ create(:ci_job_artifact, :codequality, job: build_rspec, project: project)
+ create(:ci_job_artifact, :codequality_without_errors, job: build_golang, project: project)
+ end
+
+ it 'returns codequality report with collected data' do
+ expect(codequality_reports.degradations_count).to eq(3)
+ end
+
+ context 'when builds are retried' do
+ let(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline, project: project) }
+
+ it 'returns a codequality reports without degradations' do
+ expect(codequality_reports.degradations).to be_empty
+ end
+ end
+ end
+
+ context 'when pipeline does not have any builds with codequality reports' do
+ it 'returns codequality reports without degradations' do
+ expect(codequality_reports.degradations).to be_empty
+ end
+ end
+ end
+
describe '#total_size' do
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build_job2) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
@@ -3509,18 +3834,9 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#parent_pipeline' do
let_it_be(:project) { create(:project) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
context 'when pipeline is triggered by a pipeline from the same project' do
- let(:upstream_pipeline) { create(:ci_pipeline, project: pipeline.project) }
-
- before do
- create(:ci_sources_pipeline,
- source_pipeline: upstream_pipeline,
- source_project: project,
- pipeline: pipeline,
- project: project)
- end
+ let_it_be(:upstream_pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, child_of: upstream_pipeline) }
it 'returns the parent pipeline' do
expect(pipeline.parent_pipeline).to eq(upstream_pipeline)
@@ -3532,15 +3848,8 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is triggered by a pipeline from another project' do
- let(:upstream_pipeline) { create(:ci_pipeline) }
-
- before do
- create(:ci_sources_pipeline,
- source_pipeline: upstream_pipeline,
- source_project: upstream_pipeline.project,
- pipeline: pipeline,
- project: project)
- end
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:upstream_pipeline) { create(:ci_pipeline, project: create(:project), upstream_of: pipeline) }
it 'returns nil' do
expect(pipeline.parent_pipeline).to be_nil
@@ -3552,6 +3861,8 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is not triggered by a pipeline' do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
it 'returns nil' do
expect(pipeline.parent_pipeline).to be_nil
end
@@ -3851,4 +4162,70 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
bridge
end
end
+
+ describe 'test failure history processing' do
+ it 'performs the service asynchronously when the pipeline is completed' do
+ service = double
+
+ expect(Ci::TestFailureHistoryService).to receive(:new).with(pipeline).and_return(service)
+ expect(service).to receive_message_chain(:async, :perform_if_needed)
+
+ pipeline.succeed!
+ end
+ end
+
+ describe '#latest_test_report_builds' do
+ it 'returns pipeline builds with test report artifacts' do
+ test_build = create(:ci_build, :test_reports, pipeline: pipeline, project: project)
+ create(:ci_build, :artifacts, pipeline: pipeline, project: project)
+
+ expect(pipeline.latest_test_report_builds).to contain_exactly(test_build)
+ end
+
+ it 'preloads project on each build to avoid N+1 queries' do
+ create(:ci_build, :test_reports, pipeline: pipeline, project: project)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ pipeline.latest_test_report_builds.map(&:project).map(&:full_path)
+ end
+
+ multi_build_pipeline = create(:ci_empty_pipeline, status: :created, project: project)
+ create(:ci_build, :test_reports, pipeline: multi_build_pipeline, project: project)
+ create(:ci_build, :test_reports, pipeline: multi_build_pipeline, project: project)
+
+ expect { multi_build_pipeline.latest_test_report_builds.map(&:project).map(&:full_path) }
+ .not_to exceed_query_limit(control_count)
+ end
+ end
+
+ describe '#builds_with_failed_tests' do
+ it 'returns pipeline builds with test report artifacts' do
+ failed_build = create(:ci_build, :failed, :test_reports, pipeline: pipeline, project: project)
+ create(:ci_build, :success, :test_reports, pipeline: pipeline, project: project)
+
+ expect(pipeline.builds_with_failed_tests).to contain_exactly(failed_build)
+ end
+
+ it 'supports limiting the number of builds to fetch' do
+ create(:ci_build, :failed, :test_reports, pipeline: pipeline, project: project)
+ create(:ci_build, :failed, :test_reports, pipeline: pipeline, project: project)
+
+ expect(pipeline.builds_with_failed_tests(limit: 1).count).to eq(1)
+ end
+
+ it 'preloads project on each build to avoid N+1 queries' do
+ create(:ci_build, :failed, :test_reports, pipeline: pipeline, project: project)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ pipeline.builds_with_failed_tests.map(&:project).map(&:full_path)
+ end
+
+ multi_build_pipeline = create(:ci_empty_pipeline, status: :created, project: project)
+ create(:ci_build, :failed, :test_reports, pipeline: multi_build_pipeline, project: project)
+ create(:ci_build, :failed, :test_reports, pipeline: multi_build_pipeline, project: project)
+
+ expect { multi_build_pipeline.builds_with_failed_tests.map(&:project).map(&:full_path) }
+ .not_to exceed_query_limit(control_count)
+ end
+ end
end
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index 148bb3cf870..49f41570717 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -57,4 +57,16 @@ RSpec.describe Clusters::Agent do
end
end
end
+
+ describe '#has_access_to?' do
+ let(:agent) { build(:cluster_agent) }
+
+ it 'has access to own project' do
+ expect(agent.has_access_to?(agent.project)).to be_truthy
+ end
+
+ it 'does not have access to other projects' do
+ expect(agent.has_access_to?(create(:project))).to be_falsey
+ end
+ end
end
diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb
index ad1ebd4966a..5212e321a55 100644
--- a/spec/models/clusters/applications/helm_spec.rb
+++ b/spec/models/clusters/applications/helm_spec.rb
@@ -19,35 +19,9 @@ RSpec.describe Clusters::Applications::Helm do
end
describe '#can_uninstall?' do
- context "with other existing applications" do
- Clusters::Cluster::APPLICATIONS.keys.each do |application_name|
- next if application_name == 'helm'
+ subject(:application) { build(:clusters_applications_helm).can_uninstall? }
- it "is false when #{application_name} is installed" do
- cluster_application = create("clusters_applications_#{application_name}".to_sym)
-
- helm = cluster_application.cluster.application_helm
-
- expect(helm.allowed_to_uninstall?).to be_falsy
- end
- end
-
- it 'executes a single query only' do
- cluster_application = create(:clusters_applications_ingress)
- helm = cluster_application.cluster.application_helm
-
- query_count = ActiveRecord::QueryRecorder.new { helm.allowed_to_uninstall? }.count
- expect(query_count).to eq(1)
- end
- end
-
- context "without other existing applications" do
- subject { helm.can_uninstall? }
-
- let(:helm) { create(:clusters_applications_helm) }
-
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to eq true }
end
describe '#issue_client_cert' do
@@ -135,14 +109,4 @@ RSpec.describe Clusters::Applications::Helm do
end
end
end
-
- describe '#post_uninstall' do
- let(:helm) { create(:clusters_applications_helm, :installed) }
-
- it do
- expect(helm.cluster.kubeclient).to receive(:delete_namespace).with('gitlab-managed-apps')
-
- helm.post_uninstall
- end
- end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index ed74a841044..a8f81cba285 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -262,14 +262,14 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
- describe '.with_project_alert_service_data' do
- subject { described_class.with_project_alert_service_data(project_id) }
+ describe '.with_project_http_integrations' do
+ subject { described_class.with_project_http_integrations(project_id) }
let!(:cluster) { create(:cluster, :project) }
let!(:project_id) { cluster.first_project.id }
context 'project has alert service data' do
- let!(:alerts_service) { create(:alerts_service, project: cluster.clusterable) }
+ let!(:integration) { create(:alert_management_http_integration, project: cluster.clusterable) }
it { is_expected.to include(cluster) }
end
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index e877ba2ac96..fb0613187c5 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Clusters::Platforms::Kubernetes do
include KubernetesHelpers
+ include ReactiveCachingHelpers
it { is_expected.to belong_to(:cluster) }
it { is_expected.to be_kind_of(Gitlab::Kubernetes) }
@@ -406,32 +407,62 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
describe '#calculate_reactive_cache_for' do
+ let(:cluster) { create(:cluster, :project, platform_kubernetes: service) }
let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:namespace) { 'project-namespace' }
+ let(:environment) { instance_double(Environment, deployment_namespace: namespace, project: cluster.project) }
let(:expected_pod_cached_data) do
kube_pod.tap { |kp| kp['metadata'].delete('namespace') }
end
- let(:namespace) { "project-namespace" }
- let(:environment) { instance_double(Environment, deployment_namespace: namespace, project: service.cluster.project) }
-
subject { service.calculate_reactive_cache_for(environment) }
- context 'when the kubernetes integration is disabled' do
+ context 'when kubernetes responds with valid deployments' do
before do
- allow(service).to receive(:enabled?).and_return(false)
+ stub_kubeclient_pods(namespace)
+ stub_kubeclient_deployments(namespace)
+ stub_kubeclient_ingresses(namespace)
end
- it { is_expected.to be_nil }
+ shared_examples 'successful deployment request' do
+ it { is_expected.to include(pods: [expected_pod_cached_data], deployments: [kube_deployment], ingresses: [kube_ingress]) }
+ end
+
+ context 'on a project level cluster' do
+ let(:cluster) { create(:cluster, :project, platform_kubernetes: service) }
+
+ include_examples 'successful deployment request'
+ end
+
+ context 'on a group level cluster' do
+ let(:cluster) { create(:cluster, :group, platform_kubernetes: service) }
+
+ include_examples 'successful deployment request'
+ end
+
+ context 'on an instance level cluster' do
+ let(:cluster) { create(:cluster, :instance, platform_kubernetes: service) }
+
+ include_examples 'successful deployment request'
+ end
+
+ context 'when canary_ingress_weight_control feature flag is disabled' do
+ before do
+ stub_feature_flags(canary_ingress_weight_control: false)
+ end
+
+ it 'does not fetch ingress data from kubernetes' do
+ expect(subject[:ingresses]).to be_empty
+ end
+ end
end
- context 'when kubernetes responds with valid pods and deployments' do
+ context 'when the kubernetes integration is disabled' do
before do
- stub_kubeclient_pods(namespace)
- stub_kubeclient_deployments(namespace)
- stub_kubeclient_ingresses(namespace)
+ allow(service).to receive(:enabled?).and_return(false)
end
- it { is_expected.to include(pods: [expected_pod_cached_data]) }
+ it { is_expected.to be_nil }
end
context 'when kubernetes responds with 500s' do
@@ -451,7 +482,351 @@ RSpec.describe Clusters::Platforms::Kubernetes do
stub_kubeclient_ingresses(namespace, status: 404)
end
- it { is_expected.to include(pods: []) }
+ it { is_expected.to eq(pods: [], deployments: [], ingresses: []) }
+ end
+ end
+
+ describe '#rollout_status' do
+ let(:deployments) { [] }
+ let(:pods) { [] }
+ let(:ingresses) { [] }
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let!(:cluster) { create(:cluster, :project, enabled: true, platform_kubernetes: service) }
+ let(:project) { cluster.project }
+ let(:environment) { build(:environment, project: project, name: "env", slug: "env-000000") }
+ let(:cache_data) { Hash(deployments: deployments, pods: pods, ingresses: ingresses) }
+
+ subject(:rollout_status) { service.rollout_status(environment, cache_data) }
+
+ context 'legacy deployments based on app label' do
+ let(:legacy_deployment) do
+ kube_deployment(name: 'legacy-deployment').tap do |deployment|
+ deployment['metadata']['annotations'].delete('app.gitlab.com/env')
+ deployment['metadata']['annotations'].delete('app.gitlab.com/app')
+ deployment['metadata']['labels']['app'] = environment.slug
+ end
+ end
+
+ let(:legacy_pod) do
+ kube_pod(name: 'legacy-pod').tap do |pod|
+ pod['metadata']['annotations'].delete('app.gitlab.com/env')
+ pod['metadata']['annotations'].delete('app.gitlab.com/app')
+ pod['metadata']['labels']['app'] = environment.slug
+ end
+ end
+
+ context 'only legacy deployments' do
+ let(:deployments) { [legacy_deployment] }
+ let(:pods) { [legacy_pod] }
+
+ it 'contains nothing' do
+ expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
+
+ expect(rollout_status.deployments).to eq([])
+ end
+ end
+
+ context 'deployment with no pods' do
+ let(:deployment) { kube_deployment(name: 'some-deployment', environment_slug: environment.slug, project_slug: project.full_path_slug) }
+ let(:deployments) { [deployment] }
+ let(:pods) { [] }
+
+ it 'returns a valid status with matching deployments' do
+ expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
+ expect(rollout_status.deployments.map(&:name)).to contain_exactly('some-deployment')
+ end
+ end
+
+ context 'new deployment based on annotations' do
+ let(:matched_deployment) { kube_deployment(name: 'matched-deployment', environment_slug: environment.slug, project_slug: project.full_path_slug) }
+ let(:matched_pod) { kube_pod(environment_slug: environment.slug, project_slug: project.full_path_slug) }
+ let(:deployments) { [matched_deployment, legacy_deployment] }
+ let(:pods) { [matched_pod, legacy_pod] }
+
+ it 'contains only matching deployments' do
+ expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
+
+ expect(rollout_status.deployments.map(&:name)).to contain_exactly('matched-deployment')
+ end
+ end
+ end
+
+ context 'with no deployments but there are pods' do
+ let(:deployments) do
+ []
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-1', environment_slug: environment.slug, project_slug: project.full_path_slug),
+ kube_pod(name: 'pod-2', environment_slug: environment.slug, project_slug: project.full_path_slug)
+ ]
+ end
+
+ it 'returns an empty array' do
+ expect(rollout_status.instances).to eq([])
+ end
+ end
+
+ context 'with valid deployments' do
+ let(:matched_deployment) { kube_deployment(environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 2) }
+ let(:unmatched_deployment) { kube_deployment }
+ let(:matched_pod) { kube_pod(environment_slug: environment.slug, project_slug: project.full_path_slug, status: 'Pending') }
+ let(:unmatched_pod) { kube_pod(environment_slug: environment.slug + '-test', project_slug: project.full_path_slug) }
+ let(:deployments) { [matched_deployment, unmatched_deployment] }
+ let(:pods) { [matched_pod, unmatched_pod] }
+
+ it 'creates a matching RolloutStatus' do
+ expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
+ expect(rollout_status.deployments.map(&:annotations)).to eq([
+ { 'app.gitlab.com/app' => project.full_path_slug, 'app.gitlab.com/env' => 'env-000000' }
+ ])
+ expect(rollout_status.instances).to eq([{ pod_name: "kube-pod",
+ stable: true,
+ status: "pending",
+ tooltip: "kube-pod (Pending)",
+ track: "stable" },
+ { pod_name: "Not provided",
+ stable: true,
+ status: "pending",
+ tooltip: "Not provided (Pending)",
+ track: "stable" }])
+ end
+
+ context 'with canary ingress' do
+ let(:ingresses) { [kube_ingress(track: :canary)] }
+
+ it 'has canary ingress' do
+ expect(rollout_status).to be_canary_ingress_exists
+ expect(rollout_status.canary_ingress.canary_weight).to eq(50)
+ end
+ end
+ end
+
+ context 'with empty list of deployments' do
+ it 'creates a matching RolloutStatus' do
+ expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
+ expect(rollout_status).to be_not_found
+ end
+ end
+
+ context 'when the pod track does not match the deployment track' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 1, track: 'weekly')
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug, track: 'weekly'),
+ kube_pod(name: 'pod-a-2', environment_slug: environment.slug, project_slug: project.full_path_slug, track: 'daily')
+ ]
+ end
+
+ it 'does not return the pod' do
+ expect(rollout_status.instances.map { |p| p[:pod_name] }).to eq(['pod-a-1'])
+ end
+ end
+
+ context 'when the pod track is not stable' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 1, track: 'something')
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug, track: 'something')
+ ]
+ end
+
+ it 'the pod is not stable' do
+ expect(rollout_status.instances.map { |p| p.slice(:stable, :track) }).to eq([{ stable: false, track: 'something' }])
+ end
+ end
+
+ context 'when the pod track is stable' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 1, track: 'stable')
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug, track: 'stable')
+ ]
+ end
+
+ it 'the pod is stable' do
+ expect(rollout_status.instances.map { |p| p.slice(:stable, :track) }).to eq([{ stable: true, track: 'stable' }])
+ end
+ end
+
+ context 'when the pod track is not provided' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 1)
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug)
+ ]
+ end
+
+ it 'the pod is stable' do
+ expect(rollout_status.instances.map { |p| p.slice(:stable, :track) }).to eq([{ stable: true, track: 'stable' }])
+ end
+ end
+
+ context 'when the number of matching pods does not match the number of replicas' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 3)
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug)
+ ]
+ end
+
+ it 'returns a pending pod for each missing replica' do
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status) }).to eq([
+ { pod_name: 'pod-a-1', status: 'running' },
+ { pod_name: 'Not provided', status: 'pending' },
+ { pod_name: 'Not provided', status: 'pending' }
+ ])
+ end
+ end
+
+ context 'when pending pods are returned for missing replicas' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 2, track: 'canary'),
+ kube_deployment(name: 'deployment-b', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 2, track: 'stable')
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug, track: 'canary')
+ ]
+ end
+
+ it 'returns the correct track for the pending pods' do
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq([
+ { pod_name: 'pod-a-1', status: 'running', track: 'canary' },
+ { pod_name: 'Not provided', status: 'pending', track: 'canary' },
+ { pod_name: 'Not provided', status: 'pending', track: 'stable' },
+ { pod_name: 'Not provided', status: 'pending', track: 'stable' }
+ ])
+ end
+ end
+
+ context 'when two deployments with the same track are missing instances' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 1, track: 'mytrack'),
+ kube_deployment(name: 'deployment-b', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 1, track: 'mytrack')
+ ]
+ end
+
+ let(:pods) do
+ []
+ end
+
+ it 'returns the correct number of pending pods' do
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq([
+ { pod_name: 'Not provided', status: 'pending', track: 'mytrack' },
+ { pod_name: 'Not provided', status: 'pending', track: 'mytrack' }
+ ])
+ end
+ end
+
+ context 'with multiple matching deployments' do
+ let(:deployments) do
+ [
+ kube_deployment(name: 'deployment-a', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 2),
+ kube_deployment(name: 'deployment-b', environment_slug: environment.slug, project_slug: project.full_path_slug, replicas: 2)
+ ]
+ end
+
+ let(:pods) do
+ [
+ kube_pod(name: 'pod-a-1', environment_slug: environment.slug, project_slug: project.full_path_slug),
+ kube_pod(name: 'pod-a-2', environment_slug: environment.slug, project_slug: project.full_path_slug),
+ kube_pod(name: 'pod-b-1', environment_slug: environment.slug, project_slug: project.full_path_slug),
+ kube_pod(name: 'pod-b-2', environment_slug: environment.slug, project_slug: project.full_path_slug)
+ ]
+ end
+
+ it 'returns each pod once' do
+ expect(rollout_status.instances.map { |p| p[:pod_name] }).to eq(['pod-a-1', 'pod-a-2', 'pod-b-1', 'pod-b-2'])
+ end
+ end
+ end
+
+ describe '#ingresses' do
+ subject { service.ingresses(namespace) }
+
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:namespace) { 'project-namespace' }
+
+ context 'when there is an ingress in the namespace' do
+ before do
+ stub_kubeclient_ingresses(namespace)
+ end
+
+ it 'returns an ingress' do
+ expect(subject.count).to eq(1)
+ expect(subject.first).to be_kind_of(::Gitlab::Kubernetes::Ingress)
+ expect(subject.first.name).to eq('production-auto-deploy')
+ end
+ end
+
+ context 'when there are no ingresss in the namespace' do
+ before do
+ allow(service.kubeclient).to receive(:get_ingresses) { raise Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil) }
+ end
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '#patch_ingress' do
+ subject { service.patch_ingress(namespace, ingress, data) }
+
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:namespace) { 'project-namespace' }
+ let(:ingress) { Gitlab::Kubernetes::Ingress.new(kube_ingress) }
+ let(:data) { { metadata: { annotations: { name: 'test' } } } }
+
+ context 'when there is an ingress in the namespace' do
+ before do
+ stub_kubeclient_ingresses(namespace, method: :patch, resource_path: "/#{ingress.name}")
+ end
+
+ it 'returns an ingress' do
+ expect(subject[:items][0][:metadata][:name]).to eq('production-auto-deploy')
+ end
+ end
+
+ context 'when there are no ingresss in the namespace' do
+ before do
+ allow(service.kubeclient).to receive(:patch_ingress) { raise Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil) }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Kubeclient::ResourceNotFoundError)
+ end
end
end
end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 37e2f5fb8d4..6e62d4ef31b 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -233,7 +233,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
it 'calls #refresh_markdown_cache!' do
- expect(thing).to receive(:refresh_markdown_cache!)
+ expect(thing).to receive(:refresh_markdown_cache)
expect(thing.updated_cached_html_for(:description)).to eq(html)
end
@@ -279,10 +279,101 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
end
+ shared_examples 'a class with mentionable markdown fields' do
+ let(:mentionable) { klass.new(description: markdown, description_html: html, title: markdown, title_html: html, cached_markdown_version: cache_version) }
+
+ context 'when klass is a Mentionable', :aggregate_failures do
+ before do
+ klass.send(:include, Mentionable)
+ klass.send(:attr_mentionable, :description)
+ end
+
+ describe '#mentionable_attributes_changed?' do
+ message = Struct.new(:text)
+
+ let(:changes) do
+ msg = message.new('test')
+
+ changes = {}
+ changes[msg] = ['', 'some message']
+ changes[:random_sym_key] = ['', 'some message']
+ changes["description"] = ['', 'some message']
+ changes
+ end
+
+ it 'returns true with key string' do
+ changes["description_html"] = ['', 'some message']
+
+ allow(mentionable).to receive(:saved_changes).and_return(changes)
+
+ expect(mentionable.send(:mentionable_attributes_changed?)).to be true
+ end
+
+ it 'returns false with key symbol' do
+ changes[:description_html] = ['', 'some message']
+ allow(mentionable).to receive(:saved_changes).and_return(changes)
+
+ expect(mentionable.send(:mentionable_attributes_changed?)).to be false
+ end
+
+ it 'returns false when no attr_mentionable keys' do
+ allow(mentionable).to receive(:saved_changes).and_return(changes)
+
+ expect(mentionable.send(:mentionable_attributes_changed?)).to be false
+ end
+ end
+
+ describe '#save' do
+ context 'when cache is outdated' do
+ before do
+ thing.cached_markdown_version += 1
+ end
+
+ context 'when the markdown field also a mentionable attribute' do
+ let(:thing) { klass.new(description: markdown, description_html: html, cached_markdown_version: cache_version) }
+
+ it 'calls #store_mentions!' do
+ expect(thing).to receive(:mentionable_attributes_changed?).and_return(true)
+ expect(thing).to receive(:store_mentions!)
+
+ thing.try(:save)
+
+ expect(thing.description_html).to eq(html)
+ end
+ end
+
+ context 'when the markdown field is not mentionable attribute' do
+ let(:thing) { klass.new(title: markdown, title_html: html, cached_markdown_version: cache_version) }
+
+ it 'does not call #store_mentions!' do
+ expect(thing).not_to receive(:store_mentions!)
+ expect(thing).to receive(:refresh_markdown_cache)
+
+ thing.try(:save)
+
+ expect(thing.title_html).to eq(html)
+ end
+ end
+ end
+
+ context 'when the markdown field does not exist' do
+ let(:thing) { klass.new(cached_markdown_version: cache_version) }
+
+ it 'does not call #store_mentions!' do
+ expect(thing).not_to receive(:store_mentions!)
+
+ thing.try(:save)
+ end
+ end
+ end
+ end
+ end
+
context 'for Active record classes' do
let(:klass) { ar_class }
it_behaves_like 'a class with cached markdown fields'
+ it_behaves_like 'a class with mentionable markdown fields'
describe '#attribute_invalidated?' do
let(:thing) { klass.create!(description: markdown, description_html: html, cached_markdown_version: cache_version) }
diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb
index 5fb7cdb4443..7cf7b825d7d 100644
--- a/spec/models/concerns/case_sensitivity_spec.rb
+++ b/spec/models/concerns/case_sensitivity_spec.rb
@@ -4,16 +4,16 @@ require 'spec_helper'
RSpec.describe CaseSensitivity do
describe '.iwhere' do
- let(:connection) { ActiveRecord::Base.connection }
- let(:model) do
+ let_it_be(:connection) { ActiveRecord::Base.connection }
+ let_it_be(:model) do
Class.new(ActiveRecord::Base) do
include CaseSensitivity
self.table_name = 'namespaces'
end
end
- let!(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1') }
- let!(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2') }
+ let_it_be(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1') }
+ let_it_be(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2') }
it 'finds a single instance by a single attribute regardless of case' do
expect(model.iwhere(path: 'MODEL-1')).to contain_exactly(model_1)
@@ -28,6 +28,10 @@ RSpec.describe CaseSensitivity do
.to contain_exactly(model_1)
end
+ it 'finds instances by custom Arel attributes' do
+ expect(model.iwhere(model.arel_table[:path] => 'MODEL-1')).to contain_exactly(model_1)
+ end
+
it 'builds a query using LOWER' do
query = model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1').to_sql
expected_query = <<~QRY.strip
diff --git a/spec/models/concerns/ignorable_columns_spec.rb b/spec/models/concerns/ignorable_columns_spec.rb
index a5eff154a0b..c97dc606159 100644
--- a/spec/models/concerns/ignorable_columns_spec.rb
+++ b/spec/models/concerns/ignorable_columns_spec.rb
@@ -59,6 +59,14 @@ RSpec.describe IgnorableColumns do
it_behaves_like 'storing removal information'
end
+ context 'when called on a subclass without setting the ignored columns' do
+ let(:subclass) { Class.new(record_class) }
+
+ it 'does not raise Deadlock error' do
+ expect { subclass.ignored_columns_details }.not_to raise_error
+ end
+ end
+
it 'defaults to empty Hash' do
expect(subject.ignored_columns_details).to eq({})
end
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 516c0fd75bc..3c095477ea9 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -29,42 +29,6 @@ RSpec.describe Mentionable do
expect(mentionable.referenced_mentionables).to be_empty
end
end
-
- describe '#any_mentionable_attributes_changed?' do
- message = Struct.new(:text)
-
- let(:mentionable) { Example.new }
- let(:changes) do
- msg = message.new('test')
-
- changes = {}
- changes[msg] = ['', 'some message']
- changes[:random_sym_key] = ['', 'some message']
- changes["random_string_key"] = ['', 'some message']
- changes
- end
-
- it 'returns true with key string' do
- changes["message"] = ['', 'some message']
-
- allow(mentionable).to receive(:saved_changes).and_return(changes)
-
- expect(mentionable.send(:any_mentionable_attributes_changed?)).to be true
- end
-
- it 'returns false with key symbol' do
- changes[:message] = ['', 'some message']
- allow(mentionable).to receive(:saved_changes).and_return(changes)
-
- expect(mentionable.send(:any_mentionable_attributes_changed?)).to be false
- end
-
- it 'returns false when no attr_mentionable keys' do
- allow(mentionable).to receive(:saved_changes).and_return(changes)
-
- expect(mentionable.send(:any_mentionable_attributes_changed?)).to be false
- end
- end
end
RSpec.describe Issue, "Mentionable" do
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index ba70ff563a8..2059e170446 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
let(:features_enabled) { %w(issues wiki builds merge_requests snippets) }
- let(:features) { features_enabled + %w(repository pages) }
+ let(:features) { features_enabled + %w(repository pages operations) }
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index e4cf68663ef..6ab87053258 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -2,8 +2,69 @@
require 'spec_helper'
+RSpec.shared_examples '.find_by_full_path' do
+ describe '.find_by_full_path', :aggregate_failures do
+ it 'finds records by their full path' do
+ expect(described_class.find_by_full_path(record.full_path)).to eq(record)
+ expect(described_class.find_by_full_path(record.full_path.upcase)).to eq(record)
+ end
+
+ it 'returns nil for unknown paths' do
+ expect(described_class.find_by_full_path('unknown')).to be_nil
+ end
+
+ it 'includes route information when loading a record' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ described_class.find_by_full_path(record.full_path)
+ end.count
+
+ expect do
+ described_class.find_by_full_path(record.full_path).route
+ end.not_to exceed_all_query_limit(control_count)
+ end
+
+ context 'with redirect routes' do
+ let_it_be(:redirect_route) { create(:redirect_route, source: record) }
+
+ context 'without follow_redirects option' do
+ it 'does not find records by their redirected path' do
+ expect(described_class.find_by_full_path(redirect_route.path)).to be_nil
+ expect(described_class.find_by_full_path(redirect_route.path.upcase)).to be_nil
+ end
+ end
+
+ context 'with follow_redirects option set to true' do
+ it 'finds records by their canonical path' do
+ expect(described_class.find_by_full_path(record.full_path, follow_redirects: true)).to eq(record)
+ expect(described_class.find_by_full_path(record.full_path.upcase, follow_redirects: true)).to eq(record)
+ end
+
+ it 'finds records by their redirected path' do
+ expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to eq(record)
+ expect(described_class.find_by_full_path(redirect_route.path.upcase, follow_redirects: true)).to eq(record)
+ end
+
+ it 'returns nil for unknown paths' do
+ expect(described_class.find_by_full_path('unknown', follow_redirects: true)).to be_nil
+ end
+ end
+ end
+ end
+end
+
+RSpec.describe Routable do
+ it_behaves_like '.find_by_full_path' do
+ let_it_be(:record) { create(:group) }
+ end
+
+ it_behaves_like '.find_by_full_path' do
+ let_it_be(:record) { create(:project) }
+ end
+end
+
RSpec.describe Group, 'Routable' do
- let!(:group) { create(:group, name: 'foo') }
+ let_it_be_with_reload(:group) { create(:group, name: 'foo') }
+ let_it_be(:nested_group) { create(:group, parent: group) }
describe 'Validations' do
it { is_expected.to validate_presence_of(:route) }
@@ -59,61 +120,20 @@ RSpec.describe Group, 'Routable' do
end
describe '.find_by_full_path' do
- let!(:nested_group) { create(:group, parent: group) }
-
- context 'without any redirect routes' do
- it { expect(described_class.find_by_full_path(group.to_param)).to eq(group) }
- it { expect(described_class.find_by_full_path(group.to_param.upcase)).to eq(group) }
- it { expect(described_class.find_by_full_path(nested_group.to_param)).to eq(nested_group) }
- it { expect(described_class.find_by_full_path('unknown')).to eq(nil) }
-
- it 'includes route information when loading a record' do
- path = group.to_param
- control_count = ActiveRecord::QueryRecorder.new { described_class.find_by_full_path(path) }.count
-
- expect { described_class.find_by_full_path(path).route }.not_to exceed_all_query_limit(control_count)
- end
+ it_behaves_like '.find_by_full_path' do
+ let_it_be(:record) { group }
end
- context 'with redirect routes' do
- let!(:group_redirect_route) { group.redirect_routes.create!(path: 'bar') }
- let!(:nested_group_redirect_route) { nested_group.redirect_routes.create!(path: nested_group.path.sub('foo', 'bar')) }
-
- context 'without follow_redirects option' do
- context 'with the given path not matching any route' do
- it { expect(described_class.find_by_full_path('unknown')).to eq(nil) }
- end
-
- context 'with the given path matching the canonical route' do
- it { expect(described_class.find_by_full_path(group.to_param)).to eq(group) }
- it { expect(described_class.find_by_full_path(group.to_param.upcase)).to eq(group) }
- it { expect(described_class.find_by_full_path(nested_group.to_param)).to eq(nested_group) }
- end
-
- context 'with the given path matching a redirect route' do
- it { expect(described_class.find_by_full_path(group_redirect_route.path)).to eq(nil) }
- it { expect(described_class.find_by_full_path(group_redirect_route.path.upcase)).to eq(nil) }
- it { expect(described_class.find_by_full_path(nested_group_redirect_route.path)).to eq(nil) }
- end
- end
-
- context 'with follow_redirects option set to true' do
- context 'with the given path not matching any route' do
- it { expect(described_class.find_by_full_path('unknown', follow_redirects: true)).to eq(nil) }
- end
+ it_behaves_like '.find_by_full_path' do
+ let_it_be(:record) { nested_group }
+ end
- context 'with the given path matching the canonical route' do
- it { expect(described_class.find_by_full_path(group.to_param, follow_redirects: true)).to eq(group) }
- it { expect(described_class.find_by_full_path(group.to_param.upcase, follow_redirects: true)).to eq(group) }
- it { expect(described_class.find_by_full_path(nested_group.to_param, follow_redirects: true)).to eq(nested_group) }
- end
+ it 'does not find projects with a matching path' do
+ project = create(:project)
+ redirect_route = create(:redirect_route, source: project)
- context 'with the given path matching a redirect route' do
- it { expect(described_class.find_by_full_path(group_redirect_route.path, follow_redirects: true)).to eq(group) }
- it { expect(described_class.find_by_full_path(group_redirect_route.path.upcase, follow_redirects: true)).to eq(group) }
- it { expect(described_class.find_by_full_path(nested_group_redirect_route.path, follow_redirects: true)).to eq(nested_group) }
- end
- end
+ expect(described_class.find_by_full_path(project.full_path)).to be_nil
+ expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil
end
end
@@ -131,8 +151,6 @@ RSpec.describe Group, 'Routable' do
end
context 'with valid paths' do
- let!(:nested_group) { create(:group, parent: group) }
-
it 'returns the projects matching the paths' do
result = described_class.where_full_path_in([group.to_param, nested_group.to_param])
@@ -148,32 +166,36 @@ RSpec.describe Group, 'Routable' do
end
describe '#full_path' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
-
it { expect(group.full_path).to eq(group.path) }
it { expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}") }
end
describe '#full_name' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
-
it { expect(group.full_name).to eq(group.name) }
it { expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}") }
end
end
RSpec.describe Project, 'Routable' do
- describe '#full_path' do
- let(:project) { build_stubbed(:project) }
+ let_it_be(:project) { create(:project) }
+ it_behaves_like '.find_by_full_path' do
+ let_it_be(:record) { project }
+ end
+
+ it 'does not find groups with a matching path' do
+ group = create(:group)
+ redirect_route = create(:redirect_route, source: group)
+
+ expect(described_class.find_by_full_path(group.full_path)).to be_nil
+ expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil
+ end
+
+ describe '#full_path' do
it { expect(project.full_path).to eq "#{project.namespace.full_path}/#{project.path}" }
end
describe '#full_name' do
- let(:project) { build_stubbed(:project) }
-
it { expect(project.full_name).to eq "#{project.namespace.human_name} / #{project.name}" }
end
end
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 90e94b5dca9..d8b77e1cd0d 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -105,8 +105,8 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
it 'sets new token' do
subject
- expect(personal_access_token.token).to eq(token_value)
- expect(personal_access_token.token_digest).to eq(Gitlab::CryptoHelper.sha256(token_value))
+ expect(personal_access_token.token).to eq("#{PersonalAccessToken.token_prefix}#{token_value}")
+ expect(personal_access_token.token_digest).to eq(Gitlab::CryptoHelper.sha256("#{PersonalAccessToken.token_prefix}#{token_value}"))
end
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 2adceb1c960..0ecefff3a97 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -188,7 +188,7 @@ RSpec.describe ContainerRepository do
subject { repository.start_expiration_policy! }
it 'sets the expiration policy started at to now' do
- Timecop.freeze do
+ freeze_time do
expect { subject }
.to change { repository.expiration_policy_started_at }.from(nil).to(Time.zone.now)
end
diff --git a/spec/models/custom_emoji_spec.rb b/spec/models/custom_emoji_spec.rb
index 62380299ea0..41ce480b02f 100644
--- a/spec/models/custom_emoji_spec.rb
+++ b/spec/models/custom_emoji_spec.rb
@@ -22,6 +22,15 @@ RSpec.describe CustomEmoji do
expect(new_emoji.errors.messages).to eq(name: ["#{emoji_name} is already being used for another emoji"])
end
+ it 'disallows very long invalid emoji name without regular expression backtracking issues' do
+ new_emoji = build(:custom_emoji, name: 'a' * 10000 + '!', group: group)
+
+ Timeout.timeout(1) do
+ expect(new_emoji).not_to be_valid
+ expect(new_emoji.errors.messages).to eq(name: ["is too long (maximum is 36 characters)", "is invalid"])
+ end
+ end
+
it 'disallows duplicate custom emoji names within namespace' do
old_emoji = create(:custom_emoji, group: group)
new_emoji = build(:custom_emoji, name: old_emoji.name, namespace: old_emoji.namespace, group: group)
diff --git a/spec/models/cycle_analytics/code_spec.rb b/spec/models/cycle_analytics/code_spec.rb
index 8900c49a662..ca612cba654 100644
--- a/spec/models/cycle_analytics/code_spec.rb
+++ b/spec/models/cycle_analytics/code_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'CycleAnalytics#code' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
diff --git a/spec/models/cycle_analytics/issue_spec.rb b/spec/models/cycle_analytics/issue_spec.rb
index 5857365ceab..66d21f6925f 100644
--- a/spec/models/cycle_analytics/issue_spec.rb
+++ b/spec/models/cycle_analytics/issue_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'CycleAnalytics#issue' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb
index 2b9be64da2b..acaf767db01 100644
--- a/spec/models/cycle_analytics/plan_spec.rb
+++ b/spec/models/cycle_analytics/plan_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'CycleAnalytics#plan' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
diff --git a/spec/models/cycle_analytics/review_spec.rb b/spec/models/cycle_analytics/review_spec.rb
index 6ebbcebd71d..06d9cfbf8c0 100644
--- a/spec/models/cycle_analytics/review_spec.rb
+++ b/spec/models/cycle_analytics/review_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'CycleAnalytics#review' do
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
generate_cycle_analytics_spec(
phase: :review,
diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb
index 024625d229f..50cb49d6309 100644
--- a/spec/models/cycle_analytics/staging_spec.rb
+++ b/spec/models/cycle_analytics/staging_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'CycleAnalytics#staging' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
subject { project_level }
diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb
index 7010d69f8a4..8f65c047b15 100644
--- a/spec/models/cycle_analytics/test_spec.rb
+++ b/spec/models/cycle_analytics/test_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'CycleAnalytics#test' do
let_it_be(:from_date) { 10.days.ago }
let_it_be(:user) { project.owner }
let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
let!(:merge_request) { create_merge_request_closing_issue(user, project, issue) }
subject { project_level }
diff --git a/spec/models/dependency_proxy/manifest_spec.rb b/spec/models/dependency_proxy/manifest_spec.rb
new file mode 100644
index 00000000000..aa2e73356dd
--- /dev/null
+++ b/spec/models/dependency_proxy/manifest_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::Manifest, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:group) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:group) }
+ it { is_expected.to validate_presence_of(:file) }
+ it { is_expected.to validate_presence_of(:file_name) }
+ it { is_expected.to validate_presence_of(:digest) }
+ end
+
+ describe 'file is being stored' do
+ subject { create(:dependency_proxy_manifest) }
+
+ context 'when existing object has local store' do
+ it_behaves_like 'mounted file in local store'
+ end
+
+ context 'when direct upload is enabled' do
+ before do
+ stub_dependency_proxy_object_storage(direct_upload: true)
+ end
+
+ it_behaves_like 'mounted file in object store'
+ end
+ end
+
+ describe '.find_or_initialize_by_file_name' do
+ subject { DependencyProxy::Manifest.find_or_initialize_by_file_name(file_name) }
+
+ context 'no manifest exists' do
+ let_it_be(:file_name) { 'foo' }
+
+ it 'initializes a manifest' do
+ expect(DependencyProxy::Manifest).to receive(:new).with(file_name: file_name)
+
+ subject
+ end
+ end
+
+ context 'manifest exists' do
+ let_it_be(:dependency_proxy_manifest) { create(:dependency_proxy_manifest) }
+ let_it_be(:file_name) { dependency_proxy_manifest.file_name }
+
+ it { is_expected.to eq(dependency_proxy_manifest) }
+ end
+ end
+end
diff --git a/spec/models/dependency_proxy/registry_spec.rb b/spec/models/dependency_proxy/registry_spec.rb
index 5bfa75a2eed..a888ee2b7f7 100644
--- a/spec/models/dependency_proxy/registry_spec.rb
+++ b/spec/models/dependency_proxy/registry_spec.rb
@@ -54,4 +54,11 @@ RSpec.describe DependencyProxy::Registry, type: :model do
end
end
end
+
+ describe '#authenticate_header' do
+ it 'returns the OAuth realm and service header' do
+ expect(described_class.authenticate_header)
+ .to eq("Bearer realm=\"#{Gitlab.config.gitlab.url}/jwt/auth\",service=\"dependency_proxy\"")
+ end
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 9afacd518af..c962b012a4b 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -202,6 +202,31 @@ RSpec.describe Deployment do
deployment.cancel!
end
end
+
+ context 'when deployment was skipped' do
+ let(:deployment) { create(:deployment, :running) }
+
+ it 'has correct status' do
+ deployment.skip!
+
+ expect(deployment).to be_skipped
+ expect(deployment.finished_at).to be_nil
+ end
+
+ it 'does not execute Deployments::LinkMergeRequestWorker asynchronously' do
+ expect(Deployments::LinkMergeRequestWorker)
+ .not_to receive(:perform_async).with(deployment.id)
+
+ deployment.skip!
+ end
+
+ it 'does not execute Deployments::ExecuteHooksWorker' do
+ expect(Deployments::ExecuteHooksWorker)
+ .not_to receive(:perform_async).with(deployment.id)
+
+ deployment.skip!
+ end
+ end
end
describe '#success?' do
@@ -320,6 +345,7 @@ RSpec.describe Deployment do
deployment2 = create(:deployment, status: :running )
create(:deployment, status: :failed )
create(:deployment, status: :canceled )
+ create(:deployment, status: :skipped)
is_expected.to contain_exactly(deployment1, deployment2)
end
@@ -346,10 +372,70 @@ RSpec.describe Deployment do
it 'retrieves deployments with deployable builds' do
with_deployable = create(:deployment)
create(:deployment, deployable: nil)
+ create(:deployment, deployable_type: 'CommitStatus', deployable_id: non_existing_record_id)
is_expected.to contain_exactly(with_deployable)
end
end
+
+ describe 'finished_between' do
+ subject { described_class.finished_between(start_time, end_time) }
+
+ let_it_be(:start_time) { DateTime.new(2017) }
+ let_it_be(:end_time) { DateTime.new(2019) }
+ let_it_be(:deployment_2016) { create(:deployment, finished_at: DateTime.new(2016)) }
+ let_it_be(:deployment_2017) { create(:deployment, finished_at: DateTime.new(2017)) }
+ let_it_be(:deployment_2018) { create(:deployment, finished_at: DateTime.new(2018)) }
+ let_it_be(:deployment_2019) { create(:deployment, finished_at: DateTime.new(2019)) }
+ let_it_be(:deployment_2020) { create(:deployment, finished_at: DateTime.new(2020)) }
+
+ it 'retrieves deployments that finished between the specified times' do
+ is_expected.to contain_exactly(deployment_2017, deployment_2018)
+ end
+ end
+
+ describe 'visible' do
+ subject { described_class.visible }
+
+ it 'retrieves the visible deployments' do
+ deployment1 = create(:deployment, status: :running)
+ deployment2 = create(:deployment, status: :success)
+ deployment3 = create(:deployment, status: :failed)
+ deployment4 = create(:deployment, status: :canceled)
+ create(:deployment, status: :skipped)
+
+ is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4)
+ end
+ end
+ end
+
+ describe 'latest_for_sha' do
+ subject { described_class.latest_for_sha(sha) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:commits) { project.repository.commits('master', limit: 2) }
+ let_it_be(:deployments) { commits.reverse.map { |commit| create(:deployment, project: project, sha: commit.id) } }
+ let(:sha) { commits.map(&:id) }
+
+ it 'finds the latest deployment with sha' do
+ is_expected.to eq(deployments.last)
+ end
+
+ context 'when sha is old' do
+ let(:sha) { commits.last.id }
+
+ it 'finds the latest deployment with sha' do
+ is_expected.to eq(deployments.first)
+ end
+ end
+
+ context 'when sha is nil' do
+ let(:sha) { nil }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
end
describe '#includes_commit?' do
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index f7ce44f7281..215c733f26b 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -38,6 +38,26 @@ RSpec.describe DiffNote do
it_behaves_like 'a valid diff positionable note' do
subject { build(:diff_note_on_commit, project: project, commit_id: commit_id, position: position) }
end
+
+ it "is not valid when noteable is empty" do
+ note = build(:diff_note_on_merge_request, project: project, noteable: nil)
+
+ note.valid?
+
+ expect(note.errors[:noteable]).to include("doesn't support new-style diff notes")
+ end
+
+ context 'when importing' do
+ it "does not check if it's supported" do
+ note = build(:diff_note_on_merge_request, project: project, noteable: nil)
+ note.importing = true
+ note.valid?
+
+ expect(note.errors.full_messages).not_to include(
+ "Noteable doesn't support new-style diff notes"
+ )
+ end
+ end
end
describe "#position=" do
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 179f2a1b0e0..3e10ea847ba 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to have_many(:deployments) }
it { is_expected.to have_many(:metrics_dashboard_annotations) }
it { is_expected.to have_many(:alert_management_alerts) }
+ it { is_expected.to have_one(:upcoming_deployment) }
it { is_expected.to have_one(:latest_opened_most_severe_alert) }
it { is_expected.to delegate_method(:stop_action).to(:last_deployment) }
@@ -723,6 +724,22 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe '#upcoming_deployment' do
+ subject { environment.upcoming_deployment }
+
+ context 'when environment has a successful deployment' do
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when environment has a running deployment' do
+ let!(:deployment) { create(:deployment, :running, environment: environment, project: project) }
+
+ it { is_expected.to eq(deployment) }
+ end
+ end
+
describe '#has_terminals?' do
subject { environment.has_terminals? }
@@ -860,16 +877,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(described_class.reactive_cache_hard_limit).to eq(10.megabyte)
end
- it 'overrides reactive_cache_limit_enabled? with a FF' do
- environment_with_enabled_ff = build(:environment, project: create(:project))
- environment_with_disabled_ff = build(:environment, project: create(:project))
-
- stub_feature_flags(reactive_caching_limit_environment: environment_with_enabled_ff.project)
-
- expect(environment_with_enabled_ff.send(:reactive_cache_limit_enabled?)).to be_truthy
- expect(environment_with_disabled_ff.send(:reactive_cache_limit_enabled?)).to be_falsey
- end
-
it 'returns cache data from the deployment platform' do
expect(environment.deployment_platform).to receive(:calculate_reactive_cache_for)
.with(environment).and_return(pods: %w(pod1 pod2))
@@ -1394,4 +1401,150 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to be(false) }
end
end
+
+ describe '#cancel_deployment_jobs!' do
+ subject { environment.cancel_deployment_jobs! }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:environment, reload: true) { create(:environment, project: project) }
+ let!(:deployment) { create(:deployment, project: project, environment: environment, deployable: build) }
+ let!(:build) { create(:ci_build, :running, project: project, environment: environment) }
+
+ it 'cancels an active deployment job' do
+ subject
+
+ expect(build.reset).to be_canceled
+ end
+
+ context 'when deployable does not exist' do
+ before do
+ deployment.update_column(:deployable_id, non_existing_record_id)
+ end
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+
+ expect(build.reset).to be_running
+ end
+ end
+ end
+
+ describe '#rollout_status' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
+ let!(:environment) { create(:environment, project: project) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project) }
+
+ subject { environment.rollout_status }
+
+ context 'environment does not have a deployment board available' do
+ before do
+ allow(environment).to receive(:has_terminals?).and_return(false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'cached rollout status is present' do
+ let(:pods) { %w(pod1 pod2) }
+ let(:deployments) { %w(deployment1 deployment2) }
+
+ before do
+ stub_reactive_cache(environment, pods: pods, deployments: deployments)
+ end
+
+ it 'fetches the rollout status from the deployment platform' do
+ expect(environment.deployment_platform).to receive(:rollout_status)
+ .with(environment, pods: pods, deployments: deployments)
+ .and_return(:mock_rollout_status)
+
+ is_expected.to eq(:mock_rollout_status)
+ end
+ end
+
+ context 'cached rollout status is not present yet' do
+ before do
+ stub_reactive_cache(environment, nil)
+ end
+
+ it 'falls back to a loading status' do
+ expect(::Gitlab::Kubernetes::RolloutStatus).to receive(:loading).and_return(:mock_loading_status)
+
+ is_expected.to eq(:mock_loading_status)
+ end
+ end
+ end
+
+ describe '#ingresses' do
+ subject { environment.ingresses }
+
+ let(:deployment_platform) { double(:deployment_platform) }
+ let(:deployment_namespace) { 'production' }
+
+ before do
+ allow(environment).to receive(:deployment_platform) { deployment_platform }
+ allow(environment).to receive(:deployment_namespace) { deployment_namespace }
+ end
+
+ context 'when rollout status is available' do
+ before do
+ allow(environment).to receive(:rollout_status_available?) { true }
+ end
+
+ it 'fetches ingresses from the deployment platform' do
+ expect(deployment_platform).to receive(:ingresses).with(deployment_namespace)
+
+ subject
+ end
+ end
+
+ context 'when rollout status is not available' do
+ before do
+ allow(environment).to receive(:rollout_status_available?) { false }
+ end
+
+ it 'does nothing' do
+ expect(deployment_platform).not_to receive(:ingresses)
+
+ subject
+ end
+ end
+ end
+
+ describe '#patch_ingress' do
+ subject { environment.patch_ingress(ingress, data) }
+
+ let(:ingress) { double(:ingress) }
+ let(:data) { double(:data) }
+ let(:deployment_platform) { double(:deployment_platform) }
+ let(:deployment_namespace) { 'production' }
+
+ before do
+ allow(environment).to receive(:deployment_platform) { deployment_platform }
+ allow(environment).to receive(:deployment_namespace) { deployment_namespace }
+ end
+
+ context 'when rollout status is available' do
+ before do
+ allow(environment).to receive(:rollout_status_available?) { true }
+ end
+
+ it 'fetches ingresses from the deployment platform' do
+ expect(deployment_platform).to receive(:patch_ingress).with(deployment_namespace, ingress, data)
+
+ subject
+ end
+ end
+
+ context 'when rollout status is not available' do
+ before do
+ allow(environment).to receive(:rollout_status_available?) { false }
+ end
+
+ it 'does nothing' do
+ expect(deployment_platform).not_to receive(:patch_ingress)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/models/experiment_spec.rb b/spec/models/experiment_spec.rb
index 587f410c9be..1bf7b8b4850 100644
--- a/spec/models/experiment_spec.rb
+++ b/spec/models/experiment_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Experiment do
describe 'associations' do
it { is_expected.to have_many(:experiment_users) }
+ it { is_expected.to have_many(:experiment_subjects) }
end
describe 'validations' do
@@ -19,20 +20,21 @@ RSpec.describe Experiment do
let_it_be(:experiment_name) { :experiment_key }
let_it_be(:user) { 'a user' }
let_it_be(:group) { 'a group' }
+ let_it_be(:context) { { a: 42 } }
- subject(:add_user) { described_class.add_user(experiment_name, group, user) }
+ subject(:add_user) { described_class.add_user(experiment_name, group, user, context) }
context 'when an experiment with the provided name does not exist' do
it 'creates a new experiment record' do
allow_next_instance_of(described_class) do |experiment|
- allow(experiment).to receive(:record_user_and_group).with(user, group)
+ allow(experiment).to receive(:record_user_and_group).with(user, group, context)
end
expect { add_user }.to change(described_class, :count).by(1)
end
- it 'forwards the user and group_type to the instance' do
+ it 'forwards the user, group_type, and context to the instance' do
expect_next_instance_of(described_class) do |experiment|
- expect(experiment).to receive(:record_user_and_group).with(user, group)
+ expect(experiment).to receive(:record_user_and_group).with(user, group, context)
end
add_user
end
@@ -43,18 +45,95 @@ RSpec.describe Experiment do
it 'does not create a new experiment record' do
allow_next_found_instance_of(described_class) do |experiment|
- allow(experiment).to receive(:record_user_and_group).with(user, group)
+ allow(experiment).to receive(:record_user_and_group).with(user, group, context)
end
expect { add_user }.not_to change(described_class, :count)
end
- it 'forwards the user and group_type to the instance' do
+ it 'forwards the user, group_type, and context to the instance' do
expect_next_found_instance_of(described_class) do |experiment|
- expect(experiment).to receive(:record_user_and_group).with(user, group)
+ expect(experiment).to receive(:record_user_and_group).with(user, group, context)
end
add_user
end
end
+
+ it 'works without the optional context argument' do
+ allow_next_instance_of(described_class) do |experiment|
+ expect(experiment).to receive(:record_user_and_group).with(user, group, {})
+ end
+
+ expect { described_class.add_user(experiment_name, group, user) }.not_to raise_error
+ end
+ end
+
+ describe '.record_conversion_event' do
+ let_it_be(:user) { build(:user) }
+
+ let(:experiment_key) { :test_experiment }
+
+ subject(:record_conversion_event) { described_class.record_conversion_event(experiment_key, user) }
+
+ context 'when no matching experiment exists' do
+ it 'creates the experiment and uses it' do
+ expect_next_instance_of(described_class) do |experiment|
+ expect(experiment).to receive(:record_conversion_event_for_user)
+ end
+ expect { record_conversion_event }.to change { described_class.count }.by(1)
+ end
+
+ context 'but we are unable to successfully create one' do
+ let(:experiment_key) { nil }
+
+ it 'raises a RecordInvalid error' do
+ expect { record_conversion_event }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
+ context 'when a matching experiment already exists' do
+ before do
+ create(:experiment, name: experiment_key)
+ end
+
+ it 'sends record_conversion_event_for_user to the experiment instance' do
+ expect_next_found_instance_of(described_class) do |experiment|
+ expect(experiment).to receive(:record_conversion_event_for_user).with(user)
+ end
+ record_conversion_event
+ end
+ end
+ end
+
+ describe '#record_conversion_event_for_user' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:experiment) { create(:experiment) }
+
+ subject(:record_conversion_event_for_user) { experiment.record_conversion_event_for_user(user) }
+
+ context 'when no existing experiment_user record exists for the given user' do
+ it 'does not update or create an experiment_user record' do
+ expect { record_conversion_event_for_user }.not_to change { ExperimentUser.all.to_a }
+ end
+ end
+
+ context 'when an existing experiment_user exists for the given user' do
+ context 'but it has already been converted' do
+ let!(:experiment_user) { create(:experiment_user, experiment: experiment, user: user, converted_at: 2.days.ago) }
+
+ it 'does not update the converted_at value' do
+ expect { record_conversion_event_for_user }.not_to change { experiment_user.converted_at }
+ end
+ end
+
+ context 'and it has not yet been converted' do
+ let(:experiment_user) { create(:experiment_user, experiment: experiment, user: user) }
+
+ it 'updates the converted_at value' do
+ expect { record_conversion_event_for_user }.to change { experiment_user.reload.converted_at }
+ end
+ end
+ end
end
describe '#record_user_and_group' do
@@ -62,8 +141,9 @@ RSpec.describe Experiment do
let_it_be(:user) { create(:user) }
let(:group) { :control }
+ let(:context) { { a: 42 } }
- subject(:record_user_and_group) { experiment.record_user_and_group(user, group) }
+ subject(:record_user_and_group) { experiment.record_user_and_group(user, group, context) }
context 'when an experiment_user does not yet exist for the given user' do
it 'creates a new experiment_user record' do
@@ -74,24 +154,35 @@ RSpec.describe Experiment do
record_user_and_group
expect(ExperimentUser.last.group_type).to eq('control')
end
+
+ it 'adds the correct context to the experiment_user' do
+ record_user_and_group
+ expect(ExperimentUser.last.context).to eq({ 'a' => 42 })
+ end
end
context 'when an experiment_user already exists for the given user' do
before do
# Create an existing experiment_user for this experiment and the :control group
- experiment.record_user_and_group(user, :control)
+ experiment.record_user_and_group(user, :control, context)
end
it 'does not create a new experiment_user record' do
expect { record_user_and_group }.not_to change(ExperimentUser, :count)
end
- context 'but the group_type has changed' do
+ context 'but the group_type and context has changed' do
let(:group) { :experimental }
+ let(:context) { { b: 37 } }
- it 'updates the existing experiment_user record' do
+ it 'updates the existing experiment_user record with group_type' do
expect { record_user_and_group }.to change { ExperimentUser.last.group_type }
end
+
+ it 'updates the existing experiment_user record with context' do
+ record_user_and_group
+ expect(ExperimentUser.last.context).to eq({ 'b' => 37 })
+ end
end
end
end
diff --git a/spec/models/experiment_subject_spec.rb b/spec/models/experiment_subject_spec.rb
new file mode 100644
index 00000000000..4850814c5f5
--- /dev/null
+++ b/spec/models/experiment_subject_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ExperimentSubject, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:experiment) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:group) }
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:experiment) }
+
+ describe 'must_have_one_subject_present' do
+ let(:experiment_subject) { build(:experiment_subject, user: nil, group: nil, project: nil) }
+ let(:error_message) { 'Must have exactly one of User, Group, or Project.' }
+
+ it 'fails when no subject is present' do
+ expect(experiment_subject).not_to be_valid
+ expect(experiment_subject.errors[:base]).to include(error_message)
+ end
+
+ it 'passes when user subject is present' do
+ experiment_subject.user = build(:user)
+ expect(experiment_subject).to be_valid
+ end
+
+ it 'passes when group subject is present' do
+ experiment_subject.group = build(:group)
+ expect(experiment_subject).to be_valid
+ end
+
+ it 'passes when project subject is present' do
+ experiment_subject.project = build(:project)
+ expect(experiment_subject).to be_valid
+ end
+
+ it 'fails when more than one subject is present', :aggregate_failures do
+ # two subjects
+ experiment_subject.user = build(:user)
+ experiment_subject.group = build(:group)
+ expect(experiment_subject).not_to be_valid
+ expect(experiment_subject.errors[:base]).to include(error_message)
+
+ # three subjects
+ experiment_subject.project = build(:project)
+ expect(experiment_subject).not_to be_valid
+ expect(experiment_subject.errors[:base]).to include(error_message)
+ end
+ end
+ end
+end
diff --git a/spec/models/exported_protected_branch_spec.rb b/spec/models/exported_protected_branch_spec.rb
new file mode 100644
index 00000000000..7886a522741
--- /dev/null
+++ b/spec/models/exported_protected_branch_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ExportedProtectedBranch do
+ describe 'Associations' do
+ it { is_expected.to have_many(:push_access_levels) }
+ end
+
+ describe '.push_access_levels' do
+ it 'returns the correct push access levels' do
+ exported_branch = create(:exported_protected_branch, :developers_can_push)
+ deploy_key = create(:deploy_key)
+ create(:deploy_keys_project, :write_access, project: exported_branch.project, deploy_key: deploy_key )
+ create(:protected_branch_push_access_level, protected_branch: exported_branch, deploy_key: deploy_key)
+ dev_push_access_level = exported_branch.push_access_levels.first
+
+ expect(exported_branch.push_access_levels).to contain_exactly(dev_push_access_level)
+ end
+ end
+end
diff --git a/spec/models/group_import_state_spec.rb b/spec/models/group_import_state_spec.rb
index 469b5c96ac9..52ea20aeac8 100644
--- a/spec/models/group_import_state_spec.rb
+++ b/spec/models/group_import_state_spec.rb
@@ -70,4 +70,24 @@ RSpec.describe GroupImportState do
end
end
end
+
+ context 'when import failed' do
+ context 'when error message is present' do
+ it 'truncates error message' do
+ group_import_state = build(:group_import_state, :started)
+ group_import_state.fail_op('e' * 300)
+
+ expect(group_import_state.last_error.length).to eq(255)
+ end
+ end
+
+ context 'when error message is missing' do
+ it 'has no error message' do
+ group_import_state = build(:group_import_state, :started)
+ group_import_state.fail_op
+
+ expect(group_import_state.last_error).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index dd1faf999b3..cc8e744a15c 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Group do
it { is_expected.to have_many(:services) }
it { is_expected.to have_one(:dependency_proxy_setting) }
it { is_expected.to have_many(:dependency_proxy_blobs) }
+ it { is_expected.to have_many(:dependency_proxy_manifests) }
describe '#members & #requesters' do
let(:requester) { create(:user) }
@@ -798,20 +799,36 @@ RSpec.describe Group do
end
end
- describe '#direct_and_indirect_members' do
+ context 'members-related methods' do
let!(:group) { create(:group, :nested) }
let!(:sub_group) { create(:group, parent: group) }
let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
let!(:other_developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
- it 'returns parents members' do
- expect(group.direct_and_indirect_members).to include(developer)
- expect(group.direct_and_indirect_members).to include(maintainer)
+ describe '#direct_and_indirect_members' do
+ it 'returns parents members' do
+ expect(group.direct_and_indirect_members).to include(developer)
+ expect(group.direct_and_indirect_members).to include(maintainer)
+ end
+
+ it 'returns descendant members' do
+ expect(group.direct_and_indirect_members).to include(other_developer)
+ end
end
- it 'returns descendant members' do
- expect(group.direct_and_indirect_members).to include(other_developer)
+ describe '#direct_and_indirect_members_with_inactive' do
+ let!(:maintainer_blocked) { group.parent.add_user(create(:user, :blocked), GroupMember::MAINTAINER) }
+
+ it 'returns parents members' do
+ expect(group.direct_and_indirect_members_with_inactive).to include(developer)
+ expect(group.direct_and_indirect_members_with_inactive).to include(maintainer)
+ expect(group.direct_and_indirect_members_with_inactive).to include(maintainer_blocked)
+ end
+
+ it 'returns descendant members' do
+ expect(group.direct_and_indirect_members_with_inactive).to include(other_developer)
+ end
end
end
@@ -834,7 +851,7 @@ RSpec.describe Group do
end
end
- describe '#direct_and_indirect_users' do
+ context 'user-related methods' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
let(:user_c) { create(:user) }
@@ -853,14 +870,40 @@ RSpec.describe Group do
project.add_developer(user_d)
end
- it 'returns member users on every nest level without duplication' do
- expect(group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c, user_d)
- expect(nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
- expect(deep_nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
+ describe '#direct_and_indirect_users' do
+ it 'returns member users on every nest level without duplication' do
+ expect(group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c, user_d)
+ expect(nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
+ expect(deep_nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
+ end
+
+ it 'does not return members of projects belonging to ancestor groups' do
+ expect(nested_group.direct_and_indirect_users).not_to include(user_d)
+ end
end
- it 'does not return members of projects belonging to ancestor groups' do
- expect(nested_group.direct_and_indirect_users).not_to include(user_d)
+ describe '#direct_and_indirect_users_with_inactive' do
+ let(:user_blocked_1) { create(:user, :blocked) }
+ let(:user_blocked_2) { create(:user, :blocked) }
+ let(:user_blocked_3) { create(:user, :blocked) }
+ let(:project_in_group) { create(:project, namespace: nested_group) }
+
+ before do
+ group.add_developer(user_blocked_1)
+ nested_group.add_developer(user_blocked_1)
+ deep_nested_group.add_developer(user_blocked_2)
+ project_in_group.add_developer(user_blocked_3)
+ end
+
+ it 'returns member users on every nest level without duplication' do
+ expect(group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_d, user_blocked_1, user_blocked_2, user_blocked_3)
+ expect(nested_group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_blocked_1, user_blocked_2, user_blocked_3)
+ expect(deep_nested_group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_blocked_1, user_blocked_2)
+ end
+
+ it 'returns members of projects belonging to group' do
+ expect(nested_group.direct_and_indirect_users_with_inactive).to include(user_blocked_3)
+ end
end
end
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 696d33b7beb..c0efb2dff56 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -81,6 +81,36 @@ RSpec.describe Identity do
end
end
+ describe '.with_any_extern_uid' do
+ context 'provider with extern uid' do
+ let!(:test_entity) { create(:identity, provider: 'test_provider', extern_uid: 'test_uid') }
+
+ it 'finds any extern uids associated with a provider' do
+ identity = described_class.with_any_extern_uid('test_provider').first
+
+ expect(identity).to be
+ end
+ end
+
+ context 'provider with nil extern uid' do
+ let!(:nil_entity) { create(:identity, provider: 'nil_entity_provider', extern_uid: nil) }
+
+ it 'has no results when there are no extern uids' do
+ identity = described_class.with_any_extern_uid('nil_entity_provider').first
+
+ expect(identity).to be_nil
+ end
+ end
+
+ context 'no provider' do
+ it 'has no results when there is no associated provider' do
+ identity = described_class.with_any_extern_uid('nonexistent_provider').first
+
+ expect(identity).to be_nil
+ end
+ end
+ end
+
context 'callbacks' do
context 'before_save' do
describe 'normalizes extern uid' do
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 383e548c324..d3566ed04c2 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -10,31 +10,35 @@ RSpec.describe InstanceConfiguration do
let(:sha256) { 'SHA256:2KJDT7xf2i68mBgJ3TVsjISntg4droLbXYLfQj0VvSY' }
it 'does not return anything if file does not exist' do
- stub_pub_file(exist: false)
+ stub_pub_file(pub_file(exist: false))
expect(subject.settings[:ssh_algorithms_hashes]).to be_empty
end
it 'does not return anything if file is empty' do
- stub_pub_file
+ stub_pub_file(pub_file)
- allow(File).to receive(:read).and_return('')
+ stub_file_read(pub_file, content: '')
expect(subject.settings[:ssh_algorithms_hashes]).to be_empty
end
it 'returns the md5 and sha256 if file valid and exists' do
- stub_pub_file
+ stub_pub_file(pub_file)
result = subject.settings[:ssh_algorithms_hashes].select { |o| o[:md5] == md5 && o[:sha256] == sha256 }
expect(result.size).to eq(InstanceConfiguration::SSH_ALGORITHMS.size)
end
- def stub_pub_file(exist: true)
+ def pub_file(exist: true)
path = exist ? 'spec/fixtures/ssh_host_example_key.pub' : 'spec/fixtures/ssh_host_example_key.pub.random'
- allow(subject).to receive(:ssh_algorithm_file).and_return(Rails.root.join(path))
+ Rails.root.join(path)
+ end
+
+ def stub_pub_file(path)
+ allow(subject).to receive(:ssh_algorithm_file).and_return(path)
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 16ea2989eda..81f045b4db1 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -141,7 +141,6 @@ RSpec.describe Issue do
describe '.with_issue_type' do
let_it_be(:issue) { create(:issue, project: reusable_project) }
let_it_be(:incident) { create(:incident, project: reusable_project) }
- let_it_be(:test_case) { create(:quality_test_case, project: reusable_project) }
it 'gives issues with the given issue type' do
expect(described_class.with_issue_type('issue'))
@@ -149,8 +148,8 @@ RSpec.describe Issue do
end
it 'gives issues with the given issue type' do
- expect(described_class.with_issue_type(%w(issue incident test_case)))
- .to contain_exactly(issue, incident, test_case)
+ expect(described_class.with_issue_type(%w(issue incident)))
+ .to contain_exactly(issue, incident)
end
end
@@ -370,6 +369,20 @@ RSpec.describe Issue do
expect(link_types).not_to include(nil)
end
+ it 'returns issues including the link creation time' do
+ dates = authorized_issue_a.related_issues(user).map(&:issue_link_created_at)
+
+ expect(dates).not_to be_empty
+ expect(dates).not_to include(nil)
+ end
+
+ it 'returns issues including the link update time' do
+ dates = authorized_issue_a.related_issues(user).map(&:issue_link_updated_at)
+
+ expect(dates).not_to be_empty
+ expect(dates).not_to include(nil)
+ end
+
describe 'when a user cannot read cross project' do
it 'only returns issues within the same project' do
expect(Ability).to receive(:allowed?).with(user, :read_all_resources, :global).at_least(:once).and_call_original
diff --git a/spec/models/label_priority_spec.rb b/spec/models/label_priority_spec.rb
index db961d5a4e6..adeccd999f3 100644
--- a/spec/models/label_priority_spec.rb
+++ b/spec/models/label_priority_spec.rb
@@ -18,5 +18,11 @@ RSpec.describe LabelPriority do
expect(subject).to validate_uniqueness_of(:label_id).scoped_to(:project_id)
end
+
+ describe 'when importing' do
+ subject { create(:label_priority, importing: true) }
+
+ it { is_expected.not_to validate_presence_of(:label) }
+ end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 6706083fd91..a5493d1650b 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe MergeRequestDiff do
let(:diff_with_commits) { create(:merge_request).merge_request_diff }
+ before do
+ stub_feature_flags(diffs_gradual_load: false)
+ end
+
describe 'validations' do
subject { diff_with_commits }
@@ -415,7 +419,7 @@ RSpec.describe MergeRequestDiff do
context 'when persisted files available' do
it 'returns paginated diffs' do
- diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: {})
+ diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
expect(diffs).to be_a(Gitlab::Diff::FileCollection::MergeRequestDiffBatch)
expect(diffs.diff_files.size).to eq(10)
@@ -423,6 +427,150 @@ RSpec.describe MergeRequestDiff do
next_page: 2,
total_pages: 2)
end
+
+ it 'sorts diff files directory first' do
+ diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
+
+ expect(diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options).diff_file_paths).to eq([
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace'
+ ])
+ end
+
+ context 'when sort_diffs feature flag is disabled' do
+ before do
+ stub_feature_flags(sort_diffs: false)
+ end
+
+ it 'does not sort diff files directory first' do
+ expect(diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options).diff_file_paths).to eq([
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/.DS_Store'
+ ])
+ end
+ end
+ end
+ end
+
+ describe '#diffs' do
+ let(:diff_options) { {} }
+
+ shared_examples_for 'fetching full diffs' do
+ it 'returns diffs from repository comparison' do
+ expect_next_instance_of(Compare) do |comparison|
+ expect(comparison).to receive(:diffs)
+ .with(diff_options)
+ .and_call_original
+ end
+
+ diff_with_commits.diffs(diff_options)
+ end
+
+ it 'returns a Gitlab::Diff::FileCollection::Compare with full diffs' do
+ diffs = diff_with_commits.diffs(diff_options)
+
+ expect(diffs).to be_a(Gitlab::Diff::FileCollection::Compare)
+ expect(diffs.diff_files.size).to eq(20)
+ end
+ end
+
+ context 'when no persisted files available' do
+ before do
+ diff_with_commits.clean!
+ end
+
+ it_behaves_like 'fetching full diffs'
+ end
+
+ context 'when diff_options include ignore_whitespace_change' do
+ it_behaves_like 'fetching full diffs' do
+ let(:diff_options) do
+ { ignore_whitespace_change: true }
+ end
+ end
+ end
+
+ context 'when persisted files available' do
+ it 'returns diffs' do
+ diffs = diff_with_commits.diffs(diff_options)
+
+ expect(diffs).to be_a(Gitlab::Diff::FileCollection::MergeRequestDiff)
+ expect(diffs.diff_files.size).to eq(20)
+ end
+
+ it 'sorts diff files directory first' do
+ diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
+
+ expect(diff_with_commits.diffs(diff_options).diff_file_paths).to eq([
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'with space/README.md',
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'gitlab-grack',
+ 'gitlab-shell'
+ ])
+ end
+
+ context 'when sort_diffs feature flag is disabled' do
+ before do
+ stub_feature_flags(sort_diffs: false)
+ end
+
+ it 'does not sort diff files directory first' do
+ expect(diff_with_commits.diffs(diff_options).diff_file_paths).to eq([
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/.DS_Store',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'gitlab-grack',
+ 'gitlab-shell',
+ 'with space/README.md'
+ ])
+ end
+ end
end
end
@@ -501,6 +649,68 @@ RSpec.describe MergeRequestDiff do
expect(mr_diff.empty?).to be_truthy
end
+ it 'persists diff files sorted directory first' do
+ mr_diff = create(:merge_request).merge_request_diff
+ diff_files_paths = mr_diff.merge_request_diff_files.map { |file| file.new_path.presence || file.old_path }
+
+ expect(diff_files_paths).to eq([
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'with space/README.md',
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'gitlab-grack',
+ 'gitlab-shell'
+ ])
+ end
+
+ context 'when sort_diffs feature flag is disabled' do
+ before do
+ stub_feature_flags(sort_diffs: false)
+ end
+
+ it 'persists diff files unsorted by directory first' do
+ mr_diff = create(:merge_request).merge_request_diff
+ diff_files_paths = mr_diff.merge_request_diff_files.map { |file| file.new_path.presence || file.old_path }
+
+ expect(diff_files_paths).to eq([
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/.DS_Store',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'gitlab-grack',
+ 'gitlab-shell',
+ 'with space/README.md'
+ ])
+ end
+ end
+
it 'expands collapsed diffs before saving' do
mr_diff = create(:merge_request, source_branch: 'expand-collapse-lines', target_branch: 'master').merge_request_diff
diff_file = mr_diff.merge_request_diff_files.find_by(new_path: 'expand-collapse/file-5.txt')
diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb
index 55199cd96ad..76b44abca54 100644
--- a/spec/models/merge_request_reviewer_spec.rb
+++ b/spec/models/merge_request_reviewer_spec.rb
@@ -9,6 +9,6 @@ RSpec.describe MergeRequestReviewer do
describe 'associations' do
it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
- it { is_expected.to belong_to(:reviewer).class_name('User') }
+ it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) }
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 9574c57e46c..431a60a11a5 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -92,6 +92,39 @@ RSpec.describe MergeRequest, factory_default: :keep do
it { is_expected.not_to include(mr_without_jira_reference) }
end
+ context 'scopes' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1])}
+ let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2])}
+ let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: [])}
+
+ describe '.review_requested' do
+ it 'returns MRs that has any review requests' do
+ expect(described_class.review_requested).to eq([merge_request1, merge_request2])
+ end
+ end
+
+ describe '.no_review_requested' do
+ it 'returns MRs that has no review requests' do
+ expect(described_class.no_review_requested).to eq([merge_request3])
+ end
+ end
+
+ describe '.review_requested_to' do
+ it 'returns MRs that the user has been requested to review' do
+ expect(described_class.review_requested_to(user1)).to eq([merge_request1])
+ end
+ end
+
+ describe '.no_review_requested_to' do
+ it 'returns MRs that the user has been requested to review' do
+ expect(described_class.no_review_requested_to(user1)).to eq([merge_request2, merge_request3])
+ end
+ end
+ end
+
describe '#squash_in_progress?' do
let(:repo_path) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
@@ -467,6 +500,77 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe 'time to merge calculations' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let!(:mr1) do
+ create(
+ :merge_request,
+ :with_merged_metrics,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let!(:mr2) do
+ create(
+ :merge_request,
+ :with_merged_metrics,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let!(:mr3) do
+ create(
+ :merge_request,
+ :with_merged_metrics,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let!(:unmerged_mr) do
+ create(
+ :merge_request,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ before do
+ project.add_user(user, :developer)
+ end
+
+ describe '.total_time_to_merge' do
+ it 'returns the sum of the time to merge for all merged MRs' do
+ mrs = project.merge_requests
+
+ expect(mrs.total_time_to_merge).to be_within(1).of(expected_total_time(mrs))
+ end
+
+ context 'when merged_at is earlier than created_at' do
+ before do
+ mr1.metrics.update!(merged_at: mr1.metrics.created_at - 1.week)
+ end
+
+ it 'returns nil' do
+ mrs = project.merge_requests.where(id: mr1.id)
+
+ expect(mrs.total_time_to_merge).to be_nil
+ end
+ end
+
+ def expected_total_time(mrs)
+ mrs = mrs.reject { |mr| mr.merged_at.nil? }
+ mrs.reduce(0.0) do |sum, mr|
+ (mr.merged_at - mr.created_at) + sum
+ end
+ end
+ end
+ end
+
describe '#target_branch_sha' do
let(:project) { create(:project, :repository) }
@@ -1825,6 +1929,32 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#has_codequality_reports?' do
+ subject { merge_request.has_codequality_reports? }
+
+ let(:project) { create(:project, :repository) }
+
+ context 'when head pipeline has a codequality report' do
+ let(:merge_request) { create(:merge_request, :with_codequality_reports, source_project: project) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(codequality_mr_diff: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when head pipeline does not have a codequality report' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#has_terraform_reports?' do
context 'when head pipeline has terraform reports' do
it 'returns true' do
@@ -2101,6 +2231,62 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#compare_codequality_reports' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request, reload: true) { create(:merge_request, :with_codequality_reports, source_project: project) }
+ let_it_be(:pipeline) { merge_request.head_pipeline }
+
+ subject { merge_request.compare_codequality_reports }
+
+ context 'when head pipeline has codequality report' do
+ let(:job) do
+ create(:ci_build, options: { artifacts: { reports: { codeclimate: ['codequality.json'] } } }, pipeline: pipeline)
+ end
+
+ let(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ context 'when reactive cache worker is parsing results asynchronously' do
+ it 'returns parsing status' do
+ expect(subject[:status]).to eq(:parsing)
+ end
+ end
+
+ context 'when reactive cache worker is inline' do
+ before do
+ synchronous_reactive_cache(merge_request)
+ end
+
+ it 'returns parsed status' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject[:data]).to be_present
+ end
+
+ context 'when an error occurrs' do
+ before do
+ merge_request.update!(head_pipeline: nil)
+ end
+
+ it 'returns an error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:status_reason]).to eq("This merge request does not have codequality reports")
+ end
+ end
+
+ context 'when cached result is not latest' do
+ before do
+ allow_next_instance_of(Ci::CompareCodequalityReportsService) do |service|
+ allow(service).to receive(:latest?).and_return(false)
+ end
+ end
+
+ it 'raises an InvalidateReactiveCache error' do
+ expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
+ end
+ end
+ end
+ end
+ end
+
describe '#all_commit_shas' do
context 'when merge request is persisted' do
let(:all_commit_shas) do
@@ -3266,7 +3452,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe "#closed_without_fork?" do
+ describe "#closed_or_merged_without_fork?" do
let(:project) { create(:project) }
let(:forked_project) { fork_project(project) }
let(:user) { create(:user) }
@@ -3280,14 +3466,33 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it "returns false if the fork exist" do
- expect(closed_merge_request.closed_without_fork?).to be_falsey
+ expect(closed_merge_request.closed_or_merged_without_fork?).to be_falsey
end
it "returns true if the fork does not exist" do
unlink_project.execute
closed_merge_request.reload
- expect(closed_merge_request.closed_without_fork?).to be_truthy
+ expect(closed_merge_request.closed_or_merged_without_fork?).to be_truthy
+ end
+ end
+
+ context "when the merge request was merged" do
+ let(:merged_merge_request) do
+ create(:merged_merge_request,
+ source_project: forked_project,
+ target_project: project)
+ end
+
+ it "returns false if the fork exist" do
+ expect(merged_merge_request.closed_or_merged_without_fork?).to be_falsey
+ end
+
+ it "returns true if the fork does not exist" do
+ unlink_project.execute
+ merged_merge_request.reload
+
+ expect(merged_merge_request.closed_or_merged_without_fork?).to be_truthy
end
end
@@ -3299,7 +3504,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it "returns false" do
- expect(open_merge_request.closed_without_fork?).to be_falsey
+ expect(open_merge_request.closed_or_merged_without_fork?).to be_falsey
end
end
end
@@ -4242,6 +4447,14 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(subject.diffable_merge_ref?).to eq(true)
end
+ context 'merge request is merged' do
+ subject { build_stubbed(:merge_request, :merged, project: project) }
+
+ it 'returns false' do
+ expect(subject.diffable_merge_ref?).to eq(false)
+ end
+ end
+
context 'merge request cannot be merged' do
before do
subject.mark_as_unchecked!
diff --git a/spec/models/namespace_onboarding_action_spec.rb b/spec/models/namespace_onboarding_action_spec.rb
new file mode 100644
index 00000000000..70dcb989b32
--- /dev/null
+++ b/spec/models/namespace_onboarding_action_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespaceOnboardingAction do
+ let(:namespace) { build(:namespace) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace).required }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:action) }
+ end
+
+ describe '.completed?' do
+ let(:action) { :subscription_created }
+
+ subject { described_class.completed?(namespace, action) }
+
+ context 'action created for the namespace' do
+ before do
+ create(:namespace_onboarding_action, namespace: namespace, action: action)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'action created for another namespace' do
+ before do
+ create(:namespace_onboarding_action, namespace: build(:namespace), action: action)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '.create_action' do
+ let(:action) { :subscription_created }
+
+ subject(:create_action) { described_class.create_action(namespace, action) }
+
+ it 'creates the action for the namespace just once' do
+ expect { create_action }.to change { count_namespace_actions }.by(1)
+
+ expect { create_action }.to change { count_namespace_actions }.by(0)
+ end
+
+ def count_namespace_actions
+ described_class.where(namespace: namespace, action: action).count
+ end
+ end
+end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 85f9005052e..0130618d004 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :aggregation_schedule }
it { is_expected.to have_one :namespace_settings }
it { is_expected.to have_many :custom_emoji }
+ it { is_expected.to have_many :namespace_onboarding_actions }
end
describe 'validations' do
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index a3417ee5fc7..6e87ca6dcf7 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -357,7 +357,7 @@ RSpec.describe Note do
describe '#confidential?' do
context 'when note is not confidential' do
context 'when include_noteable is set to true' do
- it 'is true when a noteable is confidential ' do
+ it 'is true when a noteable is confidential' do
issue = create(:issue, :confidential)
note = build(:note, noteable: issue, project: issue.project)
@@ -366,7 +366,7 @@ RSpec.describe Note do
end
context 'when include_noteable is not set to true' do
- it 'is false when a noteable is confidential ' do
+ it 'is false when a noteable is confidential' do
issue = create(:issue, :confidential)
note = build(:note, noteable: issue, project: issue.project)
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index ef09fb037e9..82ac159b9cc 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -61,14 +61,14 @@ RSpec.describe Packages::PackageFile, type: :model do
end
end
- describe '#update_file_metadata callback' do
+ describe '#update_file_store callback' do
let_it_be(:package_file) { build(:package_file, :nuget, size: nil) }
subject { package_file.save! }
it 'updates metadata columns' do
expect(package_file)
- .to receive(:update_file_metadata)
+ .to receive(:update_file_store)
.and_call_original
# This expectation uses a stub because we can no longer test a change from
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 705a1991845..16764673474 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -111,6 +111,24 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value('%foo%bar').for(:name) }
end
+ context 'debian package' do
+ subject { build(:debian_package) }
+
+ it { is_expected.to allow_value('0ad').for(:name) }
+ it { is_expected.to allow_value('g++').for(:name) }
+ it { is_expected.not_to allow_value('a_b').for(:name) }
+ end
+
+ context 'debian incoming' do
+ subject { create(:debian_incoming) }
+
+ # Only 'incoming' is accepted
+ it { is_expected.to allow_value('incoming').for(:name) }
+ it { is_expected.not_to allow_value('0ad').for(:name) }
+ it { is_expected.not_to allow_value('g++').for(:name) }
+ it { is_expected.not_to allow_value('a_b').for(:name) }
+ end
+
context 'generic package' do
subject { build_stubbed(:generic_package) }
@@ -180,6 +198,21 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to allow_value('2.x-dev').for(:version) }
end
+ context 'debian package' do
+ subject { build(:debian_package) }
+
+ it { is_expected.to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
+ it { is_expected.not_to allow_value('1_0').for(:version) }
+ end
+
+ context 'debian incoming' do
+ subject { create(:debian_incoming) }
+
+ it { is_expected.to allow_value(nil).for(:version) }
+ it { is_expected.not_to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
+ it { is_expected.not_to allow_value('1_0').for(:version) }
+ end
+
context 'maven package' do
subject { build_stubbed(:maven_package) }
@@ -621,6 +654,46 @@ RSpec.describe Packages::Package, type: :model do
end
end
+ describe '#debian_incoming?' do
+ let(:package) { build(:package) }
+
+ subject { package.debian_incoming? }
+
+ it { is_expected.to eq(false) }
+
+ context 'with debian_incoming' do
+ let(:package) { create(:debian_incoming) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'with debian_package' do
+ let(:package) { create(:debian_package) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#debian_package?' do
+ let(:package) { build(:package) }
+
+ subject { package.debian_package? }
+
+ it { is_expected.to eq(false) }
+
+ context 'with debian_incoming' do
+ let(:package) { create(:debian_incoming) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'with debian_package' do
+ let(:package) { create(:debian_package) }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
describe 'plan_limits' do
Packages::Package.package_types.keys.without('composer').each do |pt|
plan_limit_name = if pt == 'generic'
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index f8ebc237577..30712af6b32 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Pages::LookupPath do
before do
stub_pages_setting(access_control: true, external_https: ["1.1.1.1:443"])
- stub_artifacts_object_storage
stub_pages_object_storage(::Pages::DeploymentUploader)
end
@@ -66,7 +65,7 @@ RSpec.describe Pages::LookupPath do
end
it 'uses deployment from object storage' do
- Timecop.freeze do
+ freeze_time do
expect(source).to(
eq({
type: 'zip',
@@ -86,7 +85,7 @@ RSpec.describe Pages::LookupPath do
end
it 'uses file protocol' do
- Timecop.freeze do
+ freeze_time do
expect(source).to(
eq({
type: 'zip',
@@ -117,64 +116,6 @@ RSpec.describe Pages::LookupPath do
include_examples 'uses disk storage'
end
end
-
- context 'when artifact_id from build job is present in pages metadata' do
- let(:artifacts_archive) { create(:ci_job_artifact, :zip, :remote_store, project: project) }
-
- before do
- project.mark_pages_as_deployed(artifacts_archive: artifacts_archive)
- end
-
- it 'uses artifacts object storage' do
- Timecop.freeze do
- expect(source).to(
- eq({
- type: 'zip',
- path: artifacts_archive.file.url(expire_at: 1.day.from_now),
- global_id: "gid://gitlab/Ci::JobArtifact/#{artifacts_archive.id}",
- sha256: artifacts_archive.file_sha256,
- file_size: artifacts_archive.size,
- file_count: nil
- })
- )
- end
- end
-
- context 'when artifact is not uploaded to object storage' do
- let(:artifacts_archive) { create(:ci_job_artifact, :zip) }
-
- it 'uses file protocol', :aggregate_failures do
- Timecop.freeze do
- expect(source).to(
- eq({
- type: 'zip',
- path: 'file://' + artifacts_archive.file.path,
- global_id: "gid://gitlab/Ci::JobArtifact/#{artifacts_archive.id}",
- sha256: artifacts_archive.file_sha256,
- file_size: artifacts_archive.size,
- file_count: nil
- })
- )
- end
- end
-
- context 'when pages_serve_with_zip_file_protocol feature flag is disabled' do
- before do
- stub_feature_flags(pages_serve_with_zip_file_protocol: false)
- end
-
- include_examples 'uses disk storage'
- end
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(pages_serve_from_artifacts_archive: false)
- end
-
- include_examples 'uses disk storage'
- end
- end
end
describe '#prefix' do
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index c927c8fd1f9..37402fa04c4 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe ProjectFeature do
end
context 'public features' do
- features = %w(issues wiki builds merge_requests snippets repository metrics_dashboard)
+ features = %w(issues wiki builds merge_requests snippets repository metrics_dashboard operations)
features.each do |feature|
it "does not allow public access level for #{feature}" do
diff --git a/spec/models/project_feature_usage_spec.rb b/spec/models/project_feature_usage_spec.rb
index d55d41fab85..cd70602fc4d 100644
--- a/spec/models/project_feature_usage_spec.rb
+++ b/spec/models/project_feature_usage_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe ProjectFeatureUsage, type: :model do
subject { project.feature_usage }
it 'logs Jira DVCS Cloud last sync' do
- Timecop.freeze do
+ freeze_time do
subject.log_jira_dvcs_integration_usage
expect(subject.jira_dvcs_server_last_sync_at).to be_nil
@@ -34,7 +34,7 @@ RSpec.describe ProjectFeatureUsage, type: :model do
end
it 'logs Jira DVCS Server last sync' do
- Timecop.freeze do
+ freeze_time do
subject.log_jira_dvcs_integration_usage(cloud: false)
expect(subject.jira_dvcs_server_last_sync_at).to be_like_time(Time.current)
diff --git a/spec/models/project_repository_storage_move_spec.rb b/spec/models/project_repository_storage_move_spec.rb
index d32867efb39..88535f6dd6e 100644
--- a/spec/models/project_repository_storage_move_spec.rb
+++ b/spec/models/project_repository_storage_move_spec.rb
@@ -3,102 +3,30 @@
require 'spec_helper'
RSpec.describe ProjectRepositoryStorageMove, type: :model do
- describe 'associations' do
- it { is_expected.to belong_to(:project) }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:state) }
- it { is_expected.to validate_presence_of(:source_storage_name) }
- it { is_expected.to validate_presence_of(:destination_storage_name) }
-
- context 'source_storage_name inclusion' do
- subject { build(:project_repository_storage_move, source_storage_name: 'missing') }
-
- it "does not allow repository storages that don't match a label in the configuration" do
- expect(subject).not_to be_valid
- expect(subject.errors[:source_storage_name].first).to match(/is not included in the list/)
- end
- end
-
- context 'destination_storage_name inclusion' do
- subject { build(:project_repository_storage_move, destination_storage_name: 'missing') }
-
- it "does not allow repository storages that don't match a label in the configuration" do
- expect(subject).not_to be_valid
- expect(subject.errors[:destination_storage_name].first).to match(/is not included in the list/)
- end
- end
-
- context 'project repository read-only' do
- subject { build(:project_repository_storage_move, project: project) }
-
- let(:project) { build(:project, repository_read_only: true) }
+ let_it_be_with_refind(:project) { create(:project) }
- it "does not allow the project to be read-only on create" do
- expect(subject).not_to be_valid
- expect(subject.errors[:project].first).to match(/is read only/)
- end
- end
- end
-
- describe 'defaults' do
- context 'destination_storage_name' do
- subject { build(:project_repository_storage_move) }
-
- it 'picks storage from ApplicationSetting' do
- expect(Gitlab::CurrentSettings).to receive(:pick_repository_storage).and_return('picked').at_least(:once)
-
- expect(subject.destination_storage_name).to eq('picked')
- end
- end
+ it_behaves_like 'handles repository moves' do
+ let(:container) { project }
+ let(:repository_storage_factory_key) { :project_repository_storage_move }
+ let(:error_key) { :project }
+ let(:repository_storage_worker) { ProjectUpdateRepositoryStorageWorker }
end
describe 'state transitions' do
- let(:project) { create(:project) }
+ let(:storage) { 'test_second_storage' }
before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
- end
-
- context 'when in the default state' do
- subject(:storage_move) { create(:project_repository_storage_move, project: project, destination_storage_name: 'test_second_storage') }
-
- context 'and transits to scheduled' do
- it 'triggers ProjectUpdateRepositoryStorageWorker' do
- expect(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async).with(project.id, 'test_second_storage', storage_move.id)
-
- storage_move.schedule!
-
- expect(project).to be_repository_read_only
- end
- end
-
- context 'and transits to started' do
- it 'does not allow the transition' do
- expect { storage_move.start! }
- .to raise_error(StateMachines::InvalidTransition)
- end
- end
+ stub_storage_settings(storage => { 'path' => 'tmp/tests/extra_storage' })
end
context 'when started' do
- subject(:storage_move) { create(:project_repository_storage_move, :started, project: project, destination_storage_name: 'test_second_storage') }
+ subject(:storage_move) { create(:project_repository_storage_move, :started, container: project, destination_storage_name: storage) }
context 'and transits to replicated' do
- it 'sets the repository storage and marks the project as writable' do
+ it 'sets the repository storage and marks the container as writable' do
storage_move.finish_replication!
- expect(project.repository_storage).to eq('test_second_storage')
- expect(project).not_to be_repository_read_only
- end
- end
-
- context 'and transits to failed' do
- it 'marks the project as writable' do
- storage_move.do_fail!
-
+ expect(project.repository_storage).to eq(storage)
expect(project).not_to be_repository_read_only
end
end
diff --git a/spec/models/project_services/datadog_service_spec.rb b/spec/models/project_services/datadog_service_spec.rb
new file mode 100644
index 00000000000..1d9f49e4824
--- /dev/null
+++ b/spec/models/project_services/datadog_service_spec.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+require 'securerandom'
+
+require 'spec_helper'
+
+RSpec.describe DatadogService, :model do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:build) { create(:ci_build, project: project) }
+
+ let(:active) { true }
+ let(:dd_site) { 'datadoghq.com' }
+ let(:default_url) { 'https://webhooks-http-intake.logs.datadoghq.com/v1/input/' }
+ let(:api_url) { nil }
+ let(:api_key) { SecureRandom.hex(32) }
+ let(:dd_env) { 'ci' }
+ let(:dd_service) { 'awesome-gitlab' }
+
+ let(:expected_hook_url) { default_url + api_key + "?env=#{dd_env}&service=#{dd_service}" }
+
+ let(:instance) do
+ described_class.new(
+ active: active,
+ project: project,
+ properties: {
+ datadog_site: dd_site,
+ api_url: api_url,
+ api_key: api_key,
+ datadog_env: dd_env,
+ datadog_service: dd_service
+ }
+ )
+ end
+
+ let(:saved_instance) do
+ instance.save!
+ instance
+ end
+
+ let(:pipeline_data) { Gitlab::DataBuilder::Pipeline.build(pipeline) }
+ let(:build_data) { Gitlab::DataBuilder::Build.build(build) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_one(:service_hook) }
+ end
+
+ describe 'validations' do
+ subject { instance }
+
+ context 'when service is active' do
+ let(:active) { true }
+
+ it { is_expected.to validate_presence_of(:api_key) }
+ it { is_expected.to allow_value(api_key).for(:api_key) }
+ it { is_expected.not_to allow_value('87dab2403c9d462 87aec4d9214edb1e').for(:api_key) }
+ it { is_expected.not_to allow_value('................................').for(:api_key) }
+
+ context 'when selecting site' do
+ let(:dd_site) { 'datadoghq.com' }
+ let(:api_url) { nil }
+
+ it { is_expected.to validate_presence_of(:datadog_site) }
+ it { is_expected.not_to validate_presence_of(:api_url) }
+ it { is_expected.not_to allow_value('datadog hq.com').for(:datadog_site) }
+ end
+
+ context 'with custom api_url' do
+ let(:dd_site) { nil }
+ let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/v1/input/' }
+
+ it { is_expected.not_to validate_presence_of(:datadog_site) }
+ it { is_expected.to validate_presence_of(:api_url) }
+ it { is_expected.to allow_value(api_url).for(:api_url) }
+ it { is_expected.not_to allow_value('example.com').for(:api_url) }
+ end
+
+ context 'when missing site and api_url' do
+ let(:dd_site) { nil }
+ let(:api_url) { nil }
+
+ it { is_expected.not_to be_valid }
+ it { is_expected.to validate_presence_of(:datadog_site) }
+ it { is_expected.to validate_presence_of(:api_url) }
+ end
+ end
+
+ context 'when service is not active' do
+ let(:active) { false }
+
+ it { is_expected.to be_valid }
+ it { is_expected.not_to validate_presence_of(:api_key) }
+ end
+ end
+
+ describe '#hook_url' do
+ subject { instance.hook_url }
+
+ context 'with standard site URL' do
+ it { is_expected.to eq(expected_hook_url) }
+ end
+
+ context 'with custom URL' do
+ let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/v1/input/' }
+
+ it { is_expected.to eq(api_url + api_key + "?env=#{dd_env}&service=#{dd_service}") }
+
+ context 'blank' do
+ let(:api_url) { '' }
+
+ it { is_expected.to eq(expected_hook_url) }
+ end
+ end
+
+ context 'without optional params' do
+ let(:dd_service) { nil }
+ let(:dd_env) { nil }
+
+ it { is_expected.to eq(default_url + api_key) }
+ end
+ end
+
+ describe '#api_keys_url' do
+ subject { instance.api_keys_url }
+
+ it { is_expected.to eq("https://app.#{dd_site}/account/settings#api") }
+
+ context 'with unset datadog_site' do
+ let(:dd_site) { nil }
+
+ it { is_expected.to eq("https://docs.datadoghq.com/account_management/api-app-keys/") }
+ end
+ end
+
+ describe '#test' do
+ context 'when request is succesful' do
+ subject { saved_instance.test(pipeline_data) }
+
+ before do
+ stub_request(:post, expected_hook_url).to_return(body: 'OK')
+ end
+ it { is_expected.to eq({ success: true, result: 'OK' }) }
+ end
+
+ context 'when request fails' do
+ subject { saved_instance.test(pipeline_data) }
+
+ before do
+ stub_request(:post, expected_hook_url).to_return(body: 'CRASH!!!', status: 500)
+ end
+ it { is_expected.to eq({ success: false, result: 'CRASH!!!' }) }
+ end
+ end
+
+ describe '#execute' do
+ before do
+ stub_request(:post, expected_hook_url)
+ saved_instance.execute(data)
+ end
+
+ context 'with pipeline data' do
+ let(:data) { pipeline_data }
+ let(:expected_headers) do
+ { WebHookService::GITLAB_EVENT_HEADER => 'Pipeline Hook' }
+ end
+
+ it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made }
+ end
+
+ context 'with job data' do
+ let(:data) { build_data }
+ let(:expected_headers) do
+ { WebHookService::GITLAB_EVENT_HEADER => 'Job Hook' }
+ end
+
+ it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made }
+ end
+ end
+end
diff --git a/spec/models/project_services/jenkins_service_spec.rb b/spec/models/project_services/jenkins_service_spec.rb
new file mode 100644
index 00000000000..4663e41736a
--- /dev/null
+++ b/spec/models/project_services/jenkins_service_spec.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JenkinsService do
+ let(:project) { create(:project) }
+ let(:jenkins_url) { 'http://jenkins.example.com/' }
+ let(:jenkins_hook_url) { jenkins_url + 'project/my_project' }
+ let(:jenkins_username) { 'u$er name%2520' }
+ let(:jenkins_password) { 'pas$ word' }
+
+ let(:jenkins_params) do
+ {
+ active: true,
+ project: project,
+ properties: {
+ password: jenkins_password,
+ username: jenkins_username,
+ jenkins_url: jenkins_url,
+ project_name: 'my_project'
+ }
+ }
+ end
+
+ let(:jenkins_authorization) { "Basic " + ::Base64.strict_encode64(jenkins_username + ':' + jenkins_password) }
+
+ describe 'Associations' do
+ it { is_expected.to belong_to :project }
+ it { is_expected.to have_one :service_hook }
+ end
+
+ describe 'username validation' do
+ before do
+ @jenkins_service = described_class.create!(
+ active: active,
+ project: project,
+ properties: {
+ jenkins_url: 'http://jenkins.example.com/',
+ password: 'password',
+ username: 'username',
+ project_name: 'my_project'
+ }
+ )
+ end
+
+ subject { @jenkins_service }
+
+ context 'when the service is active' do
+ let(:active) { true }
+
+ context 'when password was not touched' do
+ before do
+ allow(subject).to receive(:password_touched?).and_return(false)
+ end
+
+ it { is_expected.not_to validate_presence_of :username }
+ end
+
+ context 'when password was touched' do
+ before do
+ allow(subject).to receive(:password_touched?).and_return(true)
+ end
+
+ it { is_expected.to validate_presence_of :username }
+ end
+
+ context 'when password is blank' do
+ it 'does not validate the username' do
+ expect(subject).not_to validate_presence_of :username
+
+ subject.password = ''
+ subject.save!
+ end
+ end
+ end
+
+ context 'when the service is inactive' do
+ let(:active) { false }
+
+ it { is_expected.not_to validate_presence_of :username }
+ end
+ end
+
+ describe '#hook_url' do
+ let(:username) { nil }
+ let(:password) { nil }
+ let(:jenkins_service) do
+ described_class.new(
+ project: project,
+ properties: {
+ jenkins_url: jenkins_url,
+ project_name: 'my_project',
+ username: username,
+ password: password
+ }
+ )
+ end
+
+ subject { jenkins_service.hook_url }
+
+ context 'when the jenkins_url has no relative path' do
+ let(:jenkins_url) { 'http://jenkins.example.com/' }
+
+ it { is_expected.to eq('http://jenkins.example.com/project/my_project') }
+ end
+
+ context 'when the jenkins_url has relative path' do
+ let(:jenkins_url) { 'http://organization.example.com/jenkins' }
+
+ it { is_expected.to eq('http://organization.example.com/jenkins/project/my_project') }
+ end
+
+ context 'userinfo is missing and username and password are set' do
+ let(:jenkins_url) { 'http://organization.example.com/jenkins' }
+ let(:username) { 'u$ername' }
+ let(:password) { 'pas$ word' }
+
+ it { is_expected.to eq('http://u%24ername:pas%24%20word@organization.example.com/jenkins/project/my_project') }
+ end
+
+ context 'userinfo is provided and username and password are set' do
+ let(:jenkins_url) { 'http://u:p@organization.example.com/jenkins' }
+ let(:username) { 'username' }
+ let(:password) { 'password' }
+
+ it { is_expected.to eq('http://username:password@organization.example.com/jenkins/project/my_project') }
+ end
+
+ context 'userinfo is provided username and password are not set' do
+ let(:jenkins_url) { 'http://u:p@organization.example.com/jenkins' }
+
+ it { is_expected.to eq('http://u:p@organization.example.com/jenkins/project/my_project') }
+ end
+ end
+
+ describe '#test' do
+ it 'returns the right status' do
+ user = create(:user, username: 'username')
+ project = create(:project, name: 'project')
+ push_sample_data = Gitlab::DataBuilder::Push.build_sample(project, user)
+ jenkins_service = described_class.create!(jenkins_params)
+ stub_request(:post, jenkins_hook_url).with(headers: { 'Authorization' => jenkins_authorization })
+
+ result = jenkins_service.test(push_sample_data)
+
+ expect(result).to eq({ success: true, result: '' })
+ end
+ end
+
+ describe '#execute' do
+ let(:user) { create(:user, username: 'username') }
+ let(:namespace) { create(:group, :private) }
+ let(:project) { create(:project, :private, name: 'project', namespace: namespace) }
+ let(:push_sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
+ let(:jenkins_service) { described_class.create!(jenkins_params) }
+
+ before do
+ stub_request(:post, jenkins_hook_url)
+ end
+
+ it 'invokes the Jenkins API' do
+ jenkins_service.execute(push_sample_data)
+
+ expect(a_request(:post, jenkins_hook_url)).to have_been_made.once
+ end
+
+ it 'adds default web hook headers to the request' do
+ jenkins_service.execute(push_sample_data)
+
+ expect(
+ a_request(:post, jenkins_hook_url)
+ .with(headers: { 'X-Gitlab-Event' => 'Push Hook', 'Authorization' => jenkins_authorization })
+ ).to have_been_made.once
+ end
+
+ it 'request url contains properly serialized username and password' do
+ jenkins_service.execute(push_sample_data)
+
+ expect(
+ a_request(:post, 'http://jenkins.example.com/project/my_project')
+ .with(headers: { 'Authorization' => jenkins_authorization })
+ ).to have_been_made.once
+ end
+ end
+
+ describe 'Stored password invalidation' do
+ let(:project) { create(:project) }
+
+ context 'when a password was previously set' do
+ before do
+ @jenkins_service = described_class.create!(
+ project: project,
+ properties: {
+ jenkins_url: 'http://jenkins.example.com/',
+ username: 'jenkins',
+ password: 'password'
+ }
+ )
+ end
+
+ it 'resets password if url changed' do
+ @jenkins_service.jenkins_url = 'http://jenkins-edited.example.com/'
+ @jenkins_service.save!
+ expect(@jenkins_service.password).to be_nil
+ end
+
+ it 'resets password if username is blank' do
+ @jenkins_service.username = ''
+ @jenkins_service.save!
+ expect(@jenkins_service.password).to be_nil
+ end
+
+ it 'does not reset password if username changed' do
+ @jenkins_service.username = 'some_name'
+ @jenkins_service.save!
+ expect(@jenkins_service.password).to eq('password')
+ end
+
+ it 'does not reset password if new url is set together with password, even if it\'s the same password' do
+ @jenkins_service.jenkins_url = 'http://jenkins_edited.example.com/'
+ @jenkins_service.password = 'password'
+ @jenkins_service.save!
+ expect(@jenkins_service.password).to eq('password')
+ expect(@jenkins_service.jenkins_url).to eq('http://jenkins_edited.example.com/')
+ end
+
+ it 'resets password if url changed, even if setter called multiple times' do
+ @jenkins_service.jenkins_url = 'http://jenkins1.example.com/'
+ @jenkins_service.jenkins_url = 'http://jenkins1.example.com/'
+ @jenkins_service.save!
+ expect(@jenkins_service.password).to be_nil
+ end
+ end
+
+ context 'when no password was previously set' do
+ before do
+ @jenkins_service = described_class.create!(
+ project: create(:project),
+ properties: {
+ jenkins_url: 'http://jenkins.example.com/',
+ username: 'jenkins'
+ }
+ )
+ end
+
+ it 'saves password if new url is set together with password' do
+ @jenkins_service.jenkins_url = 'http://jenkins_edited.example.com/'
+ @jenkins_service.password = 'password'
+ @jenkins_service.save!
+ expect(@jenkins_service.password).to eq('password')
+ expect(@jenkins_service.jenkins_url).to eq('http://jenkins_edited.example.com/')
+ end
+ end
+ end
+end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 7741fea8717..e7cd3d7f537 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -66,6 +66,19 @@ RSpec.describe JiraService do
end
end
+ describe '#fields' do
+ let(:service) { create(:jira_service) }
+
+ subject(:fields) { service.fields }
+
+ it 'includes transition help link' do
+ transition_id_field = fields.find { |field| field[:name] == 'jira_issue_transition_id' }
+
+ expect(transition_id_field[:title]).to eq('Jira workflow transition IDs')
+ expect(transition_id_field[:help]).to include('/help/user/project/integrations/jira')
+ end
+ end
+
describe 'Associations' do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index c8b96963d5d..a71b0eb842a 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:deploy_keys) }
it { is_expected.to have_many(:hooks) }
it { is_expected.to have_many(:protected_branches) }
+ it { is_expected.to have_many(:exported_protected_branches) }
it { is_expected.to have_one(:slack_service) }
it { is_expected.to have_one(:microsoft_teams_service) }
it { is_expected.to have_one(:mattermost_service) }
@@ -146,6 +147,10 @@ RSpec.describe Project, factory_default: :keep do
let(:container_without_wiki) { create(:project) }
end
+ it_behaves_like 'can move repository storage' do
+ let_it_be(:container) { create(:project, :repository) }
+ end
+
it 'has an inverse relationship with merge requests' do
expect(described_class.reflect_on_association(:merge_requests).has_inverse?).to eq(:target_project)
end
@@ -607,6 +612,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:root_ancestor).to(:namespace).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:last_pipeline).to(:commit).with_arguments(allow_nil: true) }
+ it { is_expected.to delegate_method(:allow_editing_commit_messages?).to(:project_setting) }
end
describe 'reference methods' do
@@ -1534,6 +1540,42 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '.service_desk_custom_address_enabled?' do
+ let_it_be(:project) { create(:project, service_desk_enabled: true) }
+
+ subject(:address_enabled) { project.service_desk_custom_address_enabled? }
+
+ context 'when service_desk_email is enabled' do
+ before do
+ allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(address_enabled).to be_truthy
+ end
+
+ context 'when service_desk_custom_address flag is disabled' do
+ before do
+ stub_feature_flags(service_desk_custom_address: false)
+ end
+
+ it 'returns false' do
+ expect(address_enabled).to be_falsey
+ end
+ end
+ end
+
+ context 'when service_desk_email is disabled' do
+ before do
+ allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(false)
+ end
+
+ it 'returns false when service_desk_email is disabled' do
+ expect(address_enabled).to be_falsey
+ end
+ end
+ end
+
describe '.find_by_service_desk_project_key' do
it 'returns the correct project' do
project1 = create(:project)
@@ -3002,39 +3044,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#set_repository_read_only!' do
- let(:project) { create(:project) }
-
- it 'makes the repository read-only' do
- expect { project.set_repository_read_only! }
- .to change(project, :repository_read_only?)
- .from(false)
- .to(true)
- end
-
- it 'raises an error if the project is already read-only' do
- project.set_repository_read_only!
-
- expect { project.set_repository_read_only! }.to raise_error(described_class::RepositoryReadOnlyError, /already read-only/)
- end
-
- it 'raises an error when there is an existing git transfer in progress' do
- allow(project).to receive(:git_transfer_in_progress?) { true }
-
- expect { project.set_repository_read_only! }.to raise_error(described_class::RepositoryReadOnlyError, /in progress/)
- end
- end
-
- describe '#set_repository_writable!' do
- it 'sets repository_read_only to false' do
- project = create(:project, :read_only)
-
- expect { project.set_repository_writable! }
- .to change(project, :repository_read_only)
- .from(true).to(false)
- end
- end
-
describe '#pushes_since_gc' do
let(:project) { build_stubbed(:project) }
@@ -4281,29 +4290,33 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#git_transfer_in_progress?' do
+ using RSpec::Parameterized::TableSyntax
+
let(:project) { build(:project) }
subject { project.git_transfer_in_progress? }
- it 'returns false when repo_reference_count and wiki_reference_count are 0' do
- allow(project).to receive(:repo_reference_count) { 0 }
- allow(project).to receive(:wiki_reference_count) { 0 }
-
- expect(subject).to be_falsey
+ where(:project_reference_counter, :wiki_reference_counter, :design_reference_counter, :result) do
+ 0 | 0 | 0 | false
+ 2 | 0 | 0 | true
+ 0 | 2 | 0 | true
+ 0 | 0 | 2 | true
end
- it 'returns true when repo_reference_count is > 0' do
- allow(project).to receive(:repo_reference_count) { 2 }
- allow(project).to receive(:wiki_reference_count) { 0 }
-
- expect(subject).to be_truthy
- end
-
- it 'returns true when wiki_reference_count is > 0' do
- allow(project).to receive(:repo_reference_count) { 0 }
- allow(project).to receive(:wiki_reference_count) { 2 }
+ with_them do
+ before do
+ allow(project).to receive(:reference_counter).with(type: Gitlab::GlRepository::PROJECT) do
+ double(:project_reference_counter, value: project_reference_counter)
+ end
+ allow(project).to receive(:reference_counter).with(type: Gitlab::GlRepository::WIKI) do
+ double(:wiki_reference_counter, value: wiki_reference_counter)
+ end
+ allow(project).to receive(:reference_counter).with(type: Gitlab::GlRepository::DESIGN) do
+ double(:design_reference_counter, value: design_reference_counter)
+ end
+ end
- expect(subject).to be_truthy
+ specify { expect(subject).to be result }
end
end
@@ -4929,6 +4942,7 @@ RSpec.describe Project, factory_default: :keep do
expect(project).to receive(:after_create_default_branch)
expect(project).to receive(:refresh_markdown_cache!)
expect(InternalId).to receive(:flush_records!).with(project: project)
+ expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:repository_size])
expect(DetectRepositoryLanguagesWorker).to receive(:perform_async).with(project.id)
expect(project).to receive(:write_repository_config)
@@ -5019,11 +5033,11 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe "#default_branch" do
- context "with an empty repository" do
+ describe '#default_branch' do
+ context 'with an empty repository' do
let_it_be(:project) { create(:project_empty_repo) }
- context "group.default_branch_name is available" do
+ context 'group.default_branch_name is available' do
let(:project_group) { create(:group) }
let(:project) { create(:project, path: 'avatar', namespace: project_group) }
@@ -5036,19 +5050,19 @@ RSpec.describe Project, factory_default: :keep do
.and_return('example_branch')
end
- it "returns the group default value" do
- expect(project.default_branch).to eq("example_branch")
+ it 'returns the group default value' do
+ expect(project.default_branch).to eq('example_branch')
end
end
- context "Gitlab::CurrentSettings.default_branch_name is available" do
+ context 'Gitlab::CurrentSettings.default_branch_name is available' do
before do
expect(Gitlab::CurrentSettings)
.to receive(:default_branch_name)
.and_return(example_branch_name)
end
- context "is missing or nil" do
+ context 'is missing or nil' do
let(:example_branch_name) { nil }
it "returns nil" do
@@ -5056,10 +5070,18 @@ RSpec.describe Project, factory_default: :keep do
end
end
- context "is present" do
- let(:example_branch_name) { "example_branch_name" }
+ context 'is blank' do
+ let(:example_branch_name) { '' }
+
+ it 'returns nil' do
+ expect(project.default_branch).to be_nil
+ end
+ end
- it "returns the expected branch name" do
+ context 'is present' do
+ let(:example_branch_name) { 'example_branch_name' }
+
+ it 'returns the expected branch name' do
expect(project.default_branch).to eq(example_branch_name)
end
end
@@ -5564,6 +5586,26 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#disabled_services' do
+ subject { build(:project).disabled_services }
+
+ context 'without datadog_ci_integration' do
+ before do
+ stub_feature_flags(datadog_ci_integration: false)
+ end
+
+ it { is_expected.to include('datadog') }
+ end
+
+ context 'with datadog_ci_integration' do
+ before do
+ stub_feature_flags(datadog_ci_integration: true)
+ end
+
+ it { is_expected.not_to include('datadog') }
+ end
+ end
+
describe '#find_or_initialize_service' do
it 'avoids N+1 database queries' do
allow(Service).to receive(:available_services_names).and_return(%w[prometheus pushover])
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 2d283766edb..cb1baa02e96 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -313,17 +313,6 @@ RSpec.describe ProjectStatistics do
it 'stores the size of related uploaded files' do
expect(statistics.update_uploads_size).to eq(3.megabytes)
end
-
- context 'with feature flag disabled' do
- before do
- statistics.update_columns(uploads_size: 0)
- stub_feature_flags(count_uploads_size_in_storage_stats: false)
- end
-
- it 'does not store the size of related uploaded files' do
- expect(statistics.update_uploads_size).to eq(0)
- end
- end
end
describe '#update_storage_size' do
diff --git a/spec/models/protected_branch/push_access_level_spec.rb b/spec/models/protected_branch/push_access_level_spec.rb
index 0aba51ea567..051cb78a6b6 100644
--- a/spec/models/protected_branch/push_access_level_spec.rb
+++ b/spec/models/protected_branch/push_access_level_spec.rb
@@ -34,4 +34,59 @@ RSpec.describe ProtectedBranch::PushAccessLevel do
expect(level.errors.full_messages).to contain_exactly('Deploy key is not enabled for this project')
end
end
+
+ describe '#check_access' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:protected_branch) { create(:protected_branch, :no_one_can_push, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:deploy_key) { create(:deploy_key, user: user) }
+ let!(:deploy_keys_project) { create(:deploy_keys_project, project: project, deploy_key: deploy_key, can_push: can_push) }
+ let(:can_push) { true }
+
+ before_all do
+ project.add_guest(user)
+ end
+
+ context 'when this push_access_level is tied to a deploy key' do
+ let(:push_access_level) { create(:protected_branch_push_access_level, protected_branch: protected_branch, deploy_key: deploy_key) }
+
+ context 'when the deploy key is among the active keys for this project' do
+ specify do
+ expect(push_access_level.check_access(user)).to be_truthy
+ end
+
+ context 'when the deploy_keys_on_protected_branches FF is false' do
+ before do
+ stub_feature_flags(deploy_keys_on_protected_branches: false)
+ end
+
+ it 'is false' do
+ expect(push_access_level.check_access(user)).to be_falsey
+ end
+ end
+ end
+
+ context 'when the deploy key is not among the active keys of this project' do
+ let(:can_push) { false }
+
+ it 'is false' do
+ expect(push_access_level.check_access(user)).to be_falsey
+ end
+ end
+ end
+ end
+
+ describe '#type' do
+ let(:push_level_access) { build(:protected_branch_push_access_level) }
+
+ it 'returns :deploy_key when a deploy key is tied to the protected branch' do
+ push_level_access.deploy_key = create(:deploy_key)
+
+ expect(push_level_access.type).to eq(:deploy_key)
+ end
+
+ it 'returns :role by default' do
+ expect(push_level_access.type).to eq(:role)
+ end
+ end
end
diff --git a/spec/models/raw_usage_data_spec.rb b/spec/models/raw_usage_data_spec.rb
index c10db63da56..7acfb8c19af 100644
--- a/spec/models/raw_usage_data_spec.rb
+++ b/spec/models/raw_usage_data_spec.rb
@@ -16,28 +16,10 @@ RSpec.describe RawUsageData do
describe '#update_sent_at!' do
let(:raw_usage_data) { create(:raw_usage_data) }
- context 'with save_raw_usage_data feature enabled' do
- before do
- stub_feature_flags(save_raw_usage_data: true)
- end
+ it 'updates sent_at' do
+ raw_usage_data.update_sent_at!
- it 'updates sent_at' do
- raw_usage_data.update_sent_at!
-
- expect(raw_usage_data.sent_at).not_to be_nil
- end
- end
-
- context 'with save_raw_usage_data feature disabled' do
- before do
- stub_feature_flags(save_raw_usage_data: false)
- end
-
- it 'updates sent_at' do
- raw_usage_data.update_sent_at!
-
- expect(raw_usage_data.sent_at).to be_nil
- end
+ expect(raw_usage_data.sent_at).not_to be_nil
end
end
end
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
new file mode 100644
index 00000000000..ac252e6d6cf
--- /dev/null
+++ b/spec/models/release_highlight_spec.rb
@@ -0,0 +1,181 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ReleaseHighlight do
+ let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')) }
+
+ before do
+ allow(Dir).to receive(:glob).with(Rails.root.join('data', 'whats_new', '*.yml')).and_return(fixture_dir_glob)
+ end
+
+ after do
+ ReleaseHighlight.instance_variable_set(:@file_paths, nil)
+ end
+
+ describe '.for_version' do
+ subject { ReleaseHighlight.for_version(version: version) }
+
+ let(:version) { '1.1' }
+
+ context 'with version param that exists' do
+ it 'returns items from that version' do
+ expect(subject.items.first['title']).to eq("It's gonna be a bright")
+ end
+ end
+
+ context 'with version param that does NOT exist' do
+ let(:version) { '84.0' }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+ end
+
+ describe '.paginated' do
+ let(:dot_com) { false }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ end
+
+ context 'with page param' do
+ subject { ReleaseHighlight.paginated(page: page) }
+
+ context 'when there is another page of results' do
+ let(:page) { 2 }
+
+ it 'responds with paginated results' do
+ expect(subject[:items].first['title']).to eq('bright')
+ expect(subject[:next_page]).to eq(3)
+ end
+ end
+
+ context 'when there is NOT another page of results' do
+ let(:page) { 3 }
+
+ it 'responds with paginated results and no next_page' do
+ expect(subject[:items].first['title']).to eq("It's gonna be a bright")
+ expect(subject[:next_page]).to eq(nil)
+ end
+ end
+
+ context 'when that specific page does not exist' do
+ let(:page) { 84 }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+ end
+
+ context 'with no page param' do
+ subject { ReleaseHighlight.paginated }
+
+ it 'uses multiple levels of cache' do
+ expect(Rails.cache).to receive(:fetch).with("release_highlight:items:page-1:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION }).and_call_original
+ expect(Rails.cache).to receive(:fetch).with("release_highlight:file_paths:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION }).and_call_original
+
+ subject
+ end
+
+ it 'returns platform specific items' do
+ expect(subject[:items].count).to eq(1)
+ expect(subject[:items].first['title']).to eq("bright and sunshinin' day")
+ expect(subject[:next_page]).to eq(2)
+ end
+
+ it 'parses the body as markdown and returns html' do
+ expect(subject[:items].first['body']).to match("<h2 id=\"bright-and-sunshinin-day\">bright and sunshinin’ day</h2>")
+ end
+
+ it 'logs an error if theres an error parsing markdown for an item, and skips it' do
+ allow(Kramdown::Document).to receive(:new).and_raise
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ expect(subject[:items]).to be_empty
+ end
+
+ context 'when Gitlab.com' do
+ let(:dot_com) { true }
+
+ it 'responds with a different set of data' do
+ expect(subject[:items].count).to eq(1)
+ expect(subject[:items].first['title']).to eq("I think I can make it now the pain is gone")
+ end
+ end
+
+ context 'YAML parsing throws an exception' do
+ it 'fails gracefully and logs an error' do
+ allow(YAML).to receive(:safe_load).and_raise(Psych::Exception)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ expect(subject).to be_nil
+ end
+ end
+ end
+ end
+
+ describe '.most_recent_item_count' do
+ subject { ReleaseHighlight.most_recent_item_count }
+
+ it 'uses process memory cache' do
+ expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:recent_item_count:#{Gitlab.revision}", expires_in: described_class::CACHE_DURATION)
+
+ subject
+ end
+
+ context 'when recent release items exist' do
+ it 'returns the count from the most recent file' do
+ allow(ReleaseHighlight).to receive(:paginated).and_return(double(:paginated, items: [double(:item)]))
+
+ expect(subject).to eq(1)
+ end
+ end
+
+ context 'when recent release items do NOT exist' do
+ it 'returns nil' do
+ allow(ReleaseHighlight).to receive(:paginated).and_return(nil)
+
+ expect(subject).to be_nil
+ end
+ end
+ end
+
+ describe '.versions' do
+ subject { described_class.versions }
+
+ it 'uses process memory cache' do
+ expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:versions:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION })
+
+ subject
+ end
+
+ it 'returns versions from the file paths' do
+ expect(subject).to eq(['1.5', '1.2', '1.1'])
+ end
+
+ context 'when there are more than 12 versions' do
+ let(:file_paths) do
+ i = 0
+ Array.new(20) { "20201225_01_#{i += 1}.yml" }
+ end
+
+ it 'limits to 12 versions' do
+ allow(ReleaseHighlight).to receive(:file_paths).and_return(file_paths)
+ expect(subject.count).to eq(12)
+ end
+ end
+ end
+
+ describe 'QueryResult' do
+ subject { ReleaseHighlight::QueryResult.new(items: items, next_page: 2) }
+
+ let(:items) { [:item] }
+
+ it 'responds to map' do
+ expect(subject.map(&:to_s)).to eq(items.map(&:to_s))
+ end
+ end
+end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 31211f8ff2c..c1f073e26d1 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2335,7 +2335,7 @@ RSpec.describe Repository do
end
it 'caches the response' do
- expect(repository).to receive(:readme).and_call_original.once
+ expect(repository.head_tree).to receive(:readme_path).and_call_original.once
2.times do
expect(repository.readme_path).to eq("README.md")
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index da1fe70c891..44de83d9240 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -51,14 +51,6 @@ RSpec.describe ResourceLabelEvent, type: :model do
end
context 'callbacks' do
- describe '#usage_metrics' do
- it 'tracks changed labels' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
-
- subject.save!
- end
- end
-
describe '#expire_etag_cache' do
def expect_expiration(issue)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb
index b8a93bdbe3b..2b4898b750a 100644
--- a/spec/models/resource_state_event_spec.rb
+++ b/spec/models/resource_state_event_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe ResourceStateEvent, type: :model do
end
context 'callbacks' do
- describe '#usage_metrics' do
+ describe '#issue_usage_metrics' do
it 'tracks closed issues' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_closed_action)
@@ -53,6 +53,12 @@ RSpec.describe ResourceStateEvent, type: :model do
create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:reopened])
end
+
+ it 'does not track merge requests' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_closed_action)
+
+ create(described_class.name.underscore.to_sym, merge_request: merge_request, state: described_class.states[:closed])
+ end
end
end
end
diff --git a/spec/models/sentry_issue_spec.rb b/spec/models/sentry_issue_spec.rb
index 33654bf5e1a..c24350d7067 100644
--- a/spec/models/sentry_issue_spec.rb
+++ b/spec/models/sentry_issue_spec.rb
@@ -27,6 +27,12 @@ RSpec.describe SentryIssue do
expect(duplicate_sentry_issue).to be_invalid
expect(duplicate_sentry_issue.errors.full_messages.first).to include('is already associated')
end
+
+ describe 'when importing' do
+ subject { create(:sentry_issue, importing: true) }
+
+ it { is_expected.not_to validate_presence_of(:issue) }
+ end
end
describe 'callbacks' do
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 402c1a3d19b..04b3920cd6c 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -245,7 +245,7 @@ RSpec.describe Service do
context 'with a previous existing service (MockCiService) and a new service (Asana)' do
before do
- Service.insert(type: 'MockCiService', instance: true)
+ Service.insert({ type: 'MockCiService', instance: true })
Service.delete_by(type: 'AsanaService', instance: true)
end
@@ -291,7 +291,7 @@ RSpec.describe Service do
context 'with a previous existing service (Previous) and a new service (Asana)' do
before do
- Service.insert(type: 'PreviousService', template: true)
+ Service.insert({ type: 'PreviousService', template: true })
Service.delete_by(type: 'AsanaService', template: true)
end
@@ -916,5 +916,14 @@ RSpec.describe Service do
described_class.available_services_names(include_dev: false)
end
+
+ it { expect(described_class.available_services_names).to include('jenkins') }
+ end
+
+ describe '.project_specific_services_names' do
+ it do
+ expect(described_class.project_specific_services_names)
+ .to include(*described_class::PROJECT_SPECIFIC_SERVICE_NAMES)
+ end
end
end
diff --git a/spec/models/snippet_repository_storage_move_spec.rb b/spec/models/snippet_repository_storage_move_spec.rb
new file mode 100644
index 00000000000..c9feff0c22f
--- /dev/null
+++ b/spec/models/snippet_repository_storage_move_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SnippetRepositoryStorageMove, type: :model do
+ it_behaves_like 'handles repository moves' do
+ let_it_be_with_refind(:container) { create(:snippet) }
+
+ let(:repository_storage_factory_key) { :snippet_repository_storage_move }
+ let(:error_key) { :snippet }
+ let(:repository_storage_worker) { nil } # TODO set to SnippetUpdateRepositoryStorageWorker after https://gitlab.com/gitlab-org/gitlab/-/issues/218991 is implemented
+ end
+end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index d74f5faab7f..f87259ea048 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Snippet do
it { is_expected.to have_many(:user_mentions).class_name("SnippetUserMention") }
it { is_expected.to have_one(:snippet_repository) }
it { is_expected.to have_one(:statistics).class_name('SnippetStatistics').dependent(:destroy) }
+ it { is_expected.to have_many(:repository_storage_moves).class_name('SnippetRepositoryStorageMove').inverse_of(:container) }
end
describe 'validation' do
@@ -481,17 +482,9 @@ RSpec.describe Snippet do
end
describe '#blobs' do
- let(:snippet) { create(:snippet) }
-
- it 'returns a blob representing the snippet data' do
- blob = snippet.blob
-
- expect(blob).to be_a(Blob)
- expect(blob.path).to eq(snippet.file_name)
- expect(blob.data).to eq(snippet.content)
- end
-
context 'when repository does not exist' do
+ let(:snippet) { create(:snippet) }
+
it 'returns empty array' do
expect(snippet.blobs).to be_empty
end
@@ -777,4 +770,30 @@ RSpec.describe Snippet do
it { is_expected.to be_falsey }
end
end
+
+ describe '#git_transfer_in_progress?' do
+ let(:snippet) { build(:snippet) }
+
+ subject { snippet.git_transfer_in_progress? }
+
+ it 'returns true when there are git transfers' do
+ allow(snippet).to receive(:reference_counter).with(type: Gitlab::GlRepository::SNIPPET) do
+ double(:reference_counter, value: 2)
+ end
+
+ expect(subject).to eq true
+ end
+
+ it 'returns false when there are not git transfers' do
+ allow(snippet).to receive(:reference_counter).with(type: Gitlab::GlRepository::SNIPPET) do
+ double(:reference_counter, value: 0)
+ end
+
+ expect(subject).to eq false
+ end
+ end
+
+ it_behaves_like 'can move repository storage' do
+ let_it_be(:container) { create(:snippet, :repository) }
+ end
end
diff --git a/spec/models/suggestion_spec.rb b/spec/models/suggestion_spec.rb
index e88fc13ecee..9a7624c253a 100644
--- a/spec/models/suggestion_spec.rb
+++ b/spec/models/suggestion_spec.rb
@@ -12,6 +12,12 @@ RSpec.describe Suggestion do
describe 'validations' do
it { is_expected.to validate_presence_of(:note) }
+ context 'when importing' do
+ subject { create(:suggestion, importing: true) }
+
+ it { is_expected.not_to validate_presence_of(:note) }
+ end
+
context 'when suggestion is applied' do
before do
allow(subject).to receive(:applied?).and_return(true)
diff --git a/spec/models/system_note_metadata_spec.rb b/spec/models/system_note_metadata_spec.rb
index 9a6b57afb97..144c65d2f62 100644
--- a/spec/models/system_note_metadata_spec.rb
+++ b/spec/models/system_note_metadata_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe SystemNoteMetadata do
context 'when action type is invalid' do
subject do
- build(:system_note_metadata, note: build(:note), action: 'invalid_type' )
+ build(:system_note_metadata, note: build(:note), action: 'invalid_type')
end
it { is_expected.to be_invalid }
@@ -21,7 +21,15 @@ RSpec.describe SystemNoteMetadata do
context 'when action type is valid' do
subject do
- build(:system_note_metadata, note: build(:note), action: 'merge' )
+ build(:system_note_metadata, note: build(:note), action: 'merge')
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when importing' do
+ subject do
+ build(:system_note_metadata, note: nil, action: 'merge', importing: true)
end
it { is_expected.to be_valid }
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index ca8fe4cca3b..ef91e4a5a71 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -3,21 +3,13 @@
require 'spec_helper'
RSpec.describe Terraform::State do
- subject { create(:terraform_state, :with_file) }
-
- let(:terraform_state_file) { fixture_file('terraform/terraform.tfstate') }
-
- it { is_expected.to be_a FileStoreMounter }
+ subject { create(:terraform_state, :with_version) }
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:locked_by_user).class_name('User') }
it { is_expected.to validate_presence_of(:project_id) }
- before do
- stub_terraform_state_object_storage
- end
-
describe 'scopes' do
describe '.ordered_by_name' do
let_it_be(:project) { create(:project) }
@@ -35,64 +27,18 @@ RSpec.describe Terraform::State do
end
end
- describe '#file' do
- context 'when a file exists' do
- it 'does not use the default file' do
- expect(subject.file.read).to eq(terraform_state_file)
- end
- end
- end
-
- describe '#file_store' do
- context 'when a value is set' do
- it 'returns the value' do
- [ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
- expect(build(:terraform_state, file_store: store).file_store).to eq(store)
- end
- end
- end
- end
-
- describe '#update_file_store' do
- context 'when file is stored in object storage' do
- it_behaves_like 'mounted file in object store'
- end
-
- context 'when file is stored locally' do
- before do
- stub_terraform_state_object_storage(enabled: false)
- end
-
- it_behaves_like 'mounted file in local store'
- end
- end
-
describe '#latest_file' do
- subject { terraform_state.latest_file }
-
- context 'versioning is enabled' do
- let(:terraform_state) { create(:terraform_state, :with_version) }
- let(:latest_version) { terraform_state.latest_version }
-
- it { is_expected.to eq latest_version.file }
-
- context 'but no version exists yet' do
- let(:terraform_state) { create(:terraform_state) }
+ let(:terraform_state) { create(:terraform_state, :with_version) }
+ let(:latest_version) { terraform_state.latest_version }
- it { is_expected.to be_nil }
- end
- end
-
- context 'versioning is disabled' do
- let(:terraform_state) { create(:terraform_state, :with_file) }
+ subject { terraform_state.latest_file }
- it { is_expected.to eq terraform_state.file }
+ it { is_expected.to eq latest_version.file }
- context 'and a version exists (migration to versioned in progress)' do
- let!(:migrated_version) { create(:terraform_state_version, terraform_state: terraform_state) }
+ context 'but no version exists yet' do
+ let(:terraform_state) { create(:terraform_state) }
- it { is_expected.to eq terraform_state.latest_version.file }
- end
+ it { is_expected.to be_nil }
end
end
@@ -115,39 +61,30 @@ RSpec.describe Terraform::State do
end
end
- context 'versioning is disabled' do
- let(:terraform_state) { create(:terraform_state, :with_file) }
-
- it 'modifies the existing state record' do
- expect { subject }.not_to change { Terraform::StateVersion.count }
+ context 'versioning is disabled (migration to versioned in progress)' do
+ let(:terraform_state) { create(:terraform_state, versioning_enabled: false) }
+ let!(:migrated_version) { create(:terraform_state_version, terraform_state: terraform_state, version: 0) }
- expect(terraform_state.latest_file.read).to eq(data)
- end
-
- context 'and a version exists (migration to versioned in progress)' do
- let!(:migrated_version) { create(:terraform_state_version, terraform_state: terraform_state, version: 0) }
+ it 'creates a new version, corrects the migrated version number, and marks the state as versioned' do
+ expect { subject }.to change { Terraform::StateVersion.count }
- it 'creates a new version, corrects the migrated version number, and marks the state as versioned' do
- expect { subject }.to change { Terraform::StateVersion.count }
+ expect(migrated_version.reload.version).to eq(1)
+ expect(migrated_version.file.read).to eq(fixture_file('terraform/terraform.tfstate'))
- expect(migrated_version.reload.version).to eq(1)
- expect(migrated_version.file.read).to eq(terraform_state_file)
+ expect(terraform_state.reload.latest_version.version).to eq(version)
+ expect(terraform_state.latest_version.file.read).to eq(data)
+ expect(terraform_state).to be_versioning_enabled
+ end
- expect(terraform_state.reload.latest_version.version).to eq(version)
- expect(terraform_state.latest_version.file.read).to eq(data)
- expect(terraform_state).to be_versioning_enabled
+ context 'the current version cannot be determined' do
+ before do
+ migrated_version.update!(file: CarrierWaveStringFile.new('invalid-json'))
end
- context 'the current version cannot be determined' do
- before do
- migrated_version.update!(file: CarrierWaveStringFile.new('invalid-json'))
- end
-
- it 'uses version - 1 to correct the migrated version number' do
- expect { subject }.to change { Terraform::StateVersion.count }
+ it 'uses version - 1 to correct the migrated version number' do
+ expect { subject }.to change { Terraform::StateVersion.count }
- expect(migrated_version.reload.version).to eq(2)
- end
+ expect(migrated_version.reload.version).to eq(2)
end
end
end
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index ae1697fb7e6..e9019b55635 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -40,6 +40,14 @@ RSpec.describe Timelog do
expect(subject).to be_valid
end
+
+ describe 'when importing' do
+ it 'is valid if issue_id and merge_request_id are missing' do
+ subject.attributes = { issue: nil, merge_request: nil, importing: true }
+
+ expect(subject).to be_valid
+ end
+ end
end
describe 'scopes' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 19a6a3ce3c4..fb05c9e8052 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -99,6 +99,8 @@ RSpec.describe User do
it { is_expected.to have_many(:releases).dependent(:nullify) }
it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:user) }
it { is_expected.to have_many(:reviews).inverse_of(:author) }
+ it { is_expected.to have_many(:merge_request_assignees).inverse_of(:assignee) }
+ it { is_expected.to have_many(:merge_request_reviewers).inverse_of(:reviewer) }
describe "#user_detail" do
it 'does not persist `user_detail` by default' do
@@ -1085,7 +1087,7 @@ RSpec.describe User do
@user.update!(email: 'new_primary@example.com')
end
- it 'adds old primary to secondary emails when secondary is a new email ' do
+ it 'adds old primary to secondary emails when secondary is a new email' do
@user.update!(email: 'new_primary@example.com')
@user.reload
@@ -1521,6 +1523,16 @@ RSpec.describe User do
expect(feed_token).not_to be_blank
expect(user.reload.feed_token).to eq feed_token
end
+
+ it 'ensures no feed token when disabled' do
+ allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(true)
+
+ user = create(:user, feed_token: nil)
+ feed_token = user.feed_token
+
+ expect(feed_token).to be_blank
+ expect(user.reload.feed_token).to be_blank
+ end
end
describe 'static object token' do
@@ -1573,6 +1585,80 @@ RSpec.describe User do
end
end
+ describe '#two_factor_otp_enabled?' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when 2FA is enabled by an MFA Device' do
+ let(:user) { create(:user, :two_factor) }
+
+ it { expect(user.two_factor_otp_enabled?).to eq(true) }
+ end
+
+ context 'FortiAuthenticator' do
+ context 'when enabled via GitLab settings' do
+ before do
+ allow(::Gitlab.config.forti_authenticator).to receive(:enabled).and_return(true)
+ end
+
+ context 'when feature is disabled for the user' do
+ before do
+ stub_feature_flags(forti_authenticator: false)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(false) }
+ end
+
+ context 'when feature is enabled for the user' do
+ before do
+ stub_feature_flags(forti_authenticator: user)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(true) }
+ end
+ end
+
+ context 'when disabled via GitLab settings' do
+ before do
+ allow(::Gitlab.config.forti_authenticator).to receive(:enabled).and_return(false)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(false) }
+ end
+ end
+
+ context 'FortiTokenCloud' do
+ context 'when enabled via GitLab settings' do
+ before do
+ allow(::Gitlab.config.forti_token_cloud).to receive(:enabled).and_return(true)
+ end
+
+ context 'when feature is disabled for the user' do
+ before do
+ stub_feature_flags(forti_token_cloud: false)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(false) }
+ end
+
+ context 'when feature is enabled for the user' do
+ before do
+ stub_feature_flags(forti_token_cloud: user)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(true) }
+ end
+ end
+
+ context 'when disabled via GitLab settings' do
+ before do
+ allow(::Gitlab.config.forti_token_cloud).to receive(:enabled).and_return(false)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(false) }
+ end
+ end
+ end
+
describe 'projects' do
before do
@user = create(:user)
@@ -2024,9 +2110,10 @@ RSpec.describe User do
end
describe '.search' do
- let!(:user) { create(:user, name: 'user', username: 'usern', email: 'email@gmail.com') }
- let!(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@gmail.com') }
- let!(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@gmail.com') }
+ let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
+ let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
+ let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
+ let_it_be(:email) { create(:email, user: user, email: 'alias@example.com') }
describe 'name matching' do
it 'returns users with a matching name with exact match first' do
@@ -2056,7 +2143,7 @@ RSpec.describe User do
end
it 'does not return users with a partially matching Email' do
- expect(described_class.search(user.email[0..2])).not_to include(user, user2)
+ expect(described_class.search(user.email[1...-1])).to be_empty
end
it 'returns users with a matching Email regardless of the casing' do
@@ -2064,6 +2151,20 @@ RSpec.describe User do
end
end
+ describe 'secondary email matching' do
+ it 'returns users with a matching secondary email' do
+ expect(described_class.search(email.email)).to include(email.user)
+ end
+
+ it 'does not return users with a matching part of secondary email' do
+ expect(described_class.search(email.email[1...-1])).to be_empty
+ end
+
+ it 'returns users with a matching secondary email regardless of the casing' do
+ expect(described_class.search(email.email.upcase)).to include(email.user)
+ end
+ end
+
describe 'username matching' do
it 'returns users with a matching username' do
expect(described_class.search(user.username)).to eq([user, user2])
@@ -2103,65 +2204,119 @@ RSpec.describe User do
end
end
- describe '.search_with_secondary_emails' do
- delegate :search_with_secondary_emails, to: :described_class
+ describe '.search_without_secondary_emails' do
+ let_it_be(:user) { create(:user, name: 'John Doe', username: 'john.doe', email: 'someone.1@example.com' ) }
+ let_it_be(:another_user) { create(:user, name: 'Albert Smith', username: 'albert.smith', email: 'another.2@example.com' ) }
+ let_it_be(:email) { create(:email, user: another_user, email: 'alias@example.com') }
+
+ it 'returns users with a matching name' do
+ expect(described_class.search_without_secondary_emails(user.name)).to eq([user])
+ end
+
+ it 'returns users with a partially matching name' do
+ expect(described_class.search_without_secondary_emails(user.name[0..2])).to eq([user])
+ end
+
+ it 'returns users with a matching name regardless of the casing' do
+ expect(described_class.search_without_secondary_emails(user.name.upcase)).to eq([user])
+ end
+
+ it 'returns users with a matching email' do
+ expect(described_class.search_without_secondary_emails(user.email)).to eq([user])
+ end
- let!(:user) { create(:user, name: 'John Doe', username: 'john.doe', email: 'john.doe@example.com' ) }
- let!(:another_user) { create(:user, name: 'Albert Smith', username: 'albert.smith', email: 'albert.smith@example.com' ) }
- let!(:email) do
- create(:email, user: another_user, email: 'alias@example.com')
+ it 'does not return users with a partially matching email' do
+ expect(described_class.search_without_secondary_emails(user.email[1...-1])).to be_empty
+ end
+
+ it 'returns users with a matching email regardless of the casing' do
+ expect(described_class.search_without_secondary_emails(user.email.upcase)).to eq([user])
+ end
+
+ it 'returns users with a matching username' do
+ expect(described_class.search_without_secondary_emails(user.username)).to eq([user])
+ end
+
+ it 'returns users with a partially matching username' do
+ expect(described_class.search_without_secondary_emails(user.username[0..2])).to eq([user])
+ end
+
+ it 'returns users with a matching username regardless of the casing' do
+ expect(described_class.search_without_secondary_emails(user.username.upcase)).to eq([user])
+ end
+
+ it 'does not return users with a matching whole secondary email' do
+ expect(described_class.search_without_secondary_emails(email.email)).not_to include(email.user)
+ end
+
+ it 'does not return users with a matching part of secondary email' do
+ expect(described_class.search_without_secondary_emails(email.email[1...-1])).to be_empty
end
+ it 'returns no matches for an empty string' do
+ expect(described_class.search_without_secondary_emails('')).to be_empty
+ end
+
+ it 'returns no matches for nil' do
+ expect(described_class.search_without_secondary_emails(nil)).to be_empty
+ end
+ end
+
+ describe '.search_with_secondary_emails' do
+ let_it_be(:user) { create(:user, name: 'John Doe', username: 'john.doe', email: 'someone.1@example.com' ) }
+ let_it_be(:another_user) { create(:user, name: 'Albert Smith', username: 'albert.smith', email: 'another.2@example.com' ) }
+ let_it_be(:email) { create(:email, user: another_user, email: 'alias@example.com') }
+
it 'returns users with a matching name' do
- expect(search_with_secondary_emails(user.name)).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.name)).to eq([user])
end
it 'returns users with a partially matching name' do
- expect(search_with_secondary_emails(user.name[0..2])).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.name[0..2])).to eq([user])
end
it 'returns users with a matching name regardless of the casing' do
- expect(search_with_secondary_emails(user.name.upcase)).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.name.upcase)).to eq([user])
end
it 'returns users with a matching email' do
- expect(search_with_secondary_emails(user.email)).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.email)).to eq([user])
end
it 'does not return users with a partially matching email' do
- expect(search_with_secondary_emails(user.email[0..2])).not_to include([user])
+ expect(described_class.search_with_secondary_emails(user.email[1...-1])).to be_empty
end
it 'returns users with a matching email regardless of the casing' do
- expect(search_with_secondary_emails(user.email.upcase)).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.email.upcase)).to eq([user])
end
it 'returns users with a matching username' do
- expect(search_with_secondary_emails(user.username)).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.username)).to eq([user])
end
it 'returns users with a partially matching username' do
- expect(search_with_secondary_emails(user.username[0..2])).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.username[0..2])).to eq([user])
end
it 'returns users with a matching username regardless of the casing' do
- expect(search_with_secondary_emails(user.username.upcase)).to eq([user])
+ expect(described_class.search_with_secondary_emails(user.username.upcase)).to eq([user])
end
it 'returns users with a matching whole secondary email' do
- expect(search_with_secondary_emails(email.email)).to eq([email.user])
+ expect(described_class.search_with_secondary_emails(email.email)).to eq([email.user])
end
it 'does not return users with a matching part of secondary email' do
- expect(search_with_secondary_emails(email.email[1..4])).not_to include([email.user])
+ expect(described_class.search_with_secondary_emails(email.email[1...-1])).to be_empty
end
it 'returns no matches for an empty string' do
- expect(search_with_secondary_emails('')).to be_empty
+ expect(described_class.search_with_secondary_emails('')).to be_empty
end
it 'returns no matches for nil' do
- expect(search_with_secondary_emails(nil)).to be_empty
+ expect(described_class.search_with_secondary_emails(nil)).to be_empty
end
end
@@ -2454,6 +2609,28 @@ RSpec.describe User do
end
end
+ context 'crowd synchronized user' do
+ describe '#crowd_user?' do
+ it 'is true if provider is crowd' do
+ user = create(:omniauth_user, provider: 'crowd')
+
+ expect(user.crowd_user?).to be_truthy
+ end
+
+ it 'is false for other providers' do
+ user = create(:omniauth_user, provider: 'other-provider')
+
+ expect(user.crowd_user?).to be_falsey
+ end
+
+ it 'is false if no extern_uid is provided' do
+ user = create(:omniauth_user, extern_uid: nil)
+
+ expect(user.crowd_user?).to be_falsey
+ end
+ end
+ end
+
describe '#requires_ldap_check?' do
let(:user) { described_class.new }
@@ -2878,6 +3055,57 @@ RSpec.describe User do
end
end
+ describe '#solo_owned_groups' do
+ let_it_be_with_refind(:user) { create(:user) }
+
+ subject(:solo_owned_groups) { user.solo_owned_groups }
+
+ context 'no owned groups' do
+ it { is_expected.to be_empty }
+ end
+
+ context 'has owned groups' do
+ let_it_be(:group) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ context 'not solo owner' do
+ let_it_be(:user2) { create(:user) }
+
+ before do
+ group.add_owner(user2)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'solo owner' do
+ it { is_expected.to include(group) }
+
+ it 'avoids N+1 queries' do
+ fresh_user = User.find(user.id)
+ control_count = ActiveRecord::QueryRecorder.new do
+ fresh_user.solo_owned_groups
+ end.count
+
+ create(:group).add_owner(user)
+
+ expect { solo_owned_groups }.not_to exceed_query_limit(control_count)
+ end
+ end
+ end
+ end
+
+ describe '#can_remove_self?' do
+ let(:user) { create(:user) }
+
+ it 'returns true' do
+ expect(user.can_remove_self?).to eq true
+ end
+ end
+
describe "#recent_push" do
let(:user) { build(:user) }
let(:project) { build(:project) }
diff --git a/spec/models/wiki_page/meta_spec.rb b/spec/models/wiki_page/meta_spec.rb
index aaac72cbc68..24906d4fb79 100644
--- a/spec/models/wiki_page/meta_spec.rb
+++ b/spec/models/wiki_page/meta_spec.rb
@@ -25,13 +25,8 @@ RSpec.describe WikiPage::Meta do
end
it { is_expected.to validate_presence_of(:project_id) }
- it { is_expected.to validate_presence_of(:title) }
-
- it 'is forbidden to add extremely long titles' do
- expect do
- create(:wiki_page_meta, project: project, title: FFaker::Lorem.characters(300))
- end.to raise_error(ActiveRecord::ValueTooLong)
- end
+ it { is_expected.to validate_length_of(:title).is_at_most(255) }
+ it { is_expected.not_to allow_value(nil).for(:title) }
it 'is forbidden to have two records for the same project with the same canonical_slug' do
the_slug = generate(:sluggified_title)
diff --git a/spec/models/wiki_page/slug_spec.rb b/spec/models/wiki_page/slug_spec.rb
index cf256c67277..9e83b9a8182 100644
--- a/spec/models/wiki_page/slug_spec.rb
+++ b/spec/models/wiki_page/slug_spec.rb
@@ -48,8 +48,9 @@ RSpec.describe WikiPage::Slug do
build(:wiki_page_slug, canonical: canonical, wiki_page_meta: meta)
end
- it { is_expected.to validate_presence_of(:slug) }
it { is_expected.to validate_uniqueness_of(:slug).scoped_to(:wiki_page_meta_id) }
+ it { is_expected.to validate_length_of(:slug).is_at_most(2048) }
+ it { is_expected.not_to allow_value(nil).for(:slug) }
describe 'only_one_slug_can_be_canonical_per_meta_record' do
context 'there are no other slugs' do
diff --git a/spec/models/zoom_meeting_spec.rb b/spec/models/zoom_meeting_spec.rb
index 00a0f92e848..2b45533035d 100644
--- a/spec/models/zoom_meeting_spec.rb
+++ b/spec/models/zoom_meeting_spec.rb
@@ -12,8 +12,8 @@ RSpec.describe ZoomMeeting do
end
describe 'Associations' do
- it { is_expected.to belong_to(:project).required }
- it { is_expected.to belong_to(:issue).required }
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:issue) }
end
describe 'scopes' do
@@ -40,6 +40,16 @@ RSpec.describe ZoomMeeting do
end
describe 'Validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:issue) }
+
+ describe 'when importing' do
+ subject { build(:zoom_meeting, importing: true) }
+
+ it { is_expected.not_to validate_presence_of(:project) }
+ it { is_expected.not_to validate_presence_of(:issue) }
+ end
+
describe 'url' do
it { is_expected.to validate_presence_of(:url) }
it { is_expected.to validate_length_of(:url).is_at_most(255) }