summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/application_setting_spec.rb34
-rw-r--r--spec/models/ci/build_spec.rb9
-rw-r--r--spec/models/ci/job_artifact_spec.rb3
-rw-r--r--spec/models/ci/runner_spec.rb108
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb72
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb39
-rw-r--r--spec/models/concerns/routable_spec.rb2
-rw-r--r--spec/models/group_spec.rb17
-rw-r--r--spec/models/key_spec.rb58
-rw-r--r--spec/models/lfs_file_lock_spec.rb57
-rw-r--r--spec/models/namespace_spec.rb269
-rw-r--r--spec/models/note_spec.rb4
-rw-r--r--spec/models/project_auto_devops_spec.rb43
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb2
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb237
-rw-r--r--spec/models/project_spec.rb97
-rw-r--r--spec/models/project_wiki_spec.rb13
-rw-r--r--spec/models/repository_spec.rb83
-rw-r--r--spec/models/route_spec.rb6
-rw-r--r--spec/models/todo_spec.rb1
-rw-r--r--spec/models/upload_spec.rb81
-rw-r--r--spec/models/user_callout_spec.rb16
-rw-r--r--spec/models/user_spec.rb100
-rw-r--r--spec/models/wiki_page_spec.rb219
24 files changed, 1272 insertions, 298 deletions
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index ef480e7a80a..ae2d34750a7 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -114,6 +114,40 @@ describe ApplicationSetting do
it { expect(setting.repository_storages).to eq(['default']) }
end
+ context 'auto_devops_domain setting' do
+ context 'when auto_devops_enabled? is true' do
+ before do
+ setting.update(auto_devops_enabled: true)
+ end
+
+ it 'can be blank' do
+ setting.update(auto_devops_domain: '')
+
+ expect(setting).to be_valid
+ end
+
+ context 'with a valid value' do
+ before do
+ setting.update(auto_devops_domain: 'domain.com')
+ end
+
+ it 'is valid' do
+ expect(setting).to be_valid
+ end
+ end
+
+ context 'with an invalid value' do
+ before do
+ setting.update(auto_devops_domain: 'definitelynotahostname')
+ end
+
+ it 'is invalid' do
+ expect(setting).to be_invalid
+ end
+ end
+ end
+ end
+
context 'circuitbreaker settings' do
[:circuitbreaker_failure_count_threshold,
:circuitbreaker_check_interval,
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index f5b3b4a9fc5..9e159c3f1fe 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -675,7 +675,7 @@ describe Ci::Build do
context 'build is erasable' do
context 'new artifacts' do
- let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
+ let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
describe '#erase' do
before do
@@ -709,7 +709,7 @@ describe Ci::Build do
end
describe '#erased?' do
- let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
+ let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
subject { build.erased? }
context 'job has not been erased' do
@@ -744,7 +744,7 @@ describe Ci::Build do
context 'old artifacts' do
context 'build is erasable' do
context 'new artifacts' do
- let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
+ let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) }
describe '#erase' do
before do
@@ -778,7 +778,7 @@ describe Ci::Build do
end
describe '#erased?' do
- let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
+ let!(:build) { create(:ci_build, :trace_artifact, :success, :legacy_artifacts) }
subject { build.erased? }
context 'job has not been erased' do
@@ -1413,6 +1413,7 @@ describe Ci::Build do
[
{ key: 'CI', value: 'true', public: true },
{ key: 'GITLAB_CI', value: 'true', public: true },
+ { key: 'GITLAB_FEATURES', value: '', public: true },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true },
{ key: 'CI_SERVER_REVISION', value: Gitlab::REVISION, public: true },
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 0e18a326c68..a2bd36537e6 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -12,6 +12,9 @@ describe Ci::JobArtifact do
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
+ it { is_expected.to delegate_method(:open).to(:file) }
+ it { is_expected.to delegate_method(:exists?).to(:file) }
+
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index b2b64e6ff48..ab170e6351c 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -95,28 +95,68 @@ describe Ci::Runner do
subject { runner.online? }
- context 'never contacted' do
+ before do
+ allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:platform).and_return("darwin")
+ end
+
+ context 'no cache value' do
before do
- runner.contacted_at = nil
+ stub_redis_runner_contacted_at(nil)
end
- it { is_expected.to be_falsey }
- end
+ context 'never contacted' do
+ before do
+ runner.contacted_at = nil
+ end
- context 'contacted long time ago time' do
- before do
- runner.contacted_at = 1.year.ago
+ it { is_expected.to be_falsey }
+ end
+
+ context 'contacted long time ago time' do
+ before do
+ runner.contacted_at = 1.year.ago
+ end
+
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_falsey }
+ context 'contacted 1s ago' do
+ before do
+ runner.contacted_at = 1.second.ago
+ end
+
+ it { is_expected.to be_truthy }
+ end
end
- context 'contacted 1s ago' do
- before do
- runner.contacted_at = 1.second.ago
+ context 'with cache value' do
+ context 'contacted long time ago time' do
+ before do
+ runner.contacted_at = 1.year.ago
+ stub_redis_runner_contacted_at(1.year.ago.to_s)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'contacted 1s ago' do
+ before do
+ runner.contacted_at = 50.minutes.ago
+ stub_redis_runner_contacted_at(1.second.ago.to_s)
+ end
+
+ it { is_expected.to be_truthy }
end
+ end
- it { is_expected.to be_truthy }
+ def stub_redis_runner_contacted_at(value)
+ Gitlab::Redis::SharedState.with do |redis|
+ cache_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:get).with(cache_key)
+ .and_return({ contacted_at: value }.to_json).at_least(:once)
+ end
end
end
@@ -361,6 +401,50 @@ describe Ci::Runner do
end
end
+ describe '#update_cached_info' do
+ let(:runner) { create(:ci_runner) }
+
+ subject { runner.update_cached_info(architecture: '18-bit') }
+
+ context 'when database was updated recently' do
+ before do
+ runner.contacted_at = Time.now
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ subject
+ end
+ end
+
+ context 'when database was not updated recently' do
+ before do
+ runner.contacted_at = 2.hours.ago
+ end
+
+ it 'updates database' do
+ expect_redis_update
+
+ expect { subject }.to change { runner.reload.read_attribute(:contacted_at) }
+ .and change { runner.reload.read_attribute(:architecture) }
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ subject
+ end
+ end
+
+ def expect_redis_update
+ Gitlab::Redis::SharedState.with do |redis|
+ redis_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:set).with(redis_key, anything, any_args)
+ end
+ end
+ end
+
describe '#destroy' do
let(:runner) { create(:ci_runner) }
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 696099f7cf7..01037919530 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -6,6 +6,24 @@ describe Clusters::Applications::Prometheus do
include_examples 'cluster application specs', described_class
+ describe 'transition to installed' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, projects: [project]) }
+ let(:prometheus_service) { double('prometheus_service') }
+
+ subject { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
+
+ before do
+ allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ end
+
+ it 'ensures Prometheus service is activated' do
+ expect(prometheus_service).to receive(:update).with(active: true)
+
+ subject.make_installed
+ end
+ end
+
describe "#chart_values_file" do
subject { create(:clusters_applications_prometheus).chart_values_file }
@@ -13,4 +31,58 @@ describe Clusters::Applications::Prometheus do
expect(subject).to eq("#{Rails.root}/vendor/prometheus/values.yaml")
end
end
+
+ describe '#proxy_client' do
+ context 'cluster is nil' do
+ it 'returns nil' do
+ expect(subject.cluster).to be_nil
+ expect(subject.proxy_client).to be_nil
+ end
+ end
+
+ context "cluster doesn't have kubeclient" do
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ it 'returns nil' do
+ expect(subject.proxy_client).to be_nil
+ end
+ end
+
+ context 'cluster has kubeclient' do
+ let(:kubernetes_url) { 'http://example.com' }
+ let(:k8s_discover_response) do
+ {
+ resources: [
+ {
+ name: 'service',
+ kind: 'Service'
+ }
+ ]
+ }
+ end
+
+ let(:kube_client) { Kubeclient::Client.new(kubernetes_url) }
+
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ before do
+ allow(kube_client.rest_client).to receive(:get).and_return(k8s_discover_response.to_json)
+ allow(subject.cluster).to receive(:kubeclient).and_return(kube_client)
+ end
+
+ it 'creates proxy prometheus rest client' do
+ expect(subject.proxy_client).to be_instance_of(RestClient::Resource)
+ end
+
+ it 'creates proper url' do
+ expect(subject.proxy_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
+ end
+
+ it 'copies options and headers from kube client to proxy client' do
+ expect(subject.proxy_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
new file mode 100644
index 00000000000..3d7963120b6
--- /dev/null
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe RedisCacheable do
+ let(:model) { double }
+
+ before do
+ model.extend(described_class)
+ allow(model).to receive(:cache_attribute_key).and_return('key')
+ end
+
+ describe '#cached_attribute' do
+ let(:payload) { { attribute: 'value' } }
+
+ subject { model.cached_attribute(payload.keys.first) }
+
+ it 'gets the cache attribute' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:get).with('key')
+ .and_return(payload.to_json)
+ end
+
+ expect(subject).to eq(payload.values.first)
+ end
+ end
+
+ describe '#cache_attributes' do
+ let(:values) { { name: 'new_name' } }
+
+ subject { model.cache_attributes(values) }
+
+ it 'sets the cache attributes' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with('key', values.to_json, anything)
+ end
+
+ subject
+ end
+ end
+end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 3106207811a..8cb50d7465c 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -39,7 +39,7 @@ describe Group, 'Routable' do
create(:group, parent: group, path: 'xyz')
duplicate = build(:project, namespace: group, path: 'xyz')
- expect { duplicate.save! }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Route path has already been taken, Route is invalid')
+ expect { duplicate.save! }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Path has already been taken')
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 5e82a2988ce..338fb314ee9 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -41,7 +41,6 @@ describe Group do
describe 'validations' do
it { is_expected.to validate_presence_of :name }
- it { is_expected.to validate_uniqueness_of(:name).scoped_to(:parent_id) }
it { is_expected.to validate_presence_of :path }
it { is_expected.not_to validate_presence_of :owner }
it { is_expected.to validate_presence_of :two_factor_grace_period }
@@ -582,4 +581,20 @@ describe Group do
end
end
end
+
+ describe '#has_parent?' do
+ context 'when the group has a parent' do
+ it 'should be truthy' do
+ group = create(:group, :nested)
+ expect(group.has_parent?).to be_truthy
+ end
+ end
+
+ context 'when the group has no parent' do
+ it 'should be falsy' do
+ group = create(:group, parent: nil)
+ expect(group.has_parent?).to be_falsy
+ end
+ end
+ end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 4cd9e3f4f1d..bf5703ac986 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -1,13 +1,6 @@
require 'spec_helper'
describe Key, :mailer do
- include Gitlab::CurrentSettings
-
- describe 'modules' do
- subject { described_class }
- it { is_expected.to include_module(Gitlab::CurrentSettings) }
- end
-
describe "Associations" do
it { is_expected.to belong_to(:user) }
end
@@ -79,16 +72,53 @@ describe Key, :mailer do
expect(build(:key)).to be_valid
end
- it 'accepts a key with newline charecters after stripping them' do
- key = build(:key)
- key.key = key.key.insert(100, "\n")
- key.key = key.key.insert(40, "\r\n")
- expect(key).to be_valid
- end
-
it 'rejects the unfingerprintable key (not a key)' do
expect(build(:key, key: 'ssh-rsa an-invalid-key==')).not_to be_valid
end
+
+ where(:factory, :chars, :expected_sections) do
+ [
+ [:key, ["\n", "\r\n"], 3],
+ [:key, [' ', ' '], 3],
+ [:key_without_comment, [' ', ' '], 2]
+ ]
+ end
+
+ with_them do
+ let!(:key) { create(factory) }
+ let!(:original_fingerprint) { key.fingerprint }
+
+ it 'accepts a key with blank space characters after stripping them' do
+ modified_key = key.key.insert(100, chars.first).insert(40, chars.last)
+ _, content = modified_key.split
+
+ key.update!(key: modified_key)
+
+ expect(key).to be_valid
+ expect(key.key.split.size).to eq(expected_sections)
+
+ expect(content).not_to match(/\s/)
+ expect(original_fingerprint).to eq(key.fingerprint)
+ end
+ end
+ end
+
+ context 'validate size' do
+ where(:key_content, :result) do
+ [
+ [Spec::Support::Helpers::KeyGeneratorHelper.new(512).generate, false],
+ [Spec::Support::Helpers::KeyGeneratorHelper.new(8192).generate, false],
+ [Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate, true]
+ ]
+ end
+
+ with_them do
+ it 'validates the size of the key' do
+ key = build(:key, key: key_content)
+
+ expect(key.valid?).to eq(result)
+ end
+ end
end
context 'validate it meets key restrictions' do
diff --git a/spec/models/lfs_file_lock_spec.rb b/spec/models/lfs_file_lock_spec.rb
new file mode 100644
index 00000000000..ce87b01b49c
--- /dev/null
+++ b/spec/models/lfs_file_lock_spec.rb
@@ -0,0 +1,57 @@
+require 'rails_helper'
+
+describe LfsFileLock do
+ set(:lfs_file_lock) { create(:lfs_file_lock) }
+ subject { lfs_file_lock }
+
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:user) }
+
+ it { is_expected.to validate_presence_of(:project_id) }
+ it { is_expected.to validate_presence_of(:user_id) }
+ it { is_expected.to validate_presence_of(:path) }
+
+ describe '#can_be_unlocked_by?' do
+ let(:developer) { create(:user) }
+ let(:master) { create(:user) }
+
+ before do
+ project = lfs_file_lock.project
+
+ project.add_developer(developer)
+ project.add_master(master)
+ end
+
+ context "when it's forced" do
+ it 'can be unlocked by the author' do
+ user = lfs_file_lock.user
+
+ expect(lfs_file_lock.can_be_unlocked_by?(user, true)).to eq(true)
+ end
+
+ it 'can be unlocked by a master' do
+ expect(lfs_file_lock.can_be_unlocked_by?(master, true)).to eq(true)
+ end
+
+ it "can't be unlocked by other user" do
+ expect(lfs_file_lock.can_be_unlocked_by?(developer, true)).to eq(false)
+ end
+ end
+
+ context "when it isn't forced" do
+ it 'can be unlocked by the author' do
+ user = lfs_file_lock.user
+
+ expect(lfs_file_lock.can_be_unlocked_by?(user)).to eq(true)
+ end
+
+ it "can't be unlocked by a master" do
+ expect(lfs_file_lock.can_be_unlocked_by?(master)).to eq(false)
+ end
+
+ it "can't be unlocked by other user" do
+ expect(lfs_file_lock.can_be_unlocked_by?(developer)).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index c3673a0e2a3..e626efd054d 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -15,7 +15,6 @@ describe Namespace do
describe 'validations' do
it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_uniqueness_of(:name).scoped_to(:parent_id) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(255) }
it { is_expected.to validate_presence_of(:path) }
@@ -169,84 +168,105 @@ describe Namespace do
end
describe '#move_dir', :request_store do
- let(:namespace) { create(:namespace) }
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
+ shared_examples "namespace restrictions" do
+ context "when any project has container images" do
+ let(:container_repository) { create(:container_repository) }
- it "raises error when directory exists" do
- expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
- end
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
- it "moves dir if path changed" do
- namespace.update_attributes(path: namespace.full_path + '_new')
+ create(:project, namespace: namespace, container_repositories: [container_repository])
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
- end
+ allow(namespace).to receive(:path_was).and_return(namespace.path)
+ allow(namespace).to receive(:path).and_return('new_path')
+ end
- context "when any project has container images" do
- let(:container_repository) { create(:container_repository) }
+ it 'raises an error about not movable project' do
+ expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ end
+ end
+ end
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
+ context 'legacy storage' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
- create(:project, namespace: namespace, container_repositories: [container_repository])
+ it_behaves_like 'namespace restrictions'
- allow(namespace).to receive(:path_was).and_return(namespace.path)
- allow(namespace).to receive(:path).and_return('new_path')
+ it "raises error when directory exists" do
+ expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end
- it 'raises an error about not movable project' do
- expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ it "moves dir if path changed" do
+ namespace.update_attributes(path: namespace.full_path + '_new')
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
end
- end
- context 'with subgroups' do
- let(:parent) { create(:group, name: 'parent', path: 'parent') }
- let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
- let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
- let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
- let(:pages_dir) { File.join(TestEnv.pages_path) }
+ context 'with subgroups' do
+ let(:parent) { create(:group, name: 'parent', path: 'parent') }
+ let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, path: 'the-project', namespace: child, skip_disk_validation: true) }
+ let(:uploads_dir) { FileUploader.root }
+ let(:pages_dir) { File.join(TestEnv.pages_path) }
- before do
- FileUtils.mkdir_p(File.join(uploads_dir, 'parent', 'child', 'the-project'))
- FileUtils.mkdir_p(File.join(pages_dir, 'parent', 'child', 'the-project'))
- end
+ before do
+ FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
+ FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
+ end
- context 'renaming child' do
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
- expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
+ context 'renaming child' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
- child.update_attributes!(path: 'renamed')
+ child.update_attributes!(path: 'renamed')
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
end
- end
- context 'renaming parent' do
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
- expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
+ context 'renaming parent' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
- parent.update_attributes!(path: 'renamed')
+ parent.update_attributes!(path: 'renamed')
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
end
end
end
+ context 'hashed storage' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
+
+ it_behaves_like 'namespace restrictions'
+
+ it "repository directory remains unchanged if path changed" do
+ before_disk_path = project.disk_path
+ namespace.update_attributes(path: namespace.full_path + '_new')
+
+ expect(before_disk_path).to eq(project.disk_path)
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy
+ end
+ end
+
it 'updates project full path in .git/config for each project inside namespace' do
parent = create(:group, name: 'mygroup', path: 'mygroup')
subgroup = create(:group, name: 'mysubgroup', path: 'mysubgroup', parent: parent)
- project_in_parent_group = create(:project, :repository, namespace: parent, name: 'foo1')
- hashed_project_in_subgroup = create(:project, :repository, :hashed, namespace: subgroup, name: 'foo2')
- legacy_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo3')
+ project_in_parent_group = create(:project, :legacy_storage, :repository, namespace: parent, name: 'foo1')
+ hashed_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo2')
+ legacy_project_in_subgroup = create(:project, :legacy_storage, :repository, namespace: subgroup, name: 'foo3')
parent.update(path: 'mygroup_new')
@@ -261,38 +281,18 @@ describe Namespace do
end
describe '#rm_dir', 'callback' do
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
let(:repository_storage_path) { Gitlab.config.repositories.storages.default['path'] }
let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) }
let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") }
let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
- it 'renames its dirs when deleted' do
- allow(GitlabShellWorker).to receive(:perform_in)
-
- namespace.destroy
-
- expect(File.exist?(deleted_path_in_dir)).to be(true)
- end
-
- it 'schedules the namespace for deletion' do
- expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
-
- namespace.destroy
- end
-
- context 'in sub-groups' do
- let(:parent) { create(:group, path: 'parent') }
- let(:child) { create(:group, parent: parent, path: 'child') }
- let!(:project) { create(:project_empty_repo, namespace: child) }
- let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
- let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
- let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
+ context 'legacy storage' do
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
it 'renames its dirs when deleted' do
allow(GitlabShellWorker).to receive(:perform_in)
- child.destroy
+ namespace.destroy
expect(File.exist?(deleted_path_in_dir)).to be(true)
end
@@ -300,14 +300,57 @@ describe Namespace do
it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
- child.destroy
+ namespace.destroy
+ end
+
+ context 'in sub-groups' do
+ let(:parent) { create(:group, path: 'parent') }
+ let(:child) { create(:group, parent: parent, path: 'child') }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: child) }
+ let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
+ let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
+ let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
+
+ it 'renames its dirs when deleted' do
+ allow(GitlabShellWorker).to receive(:perform_in)
+
+ child.destroy
+
+ expect(File.exist?(deleted_path_in_dir)).to be(true)
+ end
+
+ it 'schedules the namespace for deletion' do
+ expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
+
+ child.destroy
+ end
+ end
+
+ it 'removes the exports folder' do
+ expect(namespace).to receive(:remove_exports!)
+
+ namespace.destroy
end
end
- it 'removes the exports folder' do
- expect(namespace).to receive(:remove_exports!)
+ context 'hashed storage' do
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
+
+ it 'has no repositories base directories to remove' do
+ allow(GitlabShellWorker).to receive(:perform_in)
- namespace.destroy
+ expect(File.exist?(path_in_dir)).to be(false)
+
+ namespace.destroy
+
+ expect(File.exist?(deleted_path_in_dir)).to be(false)
+ end
+
+ it 'removes the exports folder' do
+ expect(namespace).to receive(:remove_exports!)
+
+ namespace.destroy
+ end
end
end
@@ -567,32 +610,62 @@ describe Namespace do
end
end
- describe "#allowed_path_by_redirects" do
- let(:namespace1) { create(:namespace, path: 'foo') }
+ describe '#remove_exports' do
+ let(:legacy_project) { create(:project, :with_export, :legacy_storage, namespace: namespace) }
+ let(:hashed_project) { create(:project, :with_export, namespace: namespace) }
+ let(:export_path) { Dir.mktmpdir('namespace_remove_exports_spec') }
+ let(:legacy_export) { legacy_project.export_project_path }
+ let(:hashed_export) { hashed_project.export_project_path }
+
+ it 'removes exports for legacy and hashed projects' do
+ allow(Gitlab::ImportExport).to receive(:storage_path) { export_path }
+
+ expect(File.exist?(legacy_export)).to be_truthy
+ expect(File.exist?(hashed_export)).to be_truthy
+
+ namespace.remove_exports!
+
+ expect(File.exist?(legacy_export)).to be_falsy
+ expect(File.exist?(hashed_export)).to be_falsy
+ end
+ end
- context "when the path has been taken before" do
- before do
- namespace1.path = 'bar'
- namespace1.save!
+ describe '#full_path_was' do
+ context 'when the group has no parent' do
+ it 'should return the path was' do
+ group = create(:group, parent: nil)
+ expect(group.full_path_was).to eq(group.path_was)
end
+ end
+
+ context 'when a parent is assigned to a group with no previous parent' do
+ it 'should return the path was' do
+ group = create(:group, parent: nil)
- it 'should be invalid' do
- namespace2 = build(:group, path: 'foo')
- expect(namespace2).to be_invalid
+ parent = create(:group)
+ group.parent = parent
+
+ expect(group.full_path_was).to eq("#{group.path_was}")
end
+ end
+
+ context 'when a parent is removed from the group' do
+ it 'should return the parent full path' do
+ parent = create(:group)
+ group = create(:group, parent: parent)
+ group.parent = nil
- it 'should return an error on path' do
- namespace2 = build(:group, path: 'foo')
- namespace2.valid?
- expect(namespace2.errors.messages[:path].first).to eq('foo has been taken before. Please use another one')
+ expect(group.full_path_was).to eq("#{parent.full_path}/#{group.path}")
end
end
- context "when the path has not been taken before" do
- it 'should be valid' do
- expect(RedirectRoute.count).to eq(0)
- namespace = build(:namespace)
- expect(namespace).to be_valid
+ context 'when changing parents' do
+ it 'should return the previous parent full path' do
+ parent = create(:group)
+ group = create(:group, parent: parent)
+ new_parent = create(:group)
+ group.parent = new_parent
+ expect(group.full_path_was).to eq("#{parent.full_path}/#{group.path}")
end
end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 3d030927036..c853f707e6d 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -8,7 +8,7 @@ describe Note do
it { is_expected.to belong_to(:noteable).touch(false) }
it { is_expected.to belong_to(:author).class_name('User') }
- it { is_expected.to have_many(:todos).dependent(:destroy) }
+ it { is_expected.to have_many(:todos) }
end
describe 'modules' do
@@ -17,8 +17,6 @@ describe Note do
it { is_expected.to include_module(Participable) }
it { is_expected.to include_module(Mentionable) }
it { is_expected.to include_module(Awardable) }
-
- it { is_expected.to include_module(Gitlab::CurrentSettings) }
end
describe 'validation' do
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 12069575866..296b91a771c 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -18,7 +18,21 @@ describe ProjectAutoDevops do
context 'when domain is empty' do
let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: '') }
- it { expect(auto_devops).not_to have_domain }
+ context 'when there is an instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
+ end
+
+ it { expect(auto_devops).to have_domain }
+ end
+
+ context 'when there is no instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
+ end
+
+ it { expect(auto_devops).not_to have_domain }
+ end
end
end
@@ -29,9 +43,32 @@ describe ProjectAutoDevops do
let(:domain) { 'example.com' }
it 'returns AUTO_DEVOPS_DOMAIN' do
- expect(auto_devops.variables).to include(
- { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true })
+ expect(auto_devops.variables).to include(domain_variable)
end
end
+
+ context 'when domain is not defined' do
+ let(:domain) { nil }
+
+ context 'when there is an instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
+ end
+
+ it { expect(auto_devops.variables).to include(domain_variable) }
+ end
+
+ context 'when there is no instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
+ end
+
+ it { expect(auto_devops.variables).not_to include(domain_variable) }
+ end
+ end
+
+ def domain_variable
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
+ end
end
end
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index 6980ba335b8..622d8844a72 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -408,7 +408,7 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
context 'if the services is active' do
it 'should return a message' do
- expect(kubernetes_service.deprecation_message).to match(/Your cluster information on this page is still editable/)
+ expect(kubernetes_service.deprecation_message).to match(/Your Kubernetes cluster information on this page is still editable/)
end
end
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index bf39e8d7a39..ed17e019d42 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -13,17 +13,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe 'Validations' do
- context 'when service is active' do
+ context 'when manual_configuration is enabled' do
before do
- subject.active = true
+ subject.manual_configuration = true
end
it { is_expected.to validate_presence_of(:api_url) }
end
- context 'when service is inactive' do
+ context 'when manual configuration is disabled' do
before do
- subject.active = false
+ subject.manual_configuration = false
end
it { is_expected.not_to validate_presence_of(:api_url) }
@@ -31,12 +31,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe '#test' do
+ before do
+ service.manual_configuration = true
+ end
+
let!(:req_stub) { stub_prometheus_request(prometheus_query_url('1'), body: prometheus_value_body('vector')) }
context 'success' do
it 'reads the discovery endpoint' do
+ expect(service.test[:result]).to eq('Checked API endpoint')
expect(service.test[:success]).to be_truthy
- expect(req_stub).to have_been_requested
+ expect(req_stub).to have_been_requested.twice
end
end
@@ -70,6 +75,25 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
+ describe '#matched_metrics' do
+ let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricsQuery }
+ let(:client) { double(:client, label_values: nil) }
+
+ context 'with valid data' do
+ subject { service.matched_metrics }
+
+ before do
+ allow(service).to receive(:client).and_return(client)
+ synchronous_reactive_cache(service)
+ end
+
+ it 'returns reactive data' do
+ expect(subject[:success]).to be_truthy
+ expect(subject[:data]).to eq([])
+ end
+ end
+ end
+
describe '#deployment_metrics' do
let(:deployment) { build_stubbed(:deployment) }
let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
@@ -83,7 +107,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
let(:fake_deployment_time) { 10 }
before do
- stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
+ stub_reactive_cache(service, prometheus_data, deployment_query, deployment.environment.id, deployment.id)
end
it 'returns reactive data' do
@@ -96,13 +120,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
describe '#calculate_reactive_cache' do
let(:environment) { create(:environment, slug: 'env-slug') }
-
- around do |example|
- Timecop.freeze { example.run }
+ before do
+ service.manual_configuration = true
+ service.active = true
end
subject do
- service.calculate_reactive_cache(environment_query.to_s, environment.id)
+ service.calculate_reactive_cache(environment_query.name, environment.id)
+ end
+
+ around do |example|
+ Timecop.freeze { example.run }
end
context 'when service is inactive' do
@@ -132,4 +160,193 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
end
+
+ describe '#client' do
+ context 'manual configuration is enabled' do
+ let(:api_url) { 'http://some_url' }
+ before do
+ subject.manual_configuration = true
+ subject.api_url = api_url
+ end
+
+ it 'returns simple rest client from api_url' do
+ expect(subject.client).to be_instance_of(Gitlab::PrometheusClient)
+ expect(subject.client.rest_client.url).to eq(api_url)
+ end
+ end
+
+ context 'manual configuration is disabled' do
+ let!(:cluster_for_all) { create(:cluster, environment_scope: '*', projects: [project]) }
+ let!(:cluster_for_dev) { create(:cluster, environment_scope: 'dev', projects: [project]) }
+
+ let!(:prometheus_for_dev) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_dev) }
+ let(:proxy_client) { double('proxy_client') }
+
+ before do
+ service.manual_configuration = false
+ end
+
+ context 'with cluster for all environments with prometheus installed' do
+ let!(:prometheus_for_all) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_all) }
+
+ context 'without environment supplied' do
+ it 'returns client handling all environments' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
+
+ expect(service.client).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client.rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with dev environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'dev') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with prod environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'prod') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+ end
+
+ context 'with cluster for all environments without prometheus installed' do
+ context 'without environment supplied' do
+ it 'raises PrometheusError because cluster was not found' do
+ expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
+ end
+ end
+
+ context 'with dev environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'dev') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with prod environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'prod') }
+
+ it 'raises PrometheusError because cluster was not found' do
+ expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#prometheus_installed?' do
+ context 'clusters with installed prometheus' do
+ let!(:cluster) { create(:cluster, projects: [project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+
+ it 'returns true' do
+ expect(service.prometheus_installed?).to be(true)
+ end
+ end
+
+ context 'clusters without prometheus installed' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+
+ context 'clusters without prometheus' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+
+ context 'no clusters' do
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+ end
+
+ describe '#synchronize_service_state! before_save callback' do
+ context 'no clusters with prometheus are installed' do
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it 'activates service when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ end
+
+ it 'keeps service inactive when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(false)
+ end
+ end
+
+ context 'when service is active' do
+ before do
+ service.active = true
+ end
+
+ it 'keeps the service active when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ end
+
+ it 'inactivates the service when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.to change { service.active }.from(true).to(false)
+ end
+ end
+ end
+
+ context 'with prometheus installed in the cluster' do
+ before do
+ allow(service).to receive(:prometheus_installed?).and_return(true)
+ end
+
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it 'activates service when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ end
+
+ it 'activates service when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.to change { service.active }.from(false).to(true)
+ end
+ end
+
+ context 'when service is active' do
+ before do
+ service.active = true
+ end
+
+ it 'keeps service active when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ end
+
+ it 'keeps service active when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 31dcb543cbd..50b8bb7acb3 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -80,6 +80,7 @@ describe Project do
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
+ it { is_expected.to have_many(:lfs_file_locks) }
context 'after initialized' do
it "has a project_feature" do
@@ -117,7 +118,6 @@ describe Project do
it { is_expected.to include_module(Gitlab::ConfigHelper) }
it { is_expected.to include_module(Gitlab::ShellAdapter) }
it { is_expected.to include_module(Gitlab::VisibilityLevel) }
- it { is_expected.to include_module(Gitlab::CurrentSettings) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
end
@@ -130,7 +130,6 @@ describe Project do
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_presence_of(:path) }
- it { is_expected.to validate_uniqueness_of(:path).scoped_to(:namespace_id) }
it { is_expected.to validate_length_of(:path).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(2000) }
@@ -2072,7 +2071,7 @@ describe Project do
create(:ci_variable, :protected, value: 'protected', project: project)
end
- subject { project.secret_variables_for(ref: 'ref') }
+ subject { project.reload.secret_variables_for(ref: 'ref') }
before do
stub_application_setting(
@@ -2504,6 +2503,52 @@ describe Project do
end
end
+ describe '#remove_exports' do
+ let(:legacy_project) { create(:project, :legacy_storage, :with_export) }
+ let(:project) { create(:project, :with_export) }
+
+ it 'removes the exports directory for the project' do
+ expect(File.exist?(project.export_path)).to be_truthy
+
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ expect(FileUtils).to receive(:rm_rf).with(project.export_path).and_call_original
+ project.remove_exports
+
+ expect(File.exist?(project.export_path)).to be_falsy
+ end
+
+ it 'is a no-op on legacy projects when there is no namespace' do
+ export_path = legacy_project.export_path
+
+ legacy_project.update_column(:namespace_id, nil)
+
+ expect(FileUtils).not_to receive(:rm_rf).with(export_path)
+
+ legacy_project.remove_exports
+
+ expect(File.exist?(export_path)).to be_truthy
+ end
+
+ it 'runs on hashed storage projects when there is no namespace' do
+ export_path = project.export_path
+
+ project.update_column(:namespace_id, nil)
+
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original
+
+ project.remove_exports
+
+ expect(File.exist?(export_path)).to be_falsy
+ end
+
+ it 'is run when the project is destroyed' do
+ expect(project).to receive(:remove_exports).and_call_original
+
+ project.destroy
+ end
+ end
+
describe '#forks_count' do
it 'returns the number of forks' do
project = build(:project)
@@ -2515,7 +2560,7 @@ describe Project do
end
context 'legacy storage' do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, :legacy_storage) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
@@ -2689,6 +2734,8 @@ describe Project do
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
+ let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
+ let(:hashed_path) { File.join(hashed_prefix, hash) }
before do
stub_application_setting(hashed_storage_enabled: true)
@@ -2714,14 +2761,12 @@ describe Project do
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
- expect(project.base_dir).to eq("@hashed/#{hash[0..1]}/#{hash[2..3]}")
+ expect(project.base_dir).to eq(hashed_prefix)
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
- hashed_path = "@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}"
-
expect(project.disk_path).to eq(hashed_path)
end
end
@@ -2730,7 +2775,7 @@ describe Project do
it 'delegates to gitlab_shell to ensure namespace is created' do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
- expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, "@hashed/#{hash[0..1]}/#{hash[2..3]}")
+ expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, hashed_prefix)
project.ensure_storage_path_exists
end
@@ -2980,18 +3025,40 @@ describe Project do
subject { project.auto_devops_variables }
- context 'when enabled in settings' do
+ context 'when enabled in instance settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
context 'when domain is empty' do
before do
+ stub_application_setting(auto_devops_domain: nil)
+ end
+
+ it 'variables does not include AUTO_DEVOPS_DOMAIN' do
+ is_expected.not_to include(domain_variable)
+ end
+ end
+
+ context 'when domain is configured' do
+ before do
+ stub_application_setting(auto_devops_domain: 'example.com')
+ end
+
+ it 'variables includes AUTO_DEVOPS_DOMAIN' do
+ is_expected.to include(domain_variable)
+ end
+ end
+ end
+
+ context 'when explicitely enabled' do
+ context 'when domain is empty' do
+ before do
create(:project_auto_devops, project: project, domain: nil)
end
- it 'variables are empty' do
- is_expected.to be_empty
+ it 'variables does not include AUTO_DEVOPS_DOMAIN' do
+ is_expected.not_to include(domain_variable)
end
end
@@ -3000,11 +3067,15 @@ describe Project do
create(:project_auto_devops, project: project, domain: 'example.com')
end
- it "variables are not empty" do
- is_expected.not_to be_empty
+ it 'variables includes AUTO_DEVOPS_DOMAIN' do
+ is_expected.to include(domain_variable)
end
end
end
+
+ def domain_variable
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
+ end
end
describe '#latest_successful_builds_for' do
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 929086305ba..1e7671476f1 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -127,7 +127,7 @@ describe ProjectWiki do
end
after do
- destroy_page(subject.pages.first.page)
+ subject.pages.each { |page| destroy_page(page.page) }
end
it "returns the latest version of the page if it exists" do
@@ -148,6 +148,17 @@ describe ProjectWiki do
page = subject.find_page("index page")
expect(page).to be_a WikiPage
end
+
+ context 'pages with multibyte-character title' do
+ before do
+ create_page("autre pagé", "C'est un génial Gollum Wiki")
+ end
+
+ it "can find a page by slug" do
+ page = subject.find_page("autre pagé")
+ expect(page.title).to eq("autre pagé")
+ end
+ end
end
context 'when Gitaly wiki_find_page is enabled' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 1102b1c9006..a6d48e369ac 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -36,26 +36,49 @@ describe Repository do
end
describe '#branch_names_contains' do
- subject { repository.branch_names_contains(sample_commit.id) }
+ shared_examples '#branch_names_contains' do
+ set(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
- it { is_expected.to include('master') }
- it { is_expected.not_to include('feature') }
- it { is_expected.not_to include('fix') }
+ subject { repository.branch_names_contains(sample_commit.id) }
- describe 'when storage is broken', :broken_storage do
- it 'should raise a storage error' do
- expect_to_raise_storage_error do
- broken_repository.branch_names_contains(sample_commit.id)
+ it { is_expected.to include('master') }
+ it { is_expected.not_to include('feature') }
+ it { is_expected.not_to include('fix') }
+
+ describe 'when storage is broken', :broken_storage do
+ it 'should raise a storage error' do
+ expect_to_raise_storage_error do
+ broken_repository.branch_names_contains(sample_commit.id)
+ end
end
end
end
+
+ context 'when gitaly is enabled' do
+ it_behaves_like '#branch_names_contains'
+ end
+
+ context 'when gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like '#branch_names_contains'
+ end
end
describe '#tag_names_contains' do
- subject { repository.tag_names_contains(sample_commit.id) }
+ shared_examples '#tag_names_contains' do
+ subject { repository.tag_names_contains(sample_commit.id) }
+
+ it { is_expected.to include('v1.1.0') }
+ it { is_expected.not_to include('v1.0.0') }
+ end
- it { is_expected.to include('v1.1.0') }
- it { is_expected.not_to include('v1.0.0') }
+ context 'when gitaly is enabled' do
+ it_behaves_like '#tag_names_contains'
+ end
+
+ context 'when gitaly is enabled', :skip_gitaly_mock do
+ it_behaves_like '#tag_names_contains'
+ end
end
describe 'tags_sorted_by' do
@@ -239,6 +262,28 @@ describe Repository do
end
end
+ describe '#new_commits' do
+ let(:new_refs) do
+ double(:git_rev_list, new_refs: %w[
+ c1acaa58bbcbc3eafe538cb8274ba387047b69f8
+ 5937ac0a7beb003549fc5fd26fc247adbce4a52e
+ ])
+ end
+
+ it 'delegates to Gitlab::Git::RevList' do
+ expect(Gitlab::Git::RevList).to receive(:new).with(
+ repository.raw,
+ newrev: 'aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj').and_return(new_refs)
+
+ commits = repository.new_commits('aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj')
+
+ expect(commits).to eq([
+ repository.commit('c1acaa58bbcbc3eafe538cb8274ba387047b69f8'),
+ repository.commit('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
+ ])
+ end
+ end
+
describe '#commits_by' do
set(:project) { create(:project, :repository) }
@@ -959,19 +1004,19 @@ describe Repository do
end
describe '#find_branch' do
- it 'loads a branch with a fresh repo' do
- expect(Gitlab::Git::Repository).to receive(:new).twice.and_call_original
+ context 'fresh_repo is true' do
+ it 'delegates the call to raw_repository' do
+ expect(repository.raw_repository).to receive(:find_branch).with('master', true)
- 2.times do
- expect(repository.find_branch('feature')).not_to be_nil
+ repository.find_branch('master', fresh_repo: true)
end
end
- it 'loads a branch with a cached repo' do
- expect(Gitlab::Git::Repository).to receive(:new).once.and_call_original
+ context 'fresh_repo is false' do
+ it 'delegates the call to raw_repository' do
+ expect(repository.raw_repository).to receive(:find_branch).with('master', false)
- 2.times do
- expect(repository.find_branch('feature', fresh_repo: false)).not_to be_nil
+ repository.find_branch('master', fresh_repo: false)
end
end
end
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index 88f54fd10e5..dfac82b327a 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -27,7 +27,7 @@ describe Route do
redirect.save!(validate: false)
expect(new_route.valid?).to be_falsey
- expect(new_route.errors.first[1]).to eq('foo has been taken before. Please use another one')
+ expect(new_route.errors.first[1]).to eq('has been taken before')
end
end
@@ -49,7 +49,7 @@ describe Route do
redirect.save!(validate: false)
expect(route.valid?).to be_falsey
- expect(route.errors.first[1]).to eq('foo has been taken before. Please use another one')
+ expect(route.errors.first[1]).to eq('has been taken before')
end
end
@@ -368,7 +368,7 @@ describe Route do
group2.path = 'foo'
group2.valid?
- expect(group2.errors["route.path"].first).to eq('foo has been taken before. Please use another one')
+ expect(group2.errors[:path]).to eq(['has been taken before'])
end
end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 3e8f3848eca..bd498269798 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -20,6 +20,7 @@ describe Todo do
it { is_expected.to validate_presence_of(:action) }
it { is_expected.to validate_presence_of(:target_type) }
it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to validate_presence_of(:author) }
context 'for commits' do
subject { described_class.new(target_type: 'Commit') }
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 345382ea8c7..36b8e5d304f 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -43,49 +43,16 @@ describe Upload do
.to(a_string_matching(/\A\h{64}\z/))
end
end
- end
-
- describe '.remove_path' do
- it 'removes all records at the given path' do
- described_class.create!(
- size: File.size(__FILE__),
- path: __FILE__,
- model: build_stubbed(:user),
- uploader: 'AvatarUploader'
- )
-
- expect { described_class.remove_path(__FILE__) }
- .to change { described_class.count }.from(1).to(0)
- end
- end
- describe '.record' do
- let(:fake_uploader) do
- double(
- file: double(size: 12_345),
- relative_path: 'foo/bar.jpg',
- model: build_stubbed(:user),
- class: 'AvatarUploader'
- )
- end
-
- it 'removes existing paths before creation' do
- expect(described_class).to receive(:remove_path)
- .with(fake_uploader.relative_path)
-
- described_class.record(fake_uploader)
- end
+ describe 'after_destroy' do
+ context 'uploader is FileUploader-based' do
+ subject { create(:upload, :issuable_upload) }
- it 'creates a new record and assigns size, path, model, and uploader' do
- upload = described_class.record(fake_uploader)
+ it 'calls delete_file!' do
+ is_expected.to receive(:delete_file!)
- aggregate_failures do
- expect(upload).to be_persisted
- expect(upload.size).to eq fake_uploader.file.size
- expect(upload.path).to eq fake_uploader.relative_path
- expect(upload.model_id).to eq fake_uploader.model.id
- expect(upload.model_type).to eq fake_uploader.model.class.to_s
- expect(upload.uploader).to eq fake_uploader.class
+ subject.destroy
+ end
end
end
end
@@ -111,27 +78,27 @@ describe Upload do
end
end
- describe '#calculate_checksum' do
- it 'calculates the SHA256 sum' do
- upload = described_class.new(
- path: __FILE__,
- size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
- )
+ describe '#calculate_checksum!' do
+ let(:upload) do
+ described_class.new(path: __FILE__,
+ size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
+ end
+
+ it 'sets `checksum` to SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect { upload.calculate_checksum }
+ expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(nil).to(expected)
end
- it 'returns nil for a non-existant file' do
- upload = described_class.new(
- path: __FILE__,
- size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
- )
-
+ it 'sets `checksum` to nil for a non-existant file' do
expect(upload).to receive(:exist?).and_return(false)
- expect(upload.calculate_checksum).to be_nil
+ checksum = Digest::SHA256.file(__FILE__).hexdigest
+ upload.checksum = checksum
+
+ expect { upload.calculate_checksum! }
+ .to change { upload.checksum }.from(checksum).to(nil)
end
end
@@ -148,4 +115,10 @@ describe Upload do
expect(upload).not_to exist
end
end
+
+ describe "#uploader_context" do
+ subject { create(:upload, :issuable_upload, secret: 'secret', filename: 'file.txt') }
+
+ it { expect(subject.uploader_context).to match(a_hash_including(secret: 'secret', identifier: 'file.txt')) }
+ end
end
diff --git a/spec/models/user_callout_spec.rb b/spec/models/user_callout_spec.rb
new file mode 100644
index 00000000000..64ba17c81fe
--- /dev/null
+++ b/spec/models/user_callout_spec.rb
@@ -0,0 +1,16 @@
+require 'rails_helper'
+
+describe UserCallout do
+ let!(:callout) { create(:user_callout) }
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:user) }
+
+ it { is_expected.to validate_presence_of(:feature_name) }
+ it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity }
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 594f23718da..76a6aef39cc 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1,14 +1,12 @@
require 'spec_helper'
describe User do
- include Gitlab::CurrentSettings
include ProjectForksHelper
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Gitlab::ConfigHelper) }
- it { is_expected.to include_module(Gitlab::CurrentSettings) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
it { is_expected.to include_module(TokenAuthenticatable) }
@@ -35,7 +33,7 @@ describe User do
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
it { is_expected.to have_many(:identities).dependent(:destroy) }
it { is_expected.to have_many(:spam_logs).dependent(:destroy) }
- it { is_expected.to have_many(:todos).dependent(:destroy) }
+ it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:award_emoji).dependent(:destroy) }
it { is_expected.to have_many(:triggers).dependent(:destroy) }
it { is_expected.to have_many(:builds).dependent(:nullify) }
@@ -103,7 +101,7 @@ describe User do
user = build(:user, username: 'dashboard')
expect(user).not_to be_valid
- expect(user.errors.values).to eq [['dashboard is a reserved name']]
+ expect(user.errors.messages[:username]).to eq ['dashboard is a reserved name']
end
it 'allows child names' do
@@ -118,12 +116,6 @@ describe User do
expect(user).to be_valid
end
- it 'validates uniqueness' do
- user = build(:user)
-
- expect(user).to validate_uniqueness_of(:username).case_insensitive
- end
-
context 'when username is changed' do
let(:user) { build_stubbed(:user, username: 'old_path', namespace: build_stubbed(:namespace)) }
@@ -134,6 +126,35 @@ describe User do
expect(user.errors.messages[:username].first).to match('cannot be changed if a personal project has container registry tags')
end
end
+
+ context 'when the username was used by another user before' do
+ let(:username) { 'foo' }
+ let!(:other_user) { create(:user, username: username) }
+
+ before do
+ other_user.username = 'bar'
+ other_user.save!
+ end
+
+ it 'is invalid' do
+ user = build(:user, username: username)
+
+ expect(user).not_to be_valid
+ expect(user.errors.full_messages).to eq(['Username has been taken before'])
+ end
+ end
+
+ context 'when the username is in use by another user' do
+ let(:username) { 'foo' }
+ let!(:other_user) { create(:user, username: username) }
+
+ it 'is invalid' do
+ user = build(:user, username: username)
+
+ expect(user).not_to be_valid
+ expect(user.errors.full_messages).to eq(['Username has already been taken'])
+ end
+ end
end
it 'has a DB-level NOT NULL constraint on projects_limit' do
@@ -560,7 +581,7 @@ describe User do
stub_config_setting(default_can_create_group: true)
expect { user.update_attributes(external: false) }.to change { user.can_create_group }.to(true)
- .and change { user.projects_limit }.to(current_application_settings.default_projects_limit)
+ .and change { user.projects_limit }.to(Gitlab::CurrentSettings.default_projects_limit)
end
end
@@ -826,7 +847,7 @@ describe User do
end
end
- context 'when current_application_settings.user_default_external is true' do
+ context 'when Gitlab::CurrentSettings.user_default_external is true' do
before do
stub_application_setting(user_default_external: true)
end
@@ -1435,28 +1456,34 @@ describe User do
describe '#sort' do
before do
described_class.delete_all
- @user = create :user, created_at: Date.today, last_sign_in_at: Date.today, name: 'Alpha'
- @user1 = create :user, created_at: Date.today - 1, last_sign_in_at: Date.today - 1, name: 'Omega'
- @user2 = create :user, created_at: Date.today - 2, last_sign_in_at: nil, name: 'Beta'
+ @user = create :user, created_at: Date.today, current_sign_in_at: Date.today, name: 'Alpha'
+ @user1 = create :user, created_at: Date.today - 1, current_sign_in_at: Date.today - 1, name: 'Omega'
+ @user2 = create :user, created_at: Date.today - 2, name: 'Beta'
end
context 'when sort by recent_sign_in' do
- it 'sorts users by the recent sign-in time' do
- expect(described_class.sort('recent_sign_in').first).to eq(@user)
+ let(:users) { described_class.sort('recent_sign_in') }
+
+ it 'sorts users by recent sign-in time' do
+ expect(users.first).to eq(@user)
+ expect(users.second).to eq(@user1)
end
it 'pushes users who never signed in to the end' do
- expect(described_class.sort('recent_sign_in').third).to eq(@user2)
+ expect(users.third).to eq(@user2)
end
end
context 'when sort by oldest_sign_in' do
+ let(:users) { described_class.sort('oldest_sign_in') }
+
it 'sorts users by the oldest sign-in time' do
- expect(described_class.sort('oldest_sign_in').first).to eq(@user1)
+ expect(users.first).to eq(@user1)
+ expect(users.second).to eq(@user)
end
it 'pushes users who never signed in to the end' do
- expect(described_class.sort('oldest_sign_in').third).to eq(@user2)
+ expect(users.third).to eq(@user2)
end
end
@@ -1559,14 +1586,37 @@ describe User do
describe '#authorized_groups' do
let!(:user) { create(:user) }
let!(:private_group) { create(:group) }
+ let!(:child_group) { create(:group, parent: private_group) }
+
+ let!(:project_group) { create(:group) }
+ let!(:project) { create(:project, group: project_group) }
before do
private_group.add_user(user, Gitlab::Access::MASTER)
+ project.add_master(user)
end
subject { user.authorized_groups }
- it { is_expected.to eq([private_group]) }
+ it { is_expected.to contain_exactly private_group, project_group }
+ end
+
+ describe '#membership_groups' do
+ let!(:user) { create(:user) }
+ let!(:parent_group) { create(:group) }
+ let!(:child_group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.add_user(user, Gitlab::Access::MASTER)
+ end
+
+ subject { user.membership_groups }
+
+ if Group.supports_nested_groups?
+ it { is_expected.to contain_exactly parent_group, child_group }
+ else
+ it { is_expected.to contain_exactly parent_group }
+ end
end
describe '#authorized_projects', :delete do
@@ -2266,17 +2316,17 @@ describe User do
end
context 'when there is a validation error (namespace name taken) while updating namespace' do
- let!(:conflicting_namespace) { create(:group, name: new_username, path: 'quz') }
+ let!(:conflicting_namespace) { create(:group, path: new_username) }
it 'causes the user save to fail' do
expect(user.update_attributes(username: new_username)).to be_falsey
- expect(user.namespace.errors.messages[:name].first).to eq('has already been taken')
+ expect(user.namespace.errors.messages[:path].first).to eq('has already been taken')
end
it 'adds the namespace errors to the user' do
user.update_attributes(username: new_username)
- expect(user.errors.full_messages.first).to eq('Namespace name has already been taken')
+ expect(user.errors.full_messages.first).to eq('Username has already been taken')
end
end
end
@@ -2619,7 +2669,7 @@ describe User do
it 'should raise an ActiveRecord::RecordInvalid exception' do
user2 = build(:user, username: 'foo')
- expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Path foo has been taken before/)
+ expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Username has been taken before/)
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 9840afe6c4e..b2b7721674c 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -188,50 +188,181 @@ describe WikiPage do
end
end
- describe "#update" do
- before do
- create_page("Update", "content")
- @page = wiki.find_page("Update")
+ describe '#create' do
+ shared_examples 'create method' do
+ context 'with valid attributes' do
+ it 'raises an error if a page with the same path already exists' do
+ create_page('New Page', 'content')
+ create_page('foo/bar', 'content')
+ expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
+ expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
+
+ destroy_page('New Page')
+ destroy_page('bar', 'foo')
+ end
+
+ it 'if the title is preceded by a / it is removed' do
+ create_page('/New Page', 'content')
+
+ expect(wiki.find_page('New Page')).not_to be_nil
+
+ destroy_page('New Page')
+ end
+ end
end
- after do
- destroy_page(@page.title)
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'create method'
end
- context "with valid attributes" do
- it "updates the content of the page" do
- new_content = "new content"
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'create method'
+ end
+ end
- @page.update(content: new_content)
+ describe "#update" do
+ shared_examples 'update method' do
+ before do
+ create_page("Update", "content")
@page = wiki.find_page("Update")
+ end
- expect(@page.content).to eq("new content")
+ after do
+ destroy_page(@page.title, @page.directory)
end
- it "updates the title of the page" do
- new_title = "Index v.1.2.4"
+ context "with valid attributes" do
+ it "updates the content of the page" do
+ new_content = "new content"
+
+ @page.update(content: new_content)
+ @page = wiki.find_page("Update")
+
+ expect(@page.content).to eq("new content")
+ end
- @page.update(title: new_title)
- @page = wiki.find_page(new_title)
+ it "updates the title of the page" do
+ new_title = "Index v.1.2.4"
- expect(@page.title).to eq(new_title)
+ @page.update(title: new_title)
+ @page = wiki.find_page(new_title)
+
+ expect(@page.title).to eq(new_title)
+ end
+
+ it "returns true" do
+ expect(@page.update(content: "more content")).to be_truthy
+ end
end
- it "returns true" do
- expect(@page.update(content: "more content")).to be_truthy
+ context 'with same last commit sha' do
+ it 'returns true' do
+ expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ end
end
- end
- context 'with same last commit sha' do
- it 'returns true' do
- expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ context 'with different last commit sha' do
+ it 'raises exception' do
+ expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ end
end
- end
- context 'with different last commit sha' do
- it 'raises exception' do
- expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ context 'when renaming a page' do
+ it 'raises an error if the page already exists' do
+ create_page('Existing Page', 'content')
+
+ expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(@page.title).to eq 'Update'
+ expect(@page.content).to eq 'new_content'
+
+ destroy_page('Existing Page')
+ end
+
+ it 'updates the content and rename the file' do
+ new_title = 'Renamed Page'
+ new_content = 'updated content'
+
+ expect(@page.update(title: new_title, content: new_content)).to be_truthy
+
+ @page = wiki.find_page(new_title)
+
+ expect(@page).not_to be_nil
+ expect(@page.content).to eq new_content
+ end
+ end
+
+ context 'when moving a page' do
+ it 'raises an error if the page already exists' do
+ create_page('foo/Existing Page', 'content')
+
+ expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(@page.title).to eq 'Update'
+ expect(@page.content).to eq 'new_content'
+
+ destroy_page('Existing Page', 'foo')
+ end
+
+ it 'updates the content and moves the file' do
+ new_title = 'foo/Other Page'
+ new_content = 'new_content'
+
+ expect(@page.update(title: new_title, content: new_content)).to be_truthy
+
+ page = wiki.find_page(new_title)
+
+ expect(page).not_to be_nil
+ expect(page.content).to eq new_content
+ end
+
+ context 'in subdir' do
+ before do
+ create_page('foo/Existing Page', 'content')
+ @page = wiki.find_page('foo/Existing Page')
+ end
+
+ it 'moves the page to the root folder if the title is preceded by /', :skip_gitaly_mock do
+ expect(@page.slug).to eq 'foo/Existing-Page'
+ expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq 'Existing-Page'
+ end
+
+ it 'does nothing if it has the same title' do
+ original_path = @page.slug
+
+ expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq original_path
+ end
+ end
+
+ context 'in root dir' do
+ it 'does nothing if the title is preceded by /' do
+ original_path = @page.slug
+
+ expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq original_path
+ end
+ end
end
+
+ context "with invalid attributes" do
+ it 'aborts update if title blank' do
+ expect(@page.update(title: '', content: 'new_content')).to be_falsey
+ expect(@page.content).to eq 'new_content'
+
+ page = wiki.find_page('Update')
+ expect(page.content).to eq 'content'
+
+ @page.title = 'Update'
+ end
+ end
+ end
+
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'update method'
+ end
+
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'update method'
end
end
@@ -252,18 +383,34 @@ describe WikiPage do
end
describe "#versions" do
- before do
- create_page("Update", "content")
- @page = wiki.find_page("Update")
+ shared_examples 'wiki page versions' do
+ let(:page) { wiki.find_page("Update") }
+
+ before do
+ create_page("Update", "content")
+ end
+
+ after do
+ destroy_page("Update")
+ end
+
+ it "returns an array of all commits for the page" do
+ 3.times { |i| page.update(content: "content #{i}") }
+
+ expect(page.versions.count).to eq(4)
+ end
+
+ it 'returns instances of WikiPageVersion' do
+ expect(page.versions).to all( be_a(Gitlab::Git::WikiPageVersion) )
+ end
end
- after do
- destroy_page("Update")
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'wiki page versions'
end
- it "returns an array of all commits for the page" do
- 3.times { |i| @page.update(content: "content #{i}") }
- expect(@page.versions.count).to eq(4)
+ context 'when Gitaly is disabled', :disable_gitaly do
+ it_behaves_like 'wiki page versions'
end
end
@@ -421,8 +568,8 @@ describe WikiPage do
wiki.wiki.write_page(name, :markdown, content, commit_details)
end
- def destroy_page(title)
- page = wiki.wiki.page(title: title)
+ def destroy_page(title, dir = '')
+ page = wiki.wiki.page(title: title, dir: dir)
wiki.delete_page(page, "test commit")
end