summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzesiek.bizon@gmail.com>2018-05-29 17:24:50 +0200
committerGrzegorz Bizon <grzesiek.bizon@gmail.com>2018-05-29 17:24:50 +0200
commita2f6aa96a9f44a0c1880871c2d5a7b8638d8b3cb (patch)
tree5c7d8a8befe6a362081f7e3f1cd468874f5bd792 /spec/models
parent162cc55e49e155d64ee862dfebefd97cec7d272c (diff)
parent58c50d40524e0b0c23d4c5d7782583e3742047ba (diff)
downloadgitlab-ce-a2f6aa96a9f44a0c1880871c2d5a7b8638d8b3cb.tar.gz
Merge branch 'master' into backstage/gb/use-persisted-stages-to-improve-pipelines-table
* master: (583 commits)
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/appearance_spec.rb27
-rw-r--r--spec/models/application_setting_spec.rb30
-rw-r--r--spec/models/ci/build_spec.rb14
-rw-r--r--spec/models/ci/pipeline_spec.rb28
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb2
-rw-r--r--spec/models/concerns/cacheable_attributes_spec.rb153
-rw-r--r--spec/models/concerns/discussion_on_diff_spec.rb2
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb83
-rw-r--r--spec/models/concerns/resolvable_note_spec.rb8
-rw-r--r--spec/models/diff_note_spec.rb62
-rw-r--r--spec/models/internal_id_spec.rb37
-rw-r--r--spec/models/merge_request_spec.rb138
-rw-r--r--spec/models/note_diff_file_spec.rb11
-rw-r--r--spec/models/project_services/gemnasium_service_spec.rb33
-rw-r--r--spec/models/project_spec.rb28
-rw-r--r--spec/models/repository_spec.rb14
-rw-r--r--spec/models/user_spec.rb88
17 files changed, 602 insertions, 156 deletions
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 5489c17bd82..77b07cf1ac9 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -3,34 +3,11 @@ require 'rails_helper'
describe Appearance do
subject { build(:appearance) }
- it { is_expected.to be_valid }
+ it { include(CacheableAttributes) }
+ it { expect(described_class.current_without_cache).to eq(described_class.first) }
it { is_expected.to have_many(:uploads) }
- describe '.current', :use_clean_rails_memory_store_caching do
- let!(:appearance) { create(:appearance) }
-
- it 'returns the current appearance row' do
- expect(described_class.current).to eq(appearance)
- end
-
- it 'caches the result' do
- expect(described_class).to receive(:first).once
-
- 2.times { described_class.current }
- end
- end
-
- describe '#flush_redis_cache' do
- it 'flushes the cache in Redis' do
- appearance = create(:appearance)
-
- expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
-
- appearance.flush_redis_cache
- end
- end
-
describe '#single_appearance_row' do
it 'adds an error when more than 1 row exists' do
create(:appearance)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 10d6109cae7..7e47043a1cb 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -3,6 +3,9 @@ require 'spec_helper'
describe ApplicationSetting do
let(:setting) { described_class.create_from_defaults }
+ it { include(CacheableAttributes) }
+ it { expect(described_class.current_without_cache).to eq(described_class.last) }
+
it { expect(setting).to be_valid }
it { expect(setting.uuid).to be_present }
it { expect(setting).to have_db_column(:auto_devops_enabled) }
@@ -318,33 +321,6 @@ describe ApplicationSetting do
end
end
- describe '.current' do
- context 'redis unavailable' do
- it 'returns an ApplicationSetting' do
- allow(Rails.cache).to receive(:fetch).and_call_original
- allow(described_class).to receive(:last).and_return(:last)
- expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(ArgumentError)
-
- expect(described_class.current).to eq(:last)
- end
- end
-
- context 'when an ApplicationSetting is not yet present' do
- it 'does not cache nil object' do
- # when missing settings a nil object is returned, but not cached
- allow(described_class).to receive(:last).and_return(nil).twice
- expect(described_class.current).to be_nil
-
- # when the settings are set the method returns a valid object
- allow(described_class).to receive(:last).and_return(:last)
- expect(described_class.current).to eq(:last)
-
- # subsequent calls get everything from cache
- expect(described_class.current).to eq(:last)
- end
- end
- end
-
context 'restrict creating duplicates' do
before do
described_class.create_from_defaults
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 7d8bddbcedb..77179028ede 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -629,6 +629,14 @@ describe Ci::Build do
it { is_expected.to eq('review/host') }
end
+
+ context 'when using persisted variables' do
+ let(:build) do
+ create(:ci_build, environment: 'review/x$CI_BUILD_ID')
+ end
+
+ it { is_expected.to eq('review/x') }
+ end
end
describe '#starts_environment?' do
@@ -1525,6 +1533,7 @@ describe Ci::Build do
let(:container_registry_enabled) { false }
let(:predefined_variables) do
[
+ { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_JOB_ID', value: build.id.to_s, public: true },
{ key: 'CI_JOB_TOKEN', value: build.token, public: false },
{ key: 'CI_BUILD_ID', value: build.id.to_s, public: true },
@@ -1537,7 +1546,7 @@ describe Ci::Build do
{ key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true },
- { key: 'CI_SERVER_REVISION', value: Gitlab::REVISION, public: true },
+ { key: 'CI_SERVER_REVISION', value: Gitlab.revision, public: true },
{ key: 'CI_JOB_NAME', value: 'test', public: true },
{ key: 'CI_JOB_STAGE', value: 'test', public: true },
{ key: 'CI_COMMIT_SHA', value: build.sha, public: true },
@@ -1556,7 +1565,6 @@ describe Ci::Build do
{ key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true },
{ key: 'CI_PROJECT_URL', value: project.web_url, public: true },
{ key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true },
- { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true },
{ key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true },
@@ -2084,7 +2092,7 @@ describe Ci::Build do
let(:deploy_token_variables) do
[
- { key: 'CI_DEPLOY_USER', value: deploy_token.name, public: true },
+ { key: 'CI_DEPLOY_USER', value: deploy_token.username, public: true },
{ key: 'CI_DEPLOY_PASSWORD', value: deploy_token.token, public: false }
]
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index f3725e24470..a29fa0c4cae 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -167,13 +167,39 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#persisted_variables' do
+ context 'when pipeline is not persisted yet' do
+ subject { build(:ci_pipeline).persisted_variables }
+
+ it 'does not contain some variables' do
+ keys = subject.map { |variable| variable[:key] }
+
+ expect(keys).not_to include 'CI_PIPELINE_ID'
+ end
+ end
+
+ context 'when pipeline is persisted' do
+ subject { build_stubbed(:ci_pipeline).persisted_variables }
+
+ it 'does contains persisted variables' do
+ keys = subject.map { |variable| variable[:key] }
+
+ expect(keys).to eq %w[CI_PIPELINE_ID]
+ end
+ end
+ end
+
describe '#predefined_variables' do
subject { pipeline.predefined_variables }
it 'includes all predefined variables in a valid order' do
keys = subject.map { |variable| variable[:key] }
- expect(keys).to eq %w[CI_PIPELINE_ID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION]
+ expect(keys).to eq %w[CI_CONFIG_PATH
+ CI_PIPELINE_SOURCE
+ CI_COMMIT_MESSAGE
+ CI_COMMIT_TITLE
+ CI_COMMIT_DESCRIPTION]
end
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 407e2fc598a..d2302583ac8 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -79,7 +79,7 @@ describe Clusters::Applications::Prometheus do
end
it 'creates proper url' do
- expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
+ expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80/proxy')
end
it 'copies options and headers from kube client to proxy client' do
diff --git a/spec/models/concerns/cacheable_attributes_spec.rb b/spec/models/concerns/cacheable_attributes_spec.rb
new file mode 100644
index 00000000000..49e4b23ebc7
--- /dev/null
+++ b/spec/models/concerns/cacheable_attributes_spec.rb
@@ -0,0 +1,153 @@
+require 'spec_helper'
+
+describe CacheableAttributes do
+ let(:minimal_test_class) do
+ Class.new do
+ include ActiveModel::Model
+ extend ActiveModel::Callbacks
+ define_model_callbacks :commit
+ include CacheableAttributes
+
+ def self.name
+ 'TestClass'
+ end
+
+ def self.first
+ @_first ||= new('foo' => 'a')
+ end
+
+ def self.last
+ @_last ||= new('foo' => 'a', 'bar' => 'b')
+ end
+
+ attr_accessor :attributes
+
+ def initialize(attrs = {})
+ @attributes = attrs
+ end
+ end
+ end
+
+ shared_context 'with defaults' do
+ before do
+ minimal_test_class.define_singleton_method(:defaults) do
+ { foo: 'a', bar: 'b', baz: 'c' }
+ end
+ end
+ end
+
+ describe '.current_without_cache' do
+ it 'defaults to last' do
+ expect(minimal_test_class.current_without_cache).to eq(minimal_test_class.last)
+ end
+
+ it 'can be overriden' do
+ minimal_test_class.define_singleton_method(:current_without_cache) do
+ first
+ end
+
+ expect(minimal_test_class.current_without_cache).to eq(minimal_test_class.first)
+ end
+ end
+
+ describe '.cache_key' do
+ it 'excludes cache attributes' do
+ expect(minimal_test_class.cache_key).to eq("TestClass:#{Gitlab::VERSION}:#{Gitlab.migrations_hash}:json")
+ end
+ end
+
+ describe '.defaults' do
+ it 'defaults to {}' do
+ expect(minimal_test_class.defaults).to eq({})
+ end
+
+ context 'with defaults defined' do
+ include_context 'with defaults'
+
+ it 'can be overriden' do
+ expect(minimal_test_class.defaults).to eq({ foo: 'a', bar: 'b', baz: 'c' })
+ end
+ end
+ end
+
+ describe '.build_from_defaults' do
+ include_context 'with defaults'
+
+ context 'without any attributes given' do
+ it 'intializes a new object with the defaults' do
+ expect(minimal_test_class.build_from_defaults).not_to be_persisted
+ end
+ end
+
+ context 'without attributes given' do
+ it 'intializes a new object with the given attributes merged into the defaults' do
+ expect(minimal_test_class.build_from_defaults(foo: 'd').attributes[:foo]).to eq('d')
+ end
+ end
+ end
+
+ describe '.current', :use_clean_rails_memory_store_caching do
+ context 'redis unavailable' do
+ it 'returns an uncached record' do
+ allow(minimal_test_class).to receive(:last).and_return(:last)
+ expect(Rails.cache).to receive(:read).and_raise(Redis::BaseError)
+
+ expect(minimal_test_class.current).to eq(:last)
+ end
+ end
+
+ context 'when a record is not yet present' do
+ it 'does not cache nil object' do
+ # when missing settings a nil object is returned, but not cached
+ allow(minimal_test_class).to receive(:last).twice.and_return(nil)
+
+ expect(minimal_test_class.current).to be_nil
+ expect(Rails.cache.exist?(minimal_test_class.cache_key)).to be(false)
+ end
+
+ it 'cache non-nil object' do
+ # when the settings are set the method returns a valid object
+ allow(minimal_test_class).to receive(:last).and_call_original
+
+ expect(minimal_test_class.current).to eq(minimal_test_class.last)
+ expect(Rails.cache.exist?(minimal_test_class.cache_key)).to be(true)
+
+ # subsequent calls retrieve the record from the cache
+ last_record = minimal_test_class.last
+ expect(minimal_test_class).not_to receive(:last)
+ expect(minimal_test_class.current.attributes).to eq(last_record.attributes)
+ end
+ end
+ end
+
+ describe '.cached', :use_clean_rails_memory_store_caching do
+ context 'when cache is cold' do
+ it 'returns nil' do
+ expect(minimal_test_class.cached).to be_nil
+ end
+ end
+
+ context 'when cached settings do not include the latest defaults' do
+ before do
+ Rails.cache.write(minimal_test_class.cache_key, { bar: 'b', baz: 'c' }.to_json)
+ minimal_test_class.define_singleton_method(:defaults) do
+ { foo: 'a', bar: 'b', baz: 'c' }
+ end
+ end
+
+ it 'includes attributes from defaults' do
+ expect(minimal_test_class.cached.attributes[:foo]).to eq(minimal_test_class.defaults[:foo])
+ end
+ end
+ end
+
+ describe '#cache!', :use_clean_rails_memory_store_caching do
+ let(:appearance_record) { create(:appearance) }
+
+ it 'caches the attributes' do
+ appearance_record.cache!
+
+ expect(Rails.cache.read(Appearance.cache_key)).to eq(appearance_record.attributes.to_json)
+ end
+ end
+end
diff --git a/spec/models/concerns/discussion_on_diff_spec.rb b/spec/models/concerns/discussion_on_diff_spec.rb
index 30572ce9332..8cd129dc851 100644
--- a/spec/models/concerns/discussion_on_diff_spec.rb
+++ b/spec/models/concerns/discussion_on_diff_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe DiscussionOnDiff do
- subject { create(:diff_note_on_merge_request).to_discussion }
+ subject { create(:diff_note_on_merge_request, line_number: 18).to_discussion }
describe "#truncated_diff_lines" do
let(:truncated_lines) { subject.truncated_diff_lines }
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
index 3d7963120b6..23c6c6233e9 100644
--- a/spec/models/concerns/redis_cacheable_spec.rb
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -1,21 +1,36 @@
require 'spec_helper'
describe RedisCacheable do
- let(:model) { double }
+ let(:model) do
+ Struct.new(:id, :attributes) do
+ def read_attribute(attribute)
+ attributes[attribute]
+ end
+
+ def cast_value_from_cache(attribute, cached_value)
+ cached_value
+ end
+
+ def has_attribute?(attribute)
+ attributes.has_key?(attribute)
+ end
+ end
+ end
+
+ let(:payload) { { name: 'value', time: Time.zone.now } }
+ let(:instance) { model.new(1, payload) }
+ let(:cache_key) { instance.__send__(:cache_attribute_key) }
before do
- model.extend(described_class)
- allow(model).to receive(:cache_attribute_key).and_return('key')
+ model.include(described_class)
end
describe '#cached_attribute' do
- let(:payload) { { attribute: 'value' } }
-
- subject { model.cached_attribute(payload.keys.first) }
+ subject { instance.cached_attribute(payload.keys.first) }
it 'gets the cache attribute' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:get).with('key')
+ expect(redis).to receive(:get).with(cache_key)
.and_return(payload.to_json)
end
@@ -24,16 +39,62 @@ describe RedisCacheable do
end
describe '#cache_attributes' do
- let(:values) { { name: 'new_name' } }
-
- subject { model.cache_attributes(values) }
+ subject { instance.cache_attributes(payload) }
it 'sets the cache attributes' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:set).with('key', values.to_json, anything)
+ expect(redis).to receive(:set).with(cache_key, payload.to_json, anything)
end
subject
end
end
+
+ describe '#cached_attr_reader', :clean_gitlab_redis_shared_state do
+ subject { instance.name }
+
+ before do
+ model.cached_attr_reader(:name)
+ end
+
+ context 'when there is no cached value' do
+ it 'reads the attribute' do
+ expect(instance).to receive(:read_attribute).and_call_original
+
+ expect(subject).to eq(payload[:name])
+ end
+ end
+
+ context 'when there is a cached value' do
+ it 'reads the cached value' do
+ expect(instance).not_to receive(:read_attribute)
+
+ instance.cache_attributes(payload)
+
+ expect(subject).to eq(payload[:name])
+ end
+ end
+
+ it 'always returns the latest values' do
+ expect(instance.name).to eq(payload[:name])
+
+ instance.cache_attributes(name: 'new_value')
+
+ expect(instance.name).to eq('new_value')
+ end
+ end
+
+ describe '#cast_value_from_cache' do
+ subject { instance.__send__(:cast_value_from_cache, attribute, value) }
+
+ context 'with runner contacted_at' do
+ let(:instance) { Ci::Runner.new }
+ let(:attribute) { :contacted_at }
+ let(:value) { '2018-05-07 13:53:08 UTC' }
+
+ it 'converts cache string to appropriate type' do
+ expect(subject).to be_an_instance_of(ActiveSupport::TimeWithZone)
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/resolvable_note_spec.rb b/spec/models/concerns/resolvable_note_spec.rb
index 91591017587..fcb5250278e 100644
--- a/spec/models/concerns/resolvable_note_spec.rb
+++ b/spec/models/concerns/resolvable_note_spec.rb
@@ -326,4 +326,12 @@ describe Note, ResolvableNote do
end
end
end
+
+ describe "#potentially_resolvable?" do
+ it 'returns false if noteable could not be found' do
+ allow(subject).to receive(:noteable).and_return(nil)
+
+ expect(subject.potentially_resolvable?).to be_falsey
+ end
+ end
end
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index fb51c0172ab..8624f0daa4d 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -84,7 +84,47 @@ describe DiffNote do
end
end
- describe "#diff_file" do
+ describe '#create_diff_file callback' do
+ let(:noteable) { create(:merge_request) }
+ let(:project) { noteable.project }
+
+ context 'merge request' do
+ let!(:diff_note) { create(:diff_note_on_merge_request, project: project, noteable: noteable) }
+
+ it 'creates a diff note file' do
+ expect(diff_note.reload.note_diff_file).to be_present
+ end
+
+ it 'does not create diff note file if it is a reply' do
+ expect { create(:diff_note_on_merge_request, noteable: noteable, in_reply_to: diff_note) }
+ .not_to change(NoteDiffFile, :count)
+ end
+ end
+
+ context 'commit' do
+ let!(:diff_note) { create(:diff_note_on_commit, project: project) }
+
+ it 'creates a diff note file' do
+ expect(diff_note.reload.note_diff_file).to be_present
+ end
+
+ it 'does not create diff note file if it is a reply' do
+ expect { create(:diff_note_on_commit, in_reply_to: diff_note) }
+ .not_to change(NoteDiffFile, :count)
+ end
+ end
+ end
+
+ describe '#diff_file', :clean_gitlab_redis_shared_state do
+ context 'when note_diff_file association exists' do
+ it 'returns persisted diff file data' do
+ diff_file = subject.diff_file
+
+ expect(diff_file.diff.to_hash.with_indifferent_access)
+ .to include(subject.note_diff_file.to_hash)
+ end
+ end
+
context 'when the discussion was created in the diff' do
it 'returns correct diff file' do
diff_file = subject.diff_file
@@ -115,6 +155,26 @@ describe DiffNote do
expect(diff_file.diff_refs).to eq(position.diff_refs)
end
end
+
+ context 'note diff file creation enqueuing' do
+ it 'enqueues CreateNoteDiffFileWorker if it is the first note of a discussion' do
+ subject.note_diff_file.destroy!
+
+ expect(CreateNoteDiffFileWorker).to receive(:perform_async).with(subject.id)
+
+ subject.reload.diff_file
+ end
+
+ it 'does not enqueues CreateNoteDiffFileWorker if not first note of a discussion' do
+ mr = create(:merge_request)
+ diff_note = create(:diff_note_on_merge_request, project: mr.project, noteable: mr)
+ reply_diff_note = create(:diff_note_on_merge_request, in_reply_to: diff_note)
+
+ expect(CreateNoteDiffFileWorker).not_to receive(:perform_async).with(reply_diff_note.id)
+
+ reply_diff_note.reload.diff_file
+ end
+ end
end
describe "#diff_line" do
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 8ef91e8fab5..581fd0293cc 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -5,7 +5,7 @@ describe InternalId do
let(:usage) { :issues }
let(:issue) { build(:issue, project: project) }
let(:scope) { { project: project } }
- let(:init) { ->(s) { s.project.issues.maximum(:iid) } }
+ let(:init) { ->(s) { s.project.issues.size } }
context 'validations' do
it { is_expected.to validate_presence_of(:usage) }
@@ -39,29 +39,6 @@ describe InternalId do
end
end
- context 'with an InternalId record present and existing issues with a higher internal id' do
- # This can happen if the old NonatomicInternalId is still in use
- before do
- issues = Array.new(rand(1..10)).map { create(:issue, project: project) }
-
- issue = issues.last
- issue.iid = issues.map { |i| i.iid }.max + 1
- issue.save
- end
-
- let(:maximum_iid) { project.issues.map { |i| i.iid }.max }
-
- it 'updates last_value to the maximum internal id present' do
- subject
-
- expect(described_class.find_by(project: project, usage: described_class.usages[usage.to_s]).last_value).to eq(maximum_iid + 1)
- end
-
- it 'returns next internal id correctly' do
- expect(subject).to eq(maximum_iid + 1)
- end
- end
-
context 'with concurrent inserts on table' do
it 'looks up the record if it was created concurrently' do
args = { **scope, usage: described_class.usages[usage.to_s] }
@@ -104,8 +81,7 @@ describe InternalId do
describe '#increment_and_save!' do
let(:id) { create(:internal_id) }
- let(:maximum_iid) { nil }
- subject { id.increment_and_save!(maximum_iid) }
+ subject { id.increment_and_save! }
it 'returns incremented iid' do
value = id.last_value
@@ -126,14 +102,5 @@ describe InternalId do
expect(subject).to eq(1)
end
end
-
- context 'with maximum_iid given' do
- let(:id) { create(:internal_id, last_value: 1) }
- let(:maximum_iid) { id.last_value + 10 }
-
- it 'returns maximum_iid instead' do
- expect(subject).to eq(12)
- end
- end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 04379e7d2c3..92e33a64d26 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1245,38 +1245,50 @@ describe MergeRequest do
describe '#check_if_can_be_merged' do
let(:project) { create(:project, only_allow_merge_if_pipeline_succeeds: true) }
- subject { create(:merge_request, source_project: project, merge_status: :unchecked) }
+ shared_examples 'checking if can be merged' do
+ context 'when it is not broken and has no conflicts' do
+ before do
+ allow(subject).to receive(:broken?) { false }
+ allow(project.repository).to receive(:can_be_merged?).and_return(true)
+ end
- context 'when it is not broken and has no conflicts' do
- before do
- allow(subject).to receive(:broken?) { false }
- allow(project.repository).to receive(:can_be_merged?).and_return(true)
+ it 'is marked as mergeable' do
+ expect { subject.check_if_can_be_merged }.to change { subject.merge_status }.to('can_be_merged')
+ end
end
- it 'is marked as mergeable' do
- expect { subject.check_if_can_be_merged }.to change { subject.merge_status }.to('can_be_merged')
- end
- end
+ context 'when broken' do
+ before do
+ allow(subject).to receive(:broken?) { true }
+ end
- context 'when broken' do
- before do
- allow(subject).to receive(:broken?) { true }
+ it 'becomes unmergeable' do
+ expect { subject.check_if_can_be_merged }.to change { subject.merge_status }.to('cannot_be_merged')
+ end
end
- it 'becomes unmergeable' do
- expect { subject.check_if_can_be_merged }.to change { subject.merge_status }.to('cannot_be_merged')
+ context 'when it has conflicts' do
+ before do
+ allow(subject).to receive(:broken?) { false }
+ allow(project.repository).to receive(:can_be_merged?).and_return(false)
+ end
+
+ it 'becomes unmergeable' do
+ expect { subject.check_if_can_be_merged }.to change { subject.merge_status }.to('cannot_be_merged')
+ end
end
end
- context 'when it has conflicts' do
- before do
- allow(subject).to receive(:broken?) { false }
- allow(project.repository).to receive(:can_be_merged?).and_return(false)
- end
+ context 'when merge_status is unchecked' do
+ subject { create(:merge_request, source_project: project, merge_status: :unchecked) }
- it 'becomes unmergeable' do
- expect { subject.check_if_can_be_merged }.to change { subject.merge_status }.to('cannot_be_merged')
- end
+ it_behaves_like 'checking if can be merged'
+ end
+
+ context 'when merge_status is unchecked' do
+ subject { create(:merge_request, source_project: project, merge_status: :cannot_be_merged_recheck) }
+
+ it_behaves_like 'checking if can be merged'
end
end
@@ -2064,6 +2076,53 @@ describe MergeRequest do
expect(subject.merge_jid).to be_nil
end
end
+
+ describe 'transition to cannot_be_merged' do
+ let(:notification_service) { double(:notification_service) }
+ let(:todo_service) { double(:todo_service) }
+
+ subject { create(:merge_request, merge_status: :unchecked) }
+
+ before do
+ allow(NotificationService).to receive(:new).and_return(notification_service)
+ allow(TodoService).to receive(:new).and_return(todo_service)
+ end
+
+ it 'notifies, but does not notify again if rechecking still results in cannot_be_merged' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
+
+ subject.mark_as_unmergeable
+ subject.mark_as_unchecked
+ subject.mark_as_unmergeable
+ end
+
+ it 'notifies whenever merge request is newly unmergeable' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
+
+ subject.mark_as_unmergeable
+ subject.mark_as_unchecked
+ subject.mark_as_mergeable
+ subject.mark_as_unchecked
+ subject.mark_as_unmergeable
+ end
+ end
+
+ describe 'check_state?' do
+ it 'indicates whether MR is still checking for mergeability' do
+ state_machine = described_class.state_machines[:merge_status]
+ check_states = [:unchecked, :cannot_be_merged_recheck]
+
+ check_states.each do |merge_status|
+ expect(state_machine.check_state?(merge_status)).to be true
+ end
+
+ (state_machine.states.map(&:name) - check_states).each do |merge_status|
+ expect(state_machine.check_state?(merge_status)).to be false
+ end
+ end
+ end
end
describe '#should_be_rebased?' do
@@ -2195,4 +2254,39 @@ describe MergeRequest do
expect(merge_request.can_allow_maintainer_to_push?(user)).to be_truthy
end
end
+
+ describe '#merge_participants' do
+ it 'contains author' do
+ expect(subject.merge_participants).to eq([subject.author])
+ end
+
+ describe 'when merge_when_pipeline_succeeds? is true' do
+ describe 'when merge user is author' do
+ let(:user) { create(:user) }
+ subject do
+ create(:merge_request,
+ merge_when_pipeline_succeeds: true,
+ merge_user: user,
+ author: user)
+ end
+
+ it 'contains author only' do
+ expect(subject.merge_participants).to eq([subject.author])
+ end
+ end
+
+ describe 'when merge user and author are different users' do
+ let(:merge_user) { create(:user) }
+ subject do
+ create(:merge_request,
+ merge_when_pipeline_succeeds: true,
+ merge_user: merge_user)
+ end
+
+ it 'contains author and merge user' do
+ expect(subject.merge_participants).to eq([subject.author, merge_user])
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/note_diff_file_spec.rb b/spec/models/note_diff_file_spec.rb
new file mode 100644
index 00000000000..591c1a89748
--- /dev/null
+++ b/spec/models/note_diff_file_spec.rb
@@ -0,0 +1,11 @@
+require 'rails_helper'
+
+describe NoteDiffFile do
+ describe 'associations' do
+ it { is_expected.to belong_to(:diff_note) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:diff_note) }
+ end
+end
diff --git a/spec/models/project_services/gemnasium_service_spec.rb b/spec/models/project_services/gemnasium_service_spec.rb
index 4c61bc0af95..6e417323e40 100644
--- a/spec/models/project_services/gemnasium_service_spec.rb
+++ b/spec/models/project_services/gemnasium_service_spec.rb
@@ -26,24 +26,49 @@ describe GemnasiumService do
end
end
+ describe "deprecated?" do
+ let(:project) { create(:project, :repository) }
+ let(:gemnasium_service) { described_class.new }
+
+ before do
+ allow(gemnasium_service).to receive_messages(
+ project_id: project.id,
+ project: project,
+ service_hook: true,
+ token: 'verySecret',
+ api_key: 'GemnasiumUserApiKey'
+ )
+ end
+
+ it "is true" do
+ expect(gemnasium_service.deprecated?).to be true
+ end
+
+ it "can't create a new service" do
+ expect(gemnasium_service.save).to be false
+ expect(gemnasium_service.errors[:base].first)
+ .to eq('Gemnasium has been acquired by GitLab in January 2018. Since May 15, 2018, the service provided by Gemnasium is no longer available.')
+ end
+ end
+
describe "Execute" do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
+ let(:gemnasium_service) { described_class.new }
+ let(:sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
before do
- @gemnasium_service = described_class.new
- allow(@gemnasium_service).to receive_messages(
+ allow(gemnasium_service).to receive_messages(
project_id: project.id,
project: project,
service_hook: true,
token: 'verySecret',
api_key: 'GemnasiumUserApiKey'
)
- @sample_data = Gitlab::DataBuilder::Push.build_sample(project, user)
end
it "calls Gemnasium service" do
expect(Gemnasium::GitlabService).to receive(:execute).with(an_instance_of(Hash)).once
- @gemnasium_service.execute(@sample_data)
+ gemnasium_service.execute(sample_data)
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 39625b559eb..af2240f4f89 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -469,6 +469,34 @@ describe Project do
end
end
+ describe "#readme_url" do
+ let(:project) { create(:project, :repository, path: "somewhere") }
+
+ context 'with a non-existing repository' do
+ it 'returns nil' do
+ allow(project.repository).to receive(:tree).with(:head).and_return(nil)
+
+ expect(project.readme_url).to be_nil
+ end
+ end
+
+ context 'with an existing repository' do
+ context 'when no README exists' do
+ it 'returns nil' do
+ allow_any_instance_of(Tree).to receive(:readme).and_return(nil)
+
+ expect(project.readme_url).to be_nil
+ end
+ end
+
+ context 'when a README exists' do
+ it 'returns the README' do
+ expect(project.readme_url).to eql("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere/blob/master/README.md")
+ end
+ end
+ end
+ end
+
describe "#new_issuable_address" do
let(:project) { create(:project, path: "somewhere") }
let(:user) { create(:user) }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 6bc148a1392..0ccf55bd895 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2031,27 +2031,27 @@ describe Repository do
describe '#xcode_project?' do
before do
- allow(repository).to receive(:tree).with(:head).and_return(double(:tree, blobs: [blob]))
+ allow(repository).to receive(:tree).with(:head).and_return(double(:tree, trees: [tree]))
end
- context 'when the root contains a *.xcodeproj file' do
- let(:blob) { double(:blob, path: 'Foo.xcodeproj') }
+ context 'when the root contains a *.xcodeproj directory' do
+ let(:tree) { double(:tree, path: 'Foo.xcodeproj') }
it 'returns true' do
expect(repository.xcode_project?).to be_truthy
end
end
- context 'when the root contains a *.xcworkspace file' do
- let(:blob) { double(:blob, path: 'Foo.xcworkspace') }
+ context 'when the root contains a *.xcworkspace directory' do
+ let(:tree) { double(:tree, path: 'Foo.xcworkspace') }
it 'returns true' do
expect(repository.xcode_project?).to be_truthy
end
end
- context 'when the root contains no XCode config file' do
- let(:blob) { double(:blob, path: 'subdir/Foo.xcworkspace') }
+ context 'when the root contains no Xcode config directory' do
+ let(:tree) { double(:tree, path: 'Foo') }
it 'returns false' do
expect(repository.xcode_project?).to be_falsey
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 684fa030baf..6a2f4a39f09 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -393,24 +393,6 @@ describe User do
end
describe 'after commit hook' do
- describe '.update_invalid_gpg_signatures' do
- let(:user) do
- create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
- user.skip_reconfirmation!
- end
- end
-
- it 'does nothing when the name is updated' do
- expect(user).not_to receive(:update_invalid_gpg_signatures)
- user.update_attributes!(name: 'Bette')
- end
-
- it 'synchronizes the gpg keys when the email is updated' do
- expect(user).to receive(:update_invalid_gpg_signatures).at_most(:twice)
- user.update_attributes!(email: 'shawnee.ritchie@denesik.com')
- end
- end
-
describe '#update_emails_with_primary_email' do
before do
@user = create(:user, email: 'primary@example.com').tap do |user|
@@ -450,6 +432,76 @@ describe User do
expect(@user.emails.first.confirmed_at).not_to eq nil
end
end
+
+ describe '#update_notification_email' do
+ # Regression: https://gitlab.com/gitlab-org/gitlab-ce/issues/22846
+ context 'when changing :email' do
+ let(:user) { create(:user) }
+ let(:new_email) { 'new-email@example.com' }
+
+ it 'sets :unconfirmed_email' do
+ expect do
+ user.tap { |u| u.update!(email: new_email) }.reload
+ end.to change(user, :unconfirmed_email).to(new_email)
+ end
+
+ it 'does not change :notification_email' do
+ expect do
+ user.tap { |u| u.update!(email: new_email) }.reload
+ end.not_to change(user, :notification_email)
+ end
+
+ it 'updates :notification_email to the new email once confirmed' do
+ user.update!(email: new_email)
+
+ expect do
+ user.tap(&:confirm).reload
+ end.to change(user, :notification_email).to eq(new_email)
+ end
+
+ context 'and :notification_email is set to a secondary email' do
+ let!(:email_attrs) { attributes_for(:email, :confirmed, user: user) }
+ let(:secondary) { create(:email, :confirmed, email: 'secondary@example.com', user: user) }
+
+ before do
+ user.emails.create(email_attrs)
+ user.tap { |u| u.update!(notification_email: email_attrs[:email]) }.reload
+ end
+
+ it 'does not change :notification_email to :email' do
+ expect do
+ user.tap { |u| u.update!(email: new_email) }.reload
+ end.not_to change(user, :notification_email)
+ end
+
+ it 'does not change :notification_email to :email once confirmed' do
+ user.update!(email: new_email)
+
+ expect do
+ user.tap(&:confirm).reload
+ end.not_to change(user, :notification_email)
+ end
+ end
+ end
+ end
+
+ describe '#update_invalid_gpg_signatures' do
+ let(:user) do
+ create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
+ user.skip_reconfirmation!
+ end
+ end
+
+ it 'does nothing when the name is updated' do
+ expect(user).not_to receive(:update_invalid_gpg_signatures)
+ user.update_attributes!(name: 'Bette')
+ end
+
+ it 'synchronizes the gpg keys when the email is updated' do
+ expect(user).to receive(:update_invalid_gpg_signatures).at_most(:twice)
+ user.update_attributes!(email: 'shawnee.ritchie@denesik.com')
+ end
+ end
end
describe '#update_tracked_fields!', :clean_gitlab_redis_shared_state do