summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/ability_spec.rb95
-rw-r--r--spec/models/badge_spec.rb94
-rw-r--r--spec/models/badges/group_badge_spec.rb11
-rw-r--r--spec/models/badges/project_badge_spec.rb43
-rw-r--r--spec/models/chat_name_spec.rb20
-rw-r--r--spec/models/ci/build_spec.rb206
-rw-r--r--spec/models/ci/group_variable_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb8
-rw-r--r--spec/models/ci/variable_spec.rb2
-rw-r--r--spec/models/clusters/applications/helm_spec.rb97
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb76
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb55
-rw-r--r--spec/models/clusters/applications/runner_spec.rb99
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb2
-rw-r--r--spec/models/commit_status_spec.rb4
-rw-r--r--spec/models/compare_spec.rb40
-rw-r--r--spec/models/concerns/issuable_spec.rb2
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb121
-rw-r--r--spec/models/concerns/protected_ref_access_spec.rb31
-rw-r--r--spec/models/cycle_analytics/code_spec.rb20
-rw-r--r--spec/models/cycle_analytics/issue_spec.rb8
-rw-r--r--spec/models/cycle_analytics/plan_spec.rb8
-rw-r--r--spec/models/cycle_analytics/production_spec.rb14
-rw-r--r--spec/models/cycle_analytics/review_spec.rb4
-rw-r--r--spec/models/cycle_analytics/staging_spec.rb14
-rw-r--r--spec/models/cycle_analytics/test_spec.rb16
-rw-r--r--spec/models/cycle_analytics_spec.rb30
-rw-r--r--spec/models/deployment_spec.rb16
-rw-r--r--spec/models/environment_spec.rb61
-rw-r--r--spec/models/event_spec.rb16
-rw-r--r--spec/models/group_spec.rb1
-rw-r--r--spec/models/internal_id_spec.rb106
-rw-r--r--spec/models/issue_spec.rb58
-rw-r--r--spec/models/members/project_member_spec.rb23
-rw-r--r--spec/models/merge_request_spec.rb82
-rw-r--r--spec/models/namespace_spec.rb57
-rw-r--r--spec/models/notification_recipient_spec.rb16
-rw-r--r--spec/models/pages_domain_spec.rb144
-rw-r--r--spec/models/project_auto_devops_spec.rb8
-rw-r--r--spec/models/project_services/asana_service_spec.rb2
-rw-r--r--spec/models/project_services/hipchat_service_spec.rb4
-rw-r--r--spec/models/project_services/jira_service_spec.rb3
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb2
-rw-r--r--spec/models/project_services/mattermost_slash_commands_service_spec.rb6
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb189
-rw-r--r--spec/models/project_spec.rb174
-rw-r--r--spec/models/project_wiki_spec.rb8
-rw-r--r--spec/models/repository_spec.rb133
-rw-r--r--spec/models/user_interacted_project_spec.rb60
-rw-r--r--spec/models/user_spec.rb27
51 files changed, 1702 insertions, 620 deletions
diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb
index 38fb98d4f50..cd175dba6da 100644
--- a/spec/models/ability_spec.rb
+++ b/spec/models/ability_spec.rb
@@ -204,6 +204,78 @@ describe Ability do
end
end
+ describe '.merge_requests_readable_by_user' do
+ context 'with an admin' do
+ it 'returns all merge requests' do
+ user = build(:user, admin: true)
+ merge_request = build(:merge_request)
+
+ expect(described_class.merge_requests_readable_by_user([merge_request], user))
+ .to eq([merge_request])
+ end
+ end
+
+ context 'without a user' do
+ it 'returns merge_requests that are publicly visible' do
+ hidden_merge_request = build(:merge_request)
+ visible_merge_request = build(:merge_request, source_project: build(:project, :public))
+
+ merge_requests = described_class
+ .merge_requests_readable_by_user([hidden_merge_request, visible_merge_request])
+
+ expect(merge_requests).to eq([visible_merge_request])
+ end
+ end
+
+ context 'with a user' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:cross_project_merge_request) do
+ create(:merge_request, source_project: create(:project, :public))
+ end
+ let(:other_merge_request) { create(:merge_request) }
+ let(:all_merge_requests) do
+ [merge_request, cross_project_merge_request, other_merge_request]
+ end
+
+ subject(:readable_merge_requests) do
+ described_class.merge_requests_readable_by_user(all_merge_requests, user)
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns projects visible to the user' do
+ expect(readable_merge_requests).to contain_exactly(merge_request, cross_project_merge_request)
+ end
+
+ context 'when a user cannot read cross project and a filter is passed' do
+ before do
+ allow(described_class).to receive(:allowed?).and_call_original
+ expect(described_class).to receive(:allowed?).with(user, :read_cross_project) { false }
+ end
+
+ subject(:readable_merge_requests) do
+ read_cross_project_filter = -> (merge_requests) do
+ merge_requests.select { |mr| mr.source_project == project }
+ end
+ described_class.merge_requests_readable_by_user(
+ all_merge_requests, user,
+ filters: { read_cross_project: read_cross_project_filter }
+ )
+ end
+
+ it 'returns only MRs of the specified project without checking access on others' do
+ expect(described_class).not_to receive(:allowed?).with(user, :read_merge_request, cross_project_merge_request)
+
+ expect(readable_merge_requests).to contain_exactly(merge_request)
+ end
+ end
+ end
+ end
+
describe '.issues_readable_by_user' do
context 'with an admin user' do
it 'returns all given issues' do
@@ -250,6 +322,29 @@ describe Ability do
expect(issues).to eq([visible_issue])
end
end
+
+ context 'when the user cannot read cross project' do
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue) }
+ let(:other_project_issue) { create(:issue) }
+ let(:project) { issue.project }
+
+ before do
+ project.add_developer(user)
+
+ allow(described_class).to receive(:allowed?).and_call_original
+ allow(described_class).to receive(:allowed?).with(user, :read_cross_project, any_args) { false }
+ end
+
+ it 'excludes issues from other projects whithout checking separatly when passing a scope' do
+ expect(described_class).not_to receive(:allowed?).with(user, :read_issue, other_project_issue)
+
+ filters = { read_cross_project: -> (issues) { issues.where(project: project) } }
+ result = described_class.issues_readable_by_user(Issue.all, user, filters: filters)
+
+ expect(result).to contain_exactly(issue)
+ end
+ end
end
describe '.project_disabled_features_rules' do
diff --git a/spec/models/badge_spec.rb b/spec/models/badge_spec.rb
new file mode 100644
index 00000000000..33dc19e3432
--- /dev/null
+++ b/spec/models/badge_spec.rb
@@ -0,0 +1,94 @@
+require 'spec_helper'
+
+describe Badge do
+ let(:placeholder_url) { 'http://www.example.com/%{project_path}/%{project_id}/%{default_branch}/%{commit_sha}' }
+
+ describe 'validations' do
+ # Requires the let variable url_sym
+ shared_examples 'placeholder url' do
+ let(:badge) { build(:badge) }
+
+ it 'allows url with http protocol' do
+ badge[url_sym] = 'http://www.example.com'
+
+ expect(badge).to be_valid
+ end
+
+ it 'allows url with https protocol' do
+ badge[url_sym] = 'https://www.example.com'
+
+ expect(badge).to be_valid
+ end
+
+ it 'cannot be empty' do
+ badge[url_sym] = ''
+
+ expect(badge).not_to be_valid
+ end
+
+ it 'cannot be nil' do
+ badge[url_sym] = nil
+
+ expect(badge).not_to be_valid
+ end
+
+ it 'accept badges placeholders' do
+ badge[url_sym] = placeholder_url
+
+ expect(badge).to be_valid
+ end
+
+ it 'sanitize url' do
+ badge[url_sym] = 'javascript:alert(1)'
+
+ expect(badge).not_to be_valid
+ end
+ end
+
+ context 'link_url format' do
+ let(:url_sym) { :link_url }
+
+ it_behaves_like 'placeholder url'
+ end
+
+ context 'image_url format' do
+ let(:url_sym) { :image_url }
+
+ it_behaves_like 'placeholder url'
+ end
+ end
+
+ shared_examples 'rendered_links' do
+ it 'should use the project information to populate the url placeholders' do
+ stub_project_commit_info(project)
+
+ expect(badge.public_send("rendered_#{method}", project)).to eq "http://www.example.com/#{project.full_path}/#{project.id}/master/whatever"
+ end
+
+ it 'returns the url if the project used is nil' do
+ expect(badge.public_send("rendered_#{method}", nil)).to eq placeholder_url
+ end
+
+ def stub_project_commit_info(project)
+ allow(project).to receive(:commit).and_return(double('Commit', sha: 'whatever'))
+ allow(project).to receive(:default_branch).and_return('master')
+ end
+ end
+
+ context 'methods' do
+ let(:badge) { build(:badge, link_url: placeholder_url, image_url: placeholder_url) }
+ let!(:project) { create(:project) }
+
+ context '#rendered_link_url' do
+ let(:method) { :link_url }
+
+ it_behaves_like 'rendered_links'
+ end
+
+ context '#rendered_image_url' do
+ let(:method) { :image_url }
+
+ it_behaves_like 'rendered_links'
+ end
+ end
+end
diff --git a/spec/models/badges/group_badge_spec.rb b/spec/models/badges/group_badge_spec.rb
new file mode 100644
index 00000000000..ed7f83d0489
--- /dev/null
+++ b/spec/models/badges/group_badge_spec.rb
@@ -0,0 +1,11 @@
+require 'spec_helper'
+
+describe GroupBadge do
+ describe 'associations' do
+ it { is_expected.to belong_to(:group) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:group) }
+ end
+end
diff --git a/spec/models/badges/project_badge_spec.rb b/spec/models/badges/project_badge_spec.rb
new file mode 100644
index 00000000000..0e1a8159cb6
--- /dev/null
+++ b/spec/models/badges/project_badge_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe ProjectBadge do
+ let(:placeholder_url) { 'http://www.example.com/%{project_path}/%{project_id}/%{default_branch}/%{commit_sha}' }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ end
+
+ shared_examples 'rendered_links' do
+ it 'should use the badge project information to populate the url placeholders' do
+ stub_project_commit_info(project)
+
+ expect(badge.public_send("rendered_#{method}")).to eq "http://www.example.com/#{project.full_path}/#{project.id}/master/whatever"
+ end
+
+ def stub_project_commit_info(project)
+ allow(project).to receive(:commit).and_return(double('Commit', sha: 'whatever'))
+ allow(project).to receive(:default_branch).and_return('master')
+ end
+ end
+
+ context 'methods' do
+ let(:badge) { build(:project_badge, link_url: placeholder_url, image_url: placeholder_url) }
+ let!(:project) { badge.project }
+
+ context '#rendered_link_url' do
+ let(:method) { :link_url }
+
+ it_behaves_like 'rendered_links'
+ end
+
+ context '#rendered_image_url' do
+ let(:method) { :image_url }
+
+ it_behaves_like 'rendered_links'
+ end
+ end
+end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index e89e534d914..504bc710b25 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -14,4 +14,24 @@ describe ChatName do
it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:service_id) }
it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:service_id, :team_id) }
+
+ describe '#update_last_used_at', :clean_gitlab_redis_shared_state do
+ it 'updates the last_used_at timestamp' do
+ expect(subject.last_used_at).to be_nil
+
+ subject.update_last_used_at
+
+ expect(subject.last_used_at).to be_present
+ end
+
+ it 'does not update last_used_at if it was recently updated' do
+ subject.update_last_used_at
+
+ time = subject.last_used_at
+
+ subject.update_last_used_at
+
+ expect(subject.last_used_at).to eq(time)
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index c27313ed88b..30a352fd090 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -80,6 +80,42 @@ describe Ci::Build do
end
end
+ describe '.with_artifacts_archive' do
+ subject { described_class.with_artifacts_archive }
+
+ context 'when job does not have an archive' do
+ let!(:job) { create(:ci_build) }
+
+ it 'does not return the job' do
+ is_expected.not_to include(job)
+ end
+ end
+
+ context 'when job has a legacy archive' do
+ let!(:job) { create(:ci_build, :legacy_artifacts) }
+
+ it 'returns the job' do
+ is_expected.to include(job)
+ end
+ end
+
+ context 'when job has a job artifact archive' do
+ let!(:job) { create(:ci_build, :artifacts) }
+
+ it 'returns the job' do
+ is_expected.to include(job)
+ end
+ end
+
+ context 'when job has a job artifact trace' do
+ let!(:job) { create(:ci_build, :trace_artifact) }
+
+ it 'does not return the job' do
+ is_expected.not_to include(job)
+ end
+ end
+ end
+
describe '#actionize' do
context 'when build is a created' do
before do
@@ -679,21 +715,21 @@ describe Ci::Build do
describe '#erase' do
before do
- build.erase(erased_by: user)
+ build.erase(erased_by: erased_by)
end
context 'erased by user' do
- let!(:user) { create(:user, username: 'eraser') }
+ let!(:erased_by) { create(:user, username: 'eraser') }
include_examples 'erasable'
it 'records user who erased a build' do
- expect(build.erased_by).to eq user
+ expect(build.erased_by).to eq erased_by
end
end
context 'erased by system' do
- let(:user) { nil }
+ let(:erased_by) { nil }
include_examples 'erasable'
@@ -748,21 +784,21 @@ describe Ci::Build do
describe '#erase' do
before do
- build.erase(erased_by: user)
+ build.erase(erased_by: erased_by)
end
context 'erased by user' do
- let!(:user) { create(:user, username: 'eraser') }
+ let!(:erased_by) { create(:user, username: 'eraser') }
include_examples 'erasable'
it 'records user who erased a build' do
- expect(build.erased_by).to eq user
+ expect(build.erased_by).to eq erased_by
end
end
context 'erased by system' do
- let(:user) { nil }
+ let(:erased_by) { nil }
include_examples 'erasable'
@@ -1424,6 +1460,17 @@ describe Ci::Build do
{ key: 'CI_COMMIT_SHA', value: build.sha, public: true },
{ key: 'CI_COMMIT_REF_NAME', value: build.ref, public: true },
{ key: 'CI_COMMIT_REF_SLUG', value: build.ref_slug, public: true },
+ { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true },
+ { key: 'CI_REGISTRY_PASSWORD', value: build.token, public: false },
+ { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false },
+ { key: 'CI_BUILD_ID', value: build.id.to_s, public: true },
+ { key: 'CI_BUILD_TOKEN', value: build.token, public: false },
+ { key: 'CI_BUILD_REF', value: build.sha, public: true },
+ { key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true },
+ { key: 'CI_BUILD_REF_NAME', value: build.ref, public: true },
+ { key: 'CI_BUILD_REF_SLUG', value: build.ref_slug, public: true },
+ { key: 'CI_BUILD_NAME', value: 'test', public: true },
+ { key: 'CI_BUILD_STAGE', value: 'test', public: true },
{ key: 'CI_PROJECT_ID', value: project.id.to_s, public: true },
{ key: 'CI_PROJECT_NAME', value: project.path, public: true },
{ key: 'CI_PROJECT_PATH', value: project.full_path, public: true },
@@ -1433,9 +1480,7 @@ describe Ci::Build do
{ key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true },
{ key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true },
- { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true },
- { key: 'CI_REGISTRY_PASSWORD', value: build.token, public: false },
- { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false }
+ { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true }
]
end
@@ -1834,39 +1879,6 @@ describe Ci::Build do
it { is_expected.to include(ci_config_path) }
end
- context 'returns variables in valid order' do
- let(:build_pre_var) { { key: 'build', value: 'value' } }
- let(:project_pre_var) { { key: 'project', value: 'value' } }
- let(:pipeline_pre_var) { { key: 'pipeline', value: 'value' } }
- let(:build_yaml_var) { { key: 'yaml', value: 'value' } }
-
- before do
- allow(build).to receive(:predefined_variables) { [build_pre_var] }
- allow(build).to receive(:yaml_variables) { [build_yaml_var] }
-
- allow_any_instance_of(Project)
- .to receive(:predefined_variables) { [project_pre_var] }
-
- allow_any_instance_of(Project)
- .to receive(:secret_variables_for)
- .with(ref: 'master', environment: nil) do
- [create(:ci_variable, key: 'secret', value: 'value')]
- end
-
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:predefined_variables) { [pipeline_pre_var] }
- end
-
- it do
- is_expected.to eq(
- [build_pre_var,
- project_pre_var,
- pipeline_pre_var,
- build_yaml_var,
- { key: 'secret', value: 'value', public: false }])
- end
- end
-
context 'when using auto devops' do
context 'and is enabled' do
before do
@@ -1890,6 +1902,81 @@ describe Ci::Build do
end
end
end
+
+ context 'when pipeline variable overrides build variable' do
+ before do
+ build.yaml_variables = [{ key: 'MYVAR', value: 'myvar', public: true }]
+ pipeline.variables.build(key: 'MYVAR', value: 'pipeline value')
+ end
+
+ it 'overrides YAML variable using a pipeline variable' do
+ variables = subject.reverse.uniq { |variable| variable[:key] }.reverse
+
+ expect(variables)
+ .not_to include(key: 'MYVAR', value: 'myvar', public: true)
+ expect(variables)
+ .to include(key: 'MYVAR', value: 'pipeline value', public: false)
+ end
+ end
+
+ describe 'variables ordering' do
+ context 'when variables hierarchy is stubbed' do
+ let(:build_pre_var) { { key: 'build', value: 'value', public: true } }
+ let(:project_pre_var) { { key: 'project', value: 'value', public: true } }
+ let(:pipeline_pre_var) { { key: 'pipeline', value: 'value', public: true } }
+ let(:build_yaml_var) { { key: 'yaml', value: 'value', public: true } }
+
+ before do
+ allow(build).to receive(:predefined_variables) { [build_pre_var] }
+ allow(build).to receive(:yaml_variables) { [build_yaml_var] }
+
+ allow_any_instance_of(Project)
+ .to receive(:predefined_variables) { [project_pre_var] }
+
+ allow_any_instance_of(Project)
+ .to receive(:secret_variables_for)
+ .with(ref: 'master', environment: nil) do
+ [create(:ci_variable, key: 'secret', value: 'value')]
+ end
+
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:predefined_variables) { [pipeline_pre_var] }
+ end
+
+ it 'returns variables in order depending on resource hierarchy' do
+ is_expected.to eq(
+ [build_pre_var,
+ project_pre_var,
+ pipeline_pre_var,
+ build_yaml_var,
+ { key: 'secret', value: 'value', public: false }])
+ end
+ end
+
+ context 'when build has environment and user-provided variables' do
+ let(:expected_variables) do
+ predefined_variables.map { |variable| variable.fetch(:key) } +
+ %w[YAML_VARIABLE CI_ENVIRONMENT_NAME CI_ENVIRONMENT_SLUG
+ CI_ENVIRONMENT_URL]
+ end
+
+ before do
+ create(:environment, project: build.project,
+ name: 'staging')
+
+ build.yaml_variables = [{ key: 'YAML_VARIABLE',
+ value: 'var',
+ public: true }]
+ build.environment = 'staging'
+ end
+
+ it 'matches explicit variables ordering' do
+ received_variables = subject.map { |variable| variable.fetch(:key) }
+
+ expect(received_variables).to eq expected_variables
+ end
+ end
+ end
end
describe 'state transition: any => [:pending]' do
@@ -1907,7 +1994,7 @@ describe Ci::Build do
context 'when depended job has not been completed yet' do
let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it { expect { job.run! }.not_to raise_error(Ci::Build::MissingDependenciesError) }
+ it { expect { job.run! }.not_to raise_error }
end
context 'when artifacts of depended job has been expired' do
@@ -2014,6 +2101,35 @@ describe Ci::Build do
subject.drop!
end
+
+ context 'when retry service raises Gitlab::Access::AccessDeniedError exception' do
+ let(:retry_service) { Ci::RetryBuildService.new(subject.project, subject.user) }
+
+ before do
+ allow_any_instance_of(Ci::RetryBuildService)
+ .to receive(:execute)
+ .with(subject)
+ .and_raise(Gitlab::Access::AccessDeniedError)
+ allow(Rails.logger).to receive(:error)
+ end
+
+ it 'handles raised exception' do
+ expect { subject.drop! }.not_to raise_exception(Gitlab::Access::AccessDeniedError)
+ end
+
+ it 'logs the error' do
+ subject.drop!
+
+ expect(Rails.logger)
+ .to have_received(:error)
+ .with(a_string_matching("Unable to auto-retry job #{subject.id}"))
+ end
+
+ it 'fails the job' do
+ subject.drop!
+ expect(subject.failed?).to be_truthy
+ end
+ end
end
context 'when build is not configured to be retried' do
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index 145189e7469..1b10501701c 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -5,7 +5,7 @@ describe Ci::GroupVariable do
it { is_expected.to include_module(HasVariable) }
it { is_expected.to include_module(Presentable) }
- it { is_expected.to validate_uniqueness_of(:key).scoped_to(:group_id) }
+ it { is_expected.to validate_uniqueness_of(:key).scoped_to(:group_id).with_message(/\(\w+\) has already been taken/) }
describe '.unprotected' do
subject { described_class.unprotected }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 14d234f6aab..4635f8cfe9d 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -170,12 +170,10 @@ describe Ci::Pipeline, :mailer do
describe '#predefined_variables' do
subject { pipeline.predefined_variables }
- it { is_expected.to be_an(Array) }
-
- it 'includes the defined keys' do
- keys = subject.map { |v| v[:key] }
+ it 'includes all predefined variables in a valid order' do
+ keys = subject.map { |variable| variable[:key] }
- expect(keys).to include('CI_PIPELINE_ID', 'CI_CONFIG_PATH', 'CI_PIPELINE_SOURCE')
+ expect(keys).to eq %w[CI_PIPELINE_ID CI_CONFIG_PATH CI_PIPELINE_SOURCE]
end
end
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index e4ff551151e..875e8b2b682 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -6,7 +6,7 @@ describe Ci::Variable do
describe 'validations' do
it { is_expected.to include_module(HasVariable) }
it { is_expected.to include_module(Presentable) }
- it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope) }
+ it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope).with_message(/\(\w+\) has already been taken/) }
end
describe '.unprotected' do
diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb
index eb57abaf6ef..ba7bad617b4 100644
--- a/spec/models/clusters/applications/helm_spec.rb
+++ b/spec/models/clusters/applications/helm_spec.rb
@@ -1,102 +1,17 @@
require 'rails_helper'
describe Clusters::Applications::Helm do
- it { is_expected.to belong_to(:cluster) }
- it { is_expected.to validate_presence_of(:cluster) }
-
- describe '#name' do
- it 'is .application_name' do
- expect(subject.name).to eq(described_class.application_name)
- end
-
- it 'is recorded in Clusters::Cluster::APPLICATIONS' do
- expect(Clusters::Cluster::APPLICATIONS[subject.name]).to eq(described_class)
- end
- end
-
- describe '#version' do
- it 'defaults to Gitlab::Kubernetes::Helm::HELM_VERSION' do
- expect(subject.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
- end
- end
-
- describe '#status' do
- let(:cluster) { create(:cluster) }
-
- subject { described_class.new(cluster: cluster) }
-
- it 'defaults to :not_installable' do
- expect(subject.status_name).to be(:not_installable)
- end
-
- context 'when platform kubernetes is defined' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
-
- it 'defaults to :installable' do
- expect(subject.status_name).to be(:installable)
- end
- end
- end
+ include_examples 'cluster application core specs', :clusters_applications_helm
describe '#install_command' do
- it 'has all the needed information' do
- expect(subject.install_command).to have_attributes(name: subject.name, install_helm: true)
- end
- end
-
- describe 'status state machine' do
- describe '#make_installing' do
- subject { create(:clusters_applications_helm, :scheduled) }
-
- it 'is installing' do
- subject.make_installing!
-
- expect(subject).to be_installing
- end
- end
-
- describe '#make_installed' do
- subject { create(:clusters_applications_helm, :installing) }
-
- it 'is installed' do
- subject.make_installed
-
- expect(subject).to be_installed
- end
- end
-
- describe '#make_errored' do
- subject { create(:clusters_applications_helm, :installing) }
- let(:reason) { 'some errors' }
-
- it 'is errored' do
- subject.make_errored(reason)
-
- expect(subject).to be_errored
- expect(subject.status_reason).to eq(reason)
- end
- end
-
- describe '#make_scheduled' do
- subject { create(:clusters_applications_helm, :installable) }
-
- it 'is scheduled' do
- subject.make_scheduled
-
- expect(subject).to be_scheduled
- end
-
- describe 'when was errored' do
- subject { create(:clusters_applications_helm, :errored) }
+ let(:helm) { create(:clusters_applications_helm) }
- it 'clears #status_reason' do
- expect(subject.status_reason).not_to be_nil
+ subject { helm.install_command }
- subject.make_scheduled!
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InitCommand) }
- expect(subject.status_reason).to be_nil
- end
- end
+ it 'should be initialized with 1 arguments' do
+ expect(subject.name).to eq('helm')
end
end
end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index 619c088b0bf..03f5b88a525 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -1,8 +1,78 @@
require 'rails_helper'
describe Clusters::Applications::Ingress do
- it { is_expected.to belong_to(:cluster) }
- it { is_expected.to validate_presence_of(:cluster) }
+ let(:ingress) { create(:clusters_applications_ingress) }
- include_examples 'cluster application specs', described_class
+ include_examples 'cluster application core specs', :clusters_applications_ingress
+ include_examples 'cluster application status specs', :cluster_application_ingress
+
+ before do
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+ end
+
+ describe '#make_installed!' do
+ before do
+ application.make_installed!
+ end
+
+ let(:application) { create(:clusters_applications_ingress, :installing) }
+
+ it 'schedules a ClusterWaitForIngressIpAddressWorker' do
+ expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_in)
+ .with(Clusters::Applications::Ingress::FETCH_IP_ADDRESS_DELAY, 'ingress', application.id)
+ end
+ end
+
+ describe '#schedule_status_update' do
+ let(:application) { create(:clusters_applications_ingress, :installed) }
+
+ before do
+ application.schedule_status_update
+ end
+
+ it 'schedules a ClusterWaitForIngressIpAddressWorker' do
+ expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_async)
+ .with('ingress', application.id)
+ end
+
+ context 'when the application is not installed' do
+ let(:application) { create(:clusters_applications_ingress, :installing) }
+
+ it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do
+ expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_async)
+ end
+ end
+
+ context 'when there is already an external_ip' do
+ let(:application) { create(:clusters_applications_ingress, :installed, external_ip: '111.222.222.111') }
+
+ it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do
+ expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in)
+ end
+ end
+ end
+
+ describe '#install_command' do
+ subject { ingress.install_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) }
+
+ it 'should be initialized with ingress arguments' do
+ expect(subject.name).to eq('ingress')
+ expect(subject.chart).to eq('stable/nginx-ingress')
+ expect(subject.values).to eq(ingress.values)
+ end
+ end
+
+ describe '#values' do
+ subject { ingress.values }
+
+ it 'should include ingress valid keys' do
+ is_expected.to include('image')
+ is_expected.to include('repository')
+ is_expected.to include('stats')
+ is_expected.to include('podAnnotations')
+ end
+ end
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 01037919530..2905b58066b 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -1,10 +1,8 @@
require 'rails_helper'
describe Clusters::Applications::Prometheus do
- it { is_expected.to belong_to(:cluster) }
- it { is_expected.to validate_presence_of(:cluster) }
-
- include_examples 'cluster application specs', described_class
+ include_examples 'cluster application core specs', :clusters_applications_prometheus
+ include_examples 'cluster application status specs', :cluster_application_prometheus
describe 'transition to installed' do
let(:project) { create(:project) }
@@ -24,19 +22,11 @@ describe Clusters::Applications::Prometheus do
end
end
- describe "#chart_values_file" do
- subject { create(:clusters_applications_prometheus).chart_values_file }
-
- it 'should return chart values file path' do
- expect(subject).to eq("#{Rails.root}/vendor/prometheus/values.yaml")
- end
- end
-
- describe '#proxy_client' do
+ describe '#prometheus_client' do
context 'cluster is nil' do
it 'returns nil' do
expect(subject.cluster).to be_nil
- expect(subject.proxy_client).to be_nil
+ expect(subject.prometheus_client).to be_nil
end
end
@@ -45,7 +35,7 @@ describe Clusters::Applications::Prometheus do
subject { create(:clusters_applications_prometheus, cluster: cluster) }
it 'returns nil' do
- expect(subject.proxy_client).to be_nil
+ expect(subject.prometheus_client).to be_nil
end
end
@@ -73,16 +63,45 @@ describe Clusters::Applications::Prometheus do
end
it 'creates proxy prometheus rest client' do
- expect(subject.proxy_client).to be_instance_of(RestClient::Resource)
+ expect(subject.prometheus_client).to be_instance_of(RestClient::Resource)
end
it 'creates proper url' do
- expect(subject.proxy_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
+ expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
end
it 'copies options and headers from kube client to proxy client' do
- expect(subject.proxy_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
+ expect(subject.prometheus_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
end
end
end
+
+ describe '#install_command' do
+ let(:kubeclient) { double('kubernetes client') }
+ let(:prometheus) { create(:clusters_applications_prometheus) }
+
+ subject { prometheus.install_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) }
+
+ it 'should be initialized with 3 arguments' do
+ expect(subject.name).to eq('prometheus')
+ expect(subject.chart).to eq('stable/prometheus')
+ expect(subject.values).to eq(prometheus.values)
+ end
+ end
+
+ describe '#values' do
+ let(:prometheus) { create(:clusters_applications_prometheus) }
+
+ subject { prometheus.values }
+
+ it 'should include prometheus valid values' do
+ is_expected.to include('alertmanager')
+ is_expected.to include('kubeStateMetrics')
+ is_expected.to include('nodeExporter')
+ is_expected.to include('pushgateway')
+ is_expected.to include('serverFiles')
+ end
+ end
end
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
new file mode 100644
index 00000000000..a574779e39d
--- /dev/null
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -0,0 +1,99 @@
+require 'rails_helper'
+
+describe Clusters::Applications::Runner do
+ let(:ci_runner) { create(:ci_runner) }
+
+ include_examples 'cluster application core specs', :clusters_applications_runner
+ include_examples 'cluster application status specs', :cluster_application_runner
+
+ it { is_expected.to belong_to(:runner) }
+
+ describe '#install_command' do
+ let(:kubeclient) { double('kubernetes client') }
+ let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) }
+
+ subject { gitlab_runner.install_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) }
+
+ it 'should be initialized with 4 arguments' do
+ expect(subject.name).to eq('runner')
+ expect(subject.chart).to eq('runner/gitlab-runner')
+ expect(subject.repository).to eq('https://charts.gitlab.io')
+ expect(subject.values).to eq(gitlab_runner.values)
+ end
+ end
+
+ describe '#values' do
+ let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) }
+
+ subject { gitlab_runner.values }
+
+ it 'should include runner valid values' do
+ is_expected.to include('concurrent')
+ is_expected.to include('checkInterval')
+ is_expected.to include('rbac')
+ is_expected.to include('runners')
+ is_expected.to include('privileged: true')
+ is_expected.to include('image: ubuntu:16.04')
+ is_expected.to include('resources')
+ is_expected.to include("runnerToken: #{ci_runner.token}")
+ is_expected.to include("gitlabUrl: #{Gitlab::Routing.url_helpers.root_url}")
+ end
+
+ context 'without a runner' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster) }
+ let(:gitlab_runner) { create(:clusters_applications_runner, cluster: cluster) }
+
+ before do
+ cluster.projects << project
+ end
+
+ it 'creates a runner' do
+ expect do
+ subject
+ end.to change { Ci::Runner.count }.by(1)
+ end
+
+ it 'uses the new runner token' do
+ expect(subject).to include("runnerToken: #{gitlab_runner.reload.runner.token}")
+ end
+
+ it 'assigns the new runner to runner' do
+ subject
+ gitlab_runner.reload
+
+ expect(gitlab_runner.runner).not_to be_nil
+ end
+ end
+
+ context 'with duplicated values on vendor/runner/values.yaml' do
+ let(:values) do
+ {
+ "concurrent" => 4,
+ "checkInterval" => 3,
+ "rbac" => {
+ "create" => false
+ },
+ "clusterWideAccess" => false,
+ "runners" => {
+ "privileged" => false,
+ "image" => "ubuntu:16.04",
+ "builds" => {},
+ "services" => {},
+ "helpers" => {}
+ }
+ }
+ end
+
+ before do
+ allow(gitlab_runner).to receive(:chart_values).and_return(values)
+ end
+
+ it 'should overwrite values.yaml' do
+ is_expected.to include("privileged: #{gitlab_runner.privileged}")
+ end
+ end
+ end
+end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 799d7ced116..8f12a0e3085 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -8,6 +8,7 @@ describe Clusters::Cluster do
it { is_expected.to have_one(:application_helm) }
it { is_expected.to have_one(:application_ingress) }
it { is_expected.to have_one(:application_prometheus) }
+ it { is_expected.to have_one(:application_runner) }
it { is_expected.to delegate_method(:status).to(:provider) }
it { is_expected.to delegate_method(:status_reason).to(:provider) }
it { is_expected.to delegate_method(:status_name).to(:provider) }
@@ -196,9 +197,10 @@ describe Clusters::Cluster do
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) }
let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
+ let!(:runner) { create(:clusters_applications_runner, cluster: cluster) }
it 'returns a list of created applications' do
- is_expected.to contain_exactly(helm, ingress, prometheus)
+ is_expected.to contain_exactly(helm, ingress, prometheus, runner)
end
end
end
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 53a4e545ff6..add481b8096 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -252,7 +252,7 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
stub_kubeclient_pods(status: 500)
end
- it { expect { subject }.to raise_error(KubeException) }
+ it { expect { subject }.to raise_error(Kubeclient::HttpError) }
end
context 'when kubernetes responds with 404s' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index c536dab2681..b7ed8be69fc 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -368,7 +368,9 @@ describe CommitStatus do
'rspec:windows 0 : / 1' => 'rspec:windows',
'rspec:windows 0 : / 1 name' => 'rspec:windows name',
'0 1 name ruby' => 'name ruby',
- '0 :/ 1 name ruby' => 'name ruby'
+ '0 :/ 1 name ruby' => 'name ruby',
+ 'golang test 1.8' => 'golang test',
+ '1.9 golang test' => 'golang test'
}
tests.each do |name, group_name|
diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb
index 04f3cecae00..8e88bb81162 100644
--- a/spec/models/compare_spec.rb
+++ b/spec/models/compare_spec.rb
@@ -37,33 +37,51 @@ describe Compare do
end
end
- describe '#base_commit' do
- let(:base_commit) { Commit.new(another_sample_commit, project) }
+ describe '#base_commit_sha' do
+ it 'returns @base_sha if it is present' do
+ expect(project).not_to receive(:merge_base_commit)
- it 'returns project merge base commit' do
- expect(project).to receive(:merge_base_commit).with(start_commit.id, head_commit.id).and_return(base_commit)
+ sha = double
+ service = described_class.new(raw_compare, project, base_sha: sha)
- expect(subject.base_commit).to eq(base_commit)
+ expect(service.base_commit_sha).to eq(sha)
+ end
+
+ it 'fetches merge base SHA from repo when @base_sha is nil' do
+ expect(project).to receive(:merge_base_commit)
+ .with(start_commit.id, head_commit.id)
+ .once
+ .and_call_original
+
+ expect(subject.base_commit_sha)
+ .to eq(project.repository.merge_base(start_commit.id, head_commit.id))
+ end
+
+ it 'is memoized on first call' do
+ expect(project).to receive(:merge_base_commit)
+ .with(start_commit.id, head_commit.id)
+ .once
+ .and_call_original
+
+ 3.times { subject.base_commit_sha }
end
it 'returns nil if there is no start_commit' do
expect(subject).to receive(:start_commit).and_return(nil)
- expect(subject.base_commit).to eq(nil)
+ expect(subject.base_commit_sha).to eq(nil)
end
it 'returns nil if there is no head commit' do
expect(subject).to receive(:head_commit).and_return(nil)
- expect(subject.base_commit).to eq(nil)
+ expect(subject.base_commit_sha).to eq(nil)
end
end
describe '#diff_refs' do
- it 'uses base_commit sha as base_sha' do
- expect(subject).to receive(:base_commit).at_least(:once).and_call_original
-
- expect(subject.diff_refs.base_sha).to eq(subject.base_commit.id)
+ it 'uses base_commit_sha sha as base_sha' do
+ expect(subject.diff_refs.base_sha).to eq(subject.base_commit_sha)
end
it 'uses start_commit sha as start_sha' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 4b217df2e8f..f8874d14e3f 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -34,7 +34,7 @@ describe Issuable do
subject { build(:issue) }
before do
- allow(subject).to receive(:set_iid).and_return(false)
+ allow(InternalId).to receive(:generate_next).and_return(nil)
end
it { is_expected.to validate_presence_of(:project) }
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
new file mode 100644
index 00000000000..f4b9c57e71a
--- /dev/null
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -0,0 +1,121 @@
+require 'spec_helper'
+
+describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
+ include PrometheusHelpers
+ include ReactiveCachingHelpers
+
+ class TestClass
+ include PrometheusAdapter
+ end
+
+ let(:project) { create(:prometheus_project) }
+ let(:service) { project.prometheus_service }
+
+ let(:described_class) { TestClass }
+ let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
+
+ describe '#query' do
+ describe 'environment' do
+ let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ context 'with valid data' do
+ subject { service.query(:environment, environment) }
+
+ before do
+ stub_reactive_cache(service, prometheus_data, environment_query, environment.id)
+ end
+
+ it 'returns reactive data' do
+ is_expected.to eq(prometheus_metrics_data)
+ end
+ end
+ end
+
+ describe 'matched_metrics' do
+ let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricQuery }
+ let(:prometheus_client_wrapper) { double(:prometheus_client_wrapper, label_values: nil) }
+
+ context 'with valid data' do
+ subject { service.query(:matched_metrics) }
+
+ before do
+ allow(service).to receive(:prometheus_client_wrapper).and_return(prometheus_client_wrapper)
+ synchronous_reactive_cache(service)
+ end
+
+ it 'returns reactive data' do
+ expect(subject[:success]).to be_truthy
+ expect(subject[:data]).to eq([])
+ end
+ end
+ end
+
+ describe 'deployment' do
+ let(:deployment) { build_stubbed(:deployment) }
+ let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ context 'with valid data' do
+ subject { service.query(:deployment, deployment) }
+
+ before do
+ stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
+ end
+
+ it 'returns reactive data' do
+ expect(subject).to eq(prometheus_metrics_data)
+ end
+ end
+ end
+ end
+
+ describe '#calculate_reactive_cache' do
+ let(:environment) { create(:environment, slug: 'env-slug') }
+ before do
+ service.manual_configuration = true
+ service.active = true
+ end
+
+ subject do
+ service.calculate_reactive_cache(environment_query.name, environment.id)
+ end
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when Prometheus responds with valid data' do
+ before do
+ stub_all_prometheus_requests(environment.slug)
+ end
+
+ it { expect(subject.to_json).to eq(prometheus_data.to_json) }
+ it { expect(subject.to_json).to eq(prometheus_data.to_json) }
+ end
+
+ [404, 500].each do |status|
+ context "when Prometheus responds with #{status}" do
+ before do
+ stub_all_prometheus_requests(environment.slug, status: status, body: "QUERY FAILED!")
+ end
+
+ it { is_expected.to eq(success: false, result: %(#{status} - "QUERY FAILED!")) }
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/protected_ref_access_spec.rb b/spec/models/concerns/protected_ref_access_spec.rb
new file mode 100644
index 00000000000..a62ca391e25
--- /dev/null
+++ b/spec/models/concerns/protected_ref_access_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe ProtectedRefAccess do
+ subject(:protected_ref_access) do
+ create(:protected_branch, :masters_can_push).push_access_levels.first
+ end
+
+ let(:project) { protected_ref_access.project }
+
+ describe '#check_access' do
+ it 'is always true for admins' do
+ admin = create(:admin)
+
+ expect(protected_ref_access.check_access(admin)).to be_truthy
+ end
+
+ it 'is true for masters' do
+ master = create(:user)
+ project.add_master(master)
+
+ expect(protected_ref_access.check_access(master)).to be_truthy
+ end
+
+ it 'is for developers of the project' do
+ developer = create(:user)
+ project.add_developer(developer)
+
+ expect(protected_ref_access.check_access(developer)).to be_falsy
+ end
+ end
+end
diff --git a/spec/models/cycle_analytics/code_spec.rb b/spec/models/cycle_analytics/code_spec.rb
index f2f1928926c..6a6b58fb52b 100644
--- a/spec/models/cycle_analytics/code_spec.rb
+++ b/spec/models/cycle_analytics/code_spec.rb
@@ -18,11 +18,11 @@ describe 'CycleAnalytics#code' do
end]],
end_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
- context.create_merge_request_closing_issue(data[:issue])
+ context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: -> (context, data) do
- context.merge_merge_requests_closing_issue(data[:issue])
- context.deploy_master
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
+ context.deploy_master(context.user, context.project)
end)
context "when a regular merge request (that doesn't close the issue) is created" do
@@ -30,10 +30,10 @@ describe 'CycleAnalytics#code' do
issue = create(:issue, project: project)
create_commit_referencing_issue(issue)
- create_merge_request_closing_issue(issue, message: "Closes nothing")
+ create_merge_request_closing_issue(user, project, issue, message: "Closes nothing")
- merge_merge_requests_closing_issue(issue)
- deploy_master
+ merge_merge_requests_closing_issue(user, project, issue)
+ deploy_master(user, project)
expect(subject[:code].median).to be_nil
end
@@ -50,10 +50,10 @@ describe 'CycleAnalytics#code' do
end]],
end_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
- context.create_merge_request_closing_issue(data[:issue])
+ context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: -> (context, data) do
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end)
context "when a regular merge request (that doesn't close the issue) is created" do
@@ -61,9 +61,9 @@ describe 'CycleAnalytics#code' do
issue = create(:issue, project: project)
create_commit_referencing_issue(issue)
- create_merge_request_closing_issue(issue, message: "Closes nothing")
+ create_merge_request_closing_issue(user, project, issue, message: "Closes nothing")
- merge_merge_requests_closing_issue(issue)
+ merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:code].median).to be_nil
end
diff --git a/spec/models/cycle_analytics/issue_spec.rb b/spec/models/cycle_analytics/issue_spec.rb
index 985e1bf80be..45f1b4fe8a3 100644
--- a/spec/models/cycle_analytics/issue_spec.rb
+++ b/spec/models/cycle_analytics/issue_spec.rb
@@ -26,8 +26,8 @@ describe 'CycleAnalytics#issue' do
end]],
post_fn: -> (context, data) do
if data[:issue].persisted?
- context.create_merge_request_closing_issue(data[:issue].reload)
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.create_merge_request_closing_issue(context.user, context.project, data[:issue].reload)
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end
end)
@@ -37,8 +37,8 @@ describe 'CycleAnalytics#issue' do
issue = create(:issue, project: project)
issue.update(label_ids: [regular_label.id])
- create_merge_request_closing_issue(issue)
- merge_merge_requests_closing_issue(issue)
+ create_merge_request_closing_issue(user, project, issue)
+ merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:issue].median).to be_nil
end
diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb
index 6fbb2a2d102..d366e2b723a 100644
--- a/spec/models/cycle_analytics/plan_spec.rb
+++ b/spec/models/cycle_analytics/plan_spec.rb
@@ -29,8 +29,8 @@ describe 'CycleAnalytics#plan' do
context.create_commit_referencing_issue(data[:issue], branch_name: data[:branch_name])
end]],
post_fn: -> (context, data) do
- context.create_merge_request_closing_issue(data[:issue], source_branch: data[:branch_name])
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.create_merge_request_closing_issue(context.user, context.project, data[:issue], source_branch: data[:branch_name])
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end)
context "when a regular label (instead of a list label) is added to the issue" do
@@ -41,8 +41,8 @@ describe 'CycleAnalytics#plan' do
issue.update(label_ids: [label.id])
create_commit_referencing_issue(issue, branch_name: branch_name)
- create_merge_request_closing_issue(issue, source_branch: branch_name)
- merge_merge_requests_closing_issue(issue)
+ create_merge_request_closing_issue(user, project, issue, source_branch: branch_name)
+ merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:issue].median).to be_nil
end
diff --git a/spec/models/cycle_analytics/production_spec.rb b/spec/models/cycle_analytics/production_spec.rb
index f8681c0a2f9..156eb96cfce 100644
--- a/spec/models/cycle_analytics/production_spec.rb
+++ b/spec/models/cycle_analytics/production_spec.rb
@@ -13,11 +13,11 @@ describe 'CycleAnalytics#production' do
data_fn: -> (context) { { issue: context.build(:issue, project: context.project) } },
start_time_conditions: [["issue is created", -> (context, data) { data[:issue].save }]],
before_end_fn: lambda do |context, data|
- context.create_merge_request_closing_issue(data[:issue])
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end,
end_time_conditions:
- [["merge request that closes issue is deployed to production", -> (context, data) { context.deploy_master }],
+ [["merge request that closes issue is deployed to production", -> (context, data) { context.deploy_master(context.user, context.project) }],
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
# Make other changes on master
@@ -29,14 +29,14 @@ describe 'CycleAnalytics#production' do
branch_name: 'master')
context.project.repository.commit(sha)
- context.deploy_master
+ context.deploy_master(context.user, context.project)
end]])
context "when a regular merge request (that doesn't close the issue) is merged and deployed" do
it "returns nil" do
merge_request = create(:merge_request)
MergeRequests::MergeService.new(project, user).execute(merge_request)
- deploy_master
+ deploy_master(user, project)
expect(subject[:production].median).to be_nil
end
@@ -45,9 +45,9 @@ describe 'CycleAnalytics#production' do
context "when the deployment happens to a non-production environment" do
it "returns nil" do
issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(issue)
+ merge_request = create_merge_request_closing_issue(user, project, issue)
MergeRequests::MergeService.new(project, user).execute(merge_request)
- deploy_master(environment: 'staging')
+ deploy_master(user, project, environment: 'staging')
expect(subject[:production].median).to be_nil
end
diff --git a/spec/models/cycle_analytics/review_spec.rb b/spec/models/cycle_analytics/review_spec.rb
index 0ac58695b35..0aedfb49cb5 100644
--- a/spec/models/cycle_analytics/review_spec.rb
+++ b/spec/models/cycle_analytics/review_spec.rb
@@ -13,11 +13,11 @@ describe 'CycleAnalytics#review' do
data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
start_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
- context.create_merge_request_closing_issue(data[:issue])
+ context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is merged",
-> (context, data) do
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: nil)
diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb
index b66d5623910..0cbda50c688 100644
--- a/spec/models/cycle_analytics/staging_spec.rb
+++ b/spec/models/cycle_analytics/staging_spec.rb
@@ -13,15 +13,15 @@ describe 'CycleAnalytics#staging' do
phase: :staging,
data_fn: lambda do |context|
issue = context.create(:issue, project: context.project)
- { issue: issue, merge_request: context.create_merge_request_closing_issue(issue) }
+ { issue: issue, merge_request: context.create_merge_request_closing_issue(context.user, context.project, issue) }
end,
start_time_conditions: [["merge request that closes issue is merged",
-> (context, data) do
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is deployed to production",
-> (context, data) do
- context.deploy_master
+ context.deploy_master(context.user, context.project)
end],
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
@@ -34,14 +34,14 @@ describe 'CycleAnalytics#staging' do
branch_name: 'master')
context.project.repository.commit(sha)
- context.deploy_master
+ context.deploy_master(context.user, context.project)
end]])
context "when a regular merge request (that doesn't close the issue) is merged and deployed" do
it "returns nil" do
merge_request = create(:merge_request)
MergeRequests::MergeService.new(project, user).execute(merge_request)
- deploy_master
+ deploy_master(user, project)
expect(subject[:staging].median).to be_nil
end
@@ -50,9 +50,9 @@ describe 'CycleAnalytics#staging' do
context "when the deployment happens to a non-production environment" do
it "returns nil" do
issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(issue)
+ merge_request = create_merge_request_closing_issue(user, project, issue)
MergeRequests::MergeService.new(project, user).execute(merge_request)
- deploy_master(environment: 'staging')
+ deploy_master(user, project, environment: 'staging')
expect(subject[:staging].median).to be_nil
end
diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb
index 690c09bc2dc..e58b8fdff58 100644
--- a/spec/models/cycle_analytics/test_spec.rb
+++ b/spec/models/cycle_analytics/test_spec.rb
@@ -12,26 +12,26 @@ describe 'CycleAnalytics#test' do
phase: :test,
data_fn: lambda do |context|
issue = context.create(:issue, project: context.project)
- merge_request = context.create_merge_request_closing_issue(issue)
+ merge_request = context.create_merge_request_closing_issue(context.user, context.project, issue)
pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project, head_pipeline_of: merge_request)
{ pipeline: pipeline, issue: issue }
end,
start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]],
end_time_conditions: [["pipeline is finished", -> (context, data) { data[:pipeline].succeed! }]],
post_fn: -> (context, data) do
- context.merge_merge_requests_closing_issue(data[:issue])
+ context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end)
context "when the pipeline is for a regular merge request (that doesn't close an issue)" do
it "returns nil" do
issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(issue)
+ merge_request = create_merge_request_closing_issue(user, project, issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.succeed!
- merge_merge_requests_closing_issue(issue)
+ merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:test].median).to be_nil
end
@@ -51,13 +51,13 @@ describe 'CycleAnalytics#test' do
context "when the pipeline is dropped (failed)" do
it "returns nil" do
issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(issue)
+ merge_request = create_merge_request_closing_issue(user, project, issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.drop!
- merge_merge_requests_closing_issue(issue)
+ merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:test].median).to be_nil
end
@@ -66,13 +66,13 @@ describe 'CycleAnalytics#test' do
context "when the pipeline is cancelled" do
it "returns nil" do
issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(issue)
+ merge_request = create_merge_request_closing_issue(user, project, issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.cancel!
- merge_merge_requests_closing_issue(issue)
+ merge_merge_requests_closing_issue(user, project, issue)
expect(subject[:test].median).to be_nil
end
diff --git a/spec/models/cycle_analytics_spec.rb b/spec/models/cycle_analytics_spec.rb
new file mode 100644
index 00000000000..0fe24870f02
--- /dev/null
+++ b/spec/models/cycle_analytics_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe CycleAnalytics do
+ let(:project) { create(:project, :repository) }
+ let(:from_date) { 10.days.ago }
+ let(:user) { create(:user, :admin) }
+ let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
+ let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) }
+
+ subject { described_class.new(project, from: from_date) }
+
+ describe '#all_medians_per_stage' do
+ before do
+ allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
+
+ create_cycle(user, project, issue, mr, milestone, pipeline)
+ deploy_master(user, project)
+ end
+
+ it 'returns every median for each stage for a specific project' do
+ values = described_class::STAGES.each_with_object({}) do |stage_name, hsh|
+ hsh[stage_name] = subject[stage_name].median.presence
+ end
+
+ expect(subject.all_medians_per_stage).to eq(values)
+ end
+ end
+end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index ba8aa13d5ad..ac30cd27e0c 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -64,6 +64,7 @@ describe Deployment do
describe '#metrics' do
let(:deployment) { create(:deployment) }
+ let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
subject { deployment.metrics }
@@ -76,17 +77,16 @@ describe Deployment do
{
success: true,
metrics: {},
- last_update: 42,
- deployment_time: 1494408956
+ last_update: 42
}
end
before do
- allow(deployment.project).to receive_message_chain(:monitoring_service, :deployment_metrics)
- .with(any_args).and_return(simple_metrics)
+ allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ allow(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
end
- it { is_expected.to eq(simple_metrics) }
+ it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
end
end
@@ -109,11 +109,11 @@ describe Deployment do
}
end
- let(:prometheus_service) { double('prometheus_service') }
+ let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
before do
- allow(project).to receive(:prometheus_service).and_return(prometheus_service)
- allow(prometheus_service).to receive(:additional_deployment_metrics).and_return(simple_metrics)
+ allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ allow(prometheus_adapter).to receive(:query).with(:additional_metrics_deployment, deployment).and_return(simple_metrics)
end
it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 6f24a039998..412eca4a56b 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Environment do
- set(:project) { create(:project) }
+ let(:project) { create(:project) }
subject(:environment) { create(:environment, project: project) }
it { is_expected.to belong_to(:project) }
@@ -142,15 +142,15 @@ describe Environment do
let(:commit) { project.commit.parent }
it 'returns deployment id for the environment' do
- expect(environment.first_deployment_for(commit)).to eq deployment1
+ expect(environment.first_deployment_for(commit.id)).to eq deployment1
end
it 'return nil when no deployment is found' do
- expect(environment.first_deployment_for(head_commit)).to eq nil
+ expect(environment.first_deployment_for(head_commit.id)).to eq nil
end
it 'returns a UTF-8 ref' do
- expect(environment.first_deployment_for(commit).ref).to be_utf8
+ expect(environment.first_deployment_for(commit.id).ref).to be_utf8
end
end
@@ -452,8 +452,8 @@ describe Environment do
end
it 'returns the metrics from the deployment service' do
- expect(project.monitoring_service)
- .to receive(:environment_metrics).with(environment)
+ expect(environment.prometheus_adapter)
+ .to receive(:query).with(:environment, environment)
.and_return(:fake_metrics)
is_expected.to eq(:fake_metrics)
@@ -508,12 +508,12 @@ describe Environment do
context 'when the environment has additional metrics' do
before do
- allow(environment).to receive(:has_additional_metrics?).and_return(true)
+ allow(environment).to receive(:has_metrics?).and_return(true)
end
it 'returns the additional metrics from the deployment service' do
- expect(project.prometheus_service).to receive(:additional_environment_metrics)
- .with(environment)
+ expect(environment.prometheus_adapter).to receive(:query)
+ .with(:additional_metrics_environment, environment)
.and_return(:fake_metrics)
is_expected.to eq(:fake_metrics)
@@ -522,46 +522,13 @@ describe Environment do
context 'when the environment does not have metrics' do
before do
- allow(environment).to receive(:has_additional_metrics?).and_return(false)
+ allow(environment).to receive(:has_metrics?).and_return(false)
end
it { is_expected.to be_nil }
end
end
- describe '#has_additional_metrics??' do
- subject { environment.has_additional_metrics? }
-
- context 'when the enviroment is available' do
- context 'with a deployment service' do
- let(:project) { create(:prometheus_project) }
-
- context 'and a deployment' do
- let!(:deployment) { create(:deployment, environment: environment) }
- it { is_expected.to be_truthy }
- end
-
- context 'but no deployments' do
- it { is_expected.to be_falsy }
- end
- end
-
- context 'without a monitoring service' do
- it { is_expected.to be_falsy }
- end
- end
-
- context 'when the environment is unavailable' do
- let(:project) { create(:prometheus_project) }
-
- before do
- environment.stop
- end
-
- it { is_expected.to be_falsy }
- end
- end
-
describe '#slug' do
it "is automatically generated" do
expect(environment.slug).not_to be_nil
@@ -654,4 +621,12 @@ describe Environment do
end
end
end
+
+ describe '#prometheus_adapter' do
+ it 'calls prometheus adapter service' do
+ expect_any_instance_of(Prometheus::AdapterService).to receive(:prometheus_adapter)
+
+ subject.prometheus_adapter
+ end
+ end
end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 67f49348acb..8ea92410022 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -49,6 +49,22 @@ describe Event do
end
end
end
+
+ describe 'after_create :track_user_interacted_projects' do
+ let(:event) { build(:push_event, project: project, author: project.owner) }
+
+ it 'passes event to UserInteractedProject.track' do
+ expect(UserInteractedProject).to receive(:available?).and_return(true)
+ expect(UserInteractedProject).to receive(:track).with(event)
+ event.save
+ end
+
+ it 'does not call UserInteractedProject.track if its not yet available' do
+ expect(UserInteractedProject).to receive(:available?).and_return(false)
+ expect(UserInteractedProject).not_to receive(:track)
+ event.save
+ end
+ end
end
describe "Push event" do
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 4f16b73ef38..abfc0896a41 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -18,6 +18,7 @@ describe Group do
it { is_expected.to have_many(:uploads).dependent(:destroy) }
it { is_expected.to have_one(:chat_team) }
it { is_expected.to have_many(:custom_attributes).class_name('GroupCustomAttribute') }
+ it { is_expected.to have_many(:badges).class_name('GroupBadge') }
describe '#members & #requesters' do
let(:requester) { create(:user) }
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
new file mode 100644
index 00000000000..581fd0293cc
--- /dev/null
+++ b/spec/models/internal_id_spec.rb
@@ -0,0 +1,106 @@
+require 'spec_helper'
+
+describe InternalId do
+ let(:project) { create(:project) }
+ let(:usage) { :issues }
+ let(:issue) { build(:issue, project: project) }
+ let(:scope) { { project: project } }
+ let(:init) { ->(s) { s.project.issues.size } }
+
+ context 'validations' do
+ it { is_expected.to validate_presence_of(:usage) }
+ end
+
+ describe '.generate_next' do
+ subject { described_class.generate_next(issue, scope, usage, init) }
+
+ context 'in the absence of a record' do
+ it 'creates a record if not yet present' do
+ expect { subject }.to change { described_class.count }.from(0).to(1)
+ end
+
+ it 'stores record attributes' do
+ subject
+
+ described_class.first.tap do |record|
+ expect(record.project).to eq(project)
+ expect(record.usage).to eq(usage.to_s)
+ end
+ end
+
+ context 'with existing issues' do
+ before do
+ rand(1..10).times { create(:issue, project: project) }
+ described_class.delete_all
+ end
+
+ it 'calculates last_value values automatically' do
+ expect(subject).to eq(project.issues.size + 1)
+ end
+ end
+
+ context 'with concurrent inserts on table' do
+ it 'looks up the record if it was created concurrently' do
+ args = { **scope, usage: described_class.usages[usage.to_s] }
+ record = double
+ expect(described_class).to receive(:find_by).with(args).and_return(nil) # first call, record not present
+ expect(described_class).to receive(:find_by).with(args).and_return(record) # second call, record was created by another process
+ expect(described_class).to receive(:create!).and_raise(ActiveRecord::RecordNotUnique, 'record not unique')
+ expect(record).to receive(:increment_and_save!)
+
+ subject
+ end
+ end
+ end
+
+ it 'generates a strictly monotone, gapless sequence' do
+ seq = (0..rand(100)).map do
+ described_class.generate_next(issue, scope, usage, init)
+ end
+ normalized = seq.map { |i| i - seq.min }
+
+ expect(normalized).to eq((0..seq.size - 1).to_a)
+ end
+
+ context 'with an insufficient schema version' do
+ before do
+ described_class.reset_column_information
+ expect(ActiveRecord::Migrator).to receive(:current_version).and_return(InternalId::REQUIRED_SCHEMA_VERSION - 1)
+ end
+
+ let(:init) { double('block') }
+
+ it 'calculates next internal ids on the fly' do
+ val = rand(1..100)
+
+ expect(init).to receive(:call).with(issue).and_return(val)
+ expect(subject).to eq(val + 1)
+ end
+ end
+ end
+
+ describe '#increment_and_save!' do
+ let(:id) { create(:internal_id) }
+ subject { id.increment_and_save! }
+
+ it 'returns incremented iid' do
+ value = id.last_value
+
+ expect(subject).to eq(value + 1)
+ end
+
+ it 'saves the record' do
+ subject
+
+ expect(id.changed?).to be_falsey
+ end
+
+ context 'with last_value=nil' do
+ let(:id) { build(:internal_id, last_value: nil) }
+
+ it 'returns 1' do
+ expect(subject).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index f5c9f551e65..11154291368 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -9,11 +9,17 @@ describe Issue do
describe 'modules' do
subject { described_class }
- it { is_expected.to include_module(InternalId) }
it { is_expected.to include_module(Issuable) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
it { is_expected.to include_module(Taskable) }
+
+ it_behaves_like 'AtomicInternalId' do
+ let(:internal_id_attribute) { :iid }
+ let(:instance) { build(:issue) }
+ let(:scope_attrs) { { project: instance.project } }
+ let(:usage) { :issues }
+ end
end
subject { create(:issue) }
@@ -221,27 +227,55 @@ describe Issue do
end
describe '#referenced_merge_requests' do
- it 'returns the referenced merge requests' do
- project = create(:project, :public)
-
- mr1 = create(:merge_request,
- source_project: project,
- source_branch: 'master',
- target_branch: 'feature')
+ let(:project) { create(:project, :public) }
+ let(:issue) do
+ create(:issue, description: merge_request.to_reference, project: project)
+ end
+ let!(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'master',
+ target_branch: 'feature')
+ end
+ it 'returns the referenced merge requests' do
mr2 = create(:merge_request,
source_project: project,
source_branch: 'feature',
target_branch: 'master')
- issue = create(:issue, description: mr1.to_reference, project: project)
-
create(:note_on_issue,
noteable: issue,
note: mr2.to_reference,
project_id: project.id)
- expect(issue.referenced_merge_requests).to eq([mr1, mr2])
+ expect(issue.referenced_merge_requests).to eq([merge_request, mr2])
+ end
+
+ it 'returns cross project referenced merge requests' do
+ other_project = create(:project, :public)
+ cross_project_merge_request = create(:merge_request, source_project: other_project)
+ create(:note_on_issue,
+ noteable: issue,
+ note: cross_project_merge_request.to_reference(issue.project),
+ project_id: issue.project.id)
+
+ expect(issue.referenced_merge_requests).to eq([merge_request, cross_project_merge_request])
+ end
+
+ it 'excludes cross project references if the user cannot read cross project' do
+ user = create(:user)
+ allow(Ability).to receive(:allowed?).and_call_original
+ expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
+
+ other_project = create(:project, :public)
+ cross_project_merge_request = create(:merge_request, source_project: other_project)
+ create(:note_on_issue,
+ noteable: issue,
+ note: cross_project_merge_request.to_reference(issue.project),
+ project_id: issue.project.id)
+
+ expect(issue.referenced_merge_requests(user)).to eq([merge_request])
end
end
@@ -309,7 +343,7 @@ describe Issue do
end
describe '#related_branches' do
- let(:user) { build(:admin) }
+ let(:user) { create(:admin) }
before do
allow(subject.project.repository).to receive(:branch_names)
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 3e46fa36375..b8b0e63f92e 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -45,14 +45,6 @@ describe ProjectMember do
let(:project) { owner.project }
let(:master) { create(:project_member, project: project) }
- let(:owner_todos) { (0...2).map { create(:todo, user: owner.user, project: project) } }
- let(:master_todos) { (0...3).map { create(:todo, user: master.user, project: project) } }
-
- before do
- owner_todos
- master_todos
- end
-
it "creates an expired event when left due to expiry" do
expired = create(:project_member, project: project, expires_at: Time.now - 6.days)
expired.destroy
@@ -63,21 +55,6 @@ describe ProjectMember do
master.destroy
expect(Event.recent.first.action).to eq(Event::LEFT)
end
-
- it "destroys itself and delete associated todos" do
- expect(owner.user.todos.size).to eq(2)
- expect(master.user.todos.size).to eq(3)
- expect(Todo.count).to eq(5)
-
- master_todo_ids = master_todos.map(&:id)
- master.destroy
-
- expect(owner.user.todos.size).to eq(2)
- expect(Todo.count).to eq(2)
- master_todo_ids.each do |id|
- expect(Todo.exists?(id)).to eq(false)
- end
- end
end
describe '.import_team' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 243eeddc7a8..ff5a6f63010 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -17,7 +17,7 @@ describe MergeRequest do
describe 'modules' do
subject { described_class }
- it { is_expected.to include_module(InternalId) }
+ it { is_expected.to include_module(NonatomicInternalId) }
it { is_expected.to include_module(Issuable) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
@@ -1544,7 +1544,7 @@ describe MergeRequest do
end
it "executes diff cache service" do
- expect_any_instance_of(MergeRequests::MergeRequestDiffCacheService).to receive(:execute).with(subject)
+ expect_any_instance_of(MergeRequests::MergeRequestDiffCacheService).to receive(:execute).with(subject, an_instance_of(MergeRequestDiff))
subject.reload_diff
end
@@ -2084,4 +2084,82 @@ describe MergeRequest do
it_behaves_like 'checking whether a rebase is in progress'
end
end
+
+ describe '#allow_maintainer_to_push' do
+ let(:merge_request) do
+ build(:merge_request, source_branch: 'fixes', allow_maintainer_to_push: true)
+ end
+
+ it 'is false when pushing by a maintainer is not possible' do
+ expect(merge_request).to receive(:maintainer_push_possible?) { false }
+
+ expect(merge_request.allow_maintainer_to_push).to be_falsy
+ end
+
+ it 'is true when pushing by a maintainer is possible' do
+ expect(merge_request).to receive(:maintainer_push_possible?) { true }
+
+ expect(merge_request.allow_maintainer_to_push).to be_truthy
+ end
+ end
+
+ describe '#maintainer_push_possible?' do
+ let(:merge_request) do
+ build(:merge_request, source_branch: 'fixes')
+ end
+
+ before do
+ allow(ProtectedBranch).to receive(:protected?) { false }
+ end
+
+ it 'does not allow maintainer to push if the source project is the same as the target' do
+ merge_request.target_project = merge_request.source_project = create(:project, :public)
+
+ expect(merge_request.maintainer_push_possible?).to be_falsy
+ end
+
+ it 'allows maintainer to push when both source and target are public' do
+ merge_request.target_project = build(:project, :public)
+ merge_request.source_project = build(:project, :public)
+
+ expect(merge_request.maintainer_push_possible?).to be_truthy
+ end
+
+ it 'is not available for protected branches' do
+ merge_request.target_project = build(:project, :public)
+ merge_request.source_project = build(:project, :public)
+
+ expect(ProtectedBranch).to receive(:protected?)
+ .with(merge_request.source_project, 'fixes')
+ .and_return(true)
+
+ expect(merge_request.maintainer_push_possible?).to be_falsy
+ end
+ end
+
+ describe '#can_allow_maintainer_to_push?' do
+ let(:target_project) { create(:project, :public) }
+ let(:source_project) { fork_project(target_project) }
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: source_project,
+ source_branch: 'fixes',
+ target_project: target_project)
+ end
+ let(:user) { create(:user) }
+
+ before do
+ allow(merge_request).to receive(:maintainer_push_possible?) { true }
+ end
+
+ it 'is false if the user does not have push access to the source project' do
+ expect(merge_request.can_allow_maintainer_to_push?(user)).to be_falsy
+ end
+
+ it 'is true when the user has push access to the source project' do
+ source_project.add_developer(user)
+
+ expect(merge_request.can_allow_maintainer_to_push?(user)).to be_truthy
+ end
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index e626efd054d..ee142718f7e 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -204,43 +204,67 @@ describe Namespace do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
end
- context 'with subgroups' do
+ context 'with subgroups', :nested_groups do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
+ let(:new_parent) { create(:group, name: 'new_parent', path: 'new_parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
let!(:project) { create(:project_empty_repo, :legacy_storage, path: 'the-project', namespace: child, skip_disk_validation: true) }
let(:uploads_dir) { FileUploader.root }
let(:pages_dir) { File.join(TestEnv.pages_path) }
+ def expect_project_directories_at(namespace_path)
+ expected_repository_path = File.join(TestEnv.repos_path, namespace_path, 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, namespace_path, 'the-project')
+ expected_pages_path = File.join(pages_dir, namespace_path, 'the-project')
+
+ expect(File.directory?(expected_repository_path)).to be_truthy
+ expect(File.directory?(expected_upload_path)).to be_truthy
+ expect(File.directory?(expected_pages_path)).to be_truthy
+ end
+
before do
+ FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project.full_path}.git"))
FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
end
context 'renaming child' do
it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
- expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
+ child.update!(path: 'renamed')
- child.update_attributes!(path: 'renamed')
-
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ expect_project_directories_at('parent/renamed')
end
end
context 'renaming parent' do
it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
- expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
+ parent.update!(path: 'renamed')
+
+ expect_project_directories_at('renamed/child')
+ end
+ end
+
+ context 'moving from one parent to another' do
+ it 'correctly moves the repository, uploads and pages' do
+ child.update!(parent: new_parent)
- parent.update_attributes!(path: 'renamed')
+ expect_project_directories_at('new_parent/child')
+ end
+ end
+
+ context 'moving from having a parent to root' do
+ it 'correctly moves the repository, uploads and pages' do
+ child.update!(parent: nil)
+
+ expect_project_directories_at('child')
+ end
+ end
+
+ context 'moving from root to having a parent' do
+ it 'correctly moves the repository, uploads and pages' do
+ parent.update!(parent: new_parent)
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ expect_project_directories_at('new_parent/parent/child')
end
end
end
@@ -525,7 +549,6 @@ describe Namespace do
end
end
- # Note: Group transfers are not yet implemented
context 'when a group is transferred into a root group' do
context 'when the root group "Share with group lock" is enabled' do
let(:root_group) { create(:group, share_with_group_lock: true) }
diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb
new file mode 100644
index 00000000000..eda0e1da835
--- /dev/null
+++ b/spec/models/notification_recipient_spec.rb
@@ -0,0 +1,16 @@
+require 'spec_helper'
+
+describe NotificationRecipient do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:target) { create(:issue, project: project) }
+
+ subject(:recipient) { described_class.new(user, :watch, target: target, project: project) }
+
+ it 'denies access to a target when cross project access is denied' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ expect(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(false)
+
+ expect(recipient.has_access?).to be_falsy
+ end
+end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 9d12f96c642..95713d8b85b 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -1,6 +1,10 @@
require 'spec_helper'
describe PagesDomain do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:pages_domain) { described_class.new }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
end
@@ -64,19 +68,51 @@ describe PagesDomain do
end
end
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:verification_code) }
+ end
+
+ describe '#verification_code' do
+ subject { pages_domain.verification_code }
+
+ it 'is set automatically with 128 bits of SecureRandom data' do
+ expect(SecureRandom).to receive(:hex).with(16) { 'verification code' }
+
+ is_expected.to eq('verification code')
+ end
+ end
+
+ describe '#keyed_verification_code' do
+ subject { pages_domain.keyed_verification_code }
+
+ it { is_expected.to eq("gitlab-pages-verification-code=#{pages_domain.verification_code}") }
+ end
+
+ describe '#verification_domain' do
+ subject { pages_domain.verification_domain }
+
+ it { is_expected.to be_nil }
+
+ it 'is a well-known subdomain if the domain is present' do
+ pages_domain.domain = 'example.com'
+
+ is_expected.to eq('_gitlab-pages-verification-code.example.com')
+ end
+ end
+
describe '#url' do
subject { domain.url }
context 'without the certificate' do
let(:domain) { build(:pages_domain, certificate: '') }
- it { is_expected.to eq('http://my.domain.com') }
+ it { is_expected.to eq("http://#{domain.domain}") }
end
context 'with a certificate' do
let(:domain) { build(:pages_domain, :with_certificate) }
- it { is_expected.to eq('https://my.domain.com') }
+ it { is_expected.to eq("https://#{domain.domain}") }
end
end
@@ -154,4 +190,108 @@ describe PagesDomain do
# We test only existence of output, since the output is long
it { is_expected.not_to be_empty }
end
+
+ describe '#update_daemon' do
+ it 'runs when the domain is created' do
+ domain = build(:pages_domain)
+
+ expect(domain).to receive(:update_daemon)
+
+ domain.save!
+ end
+
+ it 'runs when the domain is destroyed' do
+ domain = create(:pages_domain)
+
+ expect(domain).to receive(:update_daemon)
+
+ domain.destroy!
+ end
+
+ it 'delegates to Projects::UpdatePagesConfigurationService' do
+ service = instance_double('Projects::UpdatePagesConfigurationService')
+ expect(Projects::UpdatePagesConfigurationService).to receive(:new) { service }
+ expect(service).to receive(:execute)
+
+ create(:pages_domain)
+ end
+
+ context 'configuration updates when attributes change' do
+ set(:project1) { create(:project) }
+ set(:project2) { create(:project) }
+ set(:domain) { create(:pages_domain) }
+
+ where(:attribute, :old_value, :new_value, :update_expected) do
+ now = Time.now
+ future = now + 1.day
+
+ :project | nil | :project1 | true
+ :project | :project1 | :project1 | false
+ :project | :project1 | :project2 | true
+ :project | :project1 | nil | true
+
+ # domain can't be set to nil
+ :domain | 'a.com' | 'a.com' | false
+ :domain | 'a.com' | 'b.com' | true
+
+ # verification_code can't be set to nil
+ :verification_code | 'foo' | 'foo' | false
+ :verification_code | 'foo' | 'bar' | false
+
+ :verified_at | nil | now | false
+ :verified_at | now | now | false
+ :verified_at | now | future | false
+ :verified_at | now | nil | false
+
+ :enabled_until | nil | now | true
+ :enabled_until | now | now | false
+ :enabled_until | now | future | false
+ :enabled_until | now | nil | true
+ end
+
+ with_them do
+ it 'runs if a relevant attribute has changed' do
+ a = old_value.is_a?(Symbol) ? send(old_value) : old_value
+ b = new_value.is_a?(Symbol) ? send(new_value) : new_value
+
+ domain.update!(attribute => a)
+
+ if update_expected
+ expect(domain).to receive(:update_daemon)
+ else
+ expect(domain).not_to receive(:update_daemon)
+ end
+
+ domain.update!(attribute => b)
+ end
+ end
+
+ context 'TLS configuration' do
+ set(:domain_with_tls) { create(:pages_domain, :with_key, :with_certificate) }
+
+ let(:cert1) { domain_with_tls.certificate }
+ let(:cert2) { cert1 + ' ' }
+ let(:key1) { domain_with_tls.key }
+ let(:key2) { key1 + ' ' }
+
+ it 'updates when added' do
+ expect(domain).to receive(:update_daemon)
+
+ domain.update!(key: key1, certificate: cert1)
+ end
+
+ it 'updates when changed' do
+ expect(domain_with_tls).to receive(:update_daemon)
+
+ domain_with_tls.update!(key: key2, certificate: cert2)
+ end
+
+ it 'updates when removed' do
+ expect(domain_with_tls).to receive(:update_daemon)
+
+ domain_with_tls.update!(key: nil, certificate: nil)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 296b91a771c..7545c0797e9 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -36,14 +36,14 @@ describe ProjectAutoDevops do
end
end
- describe '#variables' do
+ describe '#predefined_variables' do
let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: domain) }
context 'when domain is defined' do
let(:domain) { 'example.com' }
it 'returns AUTO_DEVOPS_DOMAIN' do
- expect(auto_devops.variables).to include(domain_variable)
+ expect(auto_devops.predefined_variables).to include(domain_variable)
end
end
@@ -55,7 +55,7 @@ describe ProjectAutoDevops do
allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
end
- it { expect(auto_devops.variables).to include(domain_variable) }
+ it { expect(auto_devops.predefined_variables).to include(domain_variable) }
end
context 'when there is no instance domain specified' do
@@ -63,7 +63,7 @@ describe ProjectAutoDevops do
allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
end
- it { expect(auto_devops.variables).not_to include(domain_variable) }
+ it { expect(auto_devops.predefined_variables).not_to include(domain_variable) }
end
end
diff --git a/spec/models/project_services/asana_service_spec.rb b/spec/models/project_services/asana_service_spec.rb
index 04440d890aa..e66109fd98f 100644
--- a/spec/models/project_services/asana_service_spec.rb
+++ b/spec/models/project_services/asana_service_spec.rb
@@ -47,7 +47,7 @@ describe AsanaService do
it 'calls Asana service to create a story' do
data = create_data_for_commits('Message from commit. related to #123456')
- expected_message = "#{data[:user_name]} pushed to branch #{data[:ref]} of #{project.name_with_namespace} ( #{data[:commits][0][:url]} ): #{data[:commits][0][:message]}"
+ expected_message = "#{data[:user_name]} pushed to branch #{data[:ref]} of #{project.full_name} ( #{data[:commits][0][:url]} ): #{data[:commits][0][:message]}"
d1 = double('Asana::Task')
expect(d1).to receive(:add_comment).with(text: expected_message)
diff --git a/spec/models/project_services/hipchat_service_spec.rb b/spec/models/project_services/hipchat_service_spec.rb
index 23db29cb541..3e2a166cdd6 100644
--- a/spec/models/project_services/hipchat_service_spec.rb
+++ b/spec/models/project_services/hipchat_service_spec.rb
@@ -29,7 +29,7 @@ describe HipchatService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:api_url) { 'https://hipchat.example.com/v2/room/123456/notification?auth_token=verySecret' }
- let(:project_name) { project.name_with_namespace.gsub(/\s/, '') }
+ let(:project_name) { project.full_name.gsub(/\s/, '') }
let(:token) { 'verySecret' }
let(:server_url) { 'https://hipchat.example.com'}
let(:push_sample_data) do
@@ -303,7 +303,7 @@ describe HipchatService do
message = hipchat.__send__(:create_pipeline_message, data)
project_url = project.web_url
- project_name = project.name_with_namespace.gsub(/\s/, '')
+ project_name = project.full_name.gsub(/\s/, '')
pipeline_attributes = data[:object_attributes]
ref = pipeline_attributes[:ref]
ref_type = pipeline_attributes[:tag] ? 'tag' : 'branch'
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 748c366efca..54ef0be67ff 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -166,7 +166,6 @@ describe JiraService do
# Creates comment
expect(WebMock).to have_requested(:post, @comment_url)
-
# Creates Remote Link in JIRA issue fields
expect(WebMock).to have_requested(:post, @remote_link_url).with(
body: hash_including(
@@ -174,7 +173,7 @@ describe JiraService do
object: {
url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{merge_request.diff_head_sha}",
title: "GitLab: Solved by commit #{merge_request.diff_head_sha}.",
- icon: { title: "GitLab", url16x16: "https://gitlab.com/favicon.ico" },
+ icon: { title: "GitLab", url16x16: "http://localhost/favicon.ico" },
status: { resolved: true }
}
)
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index 622d8844a72..3be023a48c1 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -370,7 +370,7 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
stub_kubeclient_pods(status: 500)
end
- it { expect { subject }.to raise_error(KubeException) }
+ it { expect { subject }.to raise_error(Kubeclient::HttpError) }
end
context 'when kubernetes responds with 404s' do
diff --git a/spec/models/project_services/mattermost_slash_commands_service_spec.rb b/spec/models/project_services/mattermost_slash_commands_service_spec.rb
index 522cf15f3ba..a5bdf9a9337 100644
--- a/spec/models/project_services/mattermost_slash_commands_service_spec.rb
+++ b/spec/models/project_services/mattermost_slash_commands_service_spec.rb
@@ -31,10 +31,10 @@ describe MattermostSlashCommandsService do
url: 'http://trigger.url',
icon_url: 'http://icon.url/icon.png',
auto_complete: true,
- auto_complete_desc: "Perform common operations on: #{project.name_with_namespace}",
+ auto_complete_desc: "Perform common operations on: #{project.full_name}",
auto_complete_hint: '[help]',
- description: "Perform common operations on: #{project.name_with_namespace}",
- display_name: "GitLab / #{project.name_with_namespace}",
+ description: "Perform common operations on: #{project.full_name}",
+ display_name: "GitLab / #{project.full_name}",
method: 'P',
username: 'GitLab'
}.to_json)
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index ed17e019d42..7afb1b4a8e3 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -6,7 +6,6 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
let(:project) { create(:prometheus_project) }
let(:service) { project.prometheus_service }
- let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
describe "Associations" do
it { is_expected.to belong_to :project }
@@ -55,197 +54,31 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
- describe '#environment_metrics' do
- let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
-
- around do |example|
- Timecop.freeze { example.run }
- end
-
- context 'with valid data' do
- subject { service.environment_metrics(environment) }
-
- before do
- stub_reactive_cache(service, prometheus_data, environment_query, environment.id)
- end
-
- it 'returns reactive data' do
- is_expected.to eq(prometheus_metrics_data)
- end
- end
- end
-
- describe '#matched_metrics' do
- let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricsQuery }
- let(:client) { double(:client, label_values: nil) }
-
- context 'with valid data' do
- subject { service.matched_metrics }
-
- before do
- allow(service).to receive(:client).and_return(client)
- synchronous_reactive_cache(service)
- end
-
- it 'returns reactive data' do
- expect(subject[:success]).to be_truthy
- expect(subject[:data]).to eq([])
- end
- end
- end
-
- describe '#deployment_metrics' do
- let(:deployment) { build_stubbed(:deployment) }
- let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
-
- around do |example|
- Timecop.freeze { example.run }
- end
-
- context 'with valid data' do
- subject { service.deployment_metrics(deployment) }
- let(:fake_deployment_time) { 10 }
-
- before do
- stub_reactive_cache(service, prometheus_data, deployment_query, deployment.environment.id, deployment.id)
- end
-
- it 'returns reactive data' do
- expect(deployment).to receive(:created_at).and_return(fake_deployment_time)
-
- expect(subject).to eq(prometheus_metrics_data.merge(deployment_time: fake_deployment_time))
- end
- end
- end
-
- describe '#calculate_reactive_cache' do
- let(:environment) { create(:environment, slug: 'env-slug') }
- before do
- service.manual_configuration = true
- service.active = true
- end
-
- subject do
- service.calculate_reactive_cache(environment_query.name, environment.id)
- end
-
- around do |example|
- Timecop.freeze { example.run }
- end
-
- context 'when service is inactive' do
- before do
- service.active = false
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'when Prometheus responds with valid data' do
- before do
- stub_all_prometheus_requests(environment.slug)
- end
-
- it { expect(subject.to_json).to eq(prometheus_data.to_json) }
- it { expect(subject.to_json).to eq(prometheus_data.to_json) }
- end
-
- [404, 500].each do |status|
- context "when Prometheus responds with #{status}" do
- before do
- stub_all_prometheus_requests(environment.slug, status: status, body: "QUERY FAILED!")
- end
-
- it { is_expected.to eq(success: false, result: %(#{status} - "QUERY FAILED!")) }
- end
- end
- end
-
- describe '#client' do
+ describe '#prometheus_client' do
context 'manual configuration is enabled' do
let(:api_url) { 'http://some_url' }
+
before do
+ subject.active = true
subject.manual_configuration = true
subject.api_url = api_url
end
- it 'returns simple rest client from api_url' do
- expect(subject.client).to be_instance_of(Gitlab::PrometheusClient)
- expect(subject.client.rest_client.url).to eq(api_url)
+ it 'returns rest client from api_url' do
+ expect(subject.prometheus_client.url).to eq(api_url)
end
end
context 'manual configuration is disabled' do
- let!(:cluster_for_all) { create(:cluster, environment_scope: '*', projects: [project]) }
- let!(:cluster_for_dev) { create(:cluster, environment_scope: 'dev', projects: [project]) }
-
- let!(:prometheus_for_dev) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_dev) }
- let(:proxy_client) { double('proxy_client') }
+ let(:api_url) { 'http://some_url' }
before do
- service.manual_configuration = false
- end
-
- context 'with cluster for all environments with prometheus installed' do
- let!(:prometheus_for_all) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_all) }
-
- context 'without environment supplied' do
- it 'returns client handling all environments' do
- expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
-
- expect(service.client).to be_instance_of(Gitlab::PrometheusClient)
- expect(service.client.rest_client).to eq(proxy_client)
- end
- end
-
- context 'with dev environment supplied' do
- let!(:environment) { create(:environment, project: project, name: 'dev') }
-
- it 'returns dev cluster client' do
- expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
-
- expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
- expect(service.client(environment.id).rest_client).to eq(proxy_client)
- end
- end
-
- context 'with prod environment supplied' do
- let!(:environment) { create(:environment, project: project, name: 'prod') }
-
- it 'returns dev cluster client' do
- expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
-
- expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
- expect(service.client(environment.id).rest_client).to eq(proxy_client)
- end
- end
+ subject.manual_configuration = false
+ subject.api_url = api_url
end
- context 'with cluster for all environments without prometheus installed' do
- context 'without environment supplied' do
- it 'raises PrometheusError because cluster was not found' do
- expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
- end
- end
-
- context 'with dev environment supplied' do
- let!(:environment) { create(:environment, project: project, name: 'dev') }
-
- it 'returns dev cluster client' do
- expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
-
- expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
- expect(service.client(environment.id).rest_client).to eq(proxy_client)
- end
- end
-
- context 'with prod environment supplied' do
- let!(:environment) { create(:environment, project: project, name: 'prod') }
-
- it 'raises PrometheusError because cluster was not found' do
- expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
- end
- end
+ it 'no client provided' do
+ expect(subject.prometheus_client).to be_nil
end
end
end
@@ -284,7 +117,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
- describe '#synchronize_service_state! before_save callback' do
+ describe '#synchronize_service_state before_save callback' do
context 'no clusters with prometheus are installed' do
context 'when service is inactive' do
before do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index ee04d74d848..4cf8d861595 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Project do
+ include ProjectForksHelper
+
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
@@ -80,6 +82,7 @@ describe Project do
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
+ it { is_expected.to have_many(:project_badges).class_name('ProjectBadge') }
it { is_expected.to have_many(:lfs_file_locks) }
context 'after initialized' do
@@ -517,6 +520,20 @@ describe Project do
it 'returns the project\'s last update date if it has no events' do
expect(project.last_activity_date).to eq(project.updated_at)
end
+
+ it 'returns the most recent timestamp' do
+ project.update_attributes(updated_at: nil,
+ last_activity_at: timestamp,
+ last_repository_updated_at: timestamp - 1.hour)
+
+ expect(project.last_activity_date).to eq(timestamp)
+
+ project.update_attributes(updated_at: timestamp,
+ last_activity_at: timestamp - 1.hour,
+ last_repository_updated_at: nil)
+
+ expect(project.last_activity_date).to eq(timestamp)
+ end
end
end
@@ -1361,7 +1378,7 @@ describe Project do
context 'using a regular repository' do
it 'creates the repository' do
- expect(shell).to receive(:add_repository)
+ expect(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(true)
@@ -1371,7 +1388,7 @@ describe Project do
end
it 'adds an error if the repository could not be created' do
- expect(shell).to receive(:add_repository)
+ expect(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(false)
@@ -1385,7 +1402,7 @@ describe Project do
context 'using a forked repository' do
it 'does nothing' do
expect(project).to receive(:forked?).and_return(true)
- expect(shell).not_to receive(:add_repository)
+ expect(shell).not_to receive(:create_repository)
project.create_repository
end
@@ -1404,7 +1421,7 @@ describe Project do
allow(project).to receive(:repository_exists?)
.and_return(false)
- allow(shell).to receive(:add_repository)
+ allow(shell).to receive(:create_repository)
.with(project.repository_storage_path, project.disk_path)
.and_return(true)
@@ -1428,7 +1445,7 @@ describe Project do
allow(project).to receive(:repository_exists?)
.and_return(false)
- expect(shell).to receive(:add_repository)
+ expect(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(true)
@@ -1473,6 +1490,13 @@ describe Project do
expect(project.user_can_push_to_empty_repo?(user)).to be_truthy
end
+
+ it 'returns false when the repo is not empty' do
+ project.add_master(user)
+ expect(project).to receive(:empty_repo?).and_return(false)
+
+ expect(project.user_can_push_to_empty_repo?(user)).to be_falsey
+ end
end
describe '#container_registry_url' do
@@ -2492,7 +2516,8 @@ describe Project do
end
it 'is a no-op when there is no namespace' do
- project.update_column(:namespace_id, nil)
+ project.namespace.delete
+ project.reload
expect_any_instance_of(Projects::UpdatePagesConfigurationService).not_to receive(:execute)
expect_any_instance_of(Gitlab::PagesTransfer).not_to receive(:rename_project)
@@ -2524,7 +2549,8 @@ describe Project do
it 'is a no-op on legacy projects when there is no namespace' do
export_path = legacy_project.export_path
- legacy_project.update_column(:namespace_id, nil)
+ legacy_project.namespace.delete
+ legacy_project.reload
expect(FileUtils).not_to receive(:rm_rf).with(export_path)
@@ -2536,7 +2562,8 @@ describe Project do
it 'runs on hashed storage projects when there is no namespace' do
export_path = project.export_path
- project.update_column(:namespace_id, nil)
+ project.namespace.delete
+ legacy_project.reload
allow(FileUtils).to receive(:rm_rf).and_call_original
expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original
@@ -3321,4 +3348,135 @@ describe Project do
end.not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError
end
end
+
+ describe '#badges' do
+ let(:project_group) { create(:group) }
+ let(:project) { create(:project, path: 'avatar', namespace: project_group) }
+
+ before do
+ create_list(:project_badge, 2, project: project)
+ create(:group_badge, group: project_group)
+ end
+
+ it 'returns the project and the project group badges' do
+ create(:group_badge, group: create(:group))
+
+ expect(Badge.count).to eq 4
+ expect(project.badges.count).to eq 3
+ end
+
+ if Group.supports_nested_groups?
+ context 'with nested_groups' do
+ let(:parent_group) { create(:group) }
+
+ before do
+ create_list(:group_badge, 2, group: project_group)
+ project_group.update(parent: parent_group)
+ end
+
+ it 'returns the project and the project nested groups badges' do
+ expect(project.badges.count).to eq 5
+ end
+ end
+ end
+ end
+
+ context 'with cross project merge requests' do
+ let(:user) { create(:user) }
+ let(:target_project) { create(:project, :repository) }
+ let(:project) { fork_project(target_project, nil, repository: true) }
+ let!(:merge_request) do
+ create(
+ :merge_request,
+ target_project: target_project,
+ target_branch: 'target-branch',
+ source_project: project,
+ source_branch: 'awesome-feature-1',
+ allow_maintainer_to_push: true
+ )
+ end
+
+ before do
+ target_project.add_developer(user)
+ end
+
+ describe '#merge_requests_allowing_push_to_user' do
+ it 'returns open merge requests for which the user has developer access to the target project' do
+ expect(project.merge_requests_allowing_push_to_user(user)).to include(merge_request)
+ end
+
+ it 'does not include closed merge requests' do
+ merge_request.close
+
+ expect(project.merge_requests_allowing_push_to_user(user)).to be_empty
+ end
+
+ it 'does not include merge requests for guest users' do
+ guest = create(:user)
+ target_project.add_guest(guest)
+
+ expect(project.merge_requests_allowing_push_to_user(guest)).to be_empty
+ end
+
+ it 'does not include the merge request for other users' do
+ other_user = create(:user)
+
+ expect(project.merge_requests_allowing_push_to_user(other_user)).to be_empty
+ end
+
+ it 'is empty when no user is passed' do
+ expect(project.merge_requests_allowing_push_to_user(nil)).to be_empty
+ end
+ end
+
+ describe '#branch_allows_maintainer_push?' do
+ it 'allows access if the user can merge the merge request' do
+ expect(project.branch_allows_maintainer_push?(user, 'awesome-feature-1'))
+ .to be_truthy
+ end
+
+ it 'does not allow guest users access' do
+ guest = create(:user)
+ target_project.add_guest(guest)
+
+ expect(project.branch_allows_maintainer_push?(guest, 'awesome-feature-1'))
+ .to be_falsy
+ end
+
+ it 'does not allow access to branches for which the merge request was closed' do
+ create(:merge_request, :closed,
+ target_project: target_project,
+ target_branch: 'target-branch',
+ source_project: project,
+ source_branch: 'rejected-feature-1',
+ allow_maintainer_to_push: true)
+
+ expect(project.branch_allows_maintainer_push?(user, 'rejected-feature-1'))
+ .to be_falsy
+ end
+
+ it 'does not allow access if the user cannot merge the merge request' do
+ create(:protected_branch, :masters_can_push, project: target_project, name: 'target-branch')
+
+ expect(project.branch_allows_maintainer_push?(user, 'awesome-feature-1'))
+ .to be_falsy
+ end
+
+ it 'caches the result' do
+ control = ActiveRecord::QueryRecorder.new { project.branch_allows_maintainer_push?(user, 'awesome-feature-1') }
+
+ expect { 3.times { project.branch_allows_maintainer_push?(user, 'awesome-feature-1') } }
+ .not_to exceed_query_limit(control)
+ end
+
+ context 'when the requeststore is active', :request_store do
+ it 'only queries per project across instances' do
+ control = ActiveRecord::QueryRecorder.new { project.branch_allows_maintainer_push?(user, 'awesome-feature-1') }
+
+ expect { 2.times { described_class.find(project.id).branch_allows_maintainer_push?(user, 'awesome-feature-1') } }
+ .not_to exceed_query_limit(control).with_threshold(2)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 1e7671476f1..d87c1ca14f0 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -14,13 +14,13 @@ describe ProjectWiki do
it { is_expected.to delegate_method(:repository_storage_path).to :project }
it { is_expected.to delegate_method(:hashed_storage?).to :project }
- describe "#path_with_namespace" do
+ describe "#full_path" do
it "returns the project path with namespace with the .wiki extension" do
- expect(subject.path_with_namespace).to eq(project.full_path + '.wiki')
+ expect(subject.full_path).to eq(project.full_path + '.wiki')
end
it 'returns the same value as #full_path' do
- expect(subject.path_with_namespace).to eq(subject.full_path)
+ expect(subject.full_path).to eq(subject.full_path)
end
end
@@ -74,7 +74,7 @@ describe ProjectWiki do
# Create a fresh project which will not have a wiki
project_wiki = described_class.new(create(:project), user)
gitlab_shell = double(:gitlab_shell)
- allow(gitlab_shell).to receive(:add_repository)
+ allow(gitlab_shell).to receive(:create_repository)
allow(project_wiki).to receive(:gitlab_shell).and_return(gitlab_shell)
expect { project_wiki.send(:wiki) }.to raise_exception(ProjectWiki::CouldNotCreateWikiError)
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 0bc07dc7a85..e506c932d58 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -242,23 +242,51 @@ describe Repository do
end
describe '#commits' do
- it 'sets follow when path is a single path' do
- expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice
-
- repository.commits('master', limit: 1, path: 'README.md')
- repository.commits('master', limit: 1, path: ['README.md'])
+ context 'when neither the all flag nor a ref are specified' do
+ it 'returns every commit from default branch' do
+ expect(repository.commits(limit: 60).size).to eq(37)
+ end
end
- it 'does not set follow when path is multiple paths' do
- expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
+ context 'when ref is passed' do
+ it 'returns every commit from the specified ref' do
+ expect(repository.commits('master', limit: 60).size).to eq(37)
+ end
- repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG'])
- end
+ context 'when all' do
+ it 'returns every commit from the repository' do
+ expect(repository.commits('master', limit: 60, all: true).size).to eq(60)
+ end
+ end
- it 'does not set follow when there are no paths' do
- expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
+ context 'with path' do
+ it 'sets follow when it is a single path' do
+ expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice
+
+ repository.commits('master', limit: 1, path: 'README.md')
+ repository.commits('master', limit: 1, path: ['README.md'])
+ end
+
+ it 'does not set follow when it is multiple paths' do
+ expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
+
+ repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG'])
+ end
+ end
+
+ context 'without path' do
+ it 'does not set follow' do
+ expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
+
+ repository.commits('master', limit: 1)
+ end
+ end
+ end
- repository.commits('master', limit: 1)
+ context "when 'all' flag is set" do
+ it 'returns every commit from the repository' do
+ expect(repository.commits(all: true, limit: 60).size).to eq(60)
+ end
end
end
@@ -867,7 +895,7 @@ describe Repository do
end
it 'returns nil when the content is not recognizable' do
- repository.create_file(user, 'LICENSE', 'Copyright!',
+ repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license_key).to be_nil
@@ -911,7 +939,7 @@ describe Repository do
end
it 'returns nil when the content is not recognizable' do
- repository.create_file(user, 'LICENSE', 'Copyright!',
+ repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master')
expect(repository.license).to be_nil
@@ -976,7 +1004,7 @@ describe Repository do
end
end
- context 'with Gitaly disabled', :skip_gitaly_mock do
+ context 'with Gitaly disabled', :disable_gitaly do
context 'when pre hooks were successful' do
it 'runs without errors' do
hook = double(trigger: [true, nil])
@@ -1419,7 +1447,6 @@ describe Repository do
it 'expires the caches for an empty repository' do
allow(repository).to receive(:empty?).and_return(true)
- expect(cache).to receive(:expire).with(:empty?)
expect(cache).to receive(:expire).with(:has_visible_content?)
repository.expire_emptiness_caches
@@ -1428,7 +1455,6 @@ describe Repository do
it 'does not expire the cache for a non-empty repository' do
allow(repository).to receive(:empty?).and_return(false)
- expect(cache).not_to receive(:expire).with(:empty?)
expect(cache).not_to receive(:expire).with(:has_visible_content?)
repository.expire_emptiness_caches
@@ -1868,7 +1894,7 @@ describe Repository do
it_behaves_like 'adding tag'
end
- context 'when Gitaly operation_user_add_tag feature is disabled', :skip_gitaly_mock do
+ context 'when Gitaly operation_user_add_tag feature is disabled', :disable_gitaly do
it_behaves_like 'adding tag'
it 'passes commit SHA to pre-receive and update hooks and tag SHA to post-receive hook' do
@@ -1927,7 +1953,7 @@ describe Repository do
end
end
- context 'with gitaly disabled', :skip_gitaly_mock do
+ context 'with gitaly disabled', :disable_gitaly do
it_behaves_like "user deleting a branch"
let(:old_rev) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } # git rev-parse feature
@@ -2143,15 +2169,6 @@ describe Repository do
end
end
- describe '#expire_method_caches' do
- it 'expires the caches of the given methods' do
- expect_any_instance_of(RepositoryCache).to receive(:expire).with(:readme)
- expect_any_instance_of(RepositoryCache).to receive(:expire).with(:gitignore)
-
- repository.expire_method_caches(%i(readme gitignore))
- end
- end
-
describe '#expire_all_method_caches' do
it 'expires the caches of all methods' do
expect(repository).to receive(:expire_method_caches)
@@ -2297,66 +2314,6 @@ describe Repository do
end
end
- describe '#cache_method_output', :use_clean_rails_memory_store_caching do
- let(:fallback) { 10 }
-
- context 'with a non-existing repository' do
- let(:project) { create(:project) } # No repository
-
- subject do
- repository.cache_method_output(:cats, fallback: fallback) do
- repository.cats_call_stub
- end
- end
-
- it 'returns the fallback value' do
- expect(subject).to eq(fallback)
- end
-
- it 'avoids calling the original method' do
- expect(repository).not_to receive(:cats_call_stub)
-
- subject
- end
- end
-
- context 'with a method throwing a non-existing-repository error' do
- subject do
- repository.cache_method_output(:cats, fallback: fallback) do
- raise Gitlab::Git::Repository::NoRepository
- end
- end
-
- it 'returns the fallback value' do
- expect(subject).to eq(fallback)
- end
-
- it 'does not cache the data' do
- subject
-
- expect(repository.instance_variable_defined?(:@cats)).to eq(false)
- expect(repository.send(:cache).exist?(:cats)).to eq(false)
- end
- end
-
- context 'with an existing repository' do
- it 'caches the output' do
- object = double
-
- expect(object).to receive(:number).once.and_return(10)
-
- 2.times do
- val = repository.cache_method_output(:cats) { object.number }
-
- expect(val).to eq(10)
- end
-
- expect(repository.send(:cache).exist?(:cats)).to eq(true)
- expect(repository.instance_variable_get(:@cats)).to eq(10)
- end
- end
- end
-
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches)
diff --git a/spec/models/user_interacted_project_spec.rb b/spec/models/user_interacted_project_spec.rb
new file mode 100644
index 00000000000..cb4bb3372d4
--- /dev/null
+++ b/spec/models/user_interacted_project_spec.rb
@@ -0,0 +1,60 @@
+require 'spec_helper'
+
+describe UserInteractedProject do
+ describe '.track' do
+ subject { described_class.track(event) }
+ let(:event) { build(:event) }
+
+ Event::ACTIONS.each do |action|
+ context "for all actions (event types)" do
+ let(:event) { build(:event, action: action) }
+ it 'creates a record' do
+ expect { subject }.to change { described_class.count }.from(0).to(1)
+ end
+ end
+ end
+
+ it 'sets project accordingly' do
+ subject
+ expect(described_class.first.project).to eq(event.project)
+ end
+
+ it 'sets user accordingly' do
+ subject
+ expect(described_class.first.user).to eq(event.author)
+ end
+
+ it 'only creates a record once per user/project' do
+ expect do
+ subject
+ described_class.track(event)
+ end.to change { described_class.count }.from(0).to(1)
+ end
+
+ describe 'with an event without a project' do
+ let(:event) { build(:event, project: nil) }
+
+ it 'ignores the event' do
+ expect { subject }.not_to change { described_class.count }
+ end
+ end
+ end
+
+ describe '.available?' do
+ before do
+ described_class.instance_variable_set('@available_flag', nil)
+ end
+
+ it 'checks schema version and properly caches positive result' do
+ expect(ActiveRecord::Migrator).to receive(:current_version).and_return(described_class::REQUIRED_SCHEMA_VERSION - 1 - rand(1000))
+ expect(described_class.available?).to be_falsey
+ expect(ActiveRecord::Migrator).to receive(:current_version).and_return(described_class::REQUIRED_SCHEMA_VERSION + rand(1000))
+ expect(described_class.available?).to be_truthy
+ expect(ActiveRecord::Migrator).not_to receive(:current_version)
+ expect(described_class.available?).to be_truthy # cached response
+ end
+ end
+
+ it { is_expected.to validate_presence_of(:project_id) }
+ it { is_expected.to validate_presence_of(:user_id) }
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 3531de244bd..5680eb24985 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -27,7 +27,6 @@ describe User do
it { is_expected.to have_many(:keys).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys).dependent(:destroy) }
it { is_expected.to have_many(:events).dependent(:destroy) }
- it { is_expected.to have_many(:recent_events).class_name('Event') }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
@@ -1635,6 +1634,32 @@ describe User do
end
end
+ describe '#authorizations_for_projects' do
+ let!(:user) { create(:user) }
+ subject { Project.where("EXISTS (?)", user.authorizations_for_projects) }
+
+ it 'includes projects that belong to a user, but no other projects' do
+ owned = create(:project, :private, namespace: user.namespace)
+ member = create(:project, :private).tap { |p| p.add_master(user) }
+ other = create(:project)
+
+ expect(subject).to include(owned)
+ expect(subject).to include(member)
+ expect(subject).not_to include(other)
+ end
+
+ it 'includes projects a user has access to, but no other projects' do
+ other_user = create(:user)
+ accessible = create(:project, :private, namespace: other_user.namespace) do |project|
+ project.add_developer(user)
+ end
+ other = create(:project)
+
+ expect(subject).to include(accessible)
+ expect(subject).not_to include(other)
+ end
+ end
+
describe '#authorized_projects', :delete do
context 'with a minimum access level' do
it 'includes projects for which the user is an owner' do