summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb15
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb33
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb14
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb73
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb6
-rw-r--r--spec/factories/clusters/clusters.rb4
-rw-r--r--spec/factories/gitaly/tag.rb9
-rw-r--r--spec/fast_spec_helper.rb12
-rw-r--r--spec/features/admin/admin_settings_spec.rb2
-rw-r--r--spec/features/invites_spec.rb112
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb24
-rw-r--r--spec/features/projects/actve_tabs_spec.rb4
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb18
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb1
-rw-r--r--spec/features/projects/commit/comments/user_adds_comment_spec.rb170
-rw-r--r--spec/features/projects/commit/comments/user_deletes_comments_spec.rb37
-rw-r--r--spec/features/projects/commit/comments/user_edits_comments_spec.rb42
-rw-r--r--spec/features/projects/commit/user_comments_on_commit_spec.rb110
-rw-r--r--spec/features/projects/environments/environments_spec.rb16
-rw-r--r--spec/features/projects/merge_requests/user_creates_merge_request_spec.rb80
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb23
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb41
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb23
-rw-r--r--spec/features/projects/wiki/user_deletes_wiki_page_spec.rb3
-rw-r--r--spec/features/users/user_browses_projects_on_user_page_spec.rb8
-rw-r--r--spec/finders/issues_finder_spec.rb2
-rw-r--r--spec/finders/personal_projects_finder_spec.rb12
-rw-r--r--spec/fixtures/api/schemas/list.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/projects.json1
-rw-r--r--spec/fixtures/emails/valid_new_issue_with_quote.eml25
-rw-r--r--spec/helpers/blob_helper_spec.rb10
-rw-r--r--spec/javascripts/environments/environments_app_spec.js212
-rw-r--r--spec/javascripts/environments/folder/environments_folder_view_spec.js81
-rw-r--r--spec/javascripts/environments/mock_data.js1
-rw-r--r--spec/javascripts/helpers/vue_mount_component_helper.js22
-rw-r--r--spec/javascripts/helpers/vuex_action_helper.js2
-rw-r--r--spec/javascripts/ide/components/ide_spec.js6
-rw-r--r--spec/javascripts/ide/components/repo_editor_spec.js20
-rw-r--r--spec/javascripts/ide/mock_data.js47
-rw-r--r--spec/javascripts/ide/stores/modules/pipelines/actions_spec.js289
-rw-r--r--spec/javascripts/ide/stores/modules/pipelines/getters_spec.js71
-rw-r--r--spec/javascripts/ide/stores/modules/pipelines/mutations_spec.js120
-rw-r--r--spec/javascripts/pipelines/graph/action_component_spec.js71
-rw-r--r--spec/javascripts/pipelines/stage_spec.js27
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js6
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_maintainer_edit_spec.js40
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js2
-rw-r--r--spec/lib/api/helpers/pagination_spec.rb212
-rw-r--r--spec/lib/api/helpers/related_resources_helpers_spec.rb16
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb6
-rw-r--r--spec/lib/gitlab/auth/blocked_user_tracker_spec.rb44
-rw-r--r--spec/lib/gitlab/auth/ldap/access_spec.rb13
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb36
-rw-r--r--spec/lib/gitlab/auth/ldap/user_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/config/entry/policy_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb80
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb96
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb99
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/token_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/preloader_spec.rb20
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb101
-rw-r--r--spec/lib/gitlab/database/count_spec.rb62
-rw-r--r--spec/lib/gitlab/database_spec.rb14
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb14
-rw-r--r--spec/lib/gitlab/email/reply_parser_spec.rb18
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb70
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb112
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb52
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/import_export/project.json20
-rw-r--r--spec/lib/gitlab/incoming_email_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb6
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb12
-rw-r--r--spec/lib/gitlab/untrusted_regexp_spec.rb53
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb12
-rw-r--r--spec/mailers/notify_spec.rb2
-rw-r--r--spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb59
-rw-r--r--spec/models/appearance_spec.rb37
-rw-r--r--spec/models/application_setting_spec.rb30
-rw-r--r--spec/models/ci/build_spec.rb52
-rw-r--r--spec/models/ci/pipeline_spec.rb55
-rw-r--r--spec/models/ci/runner_spec.rb62
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb12
-rw-r--r--spec/models/commit_spec.rb85
-rw-r--r--spec/models/commit_status_spec.rb6
-rw-r--r--spec/models/concerns/cacheable_attributes_spec.rb153
-rw-r--r--spec/models/concerns/issuable_spec.rb13
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb83
-rw-r--r--spec/models/concerns/resolvable_note_spec.rb8
-rw-r--r--spec/models/concerns/sortable_spec.rb108
-rw-r--r--spec/models/generic_commit_status_spec.rb6
-rw-r--r--spec/models/group_spec.rb10
-rw-r--r--spec/models/guest_spec.rb6
-rw-r--r--spec/models/project_services/gemnasium_service_spec.rb33
-rw-r--r--spec/models/project_spec.rb38
-rw-r--r--spec/models/repository_spec.rb96
-rw-r--r--spec/models/user_spec.rb202
-rw-r--r--spec/policies/ci/build_policy_spec.rb13
-rw-r--r--spec/policies/ci/pipeline_policy_spec.rb12
-rw-r--r--spec/policies/project_policy_spec.rb2
-rw-r--r--spec/presenters/ci/build_presenter_spec.rb2
-rw-r--r--spec/presenters/commit_status_presenter_spec.rb15
-rw-r--r--spec/requests/api/environments_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb9
-rw-r--r--spec/requests/api/issues_spec.rb309
-rw-r--r--spec/requests/api/markdown_spec.rb112
-rw-r--r--spec/requests/api/merge_requests_spec.rb225
-rw-r--r--spec/requests/api/projects_spec.rb5
-rw-r--r--spec/requests/api/runner_spec.rb16
-rw-r--r--spec/requests/api/runners_spec.rb13
-rw-r--r--spec/requests/api/users_spec.rb27
-rw-r--r--spec/requests/api/v3/groups_spec.rb8
-rw-r--r--spec/requests/api/v3/projects_spec.rb2
-rw-r--r--spec/requests/openid_connect_spec.rb4
-rw-r--r--spec/serializers/pipeline_entity_spec.rb7
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb21
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb22
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb29
-rw-r--r--spec/services/clusters/create_service_spec.rb68
-rw-r--r--spec/services/keys/destroy_service_spec.rb13
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb6
-rw-r--r--spec/support/helpers/rake_helpers.rb4
-rw-r--r--spec/support/helpers/test_env.rb6
-rw-r--r--spec/support/services/clusters/create_service_shared.rb57
-rw-r--r--spec/support/shared_examples/malicious_regexp_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/with_uploads_shared_examples.rb23
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb9
131 files changed, 3990 insertions, 1366 deletions
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index f4c99ea4064..58bb91a0c80 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -51,6 +51,21 @@ describe SendFileUpload do
end
end
+ context 'with attachment' do
+ subject { controller.send_upload(uploader, attachment: 'test.js') }
+
+ it 'sends a file with content-type of text/plain' do
+ expected_params = {
+ content_type: 'text/plain',
+ filename: 'test.js',
+ disposition: 'attachment'
+ }
+ expect(controller).to receive(:send_file).with(uploader.path, expected_params)
+
+ subject
+ end
+ end
+
context 'when remote file is used' do
before do
stub_uploads_object_storage(uploader: uploader_class)
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index 694c64ae1ad..003fec8ac68 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -79,41 +79,18 @@ describe Projects::CommitController do
end
describe "as diff" do
- include_examples "export as", :diff
- let(:format) { :diff }
+ it "triggers workhorse to serve the request" do
+ go(id: commit.id, format: :diff)
- it "should really only be a git diff" do
- go(id: '66eceea0db202bb39c4e445e8ca28689645366c5', format: format)
-
- expect(response.body).to start_with("diff --git")
- end
-
- it "is only be a git diff without whitespace changes" do
- go(id: '66eceea0db202bb39c4e445e8ca28689645366c5', format: format, w: 1)
-
- expect(response.body).to start_with("diff --git")
-
- # without whitespace option, there are more than 2 diff_splits for other formats
- diff_splits = assigns(:diffs).diff_files.first.diff.diff.split("\n")
- expect(diff_splits.length).to be <= 2
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-diff:")
end
end
describe "as patch" do
- include_examples "export as", :patch
- let(:format) { :patch }
- let(:commit2) { project.commit('498214de67004b1da3d820901307bed2a68a8ef6') }
-
- it "is a git email patch" do
- go(id: commit2.id, format: format)
-
- expect(response.body).to start_with("From #{commit2.id}")
- end
-
it "contains a git diff" do
- go(id: commit2.id, format: format)
+ go(id: commit.id, format: :patch)
- expect(response.body).to match(/^diff --git/)
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-format-patch:")
end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index a451bbb97b6..9e7bc20a6d1 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -35,10 +35,16 @@ describe Projects::PipelinesController do
expect(json_response).to include('pipelines')
expect(json_response['pipelines'].count).to eq 4
- expect(json_response['count']['all']).to eq 4
- expect(json_response['count']['running']).to eq 1
- expect(json_response['count']['pending']).to eq 1
- expect(json_response['count']['finished']).to eq 1
+ expect(json_response['count']['all']).to eq '4'
+ expect(json_response['count']['running']).to eq '1'
+ expect(json_response['count']['pending']).to eq '1'
+ expect(json_response['count']['finished']).to eq '1'
+ end
+
+ it 'does not include coverage data for the pipelines' do
+ subject
+
+ expect(json_response['pipelines'][0]).not_to include('coverage')
end
context 'when performing gitaly calls', :request_store do
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index b2b245dba90..871dcf5c796 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -12,44 +12,67 @@ describe Projects::Prometheus::MetricsController do
end
describe 'GET #active_common' do
- before do
- allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- end
+ context 'when prometheus_adapter can query' do
+ before do
+ allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ end
- context 'when prometheus metrics are enabled' do
- context 'when data is not present' do
- before do
- allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
- end
+ context 'when prometheus metrics are enabled' do
+ context 'when data is not present' do
+ before do
+ allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
+ end
- it 'returns no content response' do
- get :active_common, project_params(format: :json)
+ it 'returns no content response' do
+ get :active_common, project_params(format: :json)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(204)
+ end
end
- end
- context 'when data is available' do
- let(:sample_response) { { some_data: 1 } }
+ context 'when data is available' do
+ let(:sample_response) { { some_data: 1 } }
+
+ before do
+ allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return(sample_response)
+ end
- before do
- allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return(sample_response)
+ it 'returns no content response' do
+ get :active_common, project_params(format: :json)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to eq(sample_response.deep_stringify_keys)
+ end
end
- it 'returns no content response' do
- get :active_common, project_params(format: :json)
+ context 'when requesting non json response' do
+ it 'returns not found response' do
+ get :active_common, project_params
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to eq(sample_response.deep_stringify_keys)
+ expect(response).to have_gitlab_http_status(404)
+ end
end
end
+ end
- context 'when requesting non json response' do
- it 'returns not found response' do
- get :active_common, project_params
+ context 'when prometheus_adapter cannot query' do
+ it 'renders 404' do
+ prometheus_adapter = double('prometheus_adapter', can_query?: false)
- expect(response).to have_gitlab_http_status(404)
- end
+ allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
+
+ get :active_common, project_params(format: :json)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when prometheus_adapter is disabled' do
+ it 'renders 404' do
+ get :active_common, project_params(format: :json)
+
+ expect(response).to have_gitlab_http_status(404)
end
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index a91c868cbaf..f1810763d2d 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -19,11 +19,11 @@ describe Projects::Settings::CiCdController do
end
context 'with group runners' do
- let(:group_runner) { create(:ci_runner) }
+ let(:group_runner) { create(:ci_runner, runner_type: :group_type) }
let(:parent_group) { create(:group) }
let(:group) { create(:group, runners: [group_runner], parent: parent_group) }
let(:other_project) { create(:project, group: group) }
- let!(:project_runner) { create(:ci_runner, projects: [other_project]) }
+ let!(:project_runner) { create(:ci_runner, projects: [other_project], runner_type: :project_type) }
let!(:shared_runner) { create(:ci_runner, :shared) }
it 'sets assignable project runners only' do
@@ -31,7 +31,7 @@ describe Projects::Settings::CiCdController do
get :show, namespace_id: project.namespace, project_id: project
- expect(assigns(:assignable_runners)).to eq [project_runner]
+ expect(assigns(:assignable_runners)).to contain_exactly(project_runner)
end
end
end
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 98566f907f9..0430762c1ff 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -4,8 +4,8 @@ FactoryBot.define do
name 'test-cluster'
trait :project do
- after(:create) do |cluster, evaluator|
- cluster.projects << create(:project)
+ before(:create) do |cluster, evaluator|
+ cluster.projects << create(:project, :repository)
end
end
diff --git a/spec/factories/gitaly/tag.rb b/spec/factories/gitaly/tag.rb
new file mode 100644
index 00000000000..e99776d524a
--- /dev/null
+++ b/spec/factories/gitaly/tag.rb
@@ -0,0 +1,9 @@
+FactoryBot.define do
+ factory :gitaly_tag, class: Gitaly::Tag do
+ skip_create
+
+ name { 'v3.1.4' }
+ message { 'Pie release' }
+ target_commit factory: :gitaly_commit
+ end
+end
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index 978113a08a4..134eb25e4b1 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -3,14 +3,8 @@ require 'bundler/setup'
ENV['GITLAB_ENV'] = 'test'
ENV['IN_MEMORY_APPLICATION_SETTINGS'] = 'true'
-unless Object.respond_to?(:require_dependency)
- class Object
- alias_method :require_dependency, :require
- end
-end
-
-# Defines Settings and Gitlab.config which are at the center of the app
require_relative '../config/settings'
-require_relative '../lib/gitlab' unless defined?(Gitlab.config)
-
require_relative 'support/rspec'
+require 'active_support/all'
+
+ActiveSupport::Dependencies.autoload_paths << 'lib'
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index f2f9b734c39..dc025d82937 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -152,7 +152,7 @@ feature 'Admin updates settings' do
scenario 'Change CI/CD settings' do
page.within('.as-ci-cd') do
- check 'Enabled Auto DevOps (Beta) for projects by default'
+ check 'Enabled Auto DevOps for projects by default'
fill_in 'Auto devops domain', with: 'domain.com'
click_button 'Save changes'
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index e4be6193b8b..a986ddc4abc 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -5,18 +5,41 @@ describe 'Invites' do
let(:owner) { create(:user, name: 'John Doe') }
let(:group) { create(:group, name: 'Owned') }
let(:project) { create(:project, :repository, namespace: group) }
- let(:invite) { group.group_members.invite.last }
+ let(:group_invite) { group.group_members.invite.last }
before do
project.add_master(owner)
group.add_user(owner, Gitlab::Access::OWNER)
group.add_developer('user@example.com', owner)
- invite.generate_invite_token!
+ group_invite.generate_invite_token!
+ end
+
+ def confirm_email_and_sign_in(new_user)
+ new_user_token = User.find_by_email(new_user.email).confirmation_token
+
+ visit user_confirmation_path(confirmation_token: new_user_token)
+ fill_in_sign_in_form(new_user)
+ end
+
+ def fill_in_sign_up_form(new_user)
+ fill_in 'new_user_name', with: new_user.name
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
+ fill_in 'new_user_email_confirmation', with: new_user.email
+ fill_in 'new_user_password', with: new_user.password
+ click_button "Register"
+ end
+
+ def fill_in_sign_in_form(user)
+ fill_in 'user_login', with: user.email
+ fill_in 'user_password', with: user.password
+ check 'user_remember_me'
+ click_button 'Sign in'
end
context 'when signed out' do
before do
- visit invite_path(invite.raw_invite_token)
+ visit invite_path(group_invite.raw_invite_token)
end
it 'renders sign in page with sign in notice' do
@@ -25,12 +48,9 @@ describe 'Invites' do
end
it 'sign in and redirects to invitation page' do
- fill_in 'user_login', with: user.email
- fill_in 'user_password', with: user.password
- check 'user_remember_me'
- click_button 'Sign in'
+ fill_in_sign_in_form(user)
- expect(current_path).to eq(invite_path(invite.raw_invite_token))
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_content(
'You have been invited by John Doe to join group Owned as Developer.'
)
@@ -45,7 +65,7 @@ describe 'Invites' do
end
it 'shows message user already a member' do
- visit invite_path(invite.raw_invite_token)
+ visit invite_path(group_invite.raw_invite_token)
expect(page).to have_content('However, you are already a member of this group.')
end
end
@@ -53,7 +73,7 @@ describe 'Invites' do
describe 'accepting the invitation' do
before do
sign_in(user)
- visit invite_path(invite.raw_invite_token)
+ visit invite_path(group_invite.raw_invite_token)
end
it 'grants access and redirects to group page' do
@@ -69,7 +89,7 @@ describe 'Invites' do
context 'when signed in' do
before do
sign_in(user)
- visit invite_path(invite.raw_invite_token)
+ visit invite_path(group_invite.raw_invite_token)
end
it 'declines application and redirects to dashboard' do
@@ -83,7 +103,7 @@ describe 'Invites' do
context 'when signed out' do
before do
- visit decline_invite_path(invite.raw_invite_token)
+ visit decline_invite_path(group_invite.raw_invite_token)
end
it 'declines application and redirects to sign in page' do
@@ -94,4 +114,72 @@ describe 'Invites' do
end
end
end
+
+ describe 'invite an user using their email address' do
+ let(:new_user) { build_stubbed(:user) }
+ let(:invite_email) { new_user.email }
+ let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email) }
+ let!(:project_invite) { create(:project_member, :invited, project: project, invite_email: invite_email) }
+
+ before do
+ stub_application_setting(send_user_confirmation_email: send_email_confirmation)
+ visit invite_path(group_invite.raw_invite_token)
+ end
+
+ context 'email confirmation disabled' do
+ let(:send_email_confirmation) { false }
+
+ it 'signs up and redirects to the dashboard page with all the projects/groups invitations automatically accepted' do
+ fill_in_sign_up_form(new_user)
+
+ expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_content(project.full_name)
+ visit group_path(group)
+ expect(page).to have_content(group.full_name)
+ end
+
+ context 'the user sign-up using a different email address' do
+ let(:invite_email) { build_stubbed(:user).email }
+
+ it 'signs up and redirects to the invitation page' do
+ fill_in_sign_up_form(new_user)
+
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ end
+ end
+ end
+
+ context 'email confirmation enabled' do
+ let(:send_email_confirmation) { true }
+
+ it 'signs up and redirects to root page with all the project/groups invitation automatically accepted' do
+ fill_in_sign_up_form(new_user)
+ confirm_email_and_sign_in(new_user)
+
+ expect(current_path).to eq(root_path)
+ expect(page).to have_content(project.full_name)
+ visit group_path(group)
+ expect(page).to have_content(group.full_name)
+ end
+
+ it "doesn't accept invitations until the user confirm his email" do
+ fill_in_sign_up_form(new_user)
+ sign_in(owner)
+
+ visit project_project_members_path(project)
+ expect(page).to have_content 'Invited'
+ end
+
+ context 'the user sign-up using a different email address' do
+ let(:invite_email) { build_stubbed(:user).email }
+
+ it 'signs up and redirects to the invitation page' do
+ fill_in_sign_up_form(new_user)
+ confirm_email_and_sign_in(new_user)
+
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index ac3c734599c..59aa90fc86f 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -27,7 +27,7 @@ describe 'Merge request > User resolves conflicts', :js do
end
end
- find_button('Commit conflict resolution').send_keys(:return)
+ find_button('Commit to source branch').send_keys(:return)
expect(page).to have_content('All merge conflicts were resolved')
merge_request.reload_diff
@@ -71,7 +71,7 @@ describe 'Merge request > User resolves conflicts', :js do
execute_script('ace.edit($(".files-wrapper .diff-file pre")[1]).setValue("Gregor Samsa woke from troubled dreams");')
end
- find_button('Commit conflict resolution').send_keys(:return)
+ find_button('Commit to source branch').send_keys(:return)
expect(page).to have_content('All merge conflicts were resolved')
merge_request.reload_diff
@@ -145,7 +145,7 @@ describe 'Merge request > User resolves conflicts', :js do
execute_script('ace.edit($(".files-wrapper .diff-file pre")[0]).setValue("Gregor Samsa woke from troubled dreams");')
end
- click_button 'Commit conflict resolution'
+ click_button 'Commit to source branch'
expect(page).to have_content('All merge conflicts were resolved')
diff --git a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
new file mode 100644
index 00000000000..c40c720d168
--- /dev/null
+++ b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
@@ -0,0 +1,24 @@
+require 'rails_helper'
+
+describe 'Merge request > User sees Check out branch modal', :js do
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { project.creator }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ before do
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ click_button('Check out branch')
+ end
+
+ it 'shows the check out branch modal' do
+ expect(page).to have_content('Check out, review, and merge locally')
+ end
+
+ it 'closes the check out branch model with Escape keypress' do
+ find('#modal_merge_info').send_keys(:escape)
+
+ expect(page).not_to have_content('Check out, review, and merge locally')
+ end
+end
diff --git a/spec/features/projects/actve_tabs_spec.rb b/spec/features/projects/actve_tabs_spec.rb
index 0bda68b83e7..ce5606b63ae 100644
--- a/spec/features/projects/actve_tabs_spec.rb
+++ b/spec/features/projects/actve_tabs_spec.rb
@@ -35,7 +35,7 @@ describe 'Project active tab' do
visit project_path(project)
end
- it_behaves_like 'page has active tab', 'Overview'
+ it_behaves_like 'page has active tab', 'Project'
it_behaves_like 'page has active sub tab', 'Details'
context 'on project Home/Activity' do
@@ -43,7 +43,7 @@ describe 'Project active tab' do
click_tab('Activity')
end
- it_behaves_like 'page has active tab', 'Overview'
+ it_behaves_like 'page has active tab', 'Project'
it_behaves_like 'page has active sub tab', 'Activity'
end
end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index ac82f869f0f..e7b305925f7 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -14,6 +14,8 @@ feature 'File blob', :js do
context 'Ruby file' do
before do
visit_blob('files/ruby/popen.rb')
+
+ wait_for_requests
end
it 'displays the blob' do
@@ -48,6 +50,8 @@ feature 'File blob', :js do
context 'visiting directly' do
before do
visit_blob('files/markdown/ruby-style-guide.md')
+
+ wait_for_requests
end
it 'displays the blob using the rich viewer' do
@@ -159,6 +163,8 @@ feature 'File blob', :js do
project.update_attribute(:lfs_enabled, true)
visit_blob('files/lfs/file.md')
+
+ wait_for_requests
end
it 'displays an error' do
@@ -207,6 +213,8 @@ feature 'File blob', :js do
context 'when LFS is disabled on the project' do
before do
visit_blob('files/lfs/file.md')
+
+ wait_for_requests
end
it 'displays the blob' do
@@ -242,6 +250,8 @@ feature 'File blob', :js do
).execute
visit_blob('files/test.pdf')
+
+ wait_for_requests
end
it 'displays the blob' do
@@ -268,6 +278,8 @@ feature 'File blob', :js do
project.update_attribute(:lfs_enabled, true)
visit_blob('files/lfs/lfs_object.iso')
+
+ wait_for_requests
end
it 'displays the blob' do
@@ -290,6 +302,8 @@ feature 'File blob', :js do
context 'when LFS is disabled on the project' do
before do
visit_blob('files/lfs/lfs_object.iso')
+
+ wait_for_requests
end
it 'displays the blob' do
@@ -313,6 +327,8 @@ feature 'File blob', :js do
context 'ZIP file' do
before do
visit_blob('Gemfile.zip')
+
+ wait_for_requests
end
it 'displays the blob' do
@@ -347,6 +363,8 @@ feature 'File blob', :js do
).execute
visit_blob('files/empty.md')
+
+ wait_for_requests
end
it 'displays an error' do
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index fe334b531f0..a8a627d8806 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -182,6 +182,7 @@ feature 'Gcp Cluster', :js do
it 'user sees a login page' do
expect(page).to have_css('.signin-with-google')
+ expect(page).to have_link('Google account')
end
end
diff --git a/spec/features/projects/commit/comments/user_adds_comment_spec.rb b/spec/features/projects/commit/comments/user_adds_comment_spec.rb
new file mode 100644
index 00000000000..6397df086a7
--- /dev/null
+++ b/spec/features/projects/commit/comments/user_adds_comment_spec.rb
@@ -0,0 +1,170 @@
+require "spec_helper"
+
+describe "User adds a comment on a commit", :js do
+ include Spec::Support::Helpers::Features::NotesHelpers
+ include RepoHelpers
+
+ let(:comment_text) { "XML attached" }
+ let(:another_comment_text) { "SVG attached" }
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.add_developer(user)
+ end
+
+ context "inline view" do
+ before do
+ visit(project_commit_path(project, sample_commit.id))
+ end
+
+ it "adds a comment" do
+ page.within(".js-main-target-form") do
+ expect(page).not_to have_link("Cancel")
+
+ emoji = ":+1:"
+
+ fill_in("note[note]", with: "#{comment_text} #{emoji}")
+
+ # Check on `Preview` tab
+ click_link("Preview")
+
+ expect(find(".js-md-preview")).to have_content(comment_text).and have_css("gl-emoji")
+ expect(page).not_to have_css(".js-note-text")
+
+ # Check on the `Write` tab
+ click_link("Write")
+
+ expect(page).to have_field("note[note]", with: "#{comment_text} #{emoji}")
+
+ # Submit comment from the `Preview` tab to get rid of a separate `it` block
+ # which would specially tests if everything gets cleared from the note form.
+ click_link("Preview")
+ click_button("Comment")
+ end
+
+ wait_for_requests
+
+ page.within(".note") do
+ expect(page).to have_content(comment_text).and have_css("gl-emoji")
+ end
+
+ page.within(".js-main-target-form") do
+ expect(page).to have_field("note[note]", with: "").and have_no_css(".js-md-preview")
+ end
+ end
+
+ context "when commenting on diff" do
+ it "adds a comment" do
+ page.within(".diff-file:nth-of-type(1)") do
+ # Open a form for a comment and check UI elements are visible and acting as expecting.
+ click_diff_line(sample_commit.line_code)
+
+ expect(page).to have_css(".js-temp-notes-holder form.new-note")
+ .and have_css(".js-close-discussion-note-form", text: "Cancel")
+
+ # The `Cancel` button closes the current form. The page should not have any open forms after that.
+ find(".js-close-discussion-note-form").click
+
+ expect(page).not_to have_css("form.new_note")
+
+ # Try to open the same form twice. There should be only one form opened.
+ click_diff_line(sample_commit.line_code)
+ click_diff_line(sample_commit.line_code)
+
+ expect(page).to have_css("form.new-note", count: 1)
+
+ # Fill in a form.
+ page.within("form[data-line-code='#{sample_commit.line_code}']") do
+ fill_in("note[note]", with: "#{comment_text} :smile:")
+ end
+
+ # Open another form and check we have two forms now (because the first one is filled in).
+ click_diff_line(sample_commit.del_line_code)
+
+ expect(page).to have_field("note[note]", with: "#{comment_text} :smile:")
+ .and have_field("note[note]", with: "")
+
+ # Test Preview feature for both forms.
+ page.within("form[data-line-code='#{sample_commit.line_code}']") do
+ click_link("Preview")
+ end
+
+ page.within("form[data-line-code='#{sample_commit.del_line_code}']") do
+ fill_in("note[note]", with: another_comment_text)
+
+ click_link("Preview")
+ end
+
+ expect(page).to have_css(".js-md-preview", visible: true, count: 2)
+ .and have_content(comment_text)
+ .and have_content(another_comment_text)
+ .and have_xpath("//gl-emoji[@data-name='smile']")
+
+ # Test UI elements, then submit.
+ page.within("form[data-line-code='#{sample_commit.line_code}']") do
+ expect(find(".js-note-text", visible: false).text).to eq("")
+ expect(page).to have_css('.js-md-write-button')
+
+ click_button("Comment")
+ end
+
+ expect(page).to have_button("Reply...").and have_no_css("form.new_note")
+ end
+
+ # A comment should be added and visible.
+ page.within(".diff-file:nth-of-type(1) .note") do
+ expect(page).to have_content(comment_text).and have_xpath("//gl-emoji[@data-name='smile']")
+ end
+ end
+ end
+ end
+
+ context "side-by-side view" do
+ before do
+ visit(project_commit_path(project, sample_commit.id, view: "parallel"))
+ end
+
+ it "adds a comment" do
+ new_comment = "New comment"
+ old_comment = "Old comment"
+
+ # Left side.
+ click_parallel_diff_line(sample_commit.del_line_code)
+
+ page.within(".diff-file:nth-of-type(1) form[data-line-code='#{sample_commit.del_line_code}']") do
+ fill_in("note[note]", with: old_comment)
+ click_button("Comment")
+ end
+
+ page.within(".diff-file:nth-of-type(1) .notes_content.parallel.old") do
+ expect(page).to have_content(old_comment)
+ end
+
+ # Right side.
+ click_parallel_diff_line(sample_commit.line_code)
+
+ page.within(".diff-file:nth-of-type(1) form[data-line-code='#{sample_commit.line_code}']") do
+ fill_in("note[note]", with: new_comment)
+ click_button("Comment")
+ end
+
+ wait_for_requests
+
+ expect(all(".diff-file:nth-of-type(1) .notes_content.parallel.new")[1].text).to have_content(new_comment)
+ end
+ end
+
+ private
+
+ def click_diff_line(line)
+ find(".line_holder[id='#{line}'] td:nth-of-type(1)").hover
+ find(".line_holder[id='#{line}'] button").click
+ end
+
+ def click_parallel_diff_line(line)
+ find(".line_holder.parallel td[id='#{line}']").find(:xpath, 'preceding-sibling::*[1][self::td]').hover
+ find(".line_holder.parallel button[data-line-code='#{line}']").click
+ end
+end
diff --git a/spec/features/projects/commit/comments/user_deletes_comments_spec.rb b/spec/features/projects/commit/comments/user_deletes_comments_spec.rb
new file mode 100644
index 00000000000..a727cab4ac7
--- /dev/null
+++ b/spec/features/projects/commit/comments/user_deletes_comments_spec.rb
@@ -0,0 +1,37 @@
+require "spec_helper"
+
+describe "User deletes comments on a commit", :js do
+ include Spec::Support::Helpers::Features::NotesHelpers
+ include RepoHelpers
+
+ let(:comment_text) { "XML attached" }
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.add_developer(user)
+
+ visit(project_commit_path(project, sample_commit.id))
+
+ add_note(comment_text)
+ end
+
+ it "deletes comment" do
+ page.within(".note") do
+ expect(page).to have_content(comment_text)
+ end
+
+ page.within(".main-notes-list") do
+ note = find(".note")
+ note.hover
+
+ find(".more-actions").click
+ find(".more-actions .dropdown-menu li", match: :first)
+
+ accept_confirm { find(".js-note-delete").click }
+ end
+
+ expect(page).not_to have_css(".note")
+ end
+end
diff --git a/spec/features/projects/commit/comments/user_edits_comments_spec.rb b/spec/features/projects/commit/comments/user_edits_comments_spec.rb
new file mode 100644
index 00000000000..75bccd99f59
--- /dev/null
+++ b/spec/features/projects/commit/comments/user_edits_comments_spec.rb
@@ -0,0 +1,42 @@
+require "spec_helper"
+
+describe "User edits a comment on a commit", :js do
+ include Spec::Support::Helpers::Features::NotesHelpers
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.add_developer(user)
+
+ visit(project_commit_path(project, sample_commit.id))
+
+ add_note("XML attached")
+ end
+
+ it "edits comment" do
+ NEW_COMMENT_TEXT = "+1 Awesome!".freeze
+
+ page.within(".main-notes-list") do
+ note = find(".note")
+ note.hover
+
+ note.find(".js-note-edit").click
+ end
+
+ page.find(".current-note-edit-form textarea")
+
+ page.within(".current-note-edit-form") do
+ fill_in("note[note]", with: NEW_COMMENT_TEXT)
+ click_button("Save comment")
+ end
+
+ wait_for_requests
+
+ page.within(".note") do
+ expect(page).to have_content(NEW_COMMENT_TEXT)
+ end
+ end
+end
diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb
deleted file mode 100644
index 5174f793367..00000000000
--- a/spec/features/projects/commit/user_comments_on_commit_spec.rb
+++ /dev/null
@@ -1,110 +0,0 @@
-require "spec_helper"
-
-describe "User comments on commit", :js do
- include Spec::Support::Helpers::Features::NotesHelpers
- include RepoHelpers
-
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
-
- COMMENT_TEXT = "XML attached".freeze
-
- before do
- sign_in(user)
- project.add_developer(user)
-
- visit(project_commit_path(project, sample_commit.id))
- end
-
- context "when adding new comment" do
- it "adds comment" do
- EMOJI = ":+1:".freeze
-
- page.within(".js-main-target-form") do
- expect(page).not_to have_link("Cancel")
-
- fill_in("note[note]", with: "#{COMMENT_TEXT} #{EMOJI}")
-
- # Check on `Preview` tab
- click_link("Preview")
-
- expect(find(".js-md-preview")).to have_content(COMMENT_TEXT).and have_css("gl-emoji")
- expect(page).not_to have_css(".js-note-text")
-
- # Check on `Write` tab
- click_link("Write")
-
- expect(page).to have_field("note[note]", with: "#{COMMENT_TEXT} #{EMOJI}")
-
- # Submit comment from the `Preview` tab to get rid of a separate `it` block
- # which would specially tests if everything gets cleared from the note form.
- click_link("Preview")
- click_button("Comment")
- end
-
- wait_for_requests
-
- page.within(".note") do
- expect(page).to have_content(COMMENT_TEXT).and have_css("gl-emoji")
- end
-
- page.within(".js-main-target-form") do
- expect(page).to have_field("note[note]", with: "").and have_no_css(".js-md-preview")
- end
- end
- end
-
- context "when editing comment" do
- before do
- add_note(COMMENT_TEXT)
- end
-
- it "edits comment" do
- NEW_COMMENT_TEXT = "+1 Awesome!".freeze
-
- page.within(".main-notes-list") do
- note = find(".note")
- note.hover
-
- note.find(".js-note-edit").click
- end
-
- page.find(".current-note-edit-form textarea")
-
- page.within(".current-note-edit-form") do
- fill_in("note[note]", with: NEW_COMMENT_TEXT)
- click_button("Save comment")
- end
-
- wait_for_requests
-
- page.within(".note") do
- expect(page).to have_content(NEW_COMMENT_TEXT)
- end
- end
- end
-
- context "when deleting comment" do
- before do
- add_note(COMMENT_TEXT)
- end
-
- it "deletes comment" do
- page.within(".note") do
- expect(page).to have_content(COMMENT_TEXT)
- end
-
- page.within(".main-notes-list") do
- note = find(".note")
- note.hover
-
- find(".more-actions").click
- find(".more-actions .dropdown-menu li", match: :first)
-
- accept_confirm { find(".js-note-delete").click }
- end
-
- expect(page).not_to have_css(".note")
- end
- end
-end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 5248a783db4..f9defa22d35 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -42,6 +42,22 @@ feature 'Environments page', :js do
expect(page).to have_content('You don\'t have any environments right now')
end
end
+
+ context 'when cluster is not reachable' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+ let!(:application_prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+
+ before do
+ allow_any_instance_of(Kubeclient::Client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
+
+ it 'should show one environment without error' do
+ visit_environments(project, scope: 'available')
+
+ expect(page).to have_css('.environments-container')
+ expect(page.all('.environment-name').length).to eq(1)
+ end
+ end
end
describe 'with one stopped environment' do
diff --git a/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb b/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb
index f285c6c8783..1f21ef7b382 100644
--- a/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb
+++ b/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb
@@ -1,32 +1,84 @@
-require 'spec_helper'
+require "spec_helper"
-describe 'User creates a merge request', :js do
+describe "User creates a merge request", :js do
+ include ProjectForksHelper
+
+ let(:title) { "Some feature" }
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
project.add_master(user)
sign_in(user)
+ end
+ it "creates a merge request" do
visit(project_new_merge_request_path(project))
- end
- it 'creates a merge request' do
- find('.js-source-branch').click
- click_link('fix')
+ find(".js-source-branch").click
+ click_link("fix")
- find('.js-target-branch').click
- click_link('feature')
+ find(".js-target-branch").click
+ click_link("feature")
- click_button('Compare branches')
+ click_button("Compare branches")
- fill_in('merge_request_title', with: 'Wiki Feature')
- click_button('Submit merge request')
+ fill_in("Title", with: title)
+ click_button("Submit merge request")
- page.within('.merge-request') do
- expect(page).to have_content('Wiki Feature')
+ page.within(".merge-request") do
+ expect(page).to have_content(title)
end
+ end
+
+ context "to a forked project" do
+ let(:forked_project) { fork_project(project, user, namespace: user.namespace, repository: true) }
+
+ it "creates a merge request" do
+ visit(project_new_merge_request_path(forked_project))
+
+ expect(page).to have_content("Source branch").and have_content("Target branch")
+ expect(find("#merge_request_target_project_id", visible: false).value).to eq(project.id.to_s)
+
+ click_button("Compare branches and continue")
+
+ expect(page).to have_content("You must select source and target branch")
+
+ first(".js-source-project").click
+ first(".dropdown-source-project a", text: forked_project.full_path)
+
+ first(".js-target-project").click
+ first(".dropdown-target-project a", text: project.full_path)
+
+ first(".js-source-branch").click
- wait_for_requests
+ wait_for_requests
+
+ source_branch = "fix"
+
+ first(".js-source-branch-dropdown .dropdown-content a", text: source_branch).click
+
+ click_button("Compare branches and continue")
+
+ expect(page).to have_css("h3.page-title", text: "New Merge Request")
+
+ page.within("form#new_merge_request") do
+ fill_in("Title", with: title)
+ end
+
+ click_button("Assignee")
+
+ expect(find(".js-assignee-search")["data-project-id"]).to eq(project.id.to_s)
+
+ page.within(".dropdown-menu-user") do
+ expect(page).to have_content("Unassigned")
+ .and have_content(user.name)
+ .and have_content(project.users.first.name)
+ end
+
+ click_button("Submit merge request")
+
+ expect(page).to have_content(title).and have_content("Request to merge #{user.namespace.name}:#{source_branch} into master")
+ end
end
end
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index e875a88a52b..cfdae246c09 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -75,6 +75,29 @@ describe "Projects > Settings > Pipelines settings" do
expect(project.auto_devops).not_to be_enabled
expect(project.auto_devops.domain).to eq('test.com')
end
+
+ context 'when there is a cluster with ingress and external_ip' do
+ before do
+ cluster = create(:cluster, projects: [project])
+ cluster.create_application_ingress!(external_ip: '192.168.1.100')
+ end
+
+ it 'shows the help text with the nip.io domain as an alternative to custom domain' do
+ visit project_settings_ci_cd_path(project)
+ expect(page).to have_content('192.168.1.100.nip.io can be used as an alternative to a custom domain')
+ end
+ end
+
+ context 'when there is no ingress' do
+ before do
+ create(:cluster, projects: [project])
+ end
+
+ it 'alternative to custom domain is not shown' do
+ visit project_settings_ci_cd_path(project)
+ expect(page).not_to have_content('can be used as an alternative to a custom domain')
+ end
+ end
end
end
end
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index cf80517b934..8a9255db9e8 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -37,7 +37,7 @@ describe 'Projects > User sees sidebar' do
visit project_path(project)
within('.nav-sidebar') do
- expect(page).to have_content 'Overview'
+ expect(page).to have_content 'Project'
expect(page).to have_content 'Issues'
expect(page).to have_content 'Wiki'
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index 47c5a8161d9..495a010b32c 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -13,6 +13,8 @@ describe 'User uses shortcuts', :js do
context 'when navigating to the Project pages' do
it 'redirects to the details page' do
+ visit project_issues_path(project)
+
find('body').native.send_key('g')
find('body').native.send_key('p')
@@ -22,7 +24,7 @@ describe 'User uses shortcuts', :js do
it 'redirects to the activity page' do
find('body').native.send_key('g')
- find('body').native.send_key('e')
+ find('body').native.send_key('v')
expect(page).to have_active_navigation('Project')
expect(page).to have_active_sub_navigation('Activity')
@@ -72,10 +74,19 @@ describe 'User uses shortcuts', :js do
expect(page).to have_active_sub_navigation('List')
end
+ it 'redirects to the issue board page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('b')
+
+ expect(page).to have_active_navigation('Issues')
+ expect(page).to have_active_sub_navigation('Board')
+ end
+
it 'redirects to the new issue page' do
find('body').native.send_key('i')
expect(page).to have_content(project.title)
+ expect(page).to have_content('New Issue')
end
end
@@ -88,6 +99,34 @@ describe 'User uses shortcuts', :js do
end
end
+ context 'when navigating to the CI / CD pages' do
+ it 'redirects to the Jobs page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('j')
+
+ expect(page).to have_active_navigation('CI / CD')
+ expect(page).to have_active_sub_navigation('Jobs')
+ end
+ end
+
+ context 'when navigating to the Operations pages' do
+ it 'redirects to the Environments page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('e')
+
+ expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_sub_navigation('Environments')
+ end
+
+ it 'redirects to the Kubernetes page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('k')
+
+ expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_sub_navigation('Kubernetes')
+ end
+ end
+
context 'when navigating to the Snippets pages' do
it 'redirects to the snippets page' do
find('body').native.send_key('g')
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
index 6586ccaa400..e473739a6aa 100644
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ b/spec/features/projects/wiki/markdown_preview_spec.rb
@@ -155,4 +155,27 @@ feature 'Projects > Wiki > User previews markdown changes', :js do
end
end
end
+
+ it "does not linkify double brackets inside code blocks as expected" do
+ click_link 'New page'
+ page.within '#modal-new-wiki' do
+ fill_in :new_wiki_path, with: 'linkify_test'
+ click_button 'Create page'
+ end
+
+ page.within '.wiki-form' do
+ fill_in :wiki_content, with: <<-HEREDOC
+ `[[do_not_linkify]]`
+ ```
+ [[also_do_not_linkify]]
+ ```
+ HEREDOC
+ click_on "Preview"
+ end
+
+ expect(page).to have_content("do_not_linkify")
+
+ expect(page.html).to include('[[do_not_linkify]]')
+ expect(page.html).to include('[[also_do_not_linkify]]')
+ end
end
diff --git a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
index ab9420fc38f..2c67cec6b67 100644
--- a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'User deletes wiki page' do
+feature 'User deletes wiki page', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
@@ -13,6 +13,7 @@ feature 'User deletes wiki page' do
it 'deletes a page' do
click_on('Edit')
click_on('Delete')
+ find('.js-modal-primary-action').click
expect(page).to have_content('Page was successfully deleted')
end
diff --git a/spec/features/users/user_browses_projects_on_user_page_spec.rb b/spec/features/users/user_browses_projects_on_user_page_spec.rb
index a70637c8370..7bede0b0d48 100644
--- a/spec/features/users/user_browses_projects_on_user_page_spec.rb
+++ b/spec/features/users/user_browses_projects_on_user_page_spec.rb
@@ -27,8 +27,8 @@ describe 'Users > User browses projects on user page', :js do
end
it 'paginates projects', :js do
- project = create(:project, namespace: user.namespace)
- project2 = create(:project, namespace: user.namespace)
+ project = create(:project, namespace: user.namespace, updated_at: 2.minutes.since)
+ project2 = create(:project, namespace: user.namespace, updated_at: 1.minute.since)
allow(Project).to receive(:default_per_page).and_return(1)
sign_in(user)
@@ -41,11 +41,11 @@ describe 'Users > User browses projects on user page', :js do
wait_for_requests
- expect(page).to have_content(project2.name)
+ expect(page).to have_content(project.name)
click_link('Next')
- expect(page).to have_content(project.name)
+ expect(page).to have_content(project2.name)
end
context 'when not signed in' do
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 45439640ea3..74e91b02f0f 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -372,7 +372,7 @@ describe IssuesFinder do
end
context 'personal scope' do
- let(:scope) { 'assigned-to-me' }
+ let(:scope) { 'assigned_to_me' }
it 'returns issue assigned to the user' do
expect(issues).to contain_exactly(issue1, issue2)
diff --git a/spec/finders/personal_projects_finder_spec.rb b/spec/finders/personal_projects_finder_spec.rb
index 5e52898e9c0..00c551a1f65 100644
--- a/spec/finders/personal_projects_finder_spec.rb
+++ b/spec/finders/personal_projects_finder_spec.rb
@@ -4,14 +4,16 @@ describe PersonalProjectsFinder do
let(:source_user) { create(:user) }
let(:current_user) { create(:user) }
let(:finder) { described_class.new(source_user) }
- let!(:public_project) { create(:project, :public, namespace: source_user.namespace) }
+ let!(:public_project) do
+ create(:project, :public, namespace: source_user.namespace, updated_at: 1.hour.ago)
+ end
let!(:private_project) do
- create(:project, :private, namespace: source_user.namespace, path: 'mepmep')
+ create(:project, :private, namespace: source_user.namespace, updated_at: 3.hours.ago, path: 'mepmep')
end
let!(:internal_project) do
- create(:project, :internal, namespace: source_user.namespace, path: 'C')
+ create(:project, :internal, namespace: source_user.namespace, updated_at: 2.hours.ago, path: 'C')
end
before do
@@ -28,7 +30,7 @@ describe PersonalProjectsFinder do
subject { finder.execute(current_user) }
context 'normal user' do
- it { is_expected.to eq([internal_project, private_project, public_project]) }
+ it { is_expected.to eq([public_project, internal_project, private_project]) }
end
context 'external' do
@@ -36,7 +38,7 @@ describe PersonalProjectsFinder do
current_user.update_attributes(external: true)
end
- it { is_expected.to eq([private_project, public_project]) }
+ it { is_expected.to eq([public_project, private_project]) }
end
end
end
diff --git a/spec/fixtures/api/schemas/list.json b/spec/fixtures/api/schemas/list.json
index 622a1e40d07..05922df6b81 100644
--- a/spec/fixtures/api/schemas/list.json
+++ b/spec/fixtures/api/schemas/list.json
@@ -17,6 +17,7 @@
"required": [
"id",
"color",
+ "text_color",
"description",
"title",
"priority"
@@ -29,6 +30,7 @@
},
"description": { "type": ["string", "null"] },
"title": { "type": "string" },
+ "title": { "text_color": "string" },
"priority": { "type": ["integer", "null"] }
}
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/projects.json b/spec/fixtures/api/schemas/public_api/v4/projects.json
index d89eeea89a5..17ad8d8c48d 100644
--- a/spec/fixtures/api/schemas/public_api/v4/projects.json
+++ b/spec/fixtures/api/schemas/public_api/v4/projects.json
@@ -20,6 +20,7 @@
"ssh_url_to_repo": { "type": "string" },
"http_url_to_repo": { "type": "string" },
"web_url": { "type": "string" },
+ "readme_url": { "type": ["string", "null"] },
"avatar_url": { "type": ["string", "null"] },
"star_count": { "type": "integer" },
"forks_count": { "type": "integer" },
diff --git a/spec/fixtures/emails/valid_new_issue_with_quote.eml b/spec/fixtures/emails/valid_new_issue_with_quote.eml
new file mode 100644
index 00000000000..0caf8ed4e9e
--- /dev/null
+++ b/spec/fixtures/emails/valid_new_issue_with_quote.eml
@@ -0,0 +1,25 @@
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <jake@adventuretime.ooo>
+To: incoming+gitlabhq/gitlabhq+auth_token@appmail.adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject: New Issue by email
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
+
+The reply by email functionality should be extended to allow creating a new issue by email.
+even when the email is forwarded to the project which may include lines that begin with ">"
+
+there should be a quote below this line:
+
+> this is a quote \ No newline at end of file
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 8de615ad8c2..a3e010c3206 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -258,13 +258,19 @@ describe BlobHelper do
it 'returns full IDE path' do
Rails.application.routes.default_url_options[:script_name] = nil
- expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master/")
+ expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
+ end
+
+ it 'returns full IDE path with second -' do
+ Rails.application.routes.default_url_options[:script_name] = nil
+
+ expect(helper.ide_edit_path(project, "testing/slashes", "readme.md")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/slashes/-/readme.md")
end
it 'returns IDE path without relative_url_root' do
Rails.application.routes.default_url_options[:script_name] = "/gitlab"
- expect(helper.ide_edit_path(project, "master", "")).to eq("/gitlab/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master/")
+ expect(helper.ide_edit_path(project, "master", "")).to eq("/gitlab/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
end
end
end
diff --git a/spec/javascripts/environments/environments_app_spec.js b/spec/javascripts/environments/environments_app_spec.js
index e4c3bf2bef1..615168645b4 100644
--- a/spec/javascripts/environments/environments_app_spec.js
+++ b/spec/javascripts/environments/environments_app_spec.js
@@ -1,8 +1,8 @@
-import _ from 'underscore';
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import environmentsComponent from '~/environments/components/environments_app.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { headersInterceptor } from 'spec/helpers/vue_resource_helper';
import { environment, folder } from './mock_data';
describe('Environment', () => {
@@ -18,103 +18,76 @@ describe('Environment', () => {
let EnvironmentsComponent;
let component;
+ let mock;
beforeEach(() => {
+ mock = new MockAdapter(axios);
+
EnvironmentsComponent = Vue.extend(environmentsComponent);
});
+ afterEach(() => {
+ component.$destroy();
+ mock.restore();
+ });
+
describe('successfull request', () => {
describe('without environments', () => {
- const environmentsEmptyResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), {
- status: 200,
- }));
- };
-
- beforeEach(() => {
- Vue.http.interceptors.push(environmentsEmptyResponseInterceptor);
- Vue.http.interceptors.push(headersInterceptor);
- });
-
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsEmptyResponseInterceptor,
- );
- Vue.http.interceptors = _.without(Vue.http.interceptors, headersInterceptor);
- });
+ beforeEach((done) => {
+ mock.onGet(mockData.endpoint).reply(200, { environments: [] });
- it('should render the empty state', (done) => {
component = mountComponent(EnvironmentsComponent, mockData);
setTimeout(() => {
- expect(
- component.$el.querySelector('.js-new-environment-button').textContent,
- ).toContain('New environment');
-
- expect(
- component.$el.querySelector('.js-blank-state-title').textContent,
- ).toContain('You don\'t have any environments right now.');
-
done();
}, 0);
});
+
+ it('should render the empty state', () => {
+ expect(
+ component.$el.querySelector('.js-new-environment-button').textContent,
+ ).toContain('New environment');
+
+ expect(
+ component.$el.querySelector('.js-blank-state-title').textContent,
+ ).toContain('You don\'t have any environments right now.');
+ });
});
describe('with paginated environments', () => {
- let backupInterceptors;
- const environmentsResponseInterceptor = (request, next) => {
- next((response) => {
- response.headers.set('X-nExt-pAge', '2');
- });
-
- next(request.respondWith(JSON.stringify({
+ beforeEach((done) => {
+ mock.onGet(mockData.endpoint).reply(200, {
environments: [environment],
stopped_count: 1,
available_count: 0,
- }), {
- status: 200,
- headers: {
- 'X-nExt-pAge': '2',
- 'x-page': '1',
- 'X-Per-Page': '1',
- 'X-Prev-Page': '',
- 'X-TOTAL': '37',
- 'X-Total-Pages': '2',
- },
- }));
- };
-
- beforeEach(() => {
- backupInterceptors = Vue.http.interceptors;
- Vue.http.interceptors = [
- environmentsResponseInterceptor,
- headersInterceptor,
- ];
- component = mountComponent(EnvironmentsComponent, mockData);
- });
+ }, {
+ 'X-nExt-pAge': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '1',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '2',
+ });
- afterEach(() => {
- Vue.http.interceptors = backupInterceptors;
- });
+ component = mountComponent(EnvironmentsComponent, mockData);
- it('should render a table with environments', (done) => {
setTimeout(() => {
- expect(component.$el.querySelectorAll('table')).not.toBeNull();
- expect(
- component.$el.querySelector('.environment-name').textContent.trim(),
- ).toEqual(environment.name);
done();
}, 0);
});
+ it('should render a table with environments', () => {
+ expect(component.$el.querySelectorAll('table')).not.toBeNull();
+ expect(
+ component.$el.querySelector('.environment-name').textContent.trim(),
+ ).toEqual(environment.name);
+ });
+
describe('pagination', () => {
- it('should render pagination', (done) => {
- setTimeout(() => {
- expect(
- component.$el.querySelectorAll('.gl-pagination li').length,
- ).toEqual(5);
- done();
- }, 0);
+ it('should render pagination', () => {
+ expect(
+ component.$el.querySelectorAll('.gl-pagination li').length,
+ ).toEqual(5);
});
it('should make an API request when page is clicked', (done) => {
@@ -133,50 +106,39 @@ describe('Environment', () => {
expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
done();
- });
+ }, 0);
});
});
});
});
describe('unsuccessfull request', () => {
- const environmentsErrorResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), {
- status: 500,
- }));
- };
-
- beforeEach(() => {
- Vue.http.interceptors.push(environmentsErrorResponseInterceptor);
- });
+ beforeEach((done) => {
+ mock.onGet(mockData.endpoint).reply(500, {});
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsErrorResponseInterceptor,
- );
- });
-
- it('should render empty state', (done) => {
component = mountComponent(EnvironmentsComponent, mockData);
setTimeout(() => {
- expect(
- component.$el.querySelector('.js-blank-state-title').textContent,
- ).toContain('You don\'t have any environments right now.');
done();
}, 0);
});
+
+ it('should render empty state', () => {
+ expect(
+ component.$el.querySelector('.js-blank-state-title').textContent,
+ ).toContain('You don\'t have any environments right now.');
+ });
});
describe('expandable folders', () => {
- const environmentsResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify({
- environments: [folder],
- stopped_count: 0,
- available_count: 1,
- }), {
- status: 200,
- headers: {
+ beforeEach(() => {
+ mock.onGet(mockData.endpoint).reply(200,
+ {
+ environments: [folder],
+ stopped_count: 0,
+ available_count: 1,
+ },
+ {
'X-nExt-pAge': '2',
'x-page': '1',
'X-Per-Page': '1',
@@ -184,18 +146,11 @@ describe('Environment', () => {
'X-TOTAL': '37',
'X-Total-Pages': '2',
},
- }));
- };
+ );
- beforeEach(() => {
- Vue.http.interceptors.push(environmentsResponseInterceptor);
- component = mountComponent(EnvironmentsComponent, mockData);
- });
+ mock.onGet(environment.folder_path).reply(200, { environments: [environment] });
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsResponseInterceptor,
- );
+ component = mountComponent(EnvironmentsComponent, mockData);
});
it('should open a closed folder', (done) => {
@@ -211,7 +166,7 @@ describe('Environment', () => {
).not.toContain('display: none');
done();
});
- });
+ }, 0);
});
it('should close an opened folder', (done) => {
@@ -233,7 +188,7 @@ describe('Environment', () => {
done();
});
});
- });
+ }, 0);
});
it('should show children environments and a button to show all environments', (done) => {
@@ -242,49 +197,32 @@ describe('Environment', () => {
component.$el.querySelector('.folder-name').click();
Vue.nextTick(() => {
- const folderInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify({
- environments: [environment],
- }), { status: 200 }));
- };
-
- Vue.http.interceptors.push(folderInterceptor);
-
// wait for next async request
setTimeout(() => {
expect(component.$el.querySelectorAll('.js-child-row').length).toEqual(1);
expect(component.$el.querySelector('.text-center > a.btn').textContent).toContain('Show all');
-
- Vue.http.interceptors = _.without(Vue.http.interceptors, folderInterceptor);
done();
});
});
- });
+ }, 0);
});
});
describe('methods', () => {
- const environmentsEmptyResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), {
- status: 200,
- }));
- };
-
beforeEach(() => {
- Vue.http.interceptors.push(environmentsEmptyResponseInterceptor);
- Vue.http.interceptors.push(headersInterceptor);
+ mock.onGet(mockData.endpoint).reply(200,
+ {
+ environments: [],
+ stopped_count: 0,
+ available_count: 1,
+ },
+ {},
+ );
component = mountComponent(EnvironmentsComponent, mockData);
spyOn(history, 'pushState').and.stub();
});
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsEmptyResponseInterceptor,
- );
- Vue.http.interceptors = _.without(Vue.http.interceptors, headersInterceptor);
- });
-
describe('updateContent', () => {
it('should set given parameters', (done) => {
component.updateContent({ scope: 'stopped', page: '3' })
diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js
index 906a1116974..f5ce4df0bfe 100644
--- a/spec/javascripts/environments/folder/environments_folder_view_spec.js
+++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js
@@ -1,13 +1,15 @@
-import _ from 'underscore';
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
-import { headersInterceptor } from 'spec/helpers/vue_resource_helper';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { environmentsList } from '../mock_data';
describe('Environments Folder View', () => {
let Component;
let component;
+ let mock;
+
const mockData = {
endpoint: 'environments.json',
folderName: 'review',
@@ -17,46 +19,35 @@ describe('Environments Folder View', () => {
};
beforeEach(() => {
+ mock = new MockAdapter(axios);
+
Component = Vue.extend(environmentsFolderViewComponent);
});
afterEach(() => {
+ mock.restore();
+
component.$destroy();
});
describe('successfull request', () => {
- const environmentsResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify({
+ beforeEach(() => {
+ mock.onGet(mockData.endpoint).reply(200, {
environments: environmentsList,
stopped_count: 1,
available_count: 0,
- }), {
- status: 200,
- headers: {
- 'X-nExt-pAge': '2',
- 'x-page': '1',
- 'X-Per-Page': '2',
- 'X-Prev-Page': '',
- 'X-TOTAL': '20',
- 'X-Total-Pages': '10',
- },
- }));
- };
-
- beforeEach(() => {
- Vue.http.interceptors.push(environmentsResponseInterceptor);
- Vue.http.interceptors.push(headersInterceptor);
+ }, {
+ 'X-nExt-pAge': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '20',
+ 'X-Total-Pages': '10',
+ });
component = mountComponent(Component, mockData);
});
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsResponseInterceptor,
- );
- Vue.http.interceptors = _.without(Vue.http.interceptors, headersInterceptor);
- });
-
it('should render a table with environments', (done) => {
setTimeout(() => {
expect(component.$el.querySelectorAll('table')).not.toBeNull();
@@ -135,25 +126,15 @@ describe('Environments Folder View', () => {
});
describe('unsuccessfull request', () => {
- const environmentsErrorResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), {
- status: 500,
- }));
- };
-
beforeEach(() => {
- Vue.http.interceptors.push(environmentsErrorResponseInterceptor);
- });
+ mock.onGet(mockData.endpoint).reply(500, {
+ environments: [],
+ });
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsErrorResponseInterceptor,
- );
+ component = mountComponent(Component, mockData);
});
it('should not render a table', (done) => {
- component = mountComponent(Component, mockData);
-
setTimeout(() => {
expect(
component.$el.querySelector('table'),
@@ -190,27 +171,15 @@ describe('Environments Folder View', () => {
});
describe('methods', () => {
- const environmentsEmptyResponseInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), {
- status: 200,
- }));
- };
-
beforeEach(() => {
- Vue.http.interceptors.push(environmentsEmptyResponseInterceptor);
- Vue.http.interceptors.push(headersInterceptor);
+ mock.onGet(mockData.endpoint).reply(200, {
+ environments: [],
+ });
component = mountComponent(Component, mockData);
spyOn(history, 'pushState').and.stub();
});
- afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, environmentsEmptyResponseInterceptor,
- );
- Vue.http.interceptors = _.without(Vue.http.interceptors, headersInterceptor);
- });
-
describe('updateContent', () => {
it('should set given parameters', (done) => {
component.updateContent({ scope: 'stopped', page: '4' })
diff --git a/spec/javascripts/environments/mock_data.js b/spec/javascripts/environments/mock_data.js
index 15e11aa686b..8a1e26935d9 100644
--- a/spec/javascripts/environments/mock_data.js
+++ b/spec/javascripts/environments/mock_data.js
@@ -82,6 +82,7 @@ export const environment = {
stop_path: '/root/review-app/environments/7/stop',
created_at: '2017-01-31T10:53:46.894Z',
updated_at: '2017-01-31T10:53:46.894Z',
+ folder_path: '/root/review-app/environments/7',
},
};
diff --git a/spec/javascripts/helpers/vue_mount_component_helper.js b/spec/javascripts/helpers/vue_mount_component_helper.js
index effacbcff4e..a34a1add4e0 100644
--- a/spec/javascripts/helpers/vue_mount_component_helper.js
+++ b/spec/javascripts/helpers/vue_mount_component_helper.js
@@ -1,14 +1,30 @@
+import Vue from 'vue';
+
+const mountComponent = (Component, props = {}, el = null) => new Component({
+ propsData: props,
+}).$mount(el);
+
export const createComponentWithStore = (Component, store, propsData = {}) => new Component({
store,
propsData,
});
+export const createComponentWithMixin = (mixins = [], state = {}, props = {}, template = '<div></div>') => {
+ const Component = Vue.extend({
+ template,
+ mixins,
+ data() {
+ return props;
+ },
+ });
+
+ return mountComponent(Component, props);
+};
+
export const mountComponentWithStore = (Component, { el, props, store }) =>
new Component({
store,
propsData: props || { },
}).$mount(el);
-export default (Component, props = {}, el = null) => new Component({
- propsData: props,
-}).$mount(el);
+export default mountComponent;
diff --git a/spec/javascripts/helpers/vuex_action_helper.js b/spec/javascripts/helpers/vuex_action_helper.js
index 83f29d1b0c2..d6ab0aeeed7 100644
--- a/spec/javascripts/helpers/vuex_action_helper.js
+++ b/spec/javascripts/helpers/vuex_action_helper.js
@@ -55,7 +55,7 @@ export default (action, payload, state, expectedMutations, expectedActions, done
};
// call the action with mocked store and arguments
- action({ commit, state, dispatch }, payload);
+ action({ commit, state, dispatch, rootState: state }, payload);
// check if no mutations should have been dispatched
if (expectedMutations.length === 0) {
diff --git a/spec/javascripts/ide/components/ide_spec.js b/spec/javascripts/ide/components/ide_spec.js
index 6f580e1f7af..045a60e56a0 100644
--- a/spec/javascripts/ide/components/ide_spec.js
+++ b/spec/javascripts/ide/components/ide_spec.js
@@ -107,5 +107,11 @@ describe('ide component', () => {
vm.mousetrapStopCallback(null, vm.$el.querySelector('.dropdown-input-field'), 't'),
).toBe(true);
});
+
+ it('stops callback in monaco editor', () => {
+ setFixtures('<div class="inputarea"></div>');
+
+ expect(vm.mousetrapStopCallback(null, document.querySelector('.inputarea'), 't')).toBe(true);
+ });
});
});
diff --git a/spec/javascripts/ide/components/repo_editor_spec.js b/spec/javascripts/ide/components/repo_editor_spec.js
index ff500acd849..d3f80e6f9c0 100644
--- a/spec/javascripts/ide/components/repo_editor_spec.js
+++ b/spec/javascripts/ide/components/repo_editor_spec.js
@@ -346,4 +346,24 @@ describe('RepoEditor', () => {
});
});
});
+
+ it('calls removePendingTab when old file is pending', done => {
+ spyOnProperty(vm, 'shouldHideEditor').and.returnValue(true);
+ spyOn(vm, 'removePendingTab');
+
+ vm.file.pending = true;
+
+ vm
+ .$nextTick()
+ .then(() => {
+ vm.file = file('testing');
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ expect(vm.removePendingTab).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
});
diff --git a/spec/javascripts/ide/mock_data.js b/spec/javascripts/ide/mock_data.js
index c059862b9d1..7e641c7984b 100644
--- a/spec/javascripts/ide/mock_data.js
+++ b/spec/javascripts/ide/mock_data.js
@@ -1,4 +1,3 @@
-// eslint-disable-next-line import/prefer-default-export
export const projectData = {
id: 1,
name: 'abcproject',
@@ -14,3 +13,49 @@ export const projectData = {
mergeRequests: {},
merge_requests_enabled: true,
};
+
+export const pipelines = [
+ {
+ id: 1,
+ ref: 'master',
+ sha: '123',
+ status: 'failed',
+ },
+ {
+ id: 2,
+ ref: 'master',
+ sha: '213',
+ status: 'success',
+ },
+];
+
+export const jobs = [
+ {
+ id: 1,
+ name: 'test',
+ status: 'failed',
+ stage: 'test',
+ duration: 1,
+ },
+ {
+ id: 2,
+ name: 'test 2',
+ status: 'failed',
+ stage: 'test',
+ duration: 1,
+ },
+ {
+ id: 3,
+ name: 'test 3',
+ status: 'failed',
+ stage: 'test',
+ duration: 1,
+ },
+ {
+ id: 4,
+ name: 'test 3',
+ status: 'failed',
+ stage: 'build',
+ duration: 1,
+ },
+];
diff --git a/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js b/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js
new file mode 100644
index 00000000000..85fbcf8084b
--- /dev/null
+++ b/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js
@@ -0,0 +1,289 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import actions, {
+ requestLatestPipeline,
+ receiveLatestPipelineError,
+ receiveLatestPipelineSuccess,
+ fetchLatestPipeline,
+ requestJobs,
+ receiveJobsError,
+ receiveJobsSuccess,
+ fetchJobs,
+} from '~/ide/stores/modules/pipelines/actions';
+import state from '~/ide/stores/modules/pipelines/state';
+import * as types from '~/ide/stores/modules/pipelines/mutation_types';
+import testAction from '../../../../helpers/vuex_action_helper';
+import { pipelines, jobs } from '../../../mock_data';
+
+describe('IDE pipelines actions', () => {
+ let mockedState;
+ let mock;
+
+ beforeEach(() => {
+ mockedState = state();
+ mock = new MockAdapter(axios);
+
+ gon.api_version = 'v4';
+ mockedState.currentProjectId = 'test/project';
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('requestLatestPipeline', () => {
+ it('commits request', done => {
+ testAction(
+ requestLatestPipeline,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_LATEST_PIPELINE }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveLatestPipelineError', () => {
+ it('commits error', done => {
+ testAction(
+ receiveLatestPipelineError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
+ [],
+ done,
+ );
+ });
+
+ it('creates flash message', () => {
+ const flashSpy = spyOnDependency(actions, 'flash');
+
+ receiveLatestPipelineError({ commit() {} });
+
+ expect(flashSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('receiveLatestPipelineSuccess', () => {
+ it('commits pipeline', done => {
+ testAction(
+ receiveLatestPipelineSuccess,
+ pipelines[0],
+ mockedState,
+ [{ type: types.RECEIVE_LASTEST_PIPELINE_SUCCESS, payload: pipelines[0] }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchLatestPipeline', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(200, pipelines);
+ });
+
+ it('dispatches request', done => {
+ testAction(
+ fetchLatestPipeline,
+ '123',
+ mockedState,
+ [],
+ [{ type: 'requestLatestPipeline' }, { type: 'receiveLatestPipelineSuccess' }],
+ done,
+ );
+ });
+
+ it('dispatches success with latest pipeline', done => {
+ testAction(
+ fetchLatestPipeline,
+ '123',
+ mockedState,
+ [],
+ [
+ { type: 'requestLatestPipeline' },
+ { type: 'receiveLatestPipelineSuccess', payload: pipelines[0] },
+ ],
+ done,
+ );
+ });
+
+ it('calls axios with correct params', () => {
+ const apiSpy = spyOn(axios, 'get').and.callThrough();
+
+ fetchLatestPipeline({ dispatch() {}, rootState: state }, '123');
+
+ expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
+ params: {
+ sha: '123',
+ per_page: '1',
+ },
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(500);
+ });
+
+ it('dispatches error', done => {
+ testAction(
+ fetchLatestPipeline,
+ '123',
+ mockedState,
+ [],
+ [{ type: 'requestLatestPipeline' }, { type: 'receiveLatestPipelineError' }],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestJobs', () => {
+ it('commits request', done => {
+ testAction(requestJobs, null, mockedState, [{ type: types.REQUEST_JOBS }], [], done);
+ });
+ });
+
+ describe('receiveJobsError', () => {
+ it('commits error', done => {
+ testAction(
+ receiveJobsError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_JOBS_ERROR }],
+ [],
+ done,
+ );
+ });
+
+ it('creates flash message', () => {
+ const flashSpy = spyOnDependency(actions, 'flash');
+
+ receiveJobsError({ commit() {} });
+
+ expect(flashSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('receiveJobsSuccess', () => {
+ it('commits jobs', done => {
+ testAction(
+ receiveJobsSuccess,
+ jobs,
+ mockedState,
+ [{ type: types.RECEIVE_JOBS_SUCCESS, payload: jobs }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchJobs', () => {
+ let page = '';
+
+ beforeEach(() => {
+ mockedState.latestPipeline = pipelines[0];
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines\/(.*)\/jobs/).replyOnce(() => [
+ 200,
+ jobs,
+ {
+ 'x-next-page': page,
+ },
+ ]);
+ });
+
+ it('dispatches request', done => {
+ testAction(
+ fetchJobs,
+ null,
+ mockedState,
+ [],
+ [{ type: 'requestJobs' }, { type: 'receiveJobsSuccess' }],
+ done,
+ );
+ });
+
+ it('dispatches success with latest pipeline', done => {
+ testAction(
+ fetchJobs,
+ null,
+ mockedState,
+ [],
+ [{ type: 'requestJobs' }, { type: 'receiveJobsSuccess', payload: jobs }],
+ done,
+ );
+ });
+
+ it('dispatches twice for both pages', done => {
+ page = '2';
+
+ testAction(
+ fetchJobs,
+ null,
+ mockedState,
+ [],
+ [
+ { type: 'requestJobs' },
+ { type: 'receiveJobsSuccess', payload: jobs },
+ { type: 'fetchJobs', payload: '2' },
+ { type: 'requestJobs' },
+ { type: 'receiveJobsSuccess', payload: jobs },
+ ],
+ done,
+ );
+ });
+
+ it('calls axios with correct URL', () => {
+ const apiSpy = spyOn(axios, 'get').and.callThrough();
+
+ fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
+
+ expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
+ params: { page: '1' },
+ });
+ });
+
+ it('calls axios with page next page', () => {
+ const apiSpy = spyOn(axios, 'get').and.callThrough();
+
+ fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
+
+ expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
+ params: { page: '1' },
+ });
+
+ page = '2';
+
+ fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState }, page);
+
+ expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
+ params: { page: '2' },
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(500);
+ });
+
+ it('dispatches error', done => {
+ testAction(
+ fetchJobs,
+ null,
+ mockedState,
+ [],
+ [{ type: 'requestJobs' }, { type: 'receiveJobsError' }],
+ done,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/ide/stores/modules/pipelines/getters_spec.js b/spec/javascripts/ide/stores/modules/pipelines/getters_spec.js
new file mode 100644
index 00000000000..b2a7e8a9025
--- /dev/null
+++ b/spec/javascripts/ide/stores/modules/pipelines/getters_spec.js
@@ -0,0 +1,71 @@
+import * as getters from '~/ide/stores/modules/pipelines/getters';
+import state from '~/ide/stores/modules/pipelines/state';
+
+describe('IDE pipeline getters', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe('hasLatestPipeline', () => {
+ it('returns false when loading is true', () => {
+ mockedState.isLoadingPipeline = true;
+
+ expect(getters.hasLatestPipeline(mockedState)).toBe(false);
+ });
+
+ it('returns false when pipelines is null', () => {
+ mockedState.latestPipeline = null;
+
+ expect(getters.hasLatestPipeline(mockedState)).toBe(false);
+ });
+
+ it('returns false when loading is true & pipelines is null', () => {
+ mockedState.latestPipeline = null;
+ mockedState.isLoadingPipeline = true;
+
+ expect(getters.hasLatestPipeline(mockedState)).toBe(false);
+ });
+
+ it('returns true when loading is false & pipelines is an object', () => {
+ mockedState.latestPipeline = {
+ id: 1,
+ };
+ mockedState.isLoadingPipeline = false;
+
+ expect(getters.hasLatestPipeline(mockedState)).toBe(true);
+ });
+ });
+
+ describe('failedJobs', () => {
+ it('returns array of failed jobs', () => {
+ mockedState.stages = [
+ {
+ title: 'test',
+ jobs: [{ id: 1, status: 'failed' }, { id: 2, status: 'success' }],
+ },
+ {
+ title: 'build',
+ jobs: [{ id: 3, status: 'failed' }, { id: 4, status: 'failed' }],
+ },
+ ];
+
+ expect(getters.failedJobs(mockedState).length).toBe(3);
+ expect(getters.failedJobs(mockedState)).toEqual([
+ {
+ id: 1,
+ status: jasmine.anything(),
+ },
+ {
+ id: 3,
+ status: jasmine.anything(),
+ },
+ {
+ id: 4,
+ status: jasmine.anything(),
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/javascripts/ide/stores/modules/pipelines/mutations_spec.js b/spec/javascripts/ide/stores/modules/pipelines/mutations_spec.js
new file mode 100644
index 00000000000..8262e916243
--- /dev/null
+++ b/spec/javascripts/ide/stores/modules/pipelines/mutations_spec.js
@@ -0,0 +1,120 @@
+import mutations from '~/ide/stores/modules/pipelines/mutations';
+import state from '~/ide/stores/modules/pipelines/state';
+import * as types from '~/ide/stores/modules/pipelines/mutation_types';
+import { pipelines, jobs } from '../../../mock_data';
+
+describe('IDE pipelines mutations', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe(types.REQUEST_LATEST_PIPELINE, () => {
+ it('sets loading to true', () => {
+ mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
+
+ expect(mockedState.isLoadingPipeline).toBe(true);
+ });
+ });
+
+ describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => {
+ it('sets loading to false', () => {
+ mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
+
+ expect(mockedState.isLoadingPipeline).toBe(false);
+ });
+ });
+
+ describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => {
+ it('sets loading to false on success', () => {
+ mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, pipelines[0]);
+
+ expect(mockedState.isLoadingPipeline).toBe(false);
+ });
+
+ it('sets latestPipeline', () => {
+ mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, pipelines[0]);
+
+ expect(mockedState.latestPipeline).toEqual({
+ id: pipelines[0].id,
+ status: pipelines[0].status,
+ });
+ });
+
+ it('does not set latest pipeline if pipeline is null', () => {
+ mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, null);
+
+ expect(mockedState.latestPipeline).toEqual(null);
+ });
+ });
+
+ describe(types.REQUEST_JOBS, () => {
+ it('sets jobs loading to true', () => {
+ mutations[types.REQUEST_JOBS](mockedState);
+
+ expect(mockedState.isLoadingJobs).toBe(true);
+ });
+ });
+
+ describe(types.RECEIVE_JOBS_ERROR, () => {
+ it('sets jobs loading to false', () => {
+ mutations[types.RECEIVE_JOBS_ERROR](mockedState);
+
+ expect(mockedState.isLoadingJobs).toBe(false);
+ });
+ });
+
+ describe(types.RECEIVE_JOBS_SUCCESS, () => {
+ it('sets jobs loading to false on success', () => {
+ mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
+
+ expect(mockedState.isLoadingJobs).toBe(false);
+ });
+
+ it('sets stages', () => {
+ mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
+
+ expect(mockedState.stages.length).toBe(2);
+ expect(mockedState.stages).toEqual([
+ {
+ title: 'test',
+ jobs: jasmine.anything(),
+ },
+ {
+ title: 'build',
+ jobs: jasmine.anything(),
+ },
+ ]);
+ });
+
+ it('sets jobs in stages', () => {
+ mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
+
+ expect(mockedState.stages[0].jobs.length).toBe(3);
+ expect(mockedState.stages[1].jobs.length).toBe(1);
+ expect(mockedState.stages).toEqual([
+ {
+ title: jasmine.anything(),
+ jobs: jobs.filter(job => job.stage === 'test').map(job => ({
+ id: job.id,
+ name: job.name,
+ status: job.status,
+ stage: job.stage,
+ duration: job.duration,
+ })),
+ },
+ {
+ title: jasmine.anything(),
+ jobs: jobs.filter(job => job.stage === 'build').map(job => ({
+ id: job.id,
+ name: job.name,
+ status: job.status,
+ stage: job.stage,
+ duration: job.duration,
+ })),
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/action_component_spec.js b/spec/javascripts/pipelines/graph/action_component_spec.js
index d646bef96f5..568e679abe9 100644
--- a/spec/javascripts/pipelines/graph/action_component_spec.js
+++ b/spec/javascripts/pipelines/graph/action_component_spec.js
@@ -1,13 +1,19 @@
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import actionComponent from '~/pipelines/components/graph/action_component.vue';
-import eventHub from '~/pipelines/event_hub';
import mountComponent from '../../helpers/vue_mount_component_helper';
describe('pipeline graph action component', () => {
let component;
+ let mock;
beforeEach(done => {
const ActionComponent = Vue.extend(actionComponent);
+ mock = new MockAdapter(axios);
+
+ mock.onPost('foo.json').reply(200);
+
component = mountComponent(ActionComponent, {
tooltipText: 'bar',
link: 'foo',
@@ -18,15 +24,10 @@ describe('pipeline graph action component', () => {
});
afterEach(() => {
+ mock.restore();
component.$destroy();
});
- it('should emit an event with the provided link', () => {
- eventHub.$on('postAction', link => {
- expect(link).toEqual('foo');
- });
- });
-
it('should render the provided title as a bootstrap tooltip', () => {
expect(component.$el.getAttribute('data-original-title')).toEqual('bar');
});
@@ -34,10 +35,12 @@ describe('pipeline graph action component', () => {
it('should update bootstrap tooltip when title changes', done => {
component.tooltipText = 'changed';
- setTimeout(() => {
+ component.$nextTick()
+ .then(() => {
expect(component.$el.getAttribute('data-original-title')).toBe('changed');
- done();
- });
+ })
+ .then(done)
+ .catch(done.fail);
});
it('should render an svg', () => {
@@ -45,44 +48,18 @@ describe('pipeline graph action component', () => {
expect(component.$el.querySelector('svg')).toBeDefined();
});
- it('disables the button when clicked', done => {
- component.$el.click();
+ describe('on click', () => {
+ it('emits `pipelineActionRequestComplete` after a successfull request', done => {
+ spyOn(component, '$emit');
- component.$nextTick(() => {
- expect(component.$el.getAttribute('disabled')).toEqual('disabled');
- done();
- });
- });
-
- it('re-enabled the button when `requestFinishedFor` matches `linkRequested`', done => {
- component.$el.click();
-
- component
- .$nextTick()
- .then(() => {
- expect(component.$el.getAttribute('disabled')).toEqual('disabled');
- component.requestFinishedFor = 'foo';
- })
- .then(() => {
- expect(component.$el.getAttribute('disabled')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not re-enable the button when `requestFinishedFor` does not matches `linkRequested`', done => {
- component.$el.click();
+ component.$el.click();
- component
- .$nextTick()
- .then(() => {
- expect(component.$el.getAttribute('disabled')).toEqual('disabled');
- component.requestFinishedFor = 'bar';
- })
- .then(() => {
- expect(component.$el.getAttribute('disabled')).toEqual('disabled');
- })
- .then(done)
- .catch(done.fail);
+ component.$nextTick()
+ .then(() => {
+ expect(component.$emit).toHaveBeenCalledWith('pipelineActionRequestComplete');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
});
});
diff --git a/spec/javascripts/pipelines/stage_spec.js b/spec/javascripts/pipelines/stage_spec.js
index 75156e7bdfd..16f6db39d6a 100644
--- a/spec/javascripts/pipelines/stage_spec.js
+++ b/spec/javascripts/pipelines/stage_spec.js
@@ -102,4 +102,31 @@ describe('Pipelines stage component', () => {
});
});
});
+
+ describe('pipelineActionRequestComplete', () => {
+ beforeEach(() => {
+ mock.onGet('path.json').reply(200, stageReply);
+
+ mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(200);
+ });
+
+ describe('within pipeline table', () => {
+ it('emits `refreshPipelinesTable` event when `pipelineActionRequestComplete` is triggered', done => {
+ spyOn(eventHub, '$emit');
+
+ component.type = 'PIPELINES_TABLE';
+ component.$el.querySelector('button').click();
+
+ setTimeout(() => {
+ component.$el.querySelector('.js-ci-action').click();
+ component.$nextTick()
+ .then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('refreshPipelinesTable');
+ })
+ .then(done)
+ .catch(done.fail);
+ }, 0);
+ });
+ });
+ });
});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js
index db27aa144d6..00f4f2d7c39 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js
@@ -1,12 +1,12 @@
import Vue from 'vue';
-import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author.vue';
+import MrWidgetAuthor from '~/vue_merge_request_widget/components/mr_widget_author.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
-describe('MRWidgetAuthor', () => {
+describe('MrWidgetAuthor', () => {
let vm;
beforeEach(() => {
- const Component = Vue.extend(authorComponent);
+ const Component = Vue.extend(MrWidgetAuthor);
vm = mountComponent(Component, {
author: {
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_maintainer_edit_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_maintainer_edit_spec.js
deleted file mode 100644
index cee22d5342a..00000000000
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_maintainer_edit_spec.js
+++ /dev/null
@@ -1,40 +0,0 @@
-import Vue from 'vue';
-import maintainerEditComponent from '~/vue_merge_request_widget/components/mr_widget_maintainer_edit.vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-
-describe('RWidgetMaintainerEdit', () => {
- let Component;
- let vm;
-
- beforeEach(() => {
- Component = Vue.extend(maintainerEditComponent);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('when a maintainer is allowed to edit', () => {
- beforeEach(() => {
- vm = mountComponent(Component, {
- maintainerEditAllowed: true,
- });
- });
-
- it('it renders the message', () => {
- expect(vm.$el.textContent.trim()).toEqual('Allows edits from maintainers');
- });
- });
-
- describe('when a maintainer is not allowed to edit', () => {
- beforeEach(() => {
- vm = mountComponent(Component, {
- maintainerEditAllowed: false,
- });
- });
-
- it('hides the message', () => {
- expect(vm.$el.textContent.trim()).toEqual('');
- });
- });
-});
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index e55c7649d40..30918428da2 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options';
+import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
diff --git a/spec/lib/api/helpers/pagination_spec.rb b/spec/lib/api/helpers/pagination_spec.rb
index a547988d631..c73c6023b60 100644
--- a/spec/lib/api/helpers/pagination_spec.rb
+++ b/spec/lib/api/helpers/pagination_spec.rb
@@ -7,7 +7,203 @@ describe API::Helpers::Pagination do
Class.new.include(described_class).new
end
- describe '#paginate' do
+ describe '#paginate (keyset pagination)' do
+ let(:value) { spy('return value') }
+
+ before do
+ allow(value).to receive(:to_query).and_return(value)
+
+ allow(subject).to receive(:header).and_return(value)
+ allow(subject).to receive(:params).and_return(value)
+ allow(subject).to receive(:request).and_return(value)
+ end
+
+ context 'when resource can be paginated' do
+ let!(:projects) do
+ [
+ create(:project, name: 'One'),
+ create(:project, name: 'Two'),
+ create(:project, name: 'Three')
+ ].sort_by { |e| -e.id } # sort by id desc (this is the default sort order for the API)
+ end
+
+ describe 'first page' do
+ before do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', per_page: 2 })
+ end
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 2
+ end
+
+ it 'returns the first two records (by id desc)' do
+ expect(subject.paginate(resource)).to eq(projects[0..1])
+ end
+
+ it 'adds appropriate headers' do
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Next-Page', "#{value}?ks_prev_id=#{projects[1].id}&pagination=keyset&per_page=2")
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include('rel="next"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+
+ describe 'second page' do
+ before do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', per_page: 2, ks_prev_id: projects[1].id })
+ end
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+
+ it 'returns the third record' do
+ expect(subject.paginate(resource)).to eq(projects[2..2])
+ end
+
+ it 'adds appropriate headers' do
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Next-Page', "#{value}?ks_prev_id=#{projects[2].id}&pagination=keyset&per_page=2")
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include('rel="next"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+
+ describe 'third page' do
+ before do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', per_page: 2, ks_prev_id: projects[2].id })
+ end
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 0
+ end
+
+ it 'adds appropriate headers' do
+ expect_header('X-Per-Page', '2')
+ expect(subject).not_to receive(:header).with('Link')
+
+ subject.paginate(resource)
+ end
+ end
+
+ context 'if order' do
+ context 'is not present' do
+ before do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', per_page: 2 })
+ end
+
+ it 'is not present it adds default order(:id) desc' do
+ resource.order_values = []
+
+ paginated_relation = subject.paginate(resource)
+
+ expect(resource.order_values).to be_empty
+ expect(paginated_relation.order_values).to be_present
+ expect(paginated_relation.order_values.size).to eq(1)
+ expect(paginated_relation.order_values.first).to be_descending
+ expect(paginated_relation.order_values.first.expr.name).to eq :id
+ end
+ end
+
+ context 'is present' do
+ let(:resource) { Project.all.order(name: :desc) }
+ let!(:projects) do
+ [
+ create(:project, name: 'One'),
+ create(:project, name: 'Two'),
+ create(:project, name: 'Three'),
+ create(:project, name: 'Three'), # Note the duplicate name
+ create(:project, name: 'Four'),
+ create(:project, name: 'Five'),
+ create(:project, name: 'Six')
+ ]
+
+ # if we sort this by name descending, id descending, this yields:
+ # {
+ # 2 => "Two",
+ # 4 => "Three",
+ # 3 => "Three",
+ # 7 => "Six",
+ # 1 => "One",
+ # 5 => "Four",
+ # 6 => "Five"
+ # }
+ #
+ # (key is the id)
+ end
+
+ it 'it also orders by primary key' do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', per_page: 2 })
+ paginated_relation = subject.paginate(resource)
+
+ expect(paginated_relation.order_values).to be_present
+ expect(paginated_relation.order_values.size).to eq(2)
+ expect(paginated_relation.order_values.first).to be_descending
+ expect(paginated_relation.order_values.first.expr.name).to eq :name
+ expect(paginated_relation.order_values.second).to be_descending
+ expect(paginated_relation.order_values.second.expr.name).to eq :id
+ end
+
+ it 'it returns the right records (first page)' do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', per_page: 2 })
+ result = subject.paginate(resource)
+
+ expect(result.first).to eq(projects[1])
+ expect(result.second).to eq(projects[3])
+ end
+
+ it 'it returns the right records (second page)' do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', ks_prev_id: projects[3].id, ks_prev_name: projects[3].name, per_page: 2 })
+ result = subject.paginate(resource)
+
+ expect(result.first).to eq(projects[2])
+ expect(result.second).to eq(projects[6])
+ end
+
+ it 'it returns the right records (third page), note increased per_page' do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', ks_prev_id: projects[6].id, ks_prev_name: projects[6].name, per_page: 5 })
+ result = subject.paginate(resource)
+
+ expect(result.size).to eq(3)
+ expect(result.first).to eq(projects[0])
+ expect(result.second).to eq(projects[4])
+ expect(result.last).to eq(projects[5])
+ end
+
+ it 'it returns the right link to the next page' do
+ allow(subject).to receive(:params)
+ .and_return({ pagination: 'keyset', ks_prev_id: projects[3].id, ks_prev_name: projects[3].name, per_page: 2 })
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Next-Page', "#{value}?ks_prev_id=#{projects[6].id}&ks_prev_name=#{projects[6].name}&pagination=keyset&per_page=2")
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include('rel="next"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#paginate (default offset-based pagination)' do
let(:value) { spy('return value') }
before do
@@ -146,14 +342,14 @@ describe API::Helpers::Pagination do
end
end
end
+ end
- def expect_header(*args, &block)
- expect(subject).to receive(:header).with(*args, &block)
- end
+ def expect_header(*args, &block)
+ expect(subject).to receive(:header).with(*args, &block)
+ end
- def expect_message(method)
- expect(subject).to receive(method)
- .at_least(:once).and_return(value)
- end
+ def expect_message(method)
+ expect(subject).to receive(method)
+ .at_least(:once).and_return(value)
end
end
diff --git a/spec/lib/api/helpers/related_resources_helpers_spec.rb b/spec/lib/api/helpers/related_resources_helpers_spec.rb
index b918301f1cb..66af7f81535 100644
--- a/spec/lib/api/helpers/related_resources_helpers_spec.rb
+++ b/spec/lib/api/helpers/related_resources_helpers_spec.rb
@@ -9,9 +9,9 @@ describe API::Helpers::RelatedResourcesHelpers do
let(:path) { '/api/v4/awesome_endpoint' }
subject(:url) { helpers.expose_url(path) }
- def stub_default_url_options(protocol: 'http', host: 'example.com', port: nil)
+ def stub_default_url_options(protocol: 'http', host: 'example.com', port: nil, script_name: '')
expect(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: protocol, host: host, port: port)
+ .and_return(protocol: protocol, host: host, port: port, script_name: script_name)
end
it 'respects the protocol if it is HTTP' do
@@ -37,5 +37,17 @@ describe API::Helpers::RelatedResourcesHelpers do
is_expected.to start_with('http://example.com:8080/')
end
+
+ it 'includes the relative_url before the path if it is set' do
+ stub_default_url_options(script_name: '/gitlab')
+
+ is_expected.to start_with('http://example.com/gitlab/api/v4')
+ end
+
+ it 'includes the path after the host' do
+ stub_default_url_options
+
+ is_expected.to start_with('http://example.com/api/v4')
+ end
end
end
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index ca76d6f0881..0e178b859c4 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -91,6 +91,12 @@ describe Banzai::Filter::GollumTagsFilter do
expect(doc.at_css('a').text).to eq 'link-text'
expect(doc.at_css('a')['href']).to eq expected_path
end
+
+ it "inside back ticks will be exempt from linkification" do
+ doc = filter('<code>[[link-in-backticks]]</code>', project_wiki: project_wiki)
+
+ expect(doc.at_css('code').text).to eq '[[link-in-backticks]]'
+ end
end
context 'table of contents' do
diff --git a/spec/lib/gitlab/auth/blocked_user_tracker_spec.rb b/spec/lib/gitlab/auth/blocked_user_tracker_spec.rb
index 726a3c1c83a..43b68e69131 100644
--- a/spec/lib/gitlab/auth/blocked_user_tracker_spec.rb
+++ b/spec/lib/gitlab/auth/blocked_user_tracker_spec.rb
@@ -17,12 +17,8 @@ describe Gitlab::Auth::BlockedUserTracker do
end
context 'failed login due to blocked user' do
- let(:env) do
- {
- 'warden.options' => { message: User::BLOCKED_MESSAGE },
- described_class::ACTIVE_RECORD_REQUEST_PARAMS => { 'user' => { 'login' => user.username } }
- }
- end
+ let(:base_env) { { 'warden.options' => { message: User::BLOCKED_MESSAGE } } }
+ let(:env) { base_env.merge(request_env) }
subject { described_class.log_if_user_blocked(env) }
@@ -30,23 +26,37 @@ describe Gitlab::Auth::BlockedUserTracker do
expect_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).with(user, :failed_login)
end
- it 'logs a blocked user' do
- user.block!
+ context 'via GitLab login' do
+ let(:request_env) { { described_class::ACTIVE_RECORD_REQUEST_PARAMS => { 'user' => { 'login' => user.username } } } }
- expect(subject).to be_truthy
- end
+ it 'logs a blocked user' do
+ user.block!
+
+ expect(subject).to be_truthy
+ end
- it 'logs a blocked user by e-mail' do
- user.block!
- env[described_class::ACTIVE_RECORD_REQUEST_PARAMS]['user']['login'] = user.email
+ it 'logs a blocked user by e-mail' do
+ user.block!
+ env[described_class::ACTIVE_RECORD_REQUEST_PARAMS]['user']['login'] = user.email
- expect(subject).to be_truthy
+ expect(subject).to be_truthy
+ end
end
- it 'logs a LDAP blocked user' do
- user.ldap_block!
+ context 'via LDAP login' do
+ let(:request_env) { { described_class::ACTIVE_RECORD_REQUEST_PARAMS => { 'username' => user.username } } }
+
+ it 'logs a blocked user' do
+ user.block!
+
+ expect(subject).to be_truthy
+ end
+
+ it 'logs a LDAP blocked user' do
+ user.ldap_block!
- expect(subject).to be_truthy
+ expect(subject).to be_truthy
+ end
end
end
end
diff --git a/spec/lib/gitlab/auth/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb
index 6b251d824f7..eff21985108 100644
--- a/spec/lib/gitlab/auth/ldap/access_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/access_spec.rb
@@ -8,6 +8,7 @@ describe Gitlab::Auth::LDAP::Access do
describe '.allowed?' do
it 'updates the users `last_credential_check_at' do
+ allow(access).to receive(:update_user)
expect(access).to receive(:allowed?) { true }
expect(described_class).to receive(:open).and_yield(access)
@@ -16,12 +17,21 @@ describe Gitlab::Auth::LDAP::Access do
end
end
+ describe '#find_ldap_user' do
+ it 'finds a user by dn first' do
+ expect(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(:ldap_user)
+
+ access.find_ldap_user
+ end
+ end
+
describe '#allowed?' do
subject { access.allowed? }
context 'when the user cannot be found' do
before do
allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(nil)
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(nil)
end
it { is_expected.to be_falsey }
@@ -54,7 +64,7 @@ describe Gitlab::Auth::LDAP::Access do
end
end
- context 'and has no disabled flag in active diretory' do
+ context 'and has no disabled flag in active directory' do
before do
allow(Gitlab::Auth::LDAP::Person).to receive(:disabled_via_active_directory?).and_return(false)
end
@@ -100,6 +110,7 @@ describe Gitlab::Auth::LDAP::Access do
context 'when user cannot be found' do
before do
allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_dn).and_return(nil)
+ allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_email).and_return(nil)
end
it { is_expected.to be_falsey }
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index 82587e2ba55..d3ab599d5a0 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -23,7 +23,7 @@ describe Gitlab::Auth::LDAP::Config do
end
it 'raises an error if a unknown provider is used' do
- expect { described_class.new 'unknown' }.to raise_error(RuntimeError)
+ expect { described_class.new 'unknown' }.to raise_error(described_class::InvalidProvider)
end
end
@@ -370,4 +370,38 @@ describe Gitlab::Auth::LDAP::Config do
})
end
end
+
+ describe '#base' do
+ context 'when the configured base is not normalized' do
+ it 'returns the normalized base' do
+ stub_ldap_config(options: { 'base' => 'DC=example, DC= com' })
+
+ expect(config.base).to eq('dc=example,dc=com')
+ end
+ end
+
+ context 'when the configured base is normalized' do
+ it 'returns the base unaltered' do
+ stub_ldap_config(options: { 'base' => 'dc=example,dc=com' })
+
+ expect(config.base).to eq('dc=example,dc=com')
+ end
+ end
+
+ context 'when the configured base is malformed' do
+ it 'returns the base unaltered' do
+ stub_ldap_config(options: { 'base' => 'invalid,dc=example,dc=com' })
+
+ expect(config.base).to eq('invalid,dc=example,dc=com')
+ end
+ end
+
+ context 'when the configured base is blank' do
+ it 'returns the base unaltered' do
+ stub_ldap_config(options: { 'base' => '' })
+
+ expect(config.base).to eq('')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/ldap/user_spec.rb b/spec/lib/gitlab/auth/ldap/user_spec.rb
index 653c19942ea..44bb9d20e47 100644
--- a/spec/lib/gitlab/auth/ldap/user_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/user_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Gitlab::Auth::LDAP::User do
+ include LdapHelpers
+
let(:ldap_user) { described_class.new(auth_hash) }
let(:gl_user) { ldap_user.gl_user }
let(:info) do
@@ -177,8 +179,7 @@ describe Gitlab::Auth::LDAP::User do
describe 'blocking' do
def configure_block(value)
- allow_any_instance_of(Gitlab::Auth::LDAP::Config)
- .to receive(:block_auto_created_users).and_return(value)
+ stub_ldap_config(block_auto_created_users: value)
end
context 'signup' do
diff --git a/spec/lib/gitlab/ci/config/entry/policy_spec.rb b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
index 08718c382b9..83d39b82068 100644
--- a/spec/lib/gitlab/ci/config/entry/policy_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
@@ -111,7 +111,15 @@ describe Gitlab::Ci::Config::Entry::Policy do
context 'when specifying invalid variables expressions token' do
let(:config) { { variables: ['$MY_VAR == 123'] } }
- it 'reports an error about invalid statement' do
+ it 'reports an error about invalid expression' do
+ expect(entry.errors).to include /invalid expression syntax/
+ end
+ end
+
+ context 'when using invalid variables expressions regexp' do
+ let(:config) { { variables: ['$MY_VAR =~ /some ( thing/'] } }
+
+ it 'reports an error about invalid expression' do
expect(entry.errors).to include /invalid expression syntax/
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
new file mode 100644
index 00000000000..49e5af52f4d
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
@@ -0,0 +1,80 @@
+require 'fast_spec_helper'
+require_dependency 're2'
+
+describe Gitlab::Ci::Pipeline::Expression::Lexeme::Matches do
+ let(:left) { double('left') }
+ let(:right) { double('right') }
+
+ describe '.build' do
+ it 'creates a new instance of the token' do
+ expect(described_class.build('=~', left, right))
+ .to be_a(described_class)
+ end
+ end
+
+ describe '.type' do
+ it 'is an operator' do
+ expect(described_class.type).to eq :operator
+ end
+ end
+
+ describe '#evaluate' do
+ it 'returns false when left and right do not match' do
+ allow(left).to receive(:evaluate).and_return('my-string')
+ allow(right).to receive(:evaluate)
+ .and_return(Gitlab::UntrustedRegexp.new('something'))
+
+ operator = described_class.new(left, right)
+
+ expect(operator.evaluate).to eq false
+ end
+
+ it 'returns true when left and right match' do
+ allow(left).to receive(:evaluate).and_return('my-awesome-string')
+ allow(right).to receive(:evaluate)
+ .and_return(Gitlab::UntrustedRegexp.new('awesome.string$'))
+
+ operator = described_class.new(left, right)
+
+ expect(operator.evaluate).to eq true
+ end
+
+ it 'supports matching against a nil value' do
+ allow(left).to receive(:evaluate).and_return(nil)
+ allow(right).to receive(:evaluate)
+ .and_return(Gitlab::UntrustedRegexp.new('pattern'))
+
+ operator = described_class.new(left, right)
+
+ expect(operator.evaluate).to eq false
+ end
+
+ it 'supports multiline strings' do
+ allow(left).to receive(:evaluate).and_return <<~TEXT
+ My awesome contents
+
+ My-text-string!
+ TEXT
+
+ allow(right).to receive(:evaluate)
+ .and_return(Gitlab::UntrustedRegexp.new('text-string'))
+
+ operator = described_class.new(left, right)
+
+ expect(operator.evaluate).to eq true
+ end
+
+ it 'supports regexp flags' do
+ allow(left).to receive(:evaluate).and_return <<~TEXT
+ My AWESOME content
+ TEXT
+
+ allow(right).to receive(:evaluate)
+ .and_return(Gitlab::UntrustedRegexp.new('(?i)awesome'))
+
+ operator = described_class.new(left, right)
+
+ expect(operator.evaluate).to eq true
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
new file mode 100644
index 00000000000..3ebc2e94727
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
@@ -0,0 +1,96 @@
+require 'fast_spec_helper'
+
+describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern do
+ describe '.build' do
+ it 'creates a new instance of the token' do
+ expect(described_class.build('/.*/'))
+ .to be_a(described_class)
+ end
+
+ it 'raises an error if pattern is invalid' do
+ expect { described_class.build('/ some ( thin/i') }
+ .to raise_error(Gitlab::Ci::Pipeline::Expression::Lexer::SyntaxError)
+ end
+ end
+
+ describe '.type' do
+ it 'is a value lexeme' do
+ expect(described_class.type).to eq :value
+ end
+ end
+
+ describe '.scan' do
+ it 'correctly identifies a pattern token' do
+ scanner = StringScanner.new('/pattern/')
+
+ token = described_class.scan(scanner)
+
+ expect(token).not_to be_nil
+ expect(token.build.evaluate)
+ .to eq Gitlab::UntrustedRegexp.new('pattern')
+ end
+
+ it 'is a greedy scanner for regexp boundaries' do
+ scanner = StringScanner.new('/some .* / pattern/')
+
+ token = described_class.scan(scanner)
+
+ expect(token).not_to be_nil
+ expect(token.build.evaluate)
+ .to eq Gitlab::UntrustedRegexp.new('some .* / pattern')
+ end
+
+ it 'does not allow to use an empty pattern' do
+ scanner = StringScanner.new(%(//))
+
+ token = described_class.scan(scanner)
+
+ expect(token).to be_nil
+ end
+
+ it 'support single flag' do
+ scanner = StringScanner.new('/pattern/i')
+
+ token = described_class.scan(scanner)
+
+ expect(token).not_to be_nil
+ expect(token.build.evaluate)
+ .to eq Gitlab::UntrustedRegexp.new('(?i)pattern')
+ end
+
+ it 'support multiple flags' do
+ scanner = StringScanner.new('/pattern/im')
+
+ token = described_class.scan(scanner)
+
+ expect(token).not_to be_nil
+ expect(token.build.evaluate)
+ .to eq Gitlab::UntrustedRegexp.new('(?im)pattern')
+ end
+
+ it 'does not support arbitrary flags' do
+ scanner = StringScanner.new('/pattern/x')
+
+ token = described_class.scan(scanner)
+
+ expect(token).to be_nil
+ end
+ end
+
+ describe '#evaluate' do
+ it 'returns a regular expression' do
+ regexp = described_class.new('/abc/')
+
+ expect(regexp.evaluate).to eq Gitlab::UntrustedRegexp.new('abc')
+ end
+
+ it 'raises error if evaluated regexp is not valid' do
+ allow(Gitlab::UntrustedRegexp).to receive(:valid?).and_return(true)
+
+ regexp = described_class.new('/invalid ( .*/')
+
+ expect { regexp.evaluate }
+ .to raise_error(Gitlab::Ci::Pipeline::Expression::RuntimeError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
index 230ceeb07f8..3f11b3f7673 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do
end
describe '#tokens' do
- it 'tokenss single value' do
+ it 'returns single value' do
tokens = described_class.new('$VARIABLE').tokens
expect(tokens).to be_one
@@ -20,14 +20,14 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do
expect(tokens).to all(be_an_instance_of(token_class))
end
- it 'tokenss multiple values of the same token' do
+ it 'returns multiple values of the same token' do
tokens = described_class.new("$VARIABLE1 $VARIABLE2").tokens
expect(tokens.size).to eq 2
expect(tokens).to all(be_an_instance_of(token_class))
end
- it 'tokenss multiple values with different tokens' do
+ it 'returns multiple values with different tokens' do
tokens = described_class.new('$VARIABLE "text" "value"').tokens
expect(tokens.size).to eq 3
@@ -36,7 +36,7 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do
expect(tokens.third.value).to eq '"value"'
end
- it 'tokenss tokens and operators' do
+ it 'returns tokens and operators' do
tokens = described_class.new('$VARIABLE == "text"').tokens
expect(tokens.size).to eq 3
diff --git a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
index e8e6f585310..2b78b1dd4a7 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
@@ -1,4 +1,4 @@
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Parser do
describe '#tree' do
diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
index 6685bf5385b..11e73294f18 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
@@ -1,4 +1,5 @@
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'rspec-parameterized'
describe Gitlab::Ci::Pipeline::Expression::Statement do
subject do
@@ -36,7 +37,7 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do
'== "123"', # invalid left side
'"some string"', # only string provided
'$VAR ==', # invalid right side
- '12345', # unknown syntax
+ 'null', # missing lexemes
'' # empty statement
]
@@ -44,7 +45,7 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do
context "when expression grammar is #{syntax.inspect}" do
let(:text) { syntax }
- it 'aises a statement error exception' do
+ it 'raises a statement error exception' do
expect { subject.parse_tree }
.to raise_error described_class::StatementError
end
@@ -82,48 +83,66 @@ describe Gitlab::Ci::Pipeline::Expression::Statement do
end
describe '#evaluate' do
- statements = [
- ['$PRESENT_VARIABLE == "my variable"', true],
- ["$PRESENT_VARIABLE == 'my variable'", true],
- ['"my variable" == $PRESENT_VARIABLE', true],
- ['$PRESENT_VARIABLE == null', false],
- ['$EMPTY_VARIABLE == null', false],
- ['"" == $EMPTY_VARIABLE', true],
- ['$EMPTY_VARIABLE', ''],
- ['$UNDEFINED_VARIABLE == null', true],
- ['null == $UNDEFINED_VARIABLE', true],
- ['$PRESENT_VARIABLE', 'my variable'],
- ['$UNDEFINED_VARIABLE', nil]
- ]
-
- statements.each do |expression, value|
- context "when using expression `#{expression}`" do
- let(:text) { expression }
-
- it "evaluates to `#{value.inspect}`" do
- expect(subject.evaluate).to eq value
- end
+ using RSpec::Parameterized::TableSyntax
+
+ where(:expression, :value) do
+ '$PRESENT_VARIABLE == "my variable"' | true
+ '"my variable" == $PRESENT_VARIABLE' | true
+ '$PRESENT_VARIABLE == null' | false
+ '$EMPTY_VARIABLE == null' | false
+ '"" == $EMPTY_VARIABLE' | true
+ '$EMPTY_VARIABLE' | ''
+ '$UNDEFINED_VARIABLE == null' | true
+ 'null == $UNDEFINED_VARIABLE' | true
+ '$PRESENT_VARIABLE' | 'my variable'
+ '$UNDEFINED_VARIABLE' | nil
+ "$PRESENT_VARIABLE =~ /var.*e$/" | true
+ "$PRESENT_VARIABLE =~ /^var.*/" | false
+ "$EMPTY_VARIABLE =~ /var.*/" | false
+ "$UNDEFINED_VARIABLE =~ /var.*/" | false
+ "$PRESENT_VARIABLE =~ /VAR.*/i" | true
+ end
+
+ with_them do
+ let(:text) { expression }
+
+ it "evaluates to `#{params[:value].inspect}`" do
+ expect(subject.evaluate).to eq value
end
end
end
describe '#truthful?' do
- statements = [
- ['$PRESENT_VARIABLE == "my variable"', true],
- ["$PRESENT_VARIABLE == 'no match'", false],
- ['$UNDEFINED_VARIABLE == null', true],
- ['$PRESENT_VARIABLE', true],
- ['$UNDEFINED_VARIABLE', false],
- ['$EMPTY_VARIABLE', false]
- ]
-
- statements.each do |expression, value|
- context "when using expression `#{expression}`" do
- let(:text) { expression }
-
- it "returns `#{value.inspect}`" do
- expect(subject.truthful?).to eq value
- end
+ using RSpec::Parameterized::TableSyntax
+
+ where(:expression, :value) do
+ '$PRESENT_VARIABLE == "my variable"' | true
+ "$PRESENT_VARIABLE == 'no match'" | false
+ '$UNDEFINED_VARIABLE == null' | true
+ '$PRESENT_VARIABLE' | true
+ '$UNDEFINED_VARIABLE' | false
+ '$EMPTY_VARIABLE' | false
+ '$INVALID = 1' | false
+ "$PRESENT_VARIABLE =~ /var.*/" | true
+ "$UNDEFINED_VARIABLE =~ /var.*/" | false
+ end
+
+ with_them do
+ let(:text) { expression }
+
+ it "returns `#{params[:value].inspect}`" do
+ expect(subject.truthful?).to eq value
+ end
+ end
+
+ context 'when evaluating expression raises an error' do
+ let(:text) { '$PRESENT_VARIABLE' }
+
+ it 'returns false' do
+ allow(subject).to receive(:evaluate)
+ .and_raise(described_class::StatementError)
+
+ expect(subject.truthful?).to be_falsey
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb
index 6d7453f0de5..cedfe270f9d 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/token_spec.rb
@@ -1,4 +1,4 @@
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::Ci::Pipeline::Expression::Token do
let(:value) { '$VARIABLE' }
diff --git a/spec/lib/gitlab/ci/pipeline/preloader_spec.rb b/spec/lib/gitlab/ci/pipeline/preloader_spec.rb
new file mode 100644
index 00000000000..477c7477df0
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/preloader_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Preloader do
+ describe '.preload' do
+ it 'preloads the author of every pipeline commit' do
+ commit = double(:commit)
+ pipeline = double(:pipeline, commit: commit)
+
+ expect(commit)
+ .to receive(:lazy_author)
+
+ expect(pipeline)
+ .to receive(:number_of_warnings)
+
+ described_class.preload([pipeline])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index 4ddcbd7eb66..19028495f52 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -1,17 +1,15 @@
require 'spec_helper'
describe Gitlab::CurrentSettings do
- include StubENV
-
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
- describe '#current_application_settings' do
+ describe '#current_application_settings', :use_clean_rails_memory_store_caching do
it 'allows keys to be called directly' do
db_settings = create(:application_setting,
- home_page_url: 'http://mydomain.com',
- signup_enabled: false)
+ home_page_url: 'http://mydomain.com',
+ signup_enabled: false)
expect(described_class.home_page_url).to eq(db_settings.home_page_url)
expect(described_class.signup_enabled?).to be_falsey
@@ -19,46 +17,54 @@ describe Gitlab::CurrentSettings do
expect(described_class.metrics_sample_interval).to be(15)
end
- context 'with DB available' do
+ context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
before do
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).with('application_settings').and_return(true)
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
end
- it 'attempts to use cached values first' do
- expect(ApplicationSetting).to receive(:cached)
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).not_to receive(:current)
expect(described_class.current_application_settings).to be_a(ApplicationSetting)
+ expect(described_class.current_application_settings).not_to be_persisted
end
+ end
- it 'falls back to DB if Redis returns an empty value' do
- expect(ApplicationSetting).to receive(:cached).and_return(nil)
- expect(ApplicationSetting).to receive(:last).and_call_original.twice
-
- expect(described_class.current_application_settings).to be_a(ApplicationSetting)
+ context 'with DB unavailable' do
+ before do
+ # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
+ # during the initialization phase of the test suite, so instead let's mock the internals of it
+ allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
end
- it 'falls back to DB if Redis fails' do
- db_settings = ApplicationSetting.create!(ApplicationSetting.defaults)
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).not_to receive(:current)
- expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError)
- expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError)
+ expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
- expect(described_class.current_application_settings).to eq(db_settings)
+ context 'with DB available' do
+ # This method returns the ::ApplicationSetting.defaults hash
+ # but with respect of custom attribute accessors of ApplicationSetting model
+ def settings_from_defaults
+ ar_wrapped_defaults = ::ApplicationSetting.build_from_defaults.attributes
+ ar_wrapped_defaults.slice(*::ApplicationSetting.defaults.keys)
end
- it 'creates default ApplicationSettings if none are present' do
- expect(ApplicationSetting).to receive(:cached).and_raise(::Redis::BaseError)
- expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(Redis::BaseError)
+ before do
+ # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
+ # during the initialization phase of the test suite, so instead let's mock the internals of it
+ allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
+ allow(ActiveRecord::Base.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
+ end
+ it 'creates default ApplicationSettings if none are present' do
settings = described_class.current_application_settings
expect(settings).to be_a(ApplicationSetting)
expect(settings).to be_persisted
- expect(settings).to have_attributes(ApplicationSetting.defaults)
+ expect(settings).to have_attributes(settings_from_defaults)
end
context 'with migrations pending' do
@@ -69,7 +75,7 @@ describe Gitlab::CurrentSettings do
it 'returns an in-memory ApplicationSetting object' do
settings = described_class.current_application_settings
- expect(settings).to be_a(OpenStruct)
+ expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.sign_in_enabled?).to eq(settings.sign_in_enabled)
expect(settings.sign_up_enabled?).to eq(settings.sign_up_enabled)
end
@@ -81,7 +87,7 @@ describe Gitlab::CurrentSettings do
settings = described_class.current_application_settings
app_defaults = ApplicationSetting.last
- expect(settings).to be_a(OpenStruct)
+ expect(settings).to be_a(Gitlab::FakeApplicationSettings)
expect(settings.home_page_url).to eq(db_settings.home_page_url)
expect(settings.signup_enabled?).to be_falsey
expect(settings.signup_enabled).to be_falsey
@@ -91,34 +97,29 @@ describe Gitlab::CurrentSettings do
settings.each { |key, _| expect(settings[key]).to eq(app_defaults[key]) }
end
end
- end
-
- context 'with DB unavailable' do
- before do
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
- end
- it 'returns an in-memory ApplicationSetting object' do
- expect(ApplicationSetting).not_to receive(:current)
- expect(ApplicationSetting).not_to receive(:last)
+ context 'when ApplicationSettings.current is present' do
+ it 'returns the existing application settings' do
+ expect(ApplicationSetting).to receive(:current).and_return(:current_settings)
- expect(described_class.current_application_settings).to be_a(OpenStruct)
+ expect(described_class.current_application_settings).to eq(:current_settings)
+ end
end
- end
- context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
- before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
+ context 'when the application_settings table does not exists' do
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::StatementInvalid)
+
+ expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
end
- it 'returns an in-memory ApplicationSetting object' do
- expect(ApplicationSetting).not_to receive(:current)
- expect(ApplicationSetting).not_to receive(:last)
+ context 'when the application_settings table is not fully migrated' do
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).to receive(:create_from_defaults).and_raise(ActiveRecord::UnknownAttributeError)
- expect(described_class.current_application_settings).to be_a(ApplicationSetting)
- expect(described_class.current_application_settings).not_to be_persisted
+ expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/count_spec.rb b/spec/lib/gitlab/database/count_spec.rb
new file mode 100644
index 00000000000..9d9caaabe16
--- /dev/null
+++ b/spec/lib/gitlab/database/count_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe Gitlab::Database::Count do
+ before do
+ create_list(:project, 3)
+ end
+
+ describe '.execute_estimate_if_updated_recently', :postgresql do
+ context 'when reltuples have not been updated' do
+ before do
+ expect(described_class).to receive(:reltuples_updated_recently?).and_return(false)
+ end
+
+ it 'returns nil' do
+ expect(described_class.execute_estimate_if_updated_recently(Project)).to be nil
+ end
+ end
+
+ context 'when reltuples have been updated' do
+ before do
+ ActiveRecord::Base.connection.execute('ANALYZE projects')
+ end
+
+ it 'calls postgresql_estimate_query' do
+ expect(described_class).to receive(:postgresql_estimate_query).with(Project).and_call_original
+ expect(described_class.execute_estimate_if_updated_recently(Project)).to eq(3)
+ end
+ end
+ end
+
+ describe '.approximate_count' do
+ context 'when reltuples have not been updated' do
+ it 'counts all projects the normal way' do
+ allow(described_class).to receive(:reltuples_updated_recently?).and_return(false)
+
+ expect(Project).to receive(:count).and_call_original
+ expect(described_class.approximate_count(Project)).to eq(3)
+ end
+ end
+
+ context 'no permission' do
+ it 'falls back to standard query' do
+ allow(described_class).to receive(:reltuples_updated_recently?).and_raise(PG::InsufficientPrivilege)
+
+ expect(Project).to receive(:count).and_call_original
+ expect(described_class.approximate_count(Project)).to eq(3)
+ end
+ end
+
+ describe 'when reltuples have been updated', :postgresql do
+ before do
+ ActiveRecord::Base.connection.execute('ANALYZE projects')
+ end
+
+ it 'counts all projects in the fast way' do
+ expect(described_class).to receive(:postgresql_estimate_query).with(Project).and_call_original
+
+ expect(described_class.approximate_count(Project)).to eq(3)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 1fe1d3926ad..8ac36ae8bab 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -32,6 +32,12 @@ describe Gitlab::Database do
end
describe '.version' do
+ around do |example|
+ described_class.instance_variable_set(:@version, nil)
+ example.run
+ described_class.instance_variable_set(:@version, nil)
+ end
+
context "on mysql" do
it "extracts the version number" do
allow(described_class).to receive(:database_version)
@@ -49,6 +55,14 @@ describe Gitlab::Database do
expect(described_class.version).to eq '9.4.4'
end
end
+
+ it 'memoizes the result' do
+ count = ActiveRecord::QueryRecorder
+ .new { 2.times { described_class.version } }
+ .count
+
+ expect(count).to eq(1)
+ end
end
describe '.join_lateral_supported?' do
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index 452249210b0..154ab4b3856 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -46,6 +46,20 @@ describe Gitlab::Email::Handler::CreateIssueHandler do
expect(issue.description).to eq('')
end
end
+
+ context "when there are quotes in email" do
+ let(:email_raw) { fixture_file("emails/valid_new_issue_with_quote.eml") }
+
+ it "creates a new issue" do
+ expect { receiver.execute }.to change { project.issues.count }.by(1)
+ issue = project.issues.last
+
+ expect(issue.author).to eq(user)
+ expect(issue.title).to eq('New Issue by email')
+ expect(issue.description).to include('reply by email')
+ expect(issue.description).to include('> this is a quote')
+ end
+ end
end
context "something is wrong" do
diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb
index e21a998adfe..0989188f7ee 100644
--- a/spec/lib/gitlab/email/reply_parser_spec.rb
+++ b/spec/lib/gitlab/email/reply_parser_spec.rb
@@ -3,8 +3,8 @@ require "spec_helper"
# Inspired in great part by Discourse's Email::Receiver
describe Gitlab::Email::ReplyParser do
describe '#execute' do
- def test_parse_body(mail_string)
- described_class.new(Mail::Message.new(mail_string)).execute
+ def test_parse_body(mail_string, params = {})
+ described_class.new(Mail::Message.new(mail_string), params).execute
end
it "returns an empty string if the message is blank" do
@@ -212,5 +212,19 @@ describe Gitlab::Email::ReplyParser do
it "does not wrap links with no href in unnecessary brackets" do
expect(test_parse_body(fixture_file("emails/html_empty_link.eml"))).to eq("no brackets!")
end
+
+ it "does not trim reply if trim_reply option is false" do
+ expect(test_parse_body(fixture_file("emails/valid_new_issue_with_quote.eml"), { trim_reply: false }))
+ .to eq(
+ <<-BODY.strip_heredoc.chomp
+ The reply by email functionality should be extended to allow creating a new issue by email.
+ even when the email is forwarded to the project which may include lines that begin with ">"
+
+ there should be a quote below this line:
+
+ > this is a quote
+ BODY
+ )
+ end
end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index a05feaac1ca..08c6d1e55e9 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -66,7 +66,8 @@ describe Gitlab::Git::Commit, seed_helper: true do
describe "Commit info from gitaly commit" do
let(:subject) { "My commit".force_encoding('ASCII-8BIT') }
let(:body) { subject + "My body".force_encoding('ASCII-8BIT') }
- let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body) }
+ let(:body_size) { body.length }
+ let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body, body_size: body_size) }
let(:id) { gitaly_commit.id }
let(:committer) { gitaly_commit.committer }
let(:author) { gitaly_commit.author }
@@ -83,10 +84,30 @@ describe Gitlab::Git::Commit, seed_helper: true do
it { expect(commit.committer_email).to eq(committer.email) }
it { expect(commit.parent_ids).to eq(gitaly_commit.parent_ids) }
- context 'no body' do
+ context 'body_size != body.size' do
let(:body) { "".force_encoding('ASCII-8BIT') }
- it { expect(commit.safe_message).to eq(subject) }
+ context 'zero body_size' do
+ it { expect(commit.safe_message).to eq(subject) }
+ end
+
+ context 'body_size less than threshold' do
+ let(:body_size) { 123 }
+
+ it 'fetches commit message seperately' do
+ expect(described_class).to receive(:get_message).with(repository, id)
+
+ commit.safe_message
+ end
+ end
+
+ context 'body_size greater than threshold' do
+ let(:body_size) { described_class::MAX_COMMIT_MESSAGE_DISPLAY_SIZE + 1 }
+
+ it 'returns the suject plus a notice about message size' do
+ expect(commit.safe_message).to eq("My commit\n\n--commit message is too big")
+ end
+ end
end
end
@@ -554,24 +575,10 @@ describe Gitlab::Git::Commit, seed_helper: true do
it_should_behave_like '#stats'
end
- describe '#to_diff' do
- subject { commit.to_diff }
-
- it { is_expected.not_to include "From #{SeedRepo::Commit::ID}" }
- it { is_expected.to include 'diff --git a/files/ruby/popen.rb b/files/ruby/popen.rb'}
- end
-
describe '#has_zero_stats?' do
it { expect(commit.has_zero_stats?).to eq(false) }
end
- describe '#to_patch' do
- subject { commit.to_patch }
-
- it { is_expected.to include "From #{SeedRepo::Commit::ID}" }
- it { is_expected.to include 'diff --git a/files/ruby/popen.rb b/files/ruby/popen.rb'}
- end
-
describe '#to_hash' do
let(:hash) { commit.to_hash }
subject { hash }
@@ -603,6 +610,35 @@ describe Gitlab::Git::Commit, seed_helper: true do
it { is_expected.not_to include("feature") }
end
+ describe '.get_message' do
+ let(:commit_ids) { %w[6d394385cf567f80a8fd85055db1ab4c5295806f cfe32cf61b73a0d5e9f13e774abde7ff789b1660] }
+
+ subject do
+ commit_ids.map { |id| described_class.get_message(repository, id) }
+ end
+
+ shared_examples 'getting commit messages' do
+ it 'gets commit messages' do
+ expect(subject).to contain_exactly(
+ "Added contributing guide\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
+ "Add submodule\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n"
+ )
+ end
+ end
+
+ context 'when Gitaly commit_messages feature is enabled' do
+ it_behaves_like 'getting commit messages'
+
+ it 'gets messages in one batch', :request_store do
+ expect { subject.map(&:itself) }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
+ end
+ end
+
+ context 'when Gitaly commit_messages feature is disabled', :disable_gitaly do
+ it_behaves_like 'getting commit messages'
+ end
+ end
+
def sample_commit_hash
{
author_email: "dmitriy.zaporozhets@gmail.com",
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index cce84276fe3..af6a486ab20 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -249,10 +249,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
subject(:metadata) { repository.archive_metadata(ref, storage_path, format, append_sha: append_sha) }
- it 'sets RepoPath to the repository path' do
- expect(metadata['RepoPath']).to eq(repository.path)
- end
-
it 'sets CommitId to the commit SHA' do
expect(metadata['CommitId']).to eq(SeedRepo::LastCommit::ID)
end
@@ -600,6 +596,10 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
+ it 'does not fail when deleting an empty list of refs' do
+ expect { repo.delete_refs(*[]) }.not_to raise_error
+ end
+
it 'raises an error if it failed' do
expect { repo.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::Repository::GitError)
end
@@ -615,32 +615,22 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#branch_names_contains_sha' do
- shared_examples 'returning the right branches' do
- let(:head_id) { repository.rugged.head.target.oid }
- let(:new_branch) { head_id }
- let(:utf8_branch) { 'branch-é' }
+ let(:head_id) { repository.rugged.head.target.oid }
+ let(:new_branch) { head_id }
+ let(:utf8_branch) { 'branch-é' }
- before do
- repository.create_branch(new_branch, 'master')
- repository.create_branch(utf8_branch, 'master')
- end
-
- after do
- repository.delete_branch(new_branch)
- repository.delete_branch(utf8_branch)
- end
-
- it 'displays that branch' do
- expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch, utf8_branch)
- end
+ before do
+ repository.create_branch(new_branch, 'master')
+ repository.create_branch(utf8_branch, 'master')
end
- context 'when Gitaly is enabled' do
- it_behaves_like 'returning the right branches'
+ after do
+ repository.delete_branch(new_branch)
+ repository.delete_branch(utf8_branch)
end
- context 'when Gitaly is disabled', :disable_gitaly do
- it_behaves_like 'returning the right branches'
+ it 'displays that branch' do
+ expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch, utf8_branch)
end
end
@@ -2257,66 +2247,42 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#checksum' do
- shared_examples 'calculating checksum' do
- it 'calculates the checksum for non-empty repo' do
- expect(repository.checksum).to eq '54f21be4c32c02f6788d72207fa03ad3bce725e4'
- end
-
- it 'returns 0000000000000000000000000000000000000000 for an empty repo' do
- FileUtils.rm_rf(File.join(storage_path, 'empty-repo.git'))
-
- system(git_env, *%W(#{Gitlab.config.git.bin_path} init --bare empty-repo.git),
- chdir: storage_path,
- out: '/dev/null',
- err: '/dev/null')
-
- empty_repo = described_class.new('default', 'empty-repo.git', '')
+ it 'calculates the checksum for non-empty repo' do
+ expect(repository.checksum).to eq '54f21be4c32c02f6788d72207fa03ad3bce725e4'
+ end
- expect(empty_repo.checksum).to eq '0000000000000000000000000000000000000000'
- end
+ it 'returns 0000000000000000000000000000000000000000 for an empty repo' do
+ FileUtils.rm_rf(File.join(storage_path, 'empty-repo.git'))
- it 'raises Gitlab::Git::Repository::InvalidRepository error for non-valid git repo' do
- FileUtils.rm_rf(File.join(storage_path, 'non-valid.git'))
+ system(git_env, *%W(#{Gitlab.config.git.bin_path} init --bare empty-repo.git),
+ chdir: storage_path,
+ out: '/dev/null',
+ err: '/dev/null')
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} non-valid.git),
- chdir: SEED_STORAGE_PATH,
- out: '/dev/null',
- err: '/dev/null')
+ empty_repo = described_class.new('default', 'empty-repo.git', '')
- File.truncate(File.join(storage_path, 'non-valid.git/HEAD'), 0)
+ expect(empty_repo.checksum).to eq '0000000000000000000000000000000000000000'
+ end
- non_valid = described_class.new('default', 'non-valid.git', '')
+ it 'raises Gitlab::Git::Repository::InvalidRepository error for non-valid git repo' do
+ FileUtils.rm_rf(File.join(storage_path, 'non-valid.git'))
- expect { non_valid.checksum }.to raise_error(Gitlab::Git::Repository::InvalidRepository)
- end
+ system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} non-valid.git),
+ chdir: SEED_STORAGE_PATH,
+ out: '/dev/null',
+ err: '/dev/null')
- it 'raises Gitlab::Git::Repository::NoRepository error when there is no repo' do
- broken_repo = described_class.new('default', 'a/path.git', '')
+ File.truncate(File.join(storage_path, 'non-valid.git/HEAD'), 0)
- expect { broken_repo.checksum }.to raise_error(Gitlab::Git::Repository::NoRepository)
- end
- end
+ non_valid = described_class.new('default', 'non-valid.git', '')
- context 'when calculate_checksum Gitaly feature is enabled' do
- it_behaves_like 'calculating checksum'
+ expect { non_valid.checksum }.to raise_error(Gitlab::Git::Repository::InvalidRepository)
end
- context 'when calculate_checksum Gitaly feature is disabled', :disable_gitaly do
- it_behaves_like 'calculating checksum'
-
- describe 'when storage is broken', :broken_storage do
- it 'raises a storage exception when storage is not available' do
- broken_repo = described_class.new('broken', 'a/path.git', '')
-
- expect { broken_repo.rugged }.to raise_error(Gitlab::Git::Storage::Inaccessible)
- end
- end
-
- it "raises a Gitlab::Git::Repository::Failure error if the `popen` call to git returns a non-zero exit code" do
- allow(repository).to receive(:popen).and_return(['output', nil])
+ it 'raises Gitlab::Git::Repository::NoRepository error when there is no repo' do
+ broken_repo = described_class.new('default', 'a/path.git', '')
- expect { repository.checksum }.to raise_error Gitlab::Git::Repository::ChecksumError
- end
+ expect { broken_repo.checksum }.to raise_error(Gitlab::Git::Repository::NoRepository)
end
end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index 6c4f538bf01..be2f5bfb819 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -32,4 +32,56 @@ describe Gitlab::Git::Tag, seed_helper: true do
context 'when Gitaly tags feature is disabled', :skip_gitaly_mock do
it_behaves_like 'Gitlab::Git::Repository#tags'
end
+
+ describe '.get_message' do
+ let(:tag_ids) { %w[f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8 8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b] }
+
+ subject do
+ tag_ids.map { |id| described_class.get_message(repository, id) }
+ end
+
+ shared_examples 'getting tag messages' do
+ it 'gets tag messages' do
+ expect(subject[0]).to eq("Release\n")
+ expect(subject[1]).to eq("Version 1.1.0\n")
+ end
+ end
+
+ context 'when Gitaly tag_messages feature is enabled' do
+ it_behaves_like 'getting tag messages'
+
+ it 'gets messages in one batch', :request_store do
+ expect { subject.map(&:itself) }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
+ end
+ end
+
+ context 'when Gitaly tag_messages feature is disabled', :disable_gitaly do
+ it_behaves_like 'getting tag messages'
+ end
+ end
+
+ describe 'tag into from Gitaly tag' do
+ context 'message_size != message.size' do
+ let(:gitaly_tag) { build(:gitaly_tag, message: ''.b, message_size: message_size) }
+ let(:tag) { described_class.new(repository, gitaly_tag) }
+
+ context 'message_size less than threshold' do
+ let(:message_size) { 123 }
+
+ it 'fetches tag message seperately' do
+ expect(described_class).to receive(:get_message).with(repository, gitaly_tag.id)
+
+ tag.message
+ end
+ end
+
+ context 'message_size greater than threshold' do
+ let(:message_size) { described_class::MAX_TAG_MESSAGE_DISPLAY_SIZE + 1 }
+
+ it 'returns a notice about message size' do
+ expect(tag.message).to eq("--tag message is too big")
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index ad76adcc2e5..8b46b04b8b5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -185,6 +185,7 @@ project:
- cluster
- clusters
- cluster_project
+- cluster_ingresses
- creator
- group
- namespace
diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json
index 6d63749296e..4f64f2bd6b4 100644
--- a/spec/lib/gitlab/import_export/project.json
+++ b/spec/lib/gitlab/import_export/project.json
@@ -6760,26 +6760,6 @@
"wiki_page_events": true
},
{
- "id": 92,
- "title": "Gemnasium",
- "project_id": 5,
- "created_at": "2016-06-14T15:01:51.202Z",
- "updated_at": "2016-06-14T15:01:51.202Z",
- "active": false,
- "properties": {},
- "template": false,
- "push_events": true,
- "issues_events": true,
- "merge_requests_events": true,
- "tag_push_events": true,
- "note_events": true,
- "job_events": true,
- "type": "GemnasiumService",
- "category": "common",
- "default": false,
- "wiki_page_events": true
- },
- {
"id": 91,
"title": "Flowdock",
"project_id": 5,
diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb
index ad087f42e06..4c0c3fcbcc7 100644
--- a/spec/lib/gitlab/incoming_email_spec.rb
+++ b/spec/lib/gitlab/incoming_email_spec.rb
@@ -83,6 +83,10 @@ describe Gitlab::IncomingEmail do
it "returns reply key" do
expect(described_class.key_from_address("replies+key@example.com")).to eq("key")
end
+
+ it 'does not match emails with extra bits' do
+ expect(described_class.key_from_address('somereplies+somekey@example.com.someotherdomain.com')).to be nil
+ end
end
context 'self.key_from_fallback_message_id' do
diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index 1d162f53a13..6eb0600f49e 100644
--- a/spec/lib/gitlab/metrics/web_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -180,11 +180,11 @@ describe Gitlab::Metrics::WebTransaction do
end
context 'when request goes to ActionController' do
- let(:content_type) { 'text/html' }
+ let(:request) { double(:request, format: double(:format, ref: :html)) }
before do
klass = double(:klass, name: 'TestController')
- controller = double(:controller, class: klass, action_name: 'show', content_type: content_type)
+ controller = double(:controller, class: klass, action_name: 'show', request: request)
env['action_controller.instance'] = controller
end
@@ -195,7 +195,7 @@ describe Gitlab::Metrics::WebTransaction do
end
context 'when the response content type is not :html' do
- let(:content_type) { 'application/json' }
+ let(:request) { double(:request, format: double(:format, ref: :json)) }
it 'appends the mime type to the transaction action' do
expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json' })
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index a34b7d9905a..e3f705d2299 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -106,6 +106,18 @@ describe Gitlab::ProjectSearchResults do
end
end
+ context 'when the matching content contains multiple null bytes' do
+ let(:search_result) { "master:testdata/foo.txt\x001\x00blah\x001\x00foo" }
+
+ it 'returns a valid FoundBlob' do
+ expect(subject.filename).to eq('testdata/foo.txt')
+ expect(subject.basename).to eq('testdata/foo')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq("blah\x001\x00foo")
+ end
+ end
+
context 'when the search result ends with an empty line' do
let(:results) { project.repository.search_files_by_content('Role models', 'master') }
diff --git a/spec/lib/gitlab/untrusted_regexp_spec.rb b/spec/lib/gitlab/untrusted_regexp_spec.rb
index bed58d407ef..0a6ac0aa294 100644
--- a/spec/lib/gitlab/untrusted_regexp_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp_spec.rb
@@ -1,6 +1,49 @@
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'support/shared_examples/malicious_regexp_shared_examples'
describe Gitlab::UntrustedRegexp do
+ describe '.valid?' do
+ it 'returns true if regexp is valid' do
+ expect(described_class.valid?('/some ( thing/'))
+ .to be false
+ end
+
+ it 'returns true if regexp is invalid' do
+ expect(described_class.valid?('/some .* thing/'))
+ .to be true
+ end
+ end
+
+ describe '.fabricate' do
+ context 'when regexp is using /regexp/ scheme with flags' do
+ it 'fabricates regexp with a single flag' do
+ regexp = described_class.fabricate('/something/i')
+
+ expect(regexp).to eq described_class.new('(?i)something')
+ expect(regexp.scan('SOMETHING')).to be_one
+ end
+
+ it 'fabricates regexp with multiple flags' do
+ regexp = described_class.fabricate('/something/im')
+
+ expect(regexp).to eq described_class.new('(?im)something')
+ end
+
+ it 'fabricates regexp without flags' do
+ regexp = described_class.fabricate('/something/')
+
+ expect(regexp).to eq described_class.new('something')
+ end
+ end
+
+ context 'when regexp is a raw pattern' do
+ it 'raises an error' do
+ expect { described_class.fabricate('some .* thing') }
+ .to raise_error(RegexpError)
+ end
+ end
+ end
+
describe '#initialize' do
subject { described_class.new(pattern) }
@@ -39,6 +82,14 @@ describe Gitlab::UntrustedRegexp do
expect(result).to be_falsy
end
+
+ it 'can handle regular expressions in multiline mode' do
+ regexp = described_class.new('^\d', multiline: true)
+
+ result = regexp === "Header\n\n1. Content"
+
+ expect(result).to be_truthy
+ end
end
describe '#scan' do
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index e732b089d44..660671cefaf 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Workhorse do
- let(:project) { create(:project, :repository) }
+ set(:project) { create(:project, :repository) }
let(:repository) { project.repository }
def decode_workhorse_header(array)
@@ -55,16 +55,6 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_archive feature is disabled', :disable_gitaly do
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
-
- expect(key).to eq('Gitlab-Workhorse-Send-Data')
- expect(command).to eq('git-archive')
- expect(params).to eq(base_params)
- end
- end
-
context "when the repository doesn't have an archive file path" do
before do
allow(project.repository).to receive(:archive_metadata).and_return(Hash.new)
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index e9b3ccf31fb..f310a6854d5 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -634,7 +634,7 @@ describe Notify do
it 'contains all the useful information' do
is_expected.to have_subject "Invitation to join the #{project.full_name} project"
is_expected.to have_html_escaped_body_text project.full_name
- is_expected.to have_body_text project.web_url
+ is_expected.to have_body_text project.full_name
is_expected.to have_body_text project_member.human_access
is_expected.to have_body_text project_member.invite_token
end
diff --git a/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb b/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
new file mode 100644
index 00000000000..bf299b70a29
--- /dev/null
+++ b/spec/migrations/add_unique_constraint_to_project_features_project_id_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20180511174224_add_unique_constraint_to_project_features_project_id.rb')
+
+describe AddUniqueConstraintToProjectFeaturesProjectId, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:features) { table(:project_features) }
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ before do
+ (1..3).each do |i|
+ namespaces.create(id: i, name: "ns-test-#{i}", path: "ns-test-i#{i}")
+ projects.create!(id: i, name: "test-#{i}", path: "test-#{i}", namespace_id: i)
+ end
+
+ features.create!(id: 1, project_id: 1)
+ features.create!(id: 2, project_id: 1)
+ features.create!(id: 3, project_id: 2)
+ features.create!(id: 4, project_id: 2)
+ features.create!(id: 5, project_id: 2)
+ features.create!(id: 6, project_id: 3)
+ end
+
+ it 'creates a unique index and removes duplicates' do
+ expect(migration.index_exists?(:project_features, :project_id, unique: false, name: 'index_project_features_on_project_id')).to be true
+
+ expect { migration.up }.to change { features.count }.from(6).to(3)
+
+ expect(migration.index_exists?(:project_features, :project_id, unique: true, name: 'index_project_features_on_project_id')).to be true
+ expect(migration.index_exists?(:project_features, :project_id, name: 'index_project_features_on_project_id_unique')).to be false
+
+ project_1_features = features.where(project_id: 1)
+ expect(project_1_features.count).to eq(1)
+ expect(project_1_features.first.id).to eq(2)
+
+ project_2_features = features.where(project_id: 2)
+ expect(project_2_features.count).to eq(1)
+ expect(project_2_features.first.id).to eq(5)
+
+ project_3_features = features.where(project_id: 3)
+ expect(project_3_features.count).to eq(1)
+ expect(project_3_features.first.id).to eq(6)
+ end
+ end
+
+ describe '#down' do
+ it 'restores the original index' do
+ migration.up
+
+ expect(migration.index_exists?(:project_features, :project_id, unique: true, name: 'index_project_features_on_project_id')).to be true
+
+ migration.down
+
+ expect(migration.index_exists?(:project_features, :project_id, unique: false, name: 'index_project_features_on_project_id')).to be true
+ expect(migration.index_exists?(:project_features, :project_id, name: 'index_project_features_on_project_id_old')).to be false
+ end
+ end
+end
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 56b5d616284..77b07cf1ac9 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -3,33 +3,10 @@ require 'rails_helper'
describe Appearance do
subject { build(:appearance) }
- it { is_expected.to be_valid }
+ it { include(CacheableAttributes) }
+ it { expect(described_class.current_without_cache).to eq(described_class.first) }
- it { is_expected.to have_many(:uploads).dependent(:destroy) }
-
- describe '.current', :use_clean_rails_memory_store_caching do
- let!(:appearance) { create(:appearance) }
-
- it 'returns the current appearance row' do
- expect(described_class.current).to eq(appearance)
- end
-
- it 'caches the result' do
- expect(described_class).to receive(:first).once
-
- 2.times { described_class.current }
- end
- end
-
- describe '#flush_redis_cache' do
- it 'flushes the cache in Redis' do
- appearance = create(:appearance)
-
- expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
-
- appearance.flush_redis_cache
- end
- end
+ it { is_expected.to have_many(:uploads) }
describe '#single_appearance_row' do
it 'adds an error when more than 1 row exists' do
@@ -41,4 +18,12 @@ describe Appearance do
expect(new_row.valid?).to eq(false)
end
end
+
+ context 'with uploads' do
+ it_behaves_like 'model with mounted uploader', false do
+ let(:model_object) { create(:appearance, :with_logo) }
+ let(:upload_attribute) { :logo }
+ let(:uploader_class) { AttachmentUploader }
+ end
+ end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 10d6109cae7..7e47043a1cb 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -3,6 +3,9 @@ require 'spec_helper'
describe ApplicationSetting do
let(:setting) { described_class.create_from_defaults }
+ it { include(CacheableAttributes) }
+ it { expect(described_class.current_without_cache).to eq(described_class.last) }
+
it { expect(setting).to be_valid }
it { expect(setting.uuid).to be_present }
it { expect(setting).to have_db_column(:auto_devops_enabled) }
@@ -318,33 +321,6 @@ describe ApplicationSetting do
end
end
- describe '.current' do
- context 'redis unavailable' do
- it 'returns an ApplicationSetting' do
- allow(Rails.cache).to receive(:fetch).and_call_original
- allow(described_class).to receive(:last).and_return(:last)
- expect(Rails.cache).to receive(:fetch).with(ApplicationSetting::CACHE_KEY).and_raise(ArgumentError)
-
- expect(described_class.current).to eq(:last)
- end
- end
-
- context 'when an ApplicationSetting is not yet present' do
- it 'does not cache nil object' do
- # when missing settings a nil object is returned, but not cached
- allow(described_class).to receive(:last).and_return(nil).twice
- expect(described_class.current).to be_nil
-
- # when the settings are set the method returns a valid object
- allow(described_class).to receive(:last).and_return(:last)
- expect(described_class.current).to eq(:last)
-
- # subsequent calls get everything from cache
- expect(described_class.current).to eq(:last)
- end
- end
- end
-
context 'restrict creating duplicates' do
before do
described_class.create_from_defaults
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index dc810489011..96173889ccd 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -629,6 +629,14 @@ describe Ci::Build do
it { is_expected.to eq('review/host') }
end
+
+ context 'when using persisted variables' do
+ let(:build) do
+ create(:ci_build, environment: 'review/x$CI_BUILD_ID')
+ end
+
+ it { is_expected.to eq('review/x') }
+ end
end
describe '#starts_environment?' do
@@ -1270,6 +1278,46 @@ describe Ci::Build do
end
end
+ describe '#playable?' do
+ context 'when build is a manual action' do
+ context 'when build has been skipped' do
+ subject { build_stubbed(:ci_build, :manual, status: :skipped) }
+
+ it { is_expected.not_to be_playable }
+ end
+
+ context 'when build has been canceled' do
+ subject { build_stubbed(:ci_build, :manual, status: :canceled) }
+
+ it { is_expected.to be_playable }
+ end
+
+ context 'when build is successful' do
+ subject { build_stubbed(:ci_build, :manual, status: :success) }
+
+ it { is_expected.to be_playable }
+ end
+
+ context 'when build has failed' do
+ subject { build_stubbed(:ci_build, :manual, status: :failed) }
+
+ it { is_expected.to be_playable }
+ end
+
+ context 'when build is a manual untriggered action' do
+ subject { build_stubbed(:ci_build, :manual, status: :manual) }
+
+ it { is_expected.to be_playable }
+ end
+ end
+
+ context 'when build is not a manual action' do
+ subject { build_stubbed(:ci_build, :success) }
+
+ it { is_expected.not_to be_playable }
+ end
+ end
+
describe 'project settings' do
describe '#allow_git_fetch' do
it 'return project allow_git_fetch configuration' do
@@ -1485,6 +1533,7 @@ describe Ci::Build do
let(:container_registry_enabled) { false }
let(:predefined_variables) do
[
+ { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_JOB_ID', value: build.id.to_s, public: true },
{ key: 'CI_JOB_TOKEN', value: build.token, public: false },
{ key: 'CI_BUILD_ID', value: build.id.to_s, public: true },
@@ -1516,7 +1565,6 @@ describe Ci::Build do
{ key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true },
{ key: 'CI_PROJECT_URL', value: project.web_url, public: true },
{ key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true },
- { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true },
{ key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true },
@@ -2044,7 +2092,7 @@ describe Ci::Build do
let(:deploy_token_variables) do
[
- { key: 'CI_DEPLOY_USER', value: deploy_token.name, public: true },
+ { key: 'CI_DEPLOY_USER', value: deploy_token.username, public: true },
{ key: 'CI_DEPLOY_PASSWORD', value: deploy_token.token, public: false }
]
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index ddd66a6be87..e4f4c62bd22 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -167,13 +167,39 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#persisted_variables' do
+ context 'when pipeline is not persisted yet' do
+ subject { build(:ci_pipeline).persisted_variables }
+
+ it 'does not contain some variables' do
+ keys = subject.map { |variable| variable[:key] }
+
+ expect(keys).not_to include 'CI_PIPELINE_ID'
+ end
+ end
+
+ context 'when pipeline is persisted' do
+ subject { build_stubbed(:ci_pipeline).persisted_variables }
+
+ it 'does contains persisted variables' do
+ keys = subject.map { |variable| variable[:key] }
+
+ expect(keys).to eq %w[CI_PIPELINE_ID]
+ end
+ end
+ end
+
describe '#predefined_variables' do
subject { pipeline.predefined_variables }
it 'includes all predefined variables in a valid order' do
keys = subject.map { |variable| variable[:key] }
- expect(keys).to eq %w[CI_PIPELINE_ID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION]
+ expect(keys).to eq %w[CI_CONFIG_PATH
+ CI_PIPELINE_SOURCE
+ CI_COMMIT_MESSAGE
+ CI_COMMIT_TITLE
+ CI_COMMIT_DESCRIPTION]
end
end
@@ -774,6 +800,33 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#number_of_warnings' do
+ it 'returns the number of warnings' do
+ create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
+
+ expect(pipeline.number_of_warnings).to eq(1)
+ end
+
+ it 'supports eager loading of the number of warnings' do
+ pipeline2 = create(:ci_empty_pipeline, status: :created, project: project)
+
+ create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
+ create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline2, name: 'rubocop')
+
+ pipelines = project.pipelines.to_a
+
+ pipelines.each(&:number_of_warnings)
+
+ # To run the queries we need to actually use the lazy objects, which we do
+ # by just sending "to_i" to them.
+ amount = ActiveRecord::QueryRecorder
+ .new { pipelines.each { |p| p.number_of_warnings.to_i } }
+ .count
+
+ expect(amount).to eq(1)
+ end
+ end
+
shared_context 'with some outdated pipelines' do
before do
create_pipeline(:canceled, 'ref', 'A', project)
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index e2b212f4f4c..0fbc934f669 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -626,62 +626,26 @@ describe Ci::Runner do
end
describe '.assignable_for' do
- let(:runner) { create(:ci_runner) }
+ let!(:unlocked_project_runner) { create(:ci_runner, runner_type: :project_type, projects: [project]) }
+ let!(:locked_project_runner) { create(:ci_runner, runner_type: :project_type, locked: true, projects: [project]) }
+ let!(:group_runner) { create(:ci_runner, runner_type: :group_type) }
+ let!(:instance_runner) { create(:ci_runner, :shared) }
let(:project) { create(:project) }
let(:another_project) { create(:project) }
- before do
- project.runners << runner
- end
-
- context 'with shared runners' do
- before do
- runner.update(is_shared: true)
- end
-
- context 'does not give owned runner' do
- subject { described_class.assignable_for(project) }
-
- it { is_expected.to be_empty }
- end
-
- context 'does not give shared runner' do
- subject { described_class.assignable_for(another_project) }
-
- it { is_expected.to be_empty }
- end
- end
-
- context 'with unlocked runner' do
- context 'does not give owned runner' do
- subject { described_class.assignable_for(project) }
-
- it { is_expected.to be_empty }
- end
+ context 'with already assigned project' do
+ subject { described_class.assignable_for(project) }
- context 'does give a specific runner' do
- subject { described_class.assignable_for(another_project) }
-
- it { is_expected.to contain_exactly(runner) }
- end
+ it { is_expected.to be_empty }
end
- context 'with locked runner' do
- before do
- runner.update(locked: true)
- end
-
- context 'does not give owned runner' do
- subject { described_class.assignable_for(project) }
-
- it { is_expected.to be_empty }
- end
-
- context 'does not give a locked runner' do
- subject { described_class.assignable_for(another_project) }
+ context 'with a different project' do
+ subject { described_class.assignable_for(another_project) }
- it { is_expected.to be_empty }
- end
+ it { is_expected.to include(unlocked_project_runner) }
+ it { is_expected.not_to include(group_runner) }
+ it { is_expected.not_to include(locked_project_runner) }
+ it { is_expected.not_to include(instance_runner) }
end
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index aeca6ee903a..d2302583ac8 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -79,12 +79,22 @@ describe Clusters::Applications::Prometheus do
end
it 'creates proper url' do
- expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
+ expect(subject.prometheus_client.url).to eq('http://example.com/api/v1/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80/proxy')
end
it 'copies options and headers from kube client to proxy client' do
expect(subject.prometheus_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
end
+
+ context 'when cluster is not reachable' do
+ before do
+ allow(kube_client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
+
+ it 'returns nil' do
+ expect(subject.prometheus_client).to be_nil
+ end
+ end
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 4e6b037a720..090f91168ad 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -52,22 +52,98 @@ describe Commit do
end
end
- describe '#author' do
+ describe '#author', :request_store do
it 'looks up the author in a case-insensitive way' do
user = create(:user, email: commit.author_email.upcase)
expect(commit.author).to eq(user)
end
- it 'caches the author', :request_store do
+ it 'caches the author' do
user = create(:user, email: commit.author_email)
- expect(User).to receive(:find_by_any_email).and_call_original
expect(commit.author).to eq(user)
+
key = "Commit:author:#{commit.author_email.downcase}"
- expect(RequestStore.store[key]).to eq(user)
+ expect(RequestStore.store[key]).to eq(user)
expect(commit.author).to eq(user)
end
+
+ context 'using eager loading' do
+ let!(:alice) { create(:user, email: 'alice@example.com') }
+ let!(:bob) { create(:user, email: 'hunter2@example.com') }
+
+ let(:alice_commit) do
+ described_class.new(RepoHelpers.sample_commit, project).tap do |c|
+ c.author_email = 'alice@example.com'
+ end
+ end
+
+ let(:bob_commit) do
+ # The commit for Bob uses one of his alternative Emails, instead of the
+ # primary one.
+ described_class.new(RepoHelpers.sample_commit, project).tap do |c|
+ c.author_email = 'bob@example.com'
+ end
+ end
+
+ let(:eve_commit) do
+ described_class.new(RepoHelpers.sample_commit, project).tap do |c|
+ c.author_email = 'eve@example.com'
+ end
+ end
+
+ let!(:commits) { [alice_commit, bob_commit, eve_commit] }
+
+ before do
+ create(:email, user: bob, email: 'bob@example.com')
+ end
+
+ it 'executes only two SQL queries' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ # Running this first ensures we don't run one query for every
+ # commit.
+ commits.each(&:lazy_author)
+
+ # This forces the execution of the SQL queries necessary to load the
+ # data.
+ commits.each { |c| c.author.try(:id) }
+ end
+
+ expect(recorder.count).to eq(2)
+ end
+
+ it "preloads the authors for Commits matching a user's primary Email" do
+ commits.each(&:lazy_author)
+
+ expect(alice_commit.author).to eq(alice)
+ end
+
+ it "preloads the authors for Commits using a User's alternative Email" do
+ commits.each(&:lazy_author)
+
+ expect(bob_commit.author).to eq(bob)
+ end
+
+ it 'sets the author to Nil if an author could not be found for a Commit' do
+ commits.each(&:lazy_author)
+
+ expect(eve_commit.author).to be_nil
+ end
+
+ it 'does not execute SQL queries once the authors are preloaded' do
+ commits.each(&:lazy_author)
+ commits.each { |c| c.author.try(:id) }
+
+ recorder = ActiveRecord::QueryRecorder.new do
+ alice_commit.author
+ bob_commit.author
+ eve_commit.author
+ end
+
+ expect(recorder.count).to be_zero
+ end
+ end
end
describe '#to_reference' do
@@ -182,7 +258,6 @@ eos
it { is_expected.to respond_to(:date) }
it { is_expected.to respond_to(:diffs) }
it { is_expected.to respond_to(:id) }
- it { is_expected.to respond_to(:to_patch) }
end
describe '#closes_issues' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 2ed29052dc1..f3f2bc28d2c 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -565,4 +565,10 @@ describe CommitStatus do
it_behaves_like 'commit status enqueued'
end
end
+
+ describe '#present' do
+ subject { commit_status.present }
+
+ it { is_expected.to be_a(CommitStatusPresenter) }
+ end
end
diff --git a/spec/models/concerns/cacheable_attributes_spec.rb b/spec/models/concerns/cacheable_attributes_spec.rb
new file mode 100644
index 00000000000..49e4b23ebc7
--- /dev/null
+++ b/spec/models/concerns/cacheable_attributes_spec.rb
@@ -0,0 +1,153 @@
+require 'spec_helper'
+
+describe CacheableAttributes do
+ let(:minimal_test_class) do
+ Class.new do
+ include ActiveModel::Model
+ extend ActiveModel::Callbacks
+ define_model_callbacks :commit
+ include CacheableAttributes
+
+ def self.name
+ 'TestClass'
+ end
+
+ def self.first
+ @_first ||= new('foo' => 'a')
+ end
+
+ def self.last
+ @_last ||= new('foo' => 'a', 'bar' => 'b')
+ end
+
+ attr_accessor :attributes
+
+ def initialize(attrs = {})
+ @attributes = attrs
+ end
+ end
+ end
+
+ shared_context 'with defaults' do
+ before do
+ minimal_test_class.define_singleton_method(:defaults) do
+ { foo: 'a', bar: 'b', baz: 'c' }
+ end
+ end
+ end
+
+ describe '.current_without_cache' do
+ it 'defaults to last' do
+ expect(minimal_test_class.current_without_cache).to eq(minimal_test_class.last)
+ end
+
+ it 'can be overriden' do
+ minimal_test_class.define_singleton_method(:current_without_cache) do
+ first
+ end
+
+ expect(minimal_test_class.current_without_cache).to eq(minimal_test_class.first)
+ end
+ end
+
+ describe '.cache_key' do
+ it 'excludes cache attributes' do
+ expect(minimal_test_class.cache_key).to eq("TestClass:#{Gitlab::VERSION}:#{Gitlab.migrations_hash}:json")
+ end
+ end
+
+ describe '.defaults' do
+ it 'defaults to {}' do
+ expect(minimal_test_class.defaults).to eq({})
+ end
+
+ context 'with defaults defined' do
+ include_context 'with defaults'
+
+ it 'can be overriden' do
+ expect(minimal_test_class.defaults).to eq({ foo: 'a', bar: 'b', baz: 'c' })
+ end
+ end
+ end
+
+ describe '.build_from_defaults' do
+ include_context 'with defaults'
+
+ context 'without any attributes given' do
+ it 'intializes a new object with the defaults' do
+ expect(minimal_test_class.build_from_defaults).not_to be_persisted
+ end
+ end
+
+ context 'without attributes given' do
+ it 'intializes a new object with the given attributes merged into the defaults' do
+ expect(minimal_test_class.build_from_defaults(foo: 'd').attributes[:foo]).to eq('d')
+ end
+ end
+ end
+
+ describe '.current', :use_clean_rails_memory_store_caching do
+ context 'redis unavailable' do
+ it 'returns an uncached record' do
+ allow(minimal_test_class).to receive(:last).and_return(:last)
+ expect(Rails.cache).to receive(:read).and_raise(Redis::BaseError)
+
+ expect(minimal_test_class.current).to eq(:last)
+ end
+ end
+
+ context 'when a record is not yet present' do
+ it 'does not cache nil object' do
+ # when missing settings a nil object is returned, but not cached
+ allow(minimal_test_class).to receive(:last).twice.and_return(nil)
+
+ expect(minimal_test_class.current).to be_nil
+ expect(Rails.cache.exist?(minimal_test_class.cache_key)).to be(false)
+ end
+
+ it 'cache non-nil object' do
+ # when the settings are set the method returns a valid object
+ allow(minimal_test_class).to receive(:last).and_call_original
+
+ expect(minimal_test_class.current).to eq(minimal_test_class.last)
+ expect(Rails.cache.exist?(minimal_test_class.cache_key)).to be(true)
+
+ # subsequent calls retrieve the record from the cache
+ last_record = minimal_test_class.last
+ expect(minimal_test_class).not_to receive(:last)
+ expect(minimal_test_class.current.attributes).to eq(last_record.attributes)
+ end
+ end
+ end
+
+ describe '.cached', :use_clean_rails_memory_store_caching do
+ context 'when cache is cold' do
+ it 'returns nil' do
+ expect(minimal_test_class.cached).to be_nil
+ end
+ end
+
+ context 'when cached settings do not include the latest defaults' do
+ before do
+ Rails.cache.write(minimal_test_class.cache_key, { bar: 'b', baz: 'c' }.to_json)
+ minimal_test_class.define_singleton_method(:defaults) do
+ { foo: 'a', bar: 'b', baz: 'c' }
+ end
+ end
+
+ it 'includes attributes from defaults' do
+ expect(minimal_test_class.cached.attributes[:foo]).to eq(minimal_test_class.defaults[:foo])
+ end
+ end
+ end
+
+ describe '#cache!', :use_clean_rails_memory_store_caching do
+ let(:appearance_record) { create(:appearance) }
+
+ it 'caches the attributes' do
+ appearance_record.cache!
+
+ expect(Rails.cache.read(Appearance.cache_key)).to eq(appearance_record.attributes.to_json)
+ end
+ end
+end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 3d3092b8ac9..bd6bf5b0712 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -266,6 +266,19 @@ describe Issuable do
end
end
+ describe '#time_estimate=' do
+ it 'coerces the value below Gitlab::Database::MAX_INT_VALUE' do
+ expect { issue.time_estimate = 100 }.to change { issue.time_estimate }.to(100)
+ expect { issue.time_estimate = Gitlab::Database::MAX_INT_VALUE + 100 }.to change { issue.time_estimate }.to(Gitlab::Database::MAX_INT_VALUE)
+ end
+
+ it 'skips coercion for not Integer values' do
+ expect { issue.time_estimate = nil }.to change { issue.time_estimate }.to(nil)
+ expect { issue.time_estimate = 'invalid time' }.not_to raise_error(StandardError)
+ expect { issue.time_estimate = 22.33 }.not_to raise_error(StandardError)
+ end
+ end
+
describe '#to_hook_data' do
let(:builder) { double }
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
index 3d7963120b6..23c6c6233e9 100644
--- a/spec/models/concerns/redis_cacheable_spec.rb
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -1,21 +1,36 @@
require 'spec_helper'
describe RedisCacheable do
- let(:model) { double }
+ let(:model) do
+ Struct.new(:id, :attributes) do
+ def read_attribute(attribute)
+ attributes[attribute]
+ end
+
+ def cast_value_from_cache(attribute, cached_value)
+ cached_value
+ end
+
+ def has_attribute?(attribute)
+ attributes.has_key?(attribute)
+ end
+ end
+ end
+
+ let(:payload) { { name: 'value', time: Time.zone.now } }
+ let(:instance) { model.new(1, payload) }
+ let(:cache_key) { instance.__send__(:cache_attribute_key) }
before do
- model.extend(described_class)
- allow(model).to receive(:cache_attribute_key).and_return('key')
+ model.include(described_class)
end
describe '#cached_attribute' do
- let(:payload) { { attribute: 'value' } }
-
- subject { model.cached_attribute(payload.keys.first) }
+ subject { instance.cached_attribute(payload.keys.first) }
it 'gets the cache attribute' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:get).with('key')
+ expect(redis).to receive(:get).with(cache_key)
.and_return(payload.to_json)
end
@@ -24,16 +39,62 @@ describe RedisCacheable do
end
describe '#cache_attributes' do
- let(:values) { { name: 'new_name' } }
-
- subject { model.cache_attributes(values) }
+ subject { instance.cache_attributes(payload) }
it 'sets the cache attributes' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:set).with('key', values.to_json, anything)
+ expect(redis).to receive(:set).with(cache_key, payload.to_json, anything)
end
subject
end
end
+
+ describe '#cached_attr_reader', :clean_gitlab_redis_shared_state do
+ subject { instance.name }
+
+ before do
+ model.cached_attr_reader(:name)
+ end
+
+ context 'when there is no cached value' do
+ it 'reads the attribute' do
+ expect(instance).to receive(:read_attribute).and_call_original
+
+ expect(subject).to eq(payload[:name])
+ end
+ end
+
+ context 'when there is a cached value' do
+ it 'reads the cached value' do
+ expect(instance).not_to receive(:read_attribute)
+
+ instance.cache_attributes(payload)
+
+ expect(subject).to eq(payload[:name])
+ end
+ end
+
+ it 'always returns the latest values' do
+ expect(instance.name).to eq(payload[:name])
+
+ instance.cache_attributes(name: 'new_value')
+
+ expect(instance.name).to eq('new_value')
+ end
+ end
+
+ describe '#cast_value_from_cache' do
+ subject { instance.__send__(:cast_value_from_cache, attribute, value) }
+
+ context 'with runner contacted_at' do
+ let(:instance) { Ci::Runner.new }
+ let(:attribute) { :contacted_at }
+ let(:value) { '2018-05-07 13:53:08 UTC' }
+
+ it 'converts cache string to appropriate type' do
+ expect(subject).to be_an_instance_of(ActiveSupport::TimeWithZone)
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/resolvable_note_spec.rb b/spec/models/concerns/resolvable_note_spec.rb
index 91591017587..fcb5250278e 100644
--- a/spec/models/concerns/resolvable_note_spec.rb
+++ b/spec/models/concerns/resolvable_note_spec.rb
@@ -326,4 +326,12 @@ describe Note, ResolvableNote do
end
end
end
+
+ describe "#potentially_resolvable?" do
+ it 'returns false if noteable could not be found' do
+ allow(subject).to receive(:noteable).and_return(nil)
+
+ expect(subject.potentially_resolvable?).to be_falsey
+ end
+ end
end
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
new file mode 100644
index 00000000000..b821a84d5e0
--- /dev/null
+++ b/spec/models/concerns/sortable_spec.rb
@@ -0,0 +1,108 @@
+require 'spec_helper'
+
+describe Sortable do
+ describe '.order_by' do
+ let(:relation) { Group.all }
+
+ describe 'ordering by id' do
+ it 'ascending' do
+ expect(relation).to receive(:reorder).with(id: :asc)
+
+ relation.order_by('id_asc')
+ end
+
+ it 'descending' do
+ expect(relation).to receive(:reorder).with(id: :desc)
+
+ relation.order_by('id_desc')
+ end
+ end
+
+ describe 'ordering by created day' do
+ it 'ascending' do
+ expect(relation).to receive(:reorder).with(created_at: :asc)
+
+ relation.order_by('created_asc')
+ end
+
+ it 'descending' do
+ expect(relation).to receive(:reorder).with(created_at: :desc)
+
+ relation.order_by('created_desc')
+ end
+
+ it 'order by "date"' do
+ expect(relation).to receive(:reorder).with(created_at: :desc)
+
+ relation.order_by('created_date')
+ end
+ end
+
+ describe 'ordering by name' do
+ it 'ascending' do
+ expect(relation).to receive(:reorder).with("lower(name) asc")
+
+ relation.order_by('name_asc')
+ end
+
+ it 'descending' do
+ expect(relation).to receive(:reorder).with("lower(name) desc")
+
+ relation.order_by('name_desc')
+ end
+ end
+
+ describe 'ordering by Updated Time' do
+ it 'ascending' do
+ expect(relation).to receive(:reorder).with(updated_at: :asc)
+
+ relation.order_by('updated_asc')
+ end
+
+ it 'descending' do
+ expect(relation).to receive(:reorder).with(updated_at: :desc)
+
+ relation.order_by('updated_desc')
+ end
+ end
+
+ it 'does not call reorder in case of unrecognized ordering' do
+ expect(relation).not_to receive(:reorder)
+
+ relation.order_by('random_ordering')
+ end
+ end
+
+ describe 'sorting groups' do
+ def ordered_group_names(order)
+ Group.all.order_by(order).map(&:name)
+ end
+
+ let!(:ref_time) { Time.parse('2018-05-01 00:00:00') }
+ let!(:group1) { create(:group, name: 'aa', id: 1, created_at: ref_time - 15.seconds, updated_at: ref_time) }
+ let!(:group2) { create(:group, name: 'AAA', id: 2, created_at: ref_time - 10.seconds, updated_at: ref_time - 5.seconds) }
+ let!(:group3) { create(:group, name: 'BB', id: 3, created_at: ref_time - 5.seconds, updated_at: ref_time - 10.seconds) }
+ let!(:group4) { create(:group, name: 'bbb', id: 4, created_at: ref_time, updated_at: ref_time - 15.seconds) }
+
+ it 'sorts groups by id' do
+ expect(ordered_group_names('id_asc')).to eq(%w(aa AAA BB bbb))
+ expect(ordered_group_names('id_desc')).to eq(%w(bbb BB AAA aa))
+ end
+
+ it 'sorts groups by name via case-insentitive comparision' do
+ expect(ordered_group_names('name_asc')).to eq(%w(aa AAA BB bbb))
+ expect(ordered_group_names('name_desc')).to eq(%w(bbb BB AAA aa))
+ end
+
+ it 'sorts groups by created_at' do
+ expect(ordered_group_names('created_asc')).to eq(%w(aa AAA BB bbb))
+ expect(ordered_group_names('created_desc')).to eq(%w(bbb BB AAA aa))
+ expect(ordered_group_names('created_date')).to eq(%w(bbb BB AAA aa))
+ end
+
+ it 'sorts groups by updated_at' do
+ expect(ordered_group_names('updated_asc')).to eq(%w(bbb BB AAA aa))
+ expect(ordered_group_names('updated_desc')).to eq(%w(aa AAA BB bbb))
+ end
+ end
+end
diff --git a/spec/models/generic_commit_status_spec.rb b/spec/models/generic_commit_status_spec.rb
index 673049d1cc4..a3e68d2e646 100644
--- a/spec/models/generic_commit_status_spec.rb
+++ b/spec/models/generic_commit_status_spec.rb
@@ -78,4 +78,10 @@ describe GenericCommitStatus do
it { is_expected.not_to be_nil }
end
end
+
+ describe '#present' do
+ subject { generic_commit_status.present }
+
+ it { is_expected.to be_a(GenericCommitStatusPresenter) }
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 0907d28d33b..f83b52e8975 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -15,7 +15,7 @@ describe Group do
it { is_expected.to have_many(:notification_settings).dependent(:destroy) }
it { is_expected.to have_many(:labels).class_name('GroupLabel') }
it { is_expected.to have_many(:variables).class_name('Ci::GroupVariable') }
- it { is_expected.to have_many(:uploads).dependent(:destroy) }
+ it { is_expected.to have_many(:uploads) }
it { is_expected.to have_one(:chat_team) }
it { is_expected.to have_many(:custom_attributes).class_name('GroupCustomAttribute') }
it { is_expected.to have_many(:badges).class_name('GroupBadge') }
@@ -691,4 +691,12 @@ describe Group do
end
end
end
+
+ context 'with uploads' do
+ it_behaves_like 'model with mounted uploader', true do
+ let(:model_object) { create(:group, :with_avatar) }
+ let(:upload_attribute) { :avatar }
+ let(:uploader_class) { AttachmentUploader }
+ end
+ end
end
diff --git a/spec/models/guest_spec.rb b/spec/models/guest_spec.rb
index 2afdd6751a4..fc30f3056e5 100644
--- a/spec/models/guest_spec.rb
+++ b/spec/models/guest_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
describe Guest do
- let(:public_project) { build_stubbed(:project, :public) }
- let(:private_project) { build_stubbed(:project, :private) }
- let(:internal_project) { build_stubbed(:project, :internal) }
+ set(:public_project) { create(:project, :public) }
+ set(:private_project) { create(:project, :private) }
+ set(:internal_project) { create(:project, :internal) }
describe '.can_pull?' do
context 'when project is private' do
diff --git a/spec/models/project_services/gemnasium_service_spec.rb b/spec/models/project_services/gemnasium_service_spec.rb
index 4c61bc0af95..6e417323e40 100644
--- a/spec/models/project_services/gemnasium_service_spec.rb
+++ b/spec/models/project_services/gemnasium_service_spec.rb
@@ -26,24 +26,49 @@ describe GemnasiumService do
end
end
+ describe "deprecated?" do
+ let(:project) { create(:project, :repository) }
+ let(:gemnasium_service) { described_class.new }
+
+ before do
+ allow(gemnasium_service).to receive_messages(
+ project_id: project.id,
+ project: project,
+ service_hook: true,
+ token: 'verySecret',
+ api_key: 'GemnasiumUserApiKey'
+ )
+ end
+
+ it "is true" do
+ expect(gemnasium_service.deprecated?).to be true
+ end
+
+ it "can't create a new service" do
+ expect(gemnasium_service.save).to be false
+ expect(gemnasium_service.errors[:base].first)
+ .to eq('Gemnasium has been acquired by GitLab in January 2018. Since May 15, 2018, the service provided by Gemnasium is no longer available.')
+ end
+ end
+
describe "Execute" do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
+ let(:gemnasium_service) { described_class.new }
+ let(:sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
before do
- @gemnasium_service = described_class.new
- allow(@gemnasium_service).to receive_messages(
+ allow(gemnasium_service).to receive_messages(
project_id: project.id,
project: project,
service_hook: true,
token: 'verySecret',
api_key: 'GemnasiumUserApiKey'
)
- @sample_data = Gitlab::DataBuilder::Push.build_sample(project, user)
end
it "calls Gemnasium service" do
expect(Gemnasium::GitlabService).to receive(:execute).with(an_instance_of(Hash)).once
- @gemnasium_service.execute(@sample_data)
+ gemnasium_service.execute(sample_data)
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 5b452f17979..af2240f4f89 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -76,7 +76,7 @@ describe Project do
it { is_expected.to have_many(:project_group_links) }
it { is_expected.to have_many(:notification_settings).dependent(:delete_all) }
it { is_expected.to have_many(:forks).through(:forked_project_links) }
- it { is_expected.to have_many(:uploads).dependent(:destroy) }
+ it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:pipeline_schedules) }
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
@@ -469,6 +469,34 @@ describe Project do
end
end
+ describe "#readme_url" do
+ let(:project) { create(:project, :repository, path: "somewhere") }
+
+ context 'with a non-existing repository' do
+ it 'returns nil' do
+ allow(project.repository).to receive(:tree).with(:head).and_return(nil)
+
+ expect(project.readme_url).to be_nil
+ end
+ end
+
+ context 'with an existing repository' do
+ context 'when no README exists' do
+ it 'returns nil' do
+ allow_any_instance_of(Tree).to receive(:readme).and_return(nil)
+
+ expect(project.readme_url).to be_nil
+ end
+ end
+
+ context 'when a README exists' do
+ it 'returns the README' do
+ expect(project.readme_url).to eql("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere/blob/master/README.md")
+ end
+ end
+ end
+ end
+
describe "#new_issuable_address" do
let(:project) { create(:project, path: "somewhere") }
let(:user) { create(:user) }
@@ -3739,4 +3767,12 @@ describe Project do
it { is_expected.to be_nil }
end
end
+
+ context 'with uploads' do
+ it_behaves_like 'model with mounted uploader', true do
+ let(:model_object) { create(:project, :with_avatar) }
+ let(:upload_attribute) { :avatar }
+ let(:uploader_class) { AttachmentUploader }
+ end
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index a7755a505d8..0ccf55bd895 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -671,7 +671,7 @@ describe Repository do
end
end
- describe "search_files_by_content" do
+ shared_examples "search_files_by_content" do
let(:results) { repository.search_files_by_content('feature', 'master') }
subject { results }
@@ -718,7 +718,7 @@ describe Repository do
end
end
- describe "search_files_by_name" do
+ shared_examples "search_files_by_name" do
let(:results) { repository.search_files_by_name('files', 'master') }
it 'returns result' do
@@ -758,6 +758,16 @@ describe Repository do
end
end
+ describe 'with gitaly enabled' do
+ it_behaves_like 'search_files_by_content'
+ it_behaves_like 'search_files_by_name'
+ end
+
+ describe 'with gitaly disabled', :disable_gitaly do
+ it_behaves_like 'search_files_by_content'
+ it_behaves_like 'search_files_by_name'
+ end
+
describe '#async_remove_remote' do
before do
masterrev = repository.find_branch('master').dereferenced_target
@@ -990,65 +1000,25 @@ describe Repository do
subject { repository.add_branch(user, branch_name, target) }
- context 'with Gitaly enabled' do
- it "calls Gitaly's OperationService" do
- expect_any_instance_of(Gitlab::GitalyClient::OperationService)
- .to receive(:user_create_branch).with(branch_name, user, target)
- .and_return(nil)
-
- subject
- end
-
- it 'creates_the_branch' do
- expect(subject.name).to eq(branch_name)
- expect(repository.find_branch(branch_name)).not_to be_nil
- end
+ it "calls Gitaly's OperationService" do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService)
+ .to receive(:user_create_branch).with(branch_name, user, target)
+ .and_return(nil)
- context 'with a non-existing target' do
- let(:target) { 'fake-target' }
-
- it "returns false and doesn't create the branch" do
- expect(subject).to be(false)
- expect(repository.find_branch(branch_name)).to be_nil
- end
- end
+ subject
end
- context 'with Gitaly disabled', :disable_gitaly do
- context 'when pre hooks were successful' do
- it 'runs without errors' do
- hook = double(trigger: [true, nil])
- expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
-
- expect { subject }.not_to raise_error
- end
-
- it 'creates the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
-
- expect(subject.name).to eq(branch_name)
- end
-
- it 'calls the after_create_branch hook' do
- expect(repository).to receive(:after_create_branch)
-
- subject
- end
- end
-
- context 'when pre hooks failed' do
- it 'gets an error' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
-
- expect { subject }.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
- end
+ it 'creates_the_branch' do
+ expect(subject.name).to eq(branch_name)
+ expect(repository.find_branch(branch_name)).not_to be_nil
+ end
- it 'does not create the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
+ context 'with a non-existing target' do
+ let(:target) { 'fake-target' }
- expect { subject }.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
- expect(repository.find_branch(branch_name)).to be_nil
- end
+ it "returns false and doesn't create the branch" do
+ expect(subject).to be(false)
+ expect(repository.find_branch(branch_name)).to be_nil
end
end
end
@@ -2061,27 +2031,27 @@ describe Repository do
describe '#xcode_project?' do
before do
- allow(repository).to receive(:tree).with(:head).and_return(double(:tree, blobs: [blob]))
+ allow(repository).to receive(:tree).with(:head).and_return(double(:tree, trees: [tree]))
end
- context 'when the root contains a *.xcodeproj file' do
- let(:blob) { double(:blob, path: 'Foo.xcodeproj') }
+ context 'when the root contains a *.xcodeproj directory' do
+ let(:tree) { double(:tree, path: 'Foo.xcodeproj') }
it 'returns true' do
expect(repository.xcode_project?).to be_truthy
end
end
- context 'when the root contains a *.xcworkspace file' do
- let(:blob) { double(:blob, path: 'Foo.xcworkspace') }
+ context 'when the root contains a *.xcworkspace directory' do
+ let(:tree) { double(:tree, path: 'Foo.xcworkspace') }
it 'returns true' do
expect(repository.xcode_project?).to be_truthy
end
end
- context 'when the root contains no XCode config file' do
- let(:blob) { double(:blob, path: 'subdir/Foo.xcworkspace') }
+ context 'when the root contains no Xcode config directory' do
+ let(:tree) { double(:tree, path: 'Foo') }
it 'returns false' do
expect(repository.xcode_project?).to be_falsey
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index ad094b3ed48..6a2f4a39f09 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -39,7 +39,7 @@ describe User do
it { is_expected.to have_many(:builds).dependent(:nullify) }
it { is_expected.to have_many(:pipelines).dependent(:nullify) }
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
- it { is_expected.to have_many(:uploads).dependent(:destroy) }
+ it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
@@ -393,24 +393,6 @@ describe User do
end
describe 'after commit hook' do
- describe '.update_invalid_gpg_signatures' do
- let(:user) do
- create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
- user.skip_reconfirmation!
- end
- end
-
- it 'does nothing when the name is updated' do
- expect(user).not_to receive(:update_invalid_gpg_signatures)
- user.update_attributes!(name: 'Bette')
- end
-
- it 'synchronizes the gpg keys when the email is updated' do
- expect(user).to receive(:update_invalid_gpg_signatures).at_most(:twice)
- user.update_attributes!(email: 'shawnee.ritchie@denesik.com')
- end
- end
-
describe '#update_emails_with_primary_email' do
before do
@user = create(:user, email: 'primary@example.com').tap do |user|
@@ -450,6 +432,76 @@ describe User do
expect(@user.emails.first.confirmed_at).not_to eq nil
end
end
+
+ describe '#update_notification_email' do
+ # Regression: https://gitlab.com/gitlab-org/gitlab-ce/issues/22846
+ context 'when changing :email' do
+ let(:user) { create(:user) }
+ let(:new_email) { 'new-email@example.com' }
+
+ it 'sets :unconfirmed_email' do
+ expect do
+ user.tap { |u| u.update!(email: new_email) }.reload
+ end.to change(user, :unconfirmed_email).to(new_email)
+ end
+
+ it 'does not change :notification_email' do
+ expect do
+ user.tap { |u| u.update!(email: new_email) }.reload
+ end.not_to change(user, :notification_email)
+ end
+
+ it 'updates :notification_email to the new email once confirmed' do
+ user.update!(email: new_email)
+
+ expect do
+ user.tap(&:confirm).reload
+ end.to change(user, :notification_email).to eq(new_email)
+ end
+
+ context 'and :notification_email is set to a secondary email' do
+ let!(:email_attrs) { attributes_for(:email, :confirmed, user: user) }
+ let(:secondary) { create(:email, :confirmed, email: 'secondary@example.com', user: user) }
+
+ before do
+ user.emails.create(email_attrs)
+ user.tap { |u| u.update!(notification_email: email_attrs[:email]) }.reload
+ end
+
+ it 'does not change :notification_email to :email' do
+ expect do
+ user.tap { |u| u.update!(email: new_email) }.reload
+ end.not_to change(user, :notification_email)
+ end
+
+ it 'does not change :notification_email to :email once confirmed' do
+ user.update!(email: new_email)
+
+ expect do
+ user.tap(&:confirm).reload
+ end.not_to change(user, :notification_email)
+ end
+ end
+ end
+ end
+
+ describe '#update_invalid_gpg_signatures' do
+ let(:user) do
+ create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
+ user.skip_reconfirmation!
+ end
+ end
+
+ it 'does nothing when the name is updated' do
+ expect(user).not_to receive(:update_invalid_gpg_signatures)
+ user.update_attributes!(name: 'Bette')
+ end
+
+ it 'synchronizes the gpg keys when the email is updated' do
+ expect(user).to receive(:update_invalid_gpg_signatures).at_most(:twice)
+ user.update_attributes!(email: 'shawnee.ritchie@denesik.com')
+ end
+ end
end
describe '#update_tracked_fields!', :clean_gitlab_redis_shared_state do
@@ -1223,6 +1275,24 @@ describe User do
end
end
+ describe '#accept_pending_invitations!' do
+ let(:user) { create(:user, email: 'user@email.com') }
+ let!(:project_member_invite) { create(:project_member, :invited, invite_email: user.email) }
+ let!(:group_member_invite) { create(:group_member, :invited, invite_email: user.email) }
+ let!(:external_project_member_invite) { create(:project_member, :invited, invite_email: 'external@email.com') }
+ let!(:external_group_member_invite) { create(:group_member, :invited, invite_email: 'external@email.com') }
+
+ it 'accepts all the user members pending invitations and returns the accepted_members' do
+ accepted_members = user.accept_pending_invitations!
+
+ expect(accepted_members).to match_array([project_member_invite, group_member_invite])
+ expect(group_member_invite.reload).not_to be_invite
+ expect(project_member_invite.reload).not_to be_invite
+ expect(external_project_member_invite.reload).to be_invite
+ expect(external_group_member_invite.reload).to be_invite
+ end
+ end
+
describe '#all_emails' do
let(:user) { create(:user) }
@@ -1786,28 +1856,54 @@ describe User do
end
end
- describe '#ci_authorized_runners' do
+ describe '#ci_owned_runners' do
let(:user) { create(:user) }
- let(:runner) { create(:ci_runner) }
+ let(:runner_1) { create(:ci_runner) }
+ let(:runner_2) { create(:ci_runner) }
- before do
- project.runners << runner
- end
-
- context 'without any projects' do
- let(:project) { create(:project) }
+ context 'without any projects nor groups' do
+ let!(:project) { create(:project, runners: [runner_1]) }
+ let!(:group) { create(:group) }
it 'does not load' do
- expect(user.ci_authorized_runners).to be_empty
+ expect(user.ci_owned_runners).to be_empty
end
end
context 'with personal projects runners' do
let(:namespace) { create(:namespace, owner: user) }
- let(:project) { create(:project, namespace: namespace) }
+ let!(:project) { create(:project, namespace: namespace, runners: [runner_1]) }
+
+ it 'loads' do
+ expect(user.ci_owned_runners).to contain_exactly(runner_1)
+ end
+ end
+
+ context 'with personal group runner' do
+ let!(:project) { create(:project, runners: [runner_1]) }
+ let!(:group) do
+ create(:group, runners: [runner_2]).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ it 'loads' do
+ expect(user.ci_owned_runners).to contain_exactly(runner_2)
+ end
+ end
+
+ context 'with personal project and group runner' do
+ let(:namespace) { create(:namespace, owner: user) }
+ let!(:project) { create(:project, namespace: namespace, runners: [runner_1]) }
+
+ let!(:group) do
+ create(:group, runners: [runner_2]).tap do |group|
+ group.add_owner(user)
+ end
+ end
it 'loads' do
- expect(user.ci_authorized_runners).to contain_exactly(runner)
+ expect(user.ci_owned_runners).to contain_exactly(runner_1, runner_2)
end
end
@@ -1818,7 +1914,7 @@ describe User do
end
it 'loads' do
- expect(user.ci_authorized_runners).to contain_exactly(runner)
+ expect(user.ci_owned_runners).to contain_exactly(runner_1)
end
end
@@ -1828,14 +1924,28 @@ describe User do
end
it 'does not load' do
- expect(user.ci_authorized_runners).to be_empty
+ expect(user.ci_owned_runners).to be_empty
end
end
end
context 'with groups projects runners' do
let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
+ let!(:project) { create(:project, group: group, runners: [runner_1]) }
+
+ def add_user(access)
+ group.add_user(user, access)
+ end
+
+ it_behaves_like :member
+ end
+
+ context 'with groups runners' do
+ let!(:group) do
+ create(:group, runners: [runner_1]).tap do |group|
+ group.add_owner(user)
+ end
+ end
def add_user(access)
group.add_user(user, access)
@@ -1845,7 +1955,7 @@ describe User do
end
context 'with other projects runners' do
- let(:project) { create(:project) }
+ let!(:project) { create(:project, runners: [runner_1]) }
def add_user(access)
project.add_role(user, access)
@@ -1858,7 +1968,7 @@ describe User do
let(:group) { create(:group) }
let(:another_user) { create(:user) }
let(:subgroup) { create(:group, parent: group) }
- let(:project) { create(:project, group: subgroup) }
+ let!(:project) { create(:project, group: subgroup, runners: [runner_1]) }
def add_user(access)
group.add_user(user, access)
@@ -2755,4 +2865,26 @@ describe User do
it { is_expected.to be_truthy }
end
end
+
+ describe '#increment_failed_attempts!' do
+ subject(:user) { create(:user, failed_attempts: 0) }
+
+ it 'logs failed sign-in attempts' do
+ expect { user.increment_failed_attempts! }.to change(user, :failed_attempts).from(0).to(1)
+ end
+
+ it 'does not log failed sign-in attempts when in a GitLab read-only instance' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect { user.increment_failed_attempts! }.not_to change(user, :failed_attempts)
+ end
+ end
+
+ context 'with uploads' do
+ it_behaves_like 'model with mounted uploader', false do
+ let(:model_object) { create(:user, :with_avatar) }
+ let(:upload_attribute) { :avatar }
+ let(:uploader_class) { AttachmentUploader }
+ end
+ end
end
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index 41cf2ef7225..9ca156deaa0 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -94,6 +94,19 @@ describe Ci::BuildPolicy do
end
end
end
+
+ context 'when maintainer is allowed to push to pipeline branch' do
+ let(:project) { create(:project, :public) }
+ let(:owner) { user }
+
+ it 'enables update_build if user is maintainer' do
+ allow_any_instance_of(Project).to receive(:empty_repo?).and_return(false)
+ allow_any_instance_of(Project).to receive(:branch_allows_maintainer_push?).and_return(true)
+
+ expect(policy).to be_allowed :update_build
+ expect(policy).to be_allowed :update_commit_status
+ end
+ end
end
describe 'rules for protected ref' do
diff --git a/spec/policies/ci/pipeline_policy_spec.rb b/spec/policies/ci/pipeline_policy_spec.rb
index 48a8064c5fc..a5e509cfa0f 100644
--- a/spec/policies/ci/pipeline_policy_spec.rb
+++ b/spec/policies/ci/pipeline_policy_spec.rb
@@ -62,5 +62,17 @@ describe Ci::PipelinePolicy, :models do
end
end
end
+
+ context 'when maintainer is allowed to push to pipeline branch' do
+ let(:project) { create(:project, :public) }
+ let(:owner) { user }
+
+ it 'enables update_pipeline if user is maintainer' do
+ allow_any_instance_of(Project).to receive(:empty_repo?).and_return(false)
+ allow_any_instance_of(Project).to receive(:branch_allows_maintainer_push?).and_return(true)
+
+ expect(policy).to be_allowed :update_pipeline
+ end
+ end
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 8b9c4ac0b4b..6609f5f7afd 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -404,7 +404,7 @@ describe ProjectPolicy do
)
end
let(:maintainer_abilities) do
- %w(create_build update_build create_pipeline update_pipeline)
+ %w(create_build create_pipeline)
end
subject { described_class.new(user, project) }
diff --git a/spec/presenters/ci/build_presenter_spec.rb b/spec/presenters/ci/build_presenter_spec.rb
index 4bc005df2fc..efd175247b5 100644
--- a/spec/presenters/ci/build_presenter_spec.rb
+++ b/spec/presenters/ci/build_presenter_spec.rb
@@ -10,7 +10,7 @@ describe Ci::BuildPresenter do
end
it 'inherits from Gitlab::View::Presenter::Delegated' do
- expect(described_class.superclass).to eq(Gitlab::View::Presenter::Delegated)
+ expect(described_class.ancestors).to include(Gitlab::View::Presenter::Delegated)
end
describe '#initialize' do
diff --git a/spec/presenters/commit_status_presenter_spec.rb b/spec/presenters/commit_status_presenter_spec.rb
new file mode 100644
index 00000000000..f81ee44e371
--- /dev/null
+++ b/spec/presenters/commit_status_presenter_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe CommitStatusPresenter do
+ let(:project) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ subject(:presenter) do
+ described_class.new(build)
+ end
+
+ it 'inherits from Gitlab::View::Presenter::Delegated' do
+ expect(described_class.superclass).to eq(Gitlab::View::Presenter::Delegated)
+ end
+end
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 53d48a91007..fdddca5d0ef 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -15,7 +15,7 @@ describe API::Environments do
it 'returns project environments' do
project_data_keys = %w(
id description default_branch tag_list
- ssh_url_to_repo http_url_to_repo web_url
+ ssh_url_to_repo http_url_to_repo web_url readme_url
name name_with_namespace
path path_with_namespace
star_count forks_count
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index bb0034e3237..7d923932309 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -738,13 +738,16 @@ describe API::Groups do
describe "DELETE /groups/:id" do
context "when authenticated as user" do
it "removes group" do
- delete api("/groups/#{group1.id}", user1)
+ Sidekiq::Testing.fake! do
+ expect { delete api("/groups/#{group1.id}", user1) }.to change(GroupDestroyWorker.jobs, :size).by(1)
+ end
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(202)
end
it_behaves_like '412 response' do
let(:request) { api("/groups/#{group1.id}", user1) }
+ let(:success_status) { 202 }
end
it "does not remove a group if not an owner" do
@@ -773,7 +776,7 @@ describe API::Groups do
it "removes any existing group" do
delete api("/groups/#{group2.id}", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(202)
end
it "does not remove a non existing group" do
diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb
index 90f9c4ad214..3106083293f 100644
--- a/spec/requests/api/issues_spec.rb
+++ b/spec/requests/api/issues_spec.rb
@@ -64,12 +64,32 @@ describe API::Issues do
describe "GET /issues" do
context "when unauthenticated" do
- it "returns authentication error" do
+ it "returns an array of all issues" do
+ get api("/issues"), scope: 'all'
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to be_an Array
+ end
+
+ it "returns authentication error without any scope" do
get api("/issues")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_http_status(401)
+ end
+
+ it "returns authentication error when scope is assigned-to-me" do
+ get api("/issues"), scope: 'assigned-to-me'
+
+ expect(response).to have_http_status(401)
+ end
+
+ it "returns authentication error when scope is created-by-me" do
+ get api("/issues"), scope: 'created-by-me'
+
+ expect(response).to have_http_status(401)
end
end
+
context "when authenticated" do
let(:first_issue) { json_response.first }
@@ -106,6 +126,15 @@ describe API::Issues do
it 'returns issues assigned to me' do
issue2 = create(:issue, assignees: [user2], project: project)
+ get api('/issues', user2), scope: 'assigned_to_me'
+
+ expect_paginated_array_response(size: 1)
+ expect(first_issue['id']).to eq(issue2.id)
+ end
+
+ it 'returns issues assigned to me (kebab-case)' do
+ issue2 = create(:issue, assignees: [user2], project: project)
+
get api('/issues', user2), scope: 'assigned-to-me'
expect_paginated_array_response(size: 1)
@@ -379,9 +408,6 @@ describe API::Issues do
end
let!(:group_note) { create(:note_on_issue, author: user, project: group_project, noteable: group_issue) }
- before do
- group_project.add_reporter(user)
- end
let(:base_url) { "/groups/#{group.id}/issues" }
context 'when group has subgroups', :nested_groups do
@@ -408,178 +434,201 @@ describe API::Issues do
end
end
- it 'returns all group issues (including opened and closed)' do
- get api(base_url, admin)
+ context 'when user is unauthenticated' do
+ it 'lists all issues in public projects' do
+ get api(base_url)
- expect_paginated_array_response(size: 3)
+ expect_paginated_array_response(size: 2)
+ end
end
- it 'returns group issues without confidential issues for non project members' do
- get api("#{base_url}?state=opened", non_member)
+ context 'when user is a group member' do
+ before do
+ group_project.add_reporter(user)
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['title']).to eq(group_issue.title)
- end
+ it 'returns all group issues (including opened and closed)' do
+ get api(base_url, admin)
- it 'returns group confidential issues for author' do
- get api("#{base_url}?state=opened", author)
+ expect_paginated_array_response(size: 3)
+ end
- expect_paginated_array_response(size: 2)
- end
+ it 'returns group issues without confidential issues for non project members' do
+ get api("#{base_url}?state=opened", non_member)
- it 'returns group confidential issues for assignee' do
- get api("#{base_url}?state=opened", assignee)
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['title']).to eq(group_issue.title)
+ end
- expect_paginated_array_response(size: 2)
- end
+ it 'returns group confidential issues for author' do
+ get api("#{base_url}?state=opened", author)
- it 'returns group issues with confidential issues for project members' do
- get api("#{base_url}?state=opened", user)
+ expect_paginated_array_response(size: 2)
+ end
- expect_paginated_array_response(size: 2)
- end
+ it 'returns group confidential issues for assignee' do
+ get api("#{base_url}?state=opened", assignee)
- it 'returns group confidential issues for admin' do
- get api("#{base_url}?state=opened", admin)
+ expect_paginated_array_response(size: 2)
+ end
- expect_paginated_array_response(size: 2)
- end
+ it 'returns group issues with confidential issues for project members' do
+ get api("#{base_url}?state=opened", user)
- it 'returns an array of labeled group issues' do
- get api("#{base_url}?labels=#{group_label.title}", user)
+ expect_paginated_array_response(size: 2)
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['labels']).to eq([group_label.title])
- end
+ it 'returns group confidential issues for admin' do
+ get api("#{base_url}?state=opened", admin)
- it 'returns an array of labeled group issues where all labels match' do
- get api("#{base_url}?labels=#{group_label.title},foo,bar", user)
+ expect_paginated_array_response(size: 2)
+ end
- expect_paginated_array_response(size: 0)
- end
+ it 'returns an array of labeled group issues' do
+ get api("#{base_url}?labels=#{group_label.title}", user)
- it 'returns issues matching given search string for title' do
- get api("#{base_url}?search=#{group_issue.title}", user)
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['labels']).to eq([group_label.title])
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['id']).to eq(group_issue.id)
- end
+ it 'returns an array of labeled group issues where all labels match' do
+ get api("#{base_url}?labels=#{group_label.title},foo,bar", user)
- it 'returns issues matching given search string for description' do
- get api("#{base_url}?search=#{group_issue.description}", user)
+ expect_paginated_array_response(size: 0)
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['id']).to eq(group_issue.id)
- end
+ it 'returns issues matching given search string for title' do
+ get api("#{base_url}?search=#{group_issue.title}", user)
- it 'returns an array of labeled issues when all labels matches' do
- label_b = create(:label, title: 'foo', project: group_project)
- label_c = create(:label, title: 'bar', project: group_project)
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_issue.id)
+ end
- create(:label_link, label: label_b, target: group_issue)
- create(:label_link, label: label_c, target: group_issue)
+ it 'returns issues matching given search string for description' do
+ get api("#{base_url}?search=#{group_issue.description}", user)
- get api("#{base_url}", user), labels: "#{group_label.title},#{label_b.title},#{label_c.title}"
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_issue.id)
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['labels']).to eq([label_c.title, label_b.title, group_label.title])
- end
+ it 'returns an array of labeled issues when all labels matches' do
+ label_b = create(:label, title: 'foo', project: group_project)
+ label_c = create(:label, title: 'bar', project: group_project)
- it 'returns an array of issues found by iids' do
- get api(base_url, user), iids: [group_issue.iid]
+ create(:label_link, label: label_b, target: group_issue)
+ create(:label_link, label: label_c, target: group_issue)
- expect_paginated_array_response(size: 1)
- expect(json_response.first['id']).to eq(group_issue.id)
- end
+ get api("#{base_url}", user), labels: "#{group_label.title},#{label_b.title},#{label_c.title}"
- it 'returns an empty array if iid does not exist' do
- get api(base_url, user), iids: [99999]
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['labels']).to eq([label_c.title, label_b.title, group_label.title])
+ end
- expect_paginated_array_response(size: 0)
- end
+ it 'returns an array of issues found by iids' do
+ get api(base_url, user), iids: [group_issue.iid]
- it 'returns an empty array if no group issue matches labels' do
- get api("#{base_url}?labels=foo,bar", user)
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_issue.id)
+ end
- expect_paginated_array_response(size: 0)
- end
+ it 'returns an empty array if iid does not exist' do
+ get api(base_url, user), iids: [99999]
- it 'returns an empty array if no issue matches milestone' do
- get api("#{base_url}?milestone=#{group_empty_milestone.title}", user)
+ expect_paginated_array_response(size: 0)
+ end
- expect_paginated_array_response(size: 0)
- end
+ it 'returns an empty array if no group issue matches labels' do
+ get api("#{base_url}?labels=foo,bar", user)
- it 'returns an empty array if milestone does not exist' do
- get api("#{base_url}?milestone=foo", user)
+ expect_paginated_array_response(size: 0)
+ end
- expect_paginated_array_response(size: 0)
- end
+ it 'returns an empty array if no issue matches milestone' do
+ get api("#{base_url}?milestone=#{group_empty_milestone.title}", user)
- it 'returns an array of issues in given milestone' do
- get api("#{base_url}?state=opened&milestone=#{group_milestone.title}", user)
+ expect_paginated_array_response(size: 0)
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['id']).to eq(group_issue.id)
- end
+ it 'returns an empty array if milestone does not exist' do
+ get api("#{base_url}?milestone=foo", user)
- it 'returns an array of issues matching state in milestone' do
- get api("#{base_url}?milestone=#{group_milestone.title}"\
- '&state=closed', user)
+ expect_paginated_array_response(size: 0)
+ end
- expect_paginated_array_response(size: 1)
- expect(json_response.first['id']).to eq(group_closed_issue.id)
- end
+ it 'returns an array of issues in given milestone' do
+ get api("#{base_url}?state=opened&milestone=#{group_milestone.title}", user)
- it 'returns an array of issues with no milestone' do
- get api("#{base_url}?milestone=#{no_milestone_title}", user)
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_issue.id)
+ end
- expect(response).to have_gitlab_http_status(200)
+ it 'returns an array of issues matching state in milestone' do
+ get api("#{base_url}?milestone=#{group_milestone.title}"\
+ '&state=closed', user)
- expect_paginated_array_response(size: 1)
- expect(json_response.first['id']).to eq(group_confidential_issue.id)
- end
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_closed_issue.id)
+ end
- it 'sorts by created_at descending by default' do
- get api(base_url, user)
+ it 'returns an array of issues with no milestone' do
+ get api("#{base_url}?milestone=#{no_milestone_title}", user)
- response_dates = json_response.map { |issue| issue['created_at'] }
+ expect(response).to have_gitlab_http_status(200)
- expect_paginated_array_response(size: 3)
- expect(response_dates).to eq(response_dates.sort.reverse)
- end
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_confidential_issue.id)
+ end
- it 'sorts ascending when requested' do
- get api("#{base_url}?sort=asc", user)
+ it 'sorts by created_at descending by default' do
+ get api(base_url, user)
- response_dates = json_response.map { |issue| issue['created_at'] }
+ response_dates = json_response.map { |issue| issue['created_at'] }
- expect_paginated_array_response(size: 3)
- expect(response_dates).to eq(response_dates.sort)
- end
+ expect_paginated_array_response(size: 3)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
- it 'sorts by updated_at descending when requested' do
- get api("#{base_url}?order_by=updated_at", user)
+ it 'sorts ascending when requested' do
+ get api("#{base_url}?sort=asc", user)
- response_dates = json_response.map { |issue| issue['updated_at'] }
+ response_dates = json_response.map { |issue| issue['created_at'] }
- expect_paginated_array_response(size: 3)
- expect(response_dates).to eq(response_dates.sort.reverse)
- end
+ expect_paginated_array_response(size: 3)
+ expect(response_dates).to eq(response_dates.sort)
+ end
- it 'sorts by updated_at ascending when requested' do
- get api("#{base_url}?order_by=updated_at&sort=asc", user)
+ it 'sorts by updated_at descending when requested' do
+ get api("#{base_url}?order_by=updated_at", user)
- response_dates = json_response.map { |issue| issue['updated_at'] }
+ response_dates = json_response.map { |issue| issue['updated_at'] }
- expect_paginated_array_response(size: 3)
- expect(response_dates).to eq(response_dates.sort)
+ expect_paginated_array_response(size: 3)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by updated_at ascending when requested' do
+ get api("#{base_url}?order_by=updated_at&sort=asc", user)
+
+ response_dates = json_response.map { |issue| issue['updated_at'] }
+
+ expect_paginated_array_response(size: 3)
+ expect(response_dates).to eq(response_dates.sort)
+ end
end
end
describe "GET /projects/:id/issues" do
let(:base_url) { "/projects/#{project.id}" }
+ context 'when unauthenticated' do
+ it 'returns public project issues' do
+ get api("/projects/#{project.id}/issues")
+
+ expect_paginated_array_response(size: 2)
+ expect(json_response.first['title']).to eq(issue.title)
+ end
+ end
+
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/issues", user)
@@ -789,6 +838,14 @@ describe API::Issues do
end
describe "GET /projects/:id/issues/:issue_iid" do
+ context 'when unauthenticated' do
+ it 'returns public issues' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}")
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
it 'exposes known attributes' do
get api("/projects/#{project.id}/issues/#{issue.iid}", user)
@@ -1581,6 +1638,14 @@ describe API::Issues do
create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request)
end
+ context 'when unauthenticated' do
+ it 'return public project issues' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}/closed_by")
+
+ expect_paginated_array_response(size: 1)
+ end
+ end
+
it 'returns merge requests that will close issue on merge' do
get api("/projects/#{project.id}/issues/#{issue.iid}/closed_by", user)
@@ -1605,6 +1670,14 @@ describe API::Issues do
describe "GET /projects/:id/issues/:issue_iid/user_agent_detail" do
let!(:user_agent_detail) { create(:user_agent_detail, subject: issue) }
+ context 'when unauthenticated' do
+ it "returns unautorized" do
+ get api("/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
it 'exposes known attributes' do
get api("/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail", admin)
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
new file mode 100644
index 00000000000..a55796cf343
--- /dev/null
+++ b/spec/requests/api/markdown_spec.rb
@@ -0,0 +1,112 @@
+require "spec_helper"
+
+describe API::Markdown do
+ RSpec::Matchers.define_negated_matcher :exclude, :include
+
+ describe "POST /markdown" do
+ let(:user) {} # No-op. It gets overwritten in the contexts below.
+
+ before do
+ post api("/markdown", user), params
+ end
+
+ shared_examples "rendered markdown text without GFM" do
+ it "renders markdown text" do
+ expect(response).to have_http_status(201)
+ expect(response.headers["Content-Type"]).to eq("application/json")
+ expect(json_response).to be_a(Hash)
+ expect(json_response["html"]).to eq("<p>#{text}</p>")
+ end
+ end
+
+ shared_examples "404 Project Not Found" do
+ it "responses with 404 Not Found" do
+ expect(response).to have_http_status(404)
+ expect(response.headers["Content-Type"]).to eq("application/json")
+ expect(json_response).to be_a(Hash)
+ expect(json_response["message"]).to eq("404 Project Not Found")
+ end
+ end
+
+ context "when arguments are invalid" do
+ context "when text is missing" do
+ let(:params) { {} }
+
+ it "responses with 400 Bad Request" do
+ expect(response).to have_http_status(400)
+ expect(response.headers["Content-Type"]).to eq("application/json")
+ expect(json_response).to be_a(Hash)
+ expect(json_response["error"]).to eq("text is missing")
+ end
+ end
+
+ context "when project is not found" do
+ let(:params) { { text: "Hello world!", gfm: true, project: "Dummy project" } }
+
+ it_behaves_like "404 Project Not Found"
+ end
+ end
+
+ context "when arguments are valid" do
+ set(:project) { create(:project) }
+ set(:issue) { create(:issue, project: project) }
+ let(:text) { ":tada: Hello world! :100: #{issue.to_reference}" }
+
+ context "when not using gfm" do
+ context "without project" do
+ let(:params) { { text: text } }
+
+ it_behaves_like "rendered markdown text without GFM"
+ end
+
+ context "with project" do
+ let(:params) { { text: text, project: project.full_path } }
+
+ context "when not authorized" do
+ it_behaves_like "404 Project Not Found"
+ end
+
+ context "when authorized" do
+ let(:user) { project.owner }
+
+ it_behaves_like "rendered markdown text without GFM"
+ end
+ end
+ end
+
+ context "when using gfm" do
+ context "without project" do
+ let(:params) { { text: text, gfm: true } }
+
+ it "renders markdown text" do
+ expect(response).to have_http_status(201)
+ expect(response.headers["Content-Type"]).to eq("application/json")
+ expect(json_response).to be_a(Hash)
+ expect(json_response["html"]).to include("Hello world!")
+ .and include('data-name="tada"')
+ .and include('data-name="100"')
+ .and include("#1")
+ .and exclude("<a href=\"#{IssuesHelper.url_for_issue(issue.iid, project)}\"")
+ .and exclude("#1</a>")
+ end
+ end
+
+ context "with project" do
+ let(:params) { { text: text, gfm: true, project: project.full_path } }
+ let(:user) { project.owner }
+
+ it "renders markdown text" do
+ expect(response).to have_http_status(201)
+ expect(response.headers["Content-Type"]).to eq("application/json")
+ expect(json_response).to be_a(Hash)
+ expect(json_response["html"]).to include("Hello world!")
+ .and include('data-name="tada"')
+ .and include('data-name="100"')
+ .and include("<a href=\"#{IssuesHelper.url_for_issue(issue.iid, project)}\"")
+ .and include("#1</a>")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index f64623d7018..1eeeb4f1045 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -34,8 +34,7 @@ describe API::MergeRequests do
it 'returns an array of all merge requests' do
get api('/merge_requests', user), scope: 'all'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
end
it "returns authentication error without any scope" do
@@ -50,11 +49,23 @@ describe API::MergeRequests do
expect(response).to have_gitlab_http_status(401)
end
+ it "returns authentication error when scope is assigned_to_me" do
+ get api("/merge_requests"), scope: 'assigned_to_me'
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+
it "returns authentication error when scope is created-by-me" do
get api("/merge_requests"), scope: 'created-by-me'
expect(response).to have_gitlab_http_status(401)
end
+
+ it "returns authentication error when scope is created_by_me" do
+ get api("/merge_requests"), scope: 'created_by_me'
+
+ expect(response).to have_gitlab_http_status(401)
+ end
end
context 'when authenticated' do
@@ -62,27 +73,14 @@ describe API::MergeRequests do
let!(:merge_request2) { create(:merge_request, :simple, author: user, assignee: user, source_project: project2, target_project: project2) }
let(:user2) { create(:user) }
- it 'returns an array of all merge requests' do
- get api('/merge_requests', user), scope: :all
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request.id, merge_request_closed.id, merge_request_merged.id, merge_request2.id)
- end
-
- it 'does not return unauthorized merge requests' do
+ it 'returns an array of all merge requests except unauthorized ones' do
private_project = create(:project, :private)
merge_request3 = create(:merge_request, :simple, source_project: private_project, target_project: private_project, source_branch: 'other-branch')
get api('/merge_requests', user), scope: :all
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.map { |mr| mr['id'] })
- .not_to include(merge_request3.id)
+ expect_response_contain_exactly(merge_request2, merge_request_merged, merge_request_closed, merge_request)
+ expect(json_response.map { |mr| mr['id'] }).not_to include(merge_request3.id)
end
it 'returns an array of merge requests created by current user if no scope is given' do
@@ -90,10 +88,7 @@ describe API::MergeRequests do
get api('/merge_requests', user2)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request3.id)
+ expect_response_ordered_exactly(merge_request3)
end
it 'returns an array of merge requests authored by the given user' do
@@ -101,10 +96,7 @@ describe API::MergeRequests do
get api('/merge_requests', user), author_id: user2.id, scope: :all
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request3.id)
+ expect_response_ordered_exactly(merge_request3)
end
it 'returns an array of merge requests assigned to the given user' do
@@ -112,32 +104,39 @@ describe API::MergeRequests do
get api('/merge_requests', user), assignee_id: user2.id, scope: :all
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request3.id)
+ expect_response_ordered_exactly(merge_request3)
end
it 'returns an array of merge requests assigned to me' do
merge_request3 = create(:merge_request, :simple, author: user, assignee: user2, source_project: project2, target_project: project2, source_branch: 'other-branch')
+ get api('/merge_requests', user2), scope: 'assigned_to_me'
+
+ expect_response_ordered_exactly(merge_request3)
+ end
+
+ it 'returns an array of merge requests assigned to me (kebab-case)' do
+ merge_request3 = create(:merge_request, :simple, author: user, assignee: user2, source_project: project2, target_project: project2, source_branch: 'other-branch')
+
get api('/merge_requests', user2), scope: 'assigned-to-me'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request3.id)
+ expect_response_ordered_exactly(merge_request3)
end
it 'returns an array of merge requests created by me' do
merge_request3 = create(:merge_request, :simple, author: user2, assignee: user, source_project: project2, target_project: project2, source_branch: 'other-branch')
+ get api('/merge_requests', user2), scope: 'created_by_me'
+
+ expect_response_ordered_exactly(merge_request3)
+ end
+
+ it 'returns an array of merge requests created by me (kebab-case)' do
+ merge_request3 = create(:merge_request, :simple, author: user2, assignee: user, source_project: project2, target_project: project2, source_branch: 'other-branch')
+
get api('/merge_requests', user2), scope: 'created-by-me'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request3.id)
+ expect_response_ordered_exactly(merge_request3)
end
it 'returns merge requests reacted by the authenticated user by the given emoji' do
@@ -146,19 +145,14 @@ describe API::MergeRequests do
get api('/merge_requests', user2), my_reaction_emoji: award_emoji.name, scope: 'all'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request3.id)
+ expect_response_ordered_exactly(merge_request3)
end
context 'source_branch param' do
it 'returns merge requests with the given source branch' do
get api('/merge_requests', user), source_branch: merge_request_closed.source_branch, state: 'all'
- expect(json_response.length).to eq(2)
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request_closed.id, merge_request_merged.id)
+ expect_response_contain_exactly(merge_request_closed, merge_request_merged)
end
end
@@ -166,9 +160,7 @@ describe API::MergeRequests do
it 'returns merge requests with the given target branch' do
get api('/merge_requests', user), target_branch: merge_request_closed.target_branch, state: 'all'
- expect(json_response.length).to eq(2)
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request_closed.id, merge_request_merged.id)
+ expect_response_contain_exactly(merge_request_closed, merge_request_merged)
end
end
@@ -177,8 +169,7 @@ describe API::MergeRequests do
get api('/merge_requests?created_before=2000-01-02T00:00:00.060Z', user)
- expect(json_response.size).to eq(1)
- expect(json_response.first['id']).to eq(merge_request2.id)
+ expect_response_ordered_exactly(merge_request2)
end
it 'returns merge requests created after a specific date' do
@@ -186,8 +177,7 @@ describe API::MergeRequests do
get api("/merge_requests?created_after=#{merge_request2.created_at}", user)
- expect(json_response.size).to eq(1)
- expect(json_response.first['id']).to eq(merge_request2.id)
+ expect_response_ordered_exactly(merge_request2)
end
it 'returns merge requests updated before a specific date' do
@@ -195,8 +185,7 @@ describe API::MergeRequests do
get api('/merge_requests?updated_before=2000-01-02T00:00:00.060Z', user)
- expect(json_response.size).to eq(1)
- expect(json_response.first['id']).to eq(merge_request2.id)
+ expect_response_ordered_exactly(merge_request2)
end
it 'returns merge requests updated after a specific date' do
@@ -204,8 +193,7 @@ describe API::MergeRequests do
get api("/merge_requests?updated_after=#{merge_request2.updated_at}", user)
- expect(json_response.size).to eq(1)
- expect(json_response.first['id']).to eq(merge_request2.id)
+ expect_response_ordered_exactly(merge_request2)
end
context 'search params' do
@@ -216,15 +204,13 @@ describe API::MergeRequests do
it 'returns merge requests matching given search string for title' do
get api("/merge_requests", user), search: merge_request.title
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request.id)
+ expect_response_ordered_exactly(merge_request)
end
it 'returns merge requests for project matching given search string for description' do
get api("/merge_requests", user), project_id: project.id, search: merge_request.description
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request.id)
+ expect_response_ordered_exactly(merge_request)
end
end
end
@@ -235,8 +221,7 @@ describe API::MergeRequests do
it 'returns merge requests for public projects' do
get api("/projects/#{project.id}/merge_requests")
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
end
it "returns 404 for non public projects" do
@@ -265,10 +250,7 @@ describe API::MergeRequests do
it "returns an array of all merge_requests" do
get api("/projects/#{project.id}/merge_requests", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_response_ordered_exactly(merge_request_merged, merge_request_closed, merge_request)
expect(json_response.last['title']).to eq(merge_request.title)
expect(json_response.last).to have_key('web_url')
expect(json_response.last['sha']).to eq(merge_request.diff_head_sha)
@@ -286,11 +268,8 @@ describe API::MergeRequests do
it "returns an array of all merge_requests using simple mode" do
get api("/projects/#{project.id}/merge_requests?view=simple", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
+ expect_response_ordered_exactly(merge_request_merged, merge_request_closed, merge_request)
expect(json_response.last.keys).to match_array(%w(id iid title web_url created_at description project_id state updated_at))
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
expect(json_response.last['iid']).to eq(merge_request.iid)
expect(json_response.last['title']).to eq(merge_request.title)
expect(json_response.last).to have_key('web_url')
@@ -302,51 +281,36 @@ describe API::MergeRequests do
it "returns an array of all merge_requests" do
get api("/projects/#{project.id}/merge_requests?state", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_response_ordered_exactly(merge_request_merged, merge_request_closed, merge_request)
expect(json_response.last['title']).to eq(merge_request.title)
end
it "returns an array of open merge_requests" do
get api("/projects/#{project.id}/merge_requests?state=opened", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_response_ordered_exactly(merge_request)
expect(json_response.last['title']).to eq(merge_request.title)
end
it "returns an array of closed merge_requests" do
get api("/projects/#{project.id}/merge_requests?state=closed", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_response_ordered_exactly(merge_request_closed)
expect(json_response.first['title']).to eq(merge_request_closed.title)
end
it "returns an array of merged merge_requests" do
get api("/projects/#{project.id}/merge_requests?state=merged", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_response_ordered_exactly(merge_request_merged)
expect(json_response.first['title']).to eq(merge_request_merged.title)
end
it 'returns merge_request by "iids" array' do
get api("/projects/#{project.id}/merge_requests", user), iids: [merge_request.iid, merge_request_closed.iid]
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_response_ordered_exactly(merge_request_closed, merge_request)
expect(json_response.first['title']).to eq merge_request_closed.title
- expect(json_response.first['id']).to eq merge_request_closed.id
end
it 'matches V4 response schema' do
@@ -359,16 +323,14 @@ describe API::MergeRequests do
it 'returns an empty array if no issue matches milestone' do
get api("/projects/#{project.id}/merge_requests", user), milestone: '1.0.0'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
expect(json_response.length).to eq(0)
end
it 'returns an empty array if milestone does not exist' do
get api("/projects/#{project.id}/merge_requests", user), milestone: 'foo'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
expect(json_response.length).to eq(0)
end
@@ -382,17 +344,13 @@ describe API::MergeRequests do
it 'returns an array of merge requests matching state in milestone' do
get api("/projects/#{project.id}/merge_requests", user), milestone: '0.9', state: 'closed'
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request_closed.id)
+ expect_response_ordered_exactly(merge_request_closed)
end
it 'returns an array of labeled merge requests' do
get api("/projects/#{project.id}/merge_requests?labels=#{label.title}", user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
expect(json_response.length).to eq(1)
expect(json_response.first['labels']).to eq([label2.title, label.title])
end
@@ -400,16 +358,14 @@ describe API::MergeRequests do
it 'returns an array of labeled merge requests where all labels match' do
get api("/projects/#{project.id}/merge_requests?labels=#{label.title},foo,bar", user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
expect(json_response.length).to eq(0)
end
it 'returns an empty array if no merge request matches labels' do
get api("/projects/#{project.id}/merge_requests?labels=foo,bar", user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_paginated_array_response
expect(json_response.length).to eq(0)
end
@@ -427,13 +383,12 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests?labels=#{bug_label.title}&milestone=#{milestone1.title}&state=merged", user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(mr2.id)
+ expect_response_ordered_exactly(mr2)
end
context "with ordering" do
+ let(:merge_requests) { [merge_request_merged, merge_request_closed, merge_request] }
+
before do
@mr_later = mr_with_later_created_and_updated_at_time
@mr_earlier = mr_with_earlier_created_and_updated_at_time
@@ -442,45 +397,25 @@ describe API::MergeRequests do
it "returns an array of merge_requests in ascending order" do
get api("/projects/#{project.id}/merge_requests?sort=asc", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
- response_dates = json_response.map { |merge_request| merge_request['created_at'] }
- expect(response_dates).to eq(response_dates.sort)
+ expect_response_ordered_exactly(*merge_requests.sort_by { |mr| mr['created_at'] })
end
it "returns an array of merge_requests in descending order" do
get api("/projects/#{project.id}/merge_requests?sort=desc", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
- response_dates = json_response.map { |merge_request| merge_request['created_at'] }
- expect(response_dates).to eq(response_dates.sort.reverse)
+ expect_response_ordered_exactly(*merge_requests.sort_by { |mr| mr['created_at'] }.reverse)
end
it "returns an array of merge_requests ordered by updated_at" do
get api("/projects/#{project.id}/merge_requests?order_by=updated_at", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
- response_dates = json_response.map { |merge_request| merge_request['updated_at'] }
- expect(response_dates).to eq(response_dates.sort.reverse)
+ expect_response_ordered_exactly(*merge_requests.sort_by { |mr| mr['updated_at'] }.reverse)
end
it "returns an array of merge_requests ordered by created_at" do
get api("/projects/#{project.id}/merge_requests?order_by=created_at&sort=asc", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
- response_dates = json_response.map { |merge_request| merge_request['created_at'] }
- expect(response_dates).to eq(response_dates.sort)
+ expect_response_ordered_exactly(*merge_requests.sort_by { |mr| mr['created_at'] })
end
end
@@ -488,9 +423,7 @@ describe API::MergeRequests do
it 'returns merge requests with the given source branch' do
get api('/merge_requests', user), source_branch: merge_request_closed.source_branch, state: 'all'
- expect(json_response.length).to eq(2)
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request_closed.id, merge_request_merged.id)
+ expect_response_contain_exactly(merge_request_closed, merge_request_merged)
end
end
@@ -498,9 +431,7 @@ describe API::MergeRequests do
it 'returns merge requests with the given target branch' do
get api('/merge_requests', user), target_branch: merge_request_closed.target_branch, state: 'all'
- expect(json_response.length).to eq(2)
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request_closed.id, merge_request_merged.id)
+ expect_response_contain_exactly(merge_request_closed, merge_request_merged)
end
end
end
@@ -1341,4 +1272,22 @@ describe API::MergeRequests do
merge_request_closed.save
merge_request_closed
end
+
+ def expect_response_contain_exactly(*items)
+ expect_paginated_array_response
+ expect(json_response.length).to eq(items.size)
+ expect(json_response.map { |element| element['id'] }).to contain_exactly(*items.map(&:id))
+ end
+
+ def expect_response_ordered_exactly(*items)
+ expect_paginated_array_response
+ expect(json_response.length).to eq(items.size)
+ expect(json_response.map { |element| element['id'] }).to eq(items.map(&:id))
+ end
+
+ def expect_paginated_array_response
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 85a571b8f0e..9b7c3205c1f 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -18,7 +18,7 @@ describe API::Projects do
let(:user2) { create(:user) }
let(:user3) { create(:user) }
let(:admin) { create(:admin) }
- let(:project) { create(:project, namespace: user.namespace) }
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
let(:project2) { create(:project, namespace: user.namespace) }
let(:snippet) { create(:project_snippet, :public, author: user, project: project, title: 'example') }
let(:project_member) { create(:project_member, :developer, user: user3, project: project) }
@@ -220,7 +220,7 @@ describe API::Projects do
it 'returns a simplified version of all the projects' do
expected_keys = %w(
id description default_branch tag_list
- ssh_url_to_repo http_url_to_repo web_url
+ ssh_url_to_repo http_url_to_repo web_url readme_url
name name_with_namespace
path path_with_namespace
star_count forks_count
@@ -854,6 +854,7 @@ describe API::Projects do
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
+ expect(json_response['readme_url']).to eq(project.readme_url)
end
it 'returns a project by path name' do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index da392c5ab81..efb9bddde44 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -918,6 +918,22 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
+ context 'when job is cancelled' do
+ before do
+ job.cancel
+ end
+
+ context 'when trace is patched' do
+ before do
+ patch_the_trace
+ end
+
+ it 'returns Forbidden ' do
+ expect(response.status).to eq(403)
+ end
+ end
+ end
+
context 'when redis data are flushed' do
before do
redis_shared_state_cleanup!
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index 981ac768e3a..c7587c877fc 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -27,7 +27,7 @@ describe API::Runners do
end
end
- let!(:group_runner) { create(:ci_runner, description: 'Group runner', groups: [group]) }
+ let!(:group_runner) { create(:ci_runner, description: 'Group runner', groups: [group], runner_type: :group_type) }
before do
# Set project access for users
@@ -48,7 +48,7 @@ describe API::Runners do
expect(json_response).to be_an Array
expect(json_response[0]).to have_key('ip_address')
expect(descriptions).to contain_exactly(
- 'Project runner', 'Two projects runner'
+ 'Project runner', 'Two projects runner', 'Group runner'
)
expect(shared).to be_falsey
end
@@ -592,6 +592,15 @@ describe API::Runners do
end.to change { project.runners.count }.by(+1)
expect(response).to have_gitlab_http_status(201)
end
+
+ it 'enables a shared runner' do
+ expect do
+ post api("/projects/#{project.id}/runners", admin), runner_id: shared_runner.id
+ end.to change { project.runners.count }.by(1)
+
+ expect(shared_runner.reload).not_to be_shared
+ expect(response).to have_gitlab_http_status(201)
+ end
end
context 'user is not admin' do
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index e8196980a8c..05637eb0729 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -488,10 +488,6 @@ describe API::Users do
describe "PUT /users/:id" do
let!(:admin_user) { create(:admin) }
- before do
- admin
- end
-
it "updates user with new bio" do
put api("/users/#{user.id}", admin), { bio: 'new test bio' }
@@ -525,27 +521,28 @@ describe API::Users do
expect(json_response['avatar_url']).to include(user.avatar_path)
end
- it 'updates user with his own email' do
- put api("/users/#{user.id}", admin), email: user.email
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['email']).to eq(user.email)
- expect(user.reload.email).to eq(user.email)
- end
-
it 'updates user with a new email' do
+ old_email = user.email
+ old_notification_email = user.notification_email
put api("/users/#{user.id}", admin), email: 'new@email.com'
+ user.reload
+
expect(response).to have_gitlab_http_status(200)
- expect(user.reload.notification_email).to eq('new@email.com')
+ expect(user).to be_confirmed
+ expect(user.email).to eq(old_email)
+ expect(user.notification_email).to eq(old_notification_email)
+ expect(user.unconfirmed_email).to eq('new@email.com')
end
it 'skips reconfirmation when requested' do
- put api("/users/#{user.id}", admin), { skip_reconfirmation: true }
+ put api("/users/#{user.id}", admin), email: 'new@email.com', skip_reconfirmation: true
user.reload
- expect(user.confirmed_at).to be_present
+ expect(response).to have_gitlab_http_status(200)
+ expect(user).to be_confirmed
+ expect(user.email).to eq('new@email.com')
end
it 'updates user with his own username' do
diff --git a/spec/requests/api/v3/groups_spec.rb b/spec/requests/api/v3/groups_spec.rb
index a1cdf583de3..34d4b8e9565 100644
--- a/spec/requests/api/v3/groups_spec.rb
+++ b/spec/requests/api/v3/groups_spec.rb
@@ -458,9 +458,11 @@ describe API::V3::Groups do
describe "DELETE /groups/:id" do
context "when authenticated as user" do
it "removes group" do
- delete v3_api("/groups/#{group1.id}", user1)
+ Sidekiq::Testing.fake! do
+ expect { delete v3_api("/groups/#{group1.id}", user1) }.to change(GroupDestroyWorker.jobs, :size).by(1)
+ end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(202)
end
it "does not remove a group if not an owner" do
@@ -489,7 +491,7 @@ describe API::V3::Groups do
it "removes any existing group" do
delete v3_api("/groups/#{group2.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(202)
end
it "does not remove a non existing group" do
diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb
index 4c25bd935c6..158ddf171bc 100644
--- a/spec/requests/api/v3/projects_spec.rb
+++ b/spec/requests/api/v3/projects_spec.rb
@@ -82,7 +82,7 @@ describe API::V3::Projects do
it 'returns a simplified version of all the projects' do
expected_keys = %w(
id description default_branch tag_list
- ssh_url_to_repo http_url_to_repo web_url
+ ssh_url_to_repo http_url_to_repo web_url readme_url
name name_with_namespace
path path_with_namespace
star_count forks_count
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index cd1a6cfc427..be286c490fe 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -159,7 +159,9 @@ describe 'OpenID Connect requests' do
get '/.well-known/openid-configuration'
expect(response).to have_gitlab_http_status(200)
- expect(json_response).to have_key('issuer')
+ expect(json_response['issuer']).to eq('http://localhost')
+ expect(json_response['jwks_uri']).to eq('http://www.example.com/oauth/discovery/keys')
+ expect(json_response['scopes_supported']).to eq(%w[api read_user sudo read_repository openid])
end
end
end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index 2473c561f4b..e67d12b7a89 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -26,6 +26,13 @@ describe PipelineEntity do
expect(subject).to include :updated_at, :created_at
end
+ it 'excludes coverage data when disabled' do
+ entity = described_class
+ .represent(pipeline, request: request, disable_coverage: true)
+
+ expect(entity.as_json).not_to include(:coverage)
+ end
+
it 'contains details' do
expect(subject).to include :details
expect(subject[:details])
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index e88e86c2998..b741308e2c5 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -114,7 +114,9 @@ describe PipelineSerializer do
Gitlab::GitalyClient.reset_counts
end
- shared_examples 'no N+1 queries' do
+ context 'with the same ref' do
+ let(:ref) { 'feature' }
+
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
@@ -123,12 +125,6 @@ describe PipelineSerializer do
end
end
- context 'with the same ref' do
- let(:ref) { 'feature' }
-
- it_behaves_like 'no N+1 queries'
- end
-
context 'with different refs' do
def ref
@sequence ||= 0
@@ -136,7 +132,16 @@ describe PipelineSerializer do
"feature-#{@sequence}"
end
- it_behaves_like 'no N+1 queries'
+ it 'verifies number of queries', :request_store do
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+
+ # For each ref there is a permission check if maintainer can update
+ # pipeline. With the same ref this check is cached but if refs are
+ # different then there is an extra query per ref
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/46368
+ expect(recorded.count).to be_within(1).of(51)
+ expect(recorded.cached_count).to eq(0)
+ end
end
def create_pipeline(status)
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 9a0b6efd8a9..2b88fcc9a96 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -395,7 +395,27 @@ describe Ci::CreatePipelineService do
result = execute_service
expect(result).to be_persisted
- expect(Environment.find_by(name: "review/master")).not_to be_nil
+ expect(Environment.find_by(name: "review/master")).to be_present
+ end
+ end
+
+ context 'with environment name including persisted variables' do
+ before do
+ config = YAML.dump(
+ deploy: {
+ environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_BUILD_ID" },
+ script: 'ls'
+ }
+ )
+
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'skipps persisted variables in environment name' do
+ result = execute_service
+
+ expect(result).to be_persisted
+ expect(Environment.find_by(name: "review/id1/id2")).to be_present
end
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index f1acfc48468..a73bd7a0268 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Ci::RetryPipelineService, '#execute' do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -266,6 +268,33 @@ describe Ci::RetryPipelineService, '#execute' do
end
end
+ context 'when maintainer is allowed to push to forked project' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:forked_project) { fork_project(project) }
+ let(:pipeline) { create(:ci_pipeline, project: forked_project, ref: 'fixes') }
+
+ before do
+ project.add_master(user)
+ create(:merge_request,
+ source_project: forked_project,
+ target_project: project,
+ source_branch: 'fixes',
+ allow_maintainer_to_push: true)
+ create_build('rspec 1', :failed, 1)
+ end
+
+ it 'allows to retry failed pipeline' do
+ allow_any_instance_of(Project).to receive(:fetch_branch_allows_maintainer_push?).and_return(true)
+ allow_any_instance_of(Project).to receive(:empty_repo?).and_return(false)
+
+ service.execute(pipeline)
+
+ expect(build('rspec 1')).to be_pending
+ expect(pipeline.reload).to be_running
+ end
+ end
+
def statuses
pipeline.reload.statuses
end
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index 1c2f9c5cf43..1685dc748bd 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -8,80 +8,22 @@ describe Clusters::CreateService do
subject { described_class.new(project, user, params).execute(access_token) }
context 'when provider is gcp' do
- shared_context 'valid params' do
- let(:params) do
- {
- name: 'test-cluster',
- provider_type: :gcp,
- provider_gcp_attributes: {
- gcp_project_id: 'gcp-project',
- zone: 'us-central1-a',
- num_nodes: 1,
- machine_type: 'machine_type-a'
- }
- }
- end
- end
-
- shared_context 'invalid params' do
- let(:params) do
- {
- name: 'test-cluster',
- provider_type: :gcp,
- provider_gcp_attributes: {
- gcp_project_id: '!!!!!!!',
- zone: 'us-central1-a',
- num_nodes: 1,
- machine_type: 'machine_type-a'
- }
- }
- end
- end
-
- shared_examples 'create cluster' do
- it 'creates a cluster object and performs a worker' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
- expect { subject }
- .to change { Clusters::Cluster.count }.by(1)
- .and change { Clusters::Providers::Gcp.count }.by(1)
-
- expect(subject.name).to eq('test-cluster')
- expect(subject.user).to eq(user)
- expect(subject.project).to eq(project)
- expect(subject.provider.gcp_project_id).to eq('gcp-project')
- expect(subject.provider.zone).to eq('us-central1-a')
- expect(subject.provider.num_nodes).to eq(1)
- expect(subject.provider.machine_type).to eq('machine_type-a')
- expect(subject.provider.access_token).to eq(access_token)
- expect(subject.platform).to be_nil
- end
- end
-
- shared_examples 'error' do
- it 'returns an error' do
- expect(ClusterProvisionWorker).not_to receive(:perform_async)
- expect { subject }.to change { Clusters::Cluster.count }.by(0)
- expect(subject.errors[:"provider_gcp.gcp_project_id"]).to be_present
- end
- end
-
context 'when project has no clusters' do
context 'when correct params' do
- include_context 'valid params'
+ include_context 'valid cluster create params'
- include_examples 'create cluster'
+ include_examples 'create cluster service success'
end
context 'when invalid params' do
- include_context 'invalid params'
+ include_context 'invalid cluster create params'
- include_examples 'error'
+ include_examples 'create cluster service error'
end
end
context 'when project has a cluster' do
- include_context 'valid params'
+ include_context 'valid cluster create params'
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
it 'does not create a cluster' do
diff --git a/spec/services/keys/destroy_service_spec.rb b/spec/services/keys/destroy_service_spec.rb
new file mode 100644
index 00000000000..28ac72ddd42
--- /dev/null
+++ b/spec/services/keys/destroy_service_spec.rb
@@ -0,0 +1,13 @@
+require 'spec_helper'
+
+describe Keys::DestroyService do
+ let(:user) { create(:user) }
+
+ subject { described_class.new(user) }
+
+ it 'destroys a key' do
+ key = create(:key)
+
+ expect { subject.execute(key) }.to change(Key, :count).by(-1)
+ end
+end
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index be09afd9f36..723cb374c37 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -343,7 +343,11 @@ describe Projects::UpdateRemoteMirrorService do
tag = repository.find_tag(name)
target = tag.try(:target)
target_commit = tag.try(:dereferenced_target)
- tags << Gitlab::Git::Tag.new(repository.raw_repository, name, target, target_commit)
+ tags << Gitlab::Git::Tag.new(repository.raw_repository, {
+ name: name,
+ target: target,
+ target_commit: target_commit
+ })
end
end
diff --git a/spec/support/helpers/rake_helpers.rb b/spec/support/helpers/rake_helpers.rb
index 86bfeed107c..acd9cce6a67 100644
--- a/spec/support/helpers/rake_helpers.rb
+++ b/spec/support/helpers/rake_helpers.rb
@@ -13,6 +13,10 @@ module RakeHelpers
allow(main_object).to receive(:print)
end
+ def silence_progress_bar
+ allow_any_instance_of(ProgressBar::Output).to receive(:stream).and_return(double().as_null_object)
+ end
+
def main_object
@main_object ||= TOPLEVEL_BINDING.eval('self')
end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 1dad39fdab3..57aa07cf4fa 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -159,7 +159,11 @@ module TestEnv
end
spawn_script = Rails.root.join('scripts/gitaly-test-spawn').to_s
- @gitaly_pid = Bundler.with_original_env { IO.popen([spawn_script], &:read).to_i }
+ Bundler.with_original_env do
+ raise "gitaly spawn failed" unless system(spawn_script)
+ end
+ @gitaly_pid = Integer(File.read('tmp/tests/gitaly.pid'))
+
Kernel.at_exit { stop_gitaly }
wait_gitaly
diff --git a/spec/support/services/clusters/create_service_shared.rb b/spec/support/services/clusters/create_service_shared.rb
new file mode 100644
index 00000000000..43a2fd05498
--- /dev/null
+++ b/spec/support/services/clusters/create_service_shared.rb
@@ -0,0 +1,57 @@
+shared_context 'valid cluster create params' do
+ let(:params) do
+ {
+ name: 'test-cluster',
+ provider_type: :gcp,
+ provider_gcp_attributes: {
+ gcp_project_id: 'gcp-project',
+ zone: 'us-central1-a',
+ num_nodes: 1,
+ machine_type: 'machine_type-a'
+ }
+ }
+ end
+end
+
+shared_context 'invalid cluster create params' do
+ let(:params) do
+ {
+ name: 'test-cluster',
+ provider_type: :gcp,
+ provider_gcp_attributes: {
+ gcp_project_id: '!!!!!!!',
+ zone: 'us-central1-a',
+ num_nodes: 1,
+ machine_type: 'machine_type-a'
+ }
+ }
+ end
+end
+
+shared_examples 'create cluster service success' do
+ it 'creates a cluster object and performs a worker' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+
+ expect { subject }
+ .to change { Clusters::Cluster.count }.by(1)
+ .and change { Clusters::Providers::Gcp.count }.by(1)
+
+ expect(subject.name).to eq('test-cluster')
+ expect(subject.user).to eq(user)
+ expect(subject.project).to eq(project)
+ expect(subject.provider.gcp_project_id).to eq('gcp-project')
+ expect(subject.provider.zone).to eq('us-central1-a')
+ expect(subject.provider.num_nodes).to eq(1)
+ expect(subject.provider.machine_type).to eq('machine_type-a')
+ expect(subject.provider.access_token).to eq(access_token)
+ expect(subject.platform).to be_nil
+ end
+end
+
+shared_examples 'create cluster service error' do
+ it 'returns an error' do
+ expect(ClusterProvisionWorker).not_to receive(:perform_async)
+ expect { subject }.to change { Clusters::Cluster.count }.by(0)
+ expect(subject.errors[:"provider_gcp.gcp_project_id"]).to be_present
+ end
+end
diff --git a/spec/support/shared_examples/malicious_regexp_shared_examples.rb b/spec/support/shared_examples/malicious_regexp_shared_examples.rb
index ac5d22298bb..65026f1d7c0 100644
--- a/spec/support/shared_examples/malicious_regexp_shared_examples.rb
+++ b/spec/support/shared_examples/malicious_regexp_shared_examples.rb
@@ -1,3 +1,5 @@
+require 'timeout'
+
shared_examples 'malicious regexp' do
let(:malicious_text) { 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!' }
let(:malicious_regexp) { '(?i)^(([a-z])+.)+[A-Z]([a-z])+$' }
diff --git a/spec/support/shared_examples/models/with_uploads_shared_examples.rb b/spec/support/shared_examples/models/with_uploads_shared_examples.rb
new file mode 100644
index 00000000000..47ad0c6345d
--- /dev/null
+++ b/spec/support/shared_examples/models/with_uploads_shared_examples.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+shared_examples_for 'model with mounted uploader' do |supports_fileuploads|
+ describe '.destroy' do
+ before do
+ stub_uploads_object_storage(uploader_class)
+
+ model_object.public_send(upload_attribute).migrate!(ObjectStorage::Store::REMOTE)
+ end
+
+ it 'deletes remote uploads' do
+ expect_any_instance_of(CarrierWave::Storage::Fog::File).to receive(:delete).and_call_original
+
+ expect { model_object.destroy }.to change { Upload.count }.by(-1)
+ end
+
+ it 'deletes any FileUploader uploads which are not mounted', skip: !supports_fileuploads do
+ create(:upload, uploader: FileUploader, model: model_object)
+
+ expect { model_object.destroy }.to change { Upload.count }.by(-2)
+ end
+ end
+end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index f28bf430f02..98d4456b277 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -36,16 +36,17 @@ describe 'layouts/nav/sidebar/_project' do
expect(rendered).to have_text 'Registry'
end
- it 'highlights only one tab' do
+ it 'highlights sidebar item and flyout' do
render
- expect(rendered).to have_css('.active', count: 1)
+ expect(rendered).to have_css('.sidebar-top-level-items > li.active', count: 1)
+ expect(rendered).to have_css('.is-fly-out-only > li.active', count: 1)
end
- it 'highlights container registry tab only' do
+ it 'highlights container registry tab' do
render
- expect(rendered).to have_css('.active', text: 'Registry')
+ expect(rendered).to have_css('.sidebar-top-level-items > li.active', text: 'Registry')
end
end
end