diff options
author | Lin Jen-Shin <godfat@godfat.org> | 2016-08-16 00:14:26 +0800 |
---|---|---|
committer | Lin Jen-Shin <godfat@godfat.org> | 2016-08-16 00:14:26 +0800 |
commit | 567ef6c1755c13dfcda1c9cb06b1eb5fbca0ef1b (patch) | |
tree | 8871b4bb3d8bd3f56205465273deb8d61b7274c6 /spec | |
parent | 44b29b5b3b908ee5acd1d35fdf8e75333e7e50c1 (diff) | |
parent | 06e3bb9f232329674bdc162dc8f973a19b03f3c4 (diff) | |
download | gitlab-ce-567ef6c1755c13dfcda1c9cb06b1eb5fbca0ef1b.tar.gz |
Merge remote-tracking branch 'upstream/master' into artifacts-from-ref-and-build-name
* upstream/master: (123 commits)
Limit the size of SVGs when viewing them as blobs
Add a spec for ProjectsFinder project_ids_relation option
Fix ProjectsFinder spec
Pass project IDs relation to ProjectsFinder instead of using a block
Speed up todos queries by limiting the projects set we join with
Used phantomjs variable
Ability to specify branches for pivotal tracker integration
Fix a memory leak caused by Banzai::Filter::SanitizationFilter
Update phantomjs link
Remove sleeping and replace escaped text.
Filters test fix
Fixed filtering tests
Removed screenshot command :poop:
Updated failing tests
Used mirrored version on GitLab
Updated tests
Fix `U2fSpec` for PhantomJS versions > 2.
Fix file downloading
Install latest stable phantomjs
Use new PhantomJS version
...
Diffstat (limited to 'spec')
70 files changed, 2664 insertions, 1349 deletions
diff --git a/spec/controllers/admin/groups_controller_spec.rb b/spec/controllers/admin/groups_controller_spec.rb new file mode 100644 index 00000000000..0239aea47fb --- /dev/null +++ b/spec/controllers/admin/groups_controller_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe Admin::GroupsController do + let(:group) { create(:group) } + let(:project) { create(:project, namespace: group) } + let(:admin) { create(:admin) } + + before do + sign_in(admin) + Sidekiq::Testing.fake! + end + + describe 'DELETE #destroy' do + it 'schedules a group destroy' do + expect { delete :destroy, id: project.group.path }.to change(GroupDestroyWorker.jobs, :size).by(1) + end + + it 'redirects to the admin group path' do + delete :destroy, id: project.group.path + + expect(response).to redirect_to(admin_groups_path) + end + end +end diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index cd98fecd0c7..4ae6364207b 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -75,4 +75,33 @@ describe GroupsController do end end end + + describe 'DELETE #destroy' do + context 'as another user' do + it 'returns 404' do + sign_in(create(:user)) + + delete :destroy, id: group.path + + expect(response.status).to eq(404) + end + end + + context 'as the group owner' do + before do + Sidekiq::Testing.fake! + sign_in(user) + end + + it 'schedules a group destroy' do + expect { delete :destroy, id: group.path }.to change(GroupDestroyWorker.jobs, :size).by(1) + end + + it 'redirects to the root path' do + delete :destroy, id: group.path + + expect(response).to redirect_to(root_path) + end + end + end end diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb index 1b32d560b16..0c93bbdfe26 100644 --- a/spec/factories/ci/builds.rb +++ b/spec/factories/ci/builds.rb @@ -7,6 +7,7 @@ FactoryGirl.define do stage_idx 0 ref 'master' tag false + status 'pending' created_at 'Di 29. Okt 09:50:00 CET 2013' started_at 'Di 29. Okt 09:51:28 CET 2013' finished_at 'Di 29. Okt 09:53:28 CET 2013' @@ -45,6 +46,10 @@ FactoryGirl.define do status 'pending' end + trait :created do + status 'created' + end + trait :manual do status 'skipped' self.when 'manual' diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb index a039bef6f3c..04d66020c87 100644 --- a/spec/factories/ci/pipelines.rb +++ b/spec/factories/ci/pipelines.rb @@ -18,7 +18,9 @@ FactoryGirl.define do factory :ci_empty_pipeline, class: Ci::Pipeline do + ref 'master' sha '97de212e80737a608d939f648d959671fb0a0142' + status 'pending' project factory: :empty_project diff --git a/spec/factories/commit_statuses.rb b/spec/factories/commit_statuses.rb index 1e5c479616c..995f2080f10 100644 --- a/spec/factories/commit_statuses.rb +++ b/spec/factories/commit_statuses.rb @@ -7,6 +7,30 @@ FactoryGirl.define do started_at 'Tue, 26 Jan 2016 08:21:42 +0100' finished_at 'Tue, 26 Jan 2016 08:23:42 +0100' + trait :success do + status 'success' + end + + trait :failed do + status 'failed' + end + + trait :canceled do + status 'canceled' + end + + trait :running do + status 'running' + end + + trait :pending do + status 'pending' + end + + trait :created do + status 'created' + end + after(:build) do |build, evaluator| build.project = build.pipeline.project end diff --git a/spec/features/issuables/default_sort_order_spec.rb b/spec/features/issuables/default_sort_order_spec.rb index 0d495cd04aa..9114f751b55 100644 --- a/spec/features/issuables/default_sort_order_spec.rb +++ b/spec/features/issuables/default_sort_order_spec.rb @@ -55,7 +55,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last updated"' do visit_merge_requests_with_state(project, 'merged') - expect(selected_sort_order).to eq('last updated') + expect(find('.issues-other-filters')).to have_content('Last updated') expect(first_merge_request).to include(last_updated_issuable.title) expect(last_merge_request).to include(first_updated_issuable.title) end @@ -67,7 +67,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last updated"' do visit_merge_requests_with_state(project, 'closed') - expect(selected_sort_order).to eq('last updated') + expect(find('.issues-other-filters')).to have_content('Last updated') expect(first_merge_request).to include(last_updated_issuable.title) expect(last_merge_request).to include(first_updated_issuable.title) end @@ -79,7 +79,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last created"' do visit_merge_requests_with_state(project, 'all') - expect(selected_sort_order).to eq('last created') + expect(find('.issues-other-filters')).to have_content('Last created') expect(first_merge_request).to include(last_created_issuable.title) expect(last_merge_request).to include(first_created_issuable.title) end @@ -108,7 +108,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last created"' do visit_issues project - expect(selected_sort_order).to eq('last created') + expect(find('.issues-other-filters')).to have_content('Last created') expect(first_issue).to include(last_created_issuable.title) expect(last_issue).to include(first_created_issuable.title) end @@ -120,7 +120,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last created"' do visit_issues_with_state(project, 'open') - expect(selected_sort_order).to eq('last created') + expect(find('.issues-other-filters')).to have_content('Last created') expect(first_issue).to include(last_created_issuable.title) expect(last_issue).to include(first_created_issuable.title) end @@ -132,7 +132,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last updated"' do visit_issues_with_state(project, 'closed') - expect(selected_sort_order).to eq('last updated') + expect(find('.issues-other-filters')).to have_content('Last updated') expect(first_issue).to include(last_updated_issuable.title) expect(last_issue).to include(first_updated_issuable.title) end @@ -144,7 +144,7 @@ describe 'Projects > Issuables > Default sort order', feature: true do it 'is "last created"' do visit_issues_with_state(project, 'all') - expect(selected_sort_order).to eq('last created') + expect(find('.issues-other-filters')).to have_content('Last created') expect(first_issue).to include(last_created_issuable.title) expect(last_issue).to include(first_created_issuable.title) end diff --git a/spec/features/issues/filter_issues_spec.rb b/spec/features/issues/filter_issues_spec.rb index ea81ee54c90..e262f285868 100644 --- a/spec/features/issues/filter_issues_spec.rb +++ b/spec/features/issues/filter_issues_spec.rb @@ -117,7 +117,7 @@ describe 'Filter issues', feature: true do find('.dropdown-menu-user-link', text: user.username).click - wait_for_ajax + expect(page).not_to have_selector('.issues-list .issue') find('.js-label-select').click diff --git a/spec/features/merge_requests/create_new_mr_spec.rb b/spec/features/merge_requests/create_new_mr_spec.rb index e296078bad8..11c9de3c4bf 100644 --- a/spec/features/merge_requests/create_new_mr_spec.rb +++ b/spec/features/merge_requests/create_new_mr_spec.rb @@ -13,6 +13,8 @@ feature 'Create New Merge Request', feature: true, js: true do it 'generates a diff for an orphaned branch' do click_link 'New Merge Request' + expect(page).to have_content('Source branch') + expect(page).to have_content('Target branch') first('.js-source-branch').click first('.dropdown-source-branch .dropdown-content a', text: 'orphaned-branch').click diff --git a/spec/features/merge_requests/created_from_fork_spec.rb b/spec/features/merge_requests/created_from_fork_spec.rb index f676200ecf3..4d5d4aa121a 100644 --- a/spec/features/merge_requests/created_from_fork_spec.rb +++ b/spec/features/merge_requests/created_from_fork_spec.rb @@ -29,12 +29,16 @@ feature 'Merge request created from fork' do include WaitForAjax given(:pipeline) do - create(:ci_pipeline_with_two_job, project: fork_project, - sha: merge_request.diff_head_sha, - ref: merge_request.source_branch) + create(:ci_pipeline, + project: fork_project, + sha: merge_request.diff_head_sha, + ref: merge_request.source_branch) end - background { pipeline.create_builds(user) } + background do + create(:ci_build, pipeline: pipeline, name: 'rspec') + create(:ci_build, pipeline: pipeline, name: 'spinach') + end scenario 'user visits a pipelines page', js: true do visit_merge_request(merge_request) diff --git a/spec/features/profiles/preferences_spec.rb b/spec/features/profiles/preferences_spec.rb index 787bf42d048..d14a1158b67 100644 --- a/spec/features/profiles/preferences_spec.rb +++ b/spec/features/profiles/preferences_spec.rb @@ -68,10 +68,14 @@ describe 'Profile > Preferences', feature: true do allowing_for_delay do find('#logo').click + + expect(page).to have_content("You don't have starred projects yet") expect(page.current_path).to eq starred_dashboard_projects_path end click_link 'Your Projects' + + expect(page).not_to have_content("You don't have starred projects yet") expect(page.current_path).to eq dashboard_projects_path end end diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb index e1e105e6bbe..dbd07464444 100644 --- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb +++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb @@ -39,6 +39,7 @@ feature 'project owner creates a license file', feature: true, js: true do scenario 'project master creates a license file from the "Add license" link' do click_link 'Add License' + expect(page).to have_content('New File') expect(current_path).to eq( namespace_project_new_blob_path(project.namespace, project, 'master')) expect(find('#file_name').value).to eq('LICENSE') diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb index 67aac25e427..45bf0c0d038 100644 --- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb +++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb @@ -14,6 +14,7 @@ feature 'project owner sees a link to create a license file in empty project', f visit namespace_project_path(project.namespace, project) click_link 'Create empty bare repository' click_on 'LICENSE' + expect(page).to have_content('New File') expect(current_path).to eq( namespace_project_new_blob_path(project.namespace, project, 'master')) diff --git a/spec/features/pipelines_spec.rb b/spec/features/projects/pipelines_spec.rb index eace76c370f..29d150bc597 100644 --- a/spec/features/pipelines_spec.rb +++ b/spec/features/projects/pipelines_spec.rb @@ -12,7 +12,7 @@ describe "Pipelines" do end describe 'GET /:project/pipelines' do - let!(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', status: 'running') } + let!(:pipeline) { create(:ci_empty_pipeline, project: project, ref: 'master', status: 'running') } [:all, :running, :branches].each do |scope| context "displaying #{scope}" do @@ -31,9 +31,12 @@ describe "Pipelines" do end context 'cancelable pipeline' do - let!(:running) { create(:ci_build, :running, pipeline: pipeline, stage: 'test', commands: 'test') } + let!(:build) { create(:ci_build, pipeline: pipeline, stage: 'test', commands: 'test') } - before { visit namespace_project_pipelines_path(project.namespace, project) } + before do + build.run + visit namespace_project_pipelines_path(project.namespace, project) + end it { expect(page).to have_link('Cancel') } it { expect(page).to have_selector('.ci-running') } @@ -47,9 +50,12 @@ describe "Pipelines" do end context 'retryable pipelines' do - let!(:failed) { create(:ci_build, :failed, pipeline: pipeline, stage: 'test', commands: 'test') } + let!(:build) { create(:ci_build, pipeline: pipeline, stage: 'test', commands: 'test') } - before { visit namespace_project_pipelines_path(project.namespace, project) } + before do + build.drop + visit namespace_project_pipelines_path(project.namespace, project) + end it { expect(page).to have_link('Retry') } it { expect(page).to have_selector('.ci-failed') } @@ -58,7 +64,7 @@ describe "Pipelines" do before { click_link('Retry') } it { expect(page).not_to have_link('Retry') } - it { expect(page).to have_selector('.ci-pending') } + it { expect(page).to have_selector('.ci-running') } end end @@ -80,7 +86,9 @@ describe "Pipelines" do context 'when running' do let!(:running) { create(:generic_commit_status, status: 'running', pipeline: pipeline, stage: 'test') } - before { visit namespace_project_pipelines_path(project.namespace, project) } + before do + visit namespace_project_pipelines_path(project.namespace, project) + end it 'is not cancelable' do expect(page).not_to have_link('Cancel') @@ -92,9 +100,12 @@ describe "Pipelines" do end context 'when failed' do - let!(:running) { create(:generic_commit_status, status: 'failed', pipeline: pipeline, stage: 'test') } + let!(:status) { create(:generic_commit_status, :pending, pipeline: pipeline, stage: 'test') } - before { visit namespace_project_pipelines_path(project.namespace, project) } + before do + status.drop + visit namespace_project_pipelines_path(project.namespace, project) + end it 'is not retryable' do expect(page).not_to have_link('Retry') @@ -194,7 +205,7 @@ describe "Pipelines" do before { visit new_namespace_project_pipeline_path(project.namespace, project) } context 'for valid commit' do - before { fill_in('Create for', with: 'master') } + before { fill_in('pipeline[ref]', with: 'master') } context 'with gitlab-ci.yml' do before { stub_ci_pipeline_to_return_yaml_file } @@ -211,11 +222,37 @@ describe "Pipelines" do context 'for invalid commit' do before do - fill_in('Create for', with: 'invalid reference') + fill_in('pipeline[ref]', with: 'invalid-reference') click_on 'Create pipeline' end it { expect(page).to have_content('Reference not found') } end end + + describe 'Create pipelines', feature: true do + let(:project) { create(:project) } + + before do + visit new_namespace_project_pipeline_path(project.namespace, project) + end + + describe 'new pipeline page' do + it 'has field to add a new pipeline' do + expect(page).to have_field('pipeline[ref]') + expect(page).to have_content('Create for') + end + end + + describe 'find pipelines' do + it 'shows filtered pipelines', js: true do + fill_in('pipeline[ref]', with: 'fix') + find('input#ref').native.send_keys(:keydown) + + within('.ui-autocomplete') do + expect(page).to have_selector('li', text: 'fix') + end + end + end + end end diff --git a/spec/features/u2f_spec.rb b/spec/features/u2f_spec.rb index 9335f5bf120..d370f90f7d9 100644 --- a/spec/features/u2f_spec.rb +++ b/spec/features/u2f_spec.rb @@ -1,8 +1,16 @@ require 'spec_helper' feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: true, js: true do + include WaitForAjax + before { allow_any_instance_of(U2fHelper).to receive(:inject_u2f_api?).and_return(true) } + def manage_two_factor_authentication + click_on 'Manage Two-Factor Authentication' + expect(page).to have_content("Setup New U2F Device") + wait_for_ajax + end + def register_u2f_device(u2f_device = nil) u2f_device ||= FakeU2fDevice.new(page) u2f_device.respond_to_u2f_registration @@ -34,7 +42,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: describe 'when 2FA via OTP is enabled' do it 'allows registering a new device' do visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication expect(page.body).to match("You've already enabled two-factor authentication using mobile") register_u2f_device @@ -46,15 +54,15 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: visit profile_account_path # First device - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication register_u2f_device expect(page.body).to match('Your U2F device was registered') # Second device - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication register_u2f_device expect(page.body).to match('Your U2F device was registered') - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication expect(page.body).to match('You have 2 U2F devices registered') end end @@ -62,7 +70,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: it 'allows the same device to be registered for multiple users' do # First user visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication u2f_device = register_u2f_device expect(page.body).to match('Your U2F device was registered') logout @@ -71,7 +79,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: user = login_as(:user) user.update_attribute(:otp_required_for_login, true) visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication register_u2f_device(u2f_device) expect(page.body).to match('Your U2F device was registered') @@ -81,7 +89,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: context "when there are form errors" do it "doesn't register the device if there are errors" do visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication # Have the "u2f device" respond with bad data page.execute_script("u2f.register = function(_,_,_,callback) { callback('bad response'); };") @@ -96,7 +104,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: it "allows retrying registration" do visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication # Failed registration page.execute_script("u2f.register = function(_,_,_,callback) { callback('bad response'); };") @@ -122,7 +130,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: login_as(user) user.update_attribute(:otp_required_for_login, true) visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication @u2f_device = register_u2f_device logout end @@ -161,7 +169,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: current_user = login_as(:user) current_user.update_attribute(:otp_required_for_login, true) visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication register_u2f_device logout @@ -182,7 +190,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: current_user = login_as(:user) current_user.update_attribute(:otp_required_for_login, true) visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication register_u2f_device(@u2f_device) logout @@ -248,7 +256,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', feature: user = login_as(:user) user.update_attribute(:otp_required_for_login, true) visit profile_account_path - click_on 'Manage Two-Factor Authentication' + manage_two_factor_authentication expect(page).to have_content("Your U2F device needs to be set up.") register_u2f_device end diff --git a/spec/features/variables_spec.rb b/spec/features/variables_spec.rb index 61f2bc61e0c..d7880d5778f 100644 --- a/spec/features/variables_spec.rb +++ b/spec/features/variables_spec.rb @@ -42,6 +42,7 @@ describe 'Project variables', js: true do find('.btn-variable-edit').click end + expect(page).to have_content('Update variable') fill_in('variable_key', with: 'key') fill_in('variable_value', with: 'key value') click_button('Save variable') diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb index 0a1cc3b3df7..7a3a74335e8 100644 --- a/spec/finders/projects_finder_spec.rb +++ b/spec/finders/projects_finder_spec.rb @@ -23,73 +23,36 @@ describe ProjectsFinder do let(:finder) { described_class.new } - describe 'without a group' do - describe 'without a user' do - subject { finder.execute } + describe 'without a user' do + subject { finder.execute } - it { is_expected.to eq([public_project]) } - end - - describe 'with a user' do - subject { finder.execute(user) } - - describe 'without private projects' do - it { is_expected.to eq([public_project, internal_project]) } - end - - describe 'with private projects' do - before do - private_project.team.add_user(user, Gitlab::Access::MASTER) - end - - it do - is_expected.to eq([public_project, internal_project, - private_project]) - end - end - end + it { is_expected.to eq([public_project]) } end - describe 'with a group' do - describe 'without a user' do - subject { finder.execute(nil, group: group) } + describe 'with a user' do + subject { finder.execute(user) } - it { is_expected.to eq([public_project]) } + describe 'without private projects' do + it { is_expected.to eq([public_project, internal_project]) } end - describe 'with a user' do - subject { finder.execute(user, group: group) } - - describe 'without shared projects' do - it { is_expected.to eq([public_project, internal_project]) } + describe 'with private projects' do + before do + private_project.team.add_user(user, Gitlab::Access::MASTER) end - describe 'with shared projects and group membership' do - before do - group.add_user(user, Gitlab::Access::DEVELOPER) - - shared_project.project_group_links. - create(group_access: Gitlab::Access::MASTER, group: group) - end - - it do - is_expected.to eq([shared_project, public_project, internal_project]) - end + it do + is_expected.to eq([public_project, internal_project, private_project]) end + end + end - describe 'with shared projects and project membership' do - before do - shared_project.team.add_user(user, Gitlab::Access::DEVELOPER) + describe 'with project_ids_relation' do + let(:project_ids_relation) { Project.where(id: internal_project.id) } - shared_project.project_group_links. - create(group_access: Gitlab::Access::MASTER, group: group) - end + subject { finder.execute(user, project_ids_relation) } - it do - is_expected.to eq([shared_project, public_project, internal_project]) - end - end - end + it { is_expected.to eq([internal_project]) } end end end diff --git a/spec/fixtures/project_services/campfire/rooms.json b/spec/fixtures/project_services/campfire/rooms.json new file mode 100644 index 00000000000..71e9645c955 --- /dev/null +++ b/spec/fixtures/project_services/campfire/rooms.json @@ -0,0 +1,22 @@ +{ + "rooms": [ + { + "name": "test-room", + "locked": false, + "created_at": "2009/01/07 20:43:11 +0000", + "updated_at": "2009/03/18 14:31:39 +0000", + "topic": "The room topic\n", + "id": 123, + "membership_limit": 4 + }, + { + "name": "another room", + "locked": true, + "created_at": "2009/03/18 14:30:42 +0000", + "updated_at": "2013/01/27 14:14:27 +0000", + "topic": "Comment, ideas, GitHub notifications for eCommittee App", + "id": 456, + "membership_limit": 4 + } + ] +} diff --git a/spec/fixtures/project_services/campfire/rooms2.json b/spec/fixtures/project_services/campfire/rooms2.json new file mode 100644 index 00000000000..3d5f635d8b3 --- /dev/null +++ b/spec/fixtures/project_services/campfire/rooms2.json @@ -0,0 +1,22 @@ +{ + "rooms": [ + { + "name": "test-room-not-found", + "locked": false, + "created_at": "2009/01/07 20:43:11 +0000", + "updated_at": "2009/03/18 14:31:39 +0000", + "topic": "The room topic\n", + "id": 123, + "membership_limit": 4 + }, + { + "name": "another room", + "locked": true, + "created_at": "2009/03/18 14:30:42 +0000", + "updated_at": "2013/01/27 14:14:27 +0000", + "topic": "Comment, ideas, GitHub notifications for eCommittee App", + "id": 456, + "membership_limit": 4 + } + ] +} diff --git a/spec/helpers/members_helper_spec.rb b/spec/helpers/members_helper_spec.rb index f75fdb739f6..7998209b7b0 100644 --- a/spec/helpers/members_helper_spec.rb +++ b/spec/helpers/members_helper_spec.rb @@ -9,54 +9,6 @@ describe MembersHelper do it { expect(action_member_permission(:admin, group_member)).to eq :admin_group_member } end - describe '#default_show_roles' do - let(:user) { double } - let(:member) { build(:project_member) } - - before do - allow(helper).to receive(:current_user).and_return(user) - allow(helper).to receive(:can?).with(user, :update_project_member, member).and_return(false) - allow(helper).to receive(:can?).with(user, :destroy_project_member, member).and_return(false) - allow(helper).to receive(:can?).with(user, :admin_project_member, member.source).and_return(false) - end - - context 'when the current cannot update, destroy or admin the passed member' do - it 'returns false' do - expect(helper.default_show_roles(member)).to be_falsy - end - end - - context 'when the current can update the passed member' do - before do - allow(helper).to receive(:can?).with(user, :update_project_member, member).and_return(true) - end - - it 'returns true' do - expect(helper.default_show_roles(member)).to be_truthy - end - end - - context 'when the current can destroy the passed member' do - before do - allow(helper).to receive(:can?).with(user, :destroy_project_member, member).and_return(true) - end - - it 'returns true' do - expect(helper.default_show_roles(member)).to be_truthy - end - end - - context 'when the current can admin the passed member source' do - before do - allow(helper).to receive(:can?).with(user, :admin_project_member, member.source).and_return(true) - end - - it 'returns true' do - expect(helper.default_show_roles(member)).to be_truthy - end - end - end - describe '#remove_member_message' do let(:requester) { build(:user) } let(:project) { create(:project) } diff --git a/spec/initializers/secret_token_spec.rb b/spec/initializers/secret_token_spec.rb new file mode 100644 index 00000000000..837b0de9a4c --- /dev/null +++ b/spec/initializers/secret_token_spec.rb @@ -0,0 +1,200 @@ +require 'spec_helper' +require_relative '../../config/initializers/secret_token' + +describe 'create_tokens', lib: true do + let(:secrets) { ActiveSupport::OrderedOptions.new } + + before do + allow(ENV).to receive(:[]).and_call_original + allow(File).to receive(:write) + allow(File).to receive(:delete) + allow(Rails).to receive_message_chain(:application, :secrets).and_return(secrets) + allow(Rails).to receive_message_chain(:root, :join) { |string| string } + allow(self).to receive(:warn) + allow(self).to receive(:exit) + end + + context 'setting secret_key_base and otp_key_base' do + context 'when none of the secrets exist' do + before do + allow(ENV).to receive(:[]).with('SECRET_KEY_BASE').and_return(nil) + allow(File).to receive(:exist?).with('.secret').and_return(false) + allow(File).to receive(:exist?).with('config/secrets.yml').and_return(false) + allow(self).to receive(:warn_missing_secret) + end + + it 'generates different secrets for secret_key_base, otp_key_base, and db_key_base' do + create_tokens + + keys = secrets.values_at(:secret_key_base, :otp_key_base, :db_key_base) + + expect(keys.uniq).to eq(keys) + expect(keys.map(&:length)).to all(eq(128)) + end + + it 'warns about the secrets to add to secrets.yml' do + expect(self).to receive(:warn_missing_secret).with('secret_key_base') + expect(self).to receive(:warn_missing_secret).with('otp_key_base') + expect(self).to receive(:warn_missing_secret).with('db_key_base') + + create_tokens + end + + it 'writes the secrets to secrets.yml' do + expect(File).to receive(:write).with('config/secrets.yml', any_args) do |filename, contents, options| + new_secrets = YAML.load(contents)[Rails.env] + + expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base) + expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base) + expect(new_secrets['db_key_base']).to eq(secrets.db_key_base) + end + + create_tokens + end + + it 'does not write a .secret file' do + expect(File).not_to receive(:write).with('.secret') + + create_tokens + end + end + + context 'when the other secrets all exist' do + before do + secrets.db_key_base = 'db_key_base' + + allow(File).to receive(:exist?).with('.secret').and_return(true) + allow(File).to receive(:read).with('.secret').and_return('file_key') + end + + context 'when secret_key_base exists in the environment and secrets.yml' do + before do + allow(ENV).to receive(:[]).with('SECRET_KEY_BASE').and_return('env_key') + secrets.secret_key_base = 'secret_key_base' + secrets.otp_key_base = 'otp_key_base' + end + + it 'does not issue a warning' do + expect(self).not_to receive(:warn) + + create_tokens + end + + it 'uses the environment variable' do + create_tokens + + expect(secrets.secret_key_base).to eq('env_key') + end + + it 'does not update secrets.yml' do + expect(File).not_to receive(:write) + + create_tokens + end + end + + context 'when secret_key_base and otp_key_base exist' do + before do + secrets.secret_key_base = 'secret_key_base' + secrets.otp_key_base = 'otp_key_base' + end + + it 'does not write any files' do + expect(File).not_to receive(:write) + + create_tokens + end + + it 'sets the the keys to the values from the environment and secrets.yml' do + create_tokens + + expect(secrets.secret_key_base).to eq('secret_key_base') + expect(secrets.otp_key_base).to eq('otp_key_base') + expect(secrets.db_key_base).to eq('db_key_base') + end + + it 'deletes the .secret file' do + expect(File).to receive(:delete).with('.secret') + + create_tokens + end + end + + context 'when secret_key_base and otp_key_base do not exist' do + before do + allow(File).to receive(:exist?).with('config/secrets.yml').and_return(true) + allow(YAML).to receive(:load_file).with('config/secrets.yml').and_return('test' => secrets.to_h.stringify_keys) + allow(self).to receive(:warn_missing_secret) + end + + it 'uses the file secret' do + expect(File).to receive(:write) do |filename, contents, options| + new_secrets = YAML.load(contents)[Rails.env] + + expect(new_secrets['secret_key_base']).to eq('file_key') + expect(new_secrets['otp_key_base']).to eq('file_key') + expect(new_secrets['db_key_base']).to eq('db_key_base') + end + + create_tokens + + expect(secrets.otp_key_base).to eq('file_key') + end + + it 'keeps the other secrets as they were' do + create_tokens + + expect(secrets.db_key_base).to eq('db_key_base') + end + + it 'warns about the missing secrets' do + expect(self).to receive(:warn_missing_secret).with('secret_key_base') + expect(self).to receive(:warn_missing_secret).with('otp_key_base') + + create_tokens + end + + it 'deletes the .secret file' do + expect(File).to receive(:delete).with('.secret') + + create_tokens + end + end + end + + context 'when db_key_base is blank but exists in secrets.yml' do + before do + secrets.otp_key_base = 'otp_key_base' + secrets.secret_key_base = 'secret_key_base' + yaml_secrets = secrets.to_h.stringify_keys.merge('db_key_base' => '<%= an_erb_expression %>') + + allow(File).to receive(:exist?).with('.secret').and_return(false) + allow(File).to receive(:exist?).with('config/secrets.yml').and_return(true) + allow(YAML).to receive(:load_file).with('config/secrets.yml').and_return('test' => yaml_secrets) + allow(self).to receive(:warn_missing_secret) + end + + it 'warns about updating db_key_base' do + expect(self).to receive(:warn_missing_secret).with('db_key_base') + + create_tokens + end + + it 'warns about the blank value existing in secrets.yml and exits' do + expect(self).to receive(:warn) do |warning| + expect(warning).to include('db_key_base') + expect(warning).to include('<%= an_erb_expression %>') + end + + create_tokens + end + + it 'does not update secrets.yml' do + expect(self).to receive(:exit).with(1).and_call_original + expect(File).not_to receive(:write) + + expect { create_tokens }.to raise_error(SystemExit) + end + end + end +end diff --git a/spec/lib/ci/charts_spec.rb b/spec/lib/ci/charts_spec.rb index 034ea098193..fb6cc398307 100644 --- a/spec/lib/ci/charts_spec.rb +++ b/spec/lib/ci/charts_spec.rb @@ -2,21 +2,23 @@ require 'spec_helper' describe Ci::Charts, lib: true do context "build_times" do + let(:project) { create(:empty_project) } + let(:chart) { Ci::Charts::BuildTime.new(project) } + + subject { chart.build_times } + before do - @pipeline = FactoryGirl.create(:ci_pipeline) - FactoryGirl.create(:ci_build, pipeline: @pipeline) + create(:ci_empty_pipeline, project: project, duration: 120) end it 'returns build times in minutes' do - chart = Ci::Charts::BuildTime.new(@pipeline.project) - expect(chart.build_times).to eq([2]) + is_expected.to contain_exactly(2) end it 'handles nil build times' do - create(:ci_pipeline, duration: nil, project: @pipeline.project) + create(:ci_empty_pipeline, project: project, duration: nil) - chart = Ci::Charts::BuildTime.new(@pipeline.project) - expect(chart.build_times).to eq([2, 0]) + is_expected.to contain_exactly(2, 0) end end end diff --git a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb index 85374b8761d..be51d942af7 100644 --- a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb +++ b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb @@ -19,7 +19,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref(type, "master").first).to eq({ stage: "test", stage_idx: 1, - name: :rspec, + name: "rspec", commands: "pwd\nrspec", tag_list: [], options: {}, @@ -433,7 +433,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ stage: "test", stage_idx: 1, - name: :rspec, + name: "rspec", commands: "pwd\nrspec", tag_list: [], options: { @@ -461,7 +461,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ stage: "test", stage_idx: 1, - name: :rspec, + name: "rspec", commands: "pwd\nrspec", tag_list: [], options: { @@ -700,7 +700,7 @@ module Ci expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({ stage: "test", stage_idx: 1, - name: :rspec, + name: "rspec", commands: "pwd\nrspec", tag_list: [], options: { @@ -837,7 +837,7 @@ module Ci expect(subject.first).to eq({ stage: "test", stage_idx: 1, - name: :normal_job, + name: "normal_job", commands: "test", tag_list: [], options: {}, @@ -882,7 +882,7 @@ module Ci expect(subject.first).to eq({ stage: "build", stage_idx: 0, - name: :job1, + name: "job1", commands: "execute-script-for-job", tag_list: [], options: {}, @@ -894,7 +894,7 @@ module Ci expect(subject.second).to eq({ stage: "build", stage_idx: 0, - name: :job2, + name: "job2", commands: "execute-script-for-job", tag_list: [], options: {}, diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb index b12a7b98d4d..36c77206a3f 100644 --- a/spec/lib/extracts_path_spec.rb +++ b/spec/lib/extracts_path_spec.rb @@ -30,15 +30,28 @@ describe ExtractsPath, lib: true do expect(@logs_path).to eq("/#{@project.path_with_namespace}/refs/#{ref}/logs_tree/files/ruby/popen.rb") end - context 'escaped sequences in ref' do - let(:ref) { "improve%2Fawesome" } + context 'escaped slash character in ref' do + let(:ref) { 'improve%2Fawesome' } - it "id has no escape sequences" do + it 'has no escape sequences in @ref or @logs_path' do assign_ref_vars + expect(@ref).to eq('improve/awesome') expect(@logs_path).to eq("/#{@project.path_with_namespace}/refs/#{ref}/logs_tree/files/ruby/popen.rb") end end + + context 'ref contains %20' do + let(:ref) { 'foo%20bar' } + + it 'is not converted to a space in @id' do + @project.repository.add_branch(@project.owner, 'foo%20bar', 'master') + + assign_ref_vars + + expect(@id).to start_with('foo%20bar/') + end + end end describe '#extract_ref' do diff --git a/spec/lib/gitlab/badge/build/metadata_spec.rb b/spec/lib/gitlab/badge/build/metadata_spec.rb new file mode 100644 index 00000000000..ad5388215c2 --- /dev/null +++ b/spec/lib/gitlab/badge/build/metadata_spec.rb @@ -0,0 +1,37 @@ +require 'spec_helper' + +describe Gitlab::Badge::Build::Metadata do + let(:project) { create(:project) } + let(:branch) { 'master' } + let(:badge) { described_class.new(project, branch) } + + describe '#to_html' do + let(:html) { Nokogiri::HTML.parse(badge.to_html) } + let(:a_href) { html.at('a') } + + it 'points to link' do + expect(a_href[:href]).to eq badge.link_url + end + + it 'contains clickable image' do + expect(a_href.children.first.name).to eq 'img' + end + end + + describe '#to_markdown' do + subject { badge.to_markdown } + + it { is_expected.to include badge.image_url } + it { is_expected.to include badge.link_url } + end + + describe '#image_url' do + subject { badge.image_url } + it { is_expected.to include "badges/#{branch}/build.svg" } + end + + describe '#link_url' do + subject { badge.link_url } + it { is_expected.to include "commits/#{branch}" } + end +end diff --git a/spec/lib/gitlab/badge/build/template_spec.rb b/spec/lib/gitlab/badge/build/template_spec.rb new file mode 100644 index 00000000000..86dead3c54e --- /dev/null +++ b/spec/lib/gitlab/badge/build/template_spec.rb @@ -0,0 +1,76 @@ +require 'spec_helper' + +describe Gitlab::Badge::Build::Template do + let(:status) { 'success' } + let(:template) { described_class.new(status) } + + describe '#key_text' do + it 'is always says build' do + expect(template.key_text).to eq 'build' + end + end + + describe '#value_text' do + it 'is status value' do + expect(template.value_text).to eq 'success' + end + end + + describe 'widths and text anchors' do + it 'has fixed width and text anchors' do + expect(template.width).to eq 92 + expect(template.key_width).to eq 38 + expect(template.value_width).to eq 54 + expect(template.key_text_anchor).to eq 19 + expect(template.value_text_anchor).to eq 65 + end + end + + describe '#key_color' do + it 'is always the same' do + expect(template.key_color).to eq '#555' + end + end + + describe '#value_color' do + context 'when status is success' do + let(:status) { 'success' } + + it 'has expected color' do + expect(template.value_color).to eq '#4c1' + end + end + + context 'when status is failed' do + let(:status) { 'failed' } + + it 'has expected color' do + expect(template.value_color).to eq '#e05d44' + end + end + + context 'when status is running' do + let(:status) { 'running' } + + it 'has expected color' do + expect(template.value_color).to eq '#dfb317' + end + end + + context 'when status is unknown' do + let(:status) { 'unknown' } + + it 'has expected color' do + expect(template.value_color).to eq '#9f9f9f' + end + end + + context 'when status does not match any known statuses' do + let(:status) { 'invalid status' } + + it 'has expected color' do + expect(template.value_color).to eq '#9f9f9f' + end + end + end +end diff --git a/spec/lib/gitlab/badge/build_spec.rb b/spec/lib/gitlab/badge/build_spec.rb index f3b522a02f5..bb8144d5122 100644 --- a/spec/lib/gitlab/badge/build_spec.rb +++ b/spec/lib/gitlab/badge/build_spec.rb @@ -6,39 +6,17 @@ describe Gitlab::Badge::Build do let(:branch) { 'master' } let(:badge) { described_class.new(project, branch) } - describe '#type' do - subject { badge.type } - it { is_expected.to eq 'image/svg+xml' } - end - - describe '#to_html' do - let(:html) { Nokogiri::HTML.parse(badge.to_html) } - let(:a_href) { html.at('a') } - - it 'points to link' do - expect(a_href[:href]).to eq badge.link_url - end - - it 'contains clickable image' do - expect(a_href.children.first.name).to eq 'img' + describe '#metadata' do + it 'returns badge metadata' do + expect(badge.metadata.image_url) + .to include 'badges/master/build.svg' end end - describe '#to_markdown' do - subject { badge.to_markdown } - - it { is_expected.to include badge.image_url } - it { is_expected.to include badge.link_url } - end - - describe '#image_url' do - subject { badge.image_url } - it { is_expected.to include "badges/#{branch}/build.svg" } - end - - describe '#link_url' do - subject { badge.link_url } - it { is_expected.to include "commits/#{branch}" } + describe '#key_text' do + it 'always says build' do + expect(badge.key_text).to eq 'build' + end end context 'build exists' do @@ -47,16 +25,15 @@ describe Gitlab::Badge::Build do context 'build success' do before { build.success! } - describe '#to_s' do - subject { badge.to_s } - it { is_expected.to eq 'build-success' } + describe '#status' do + it 'is successful' do + expect(badge.status).to eq 'success' + end end - describe '#data' do - let(:data) { badge.data } - - it 'contains information about success' do - expect(status_node(data, 'success')).to be_truthy + describe '#value_text' do + it 'returns correct value text' do + expect(badge.value_text).to eq 'success' end end end @@ -64,60 +41,66 @@ describe Gitlab::Badge::Build do context 'build failed' do before { build.drop! } - describe '#to_s' do - subject { badge.to_s } - it { is_expected.to eq 'build-failed' } + describe '#status' do + it 'failed' do + expect(badge.status).to eq 'failed' + end end - describe '#data' do - let(:data) { badge.data } - - it 'contains information about failure' do - expect(status_node(data, 'failed')).to be_truthy + describe '#value_text' do + it 'has correct value text' do + expect(badge.value_text).to eq 'failed' end end end - end - context 'build does not exist' do - describe '#to_s' do - subject { badge.to_s } - it { is_expected.to eq 'build-unknown' } + context 'when outdated pipeline for given ref exists' do + before do + build.success! + + old_build = create_build(project, '11eeffdd', branch) + old_build.drop! + end + + it 'does not take outdated pipeline into account' do + expect(badge.status).to eq 'success' + end end - describe '#data' do - let(:data) { badge.data } + context 'when multiple pipelines exist for given sha' do + before do + build.drop! + + new_build = create_build(project, sha, branch) + new_build.success! + end - it 'contains infromation about unknown build' do - expect(status_node(data, 'unknown')).to be_truthy + it 'reports the compound status' do + expect(badge.status).to eq 'failed' end end end - context 'when outdated pipeline for given ref exists' do - before do - build = create_build(project, sha, branch) - build.success! - - old_build = create_build(project, '11eeffdd', branch) - old_build.drop! + context 'build does not exist' do + describe '#status' do + it 'is unknown' do + expect(badge.status).to eq 'unknown' + end end - it 'does not take outdated pipeline into account' do - expect(badge.to_s).to eq 'build-success' + describe '#value_text' do + it 'has correct value text' do + expect(badge.value_text).to eq 'unknown' + end end end def create_build(project, sha, branch) - pipeline = create(:ci_pipeline, project: project, - sha: sha, - ref: branch) + pipeline = create(:ci_empty_pipeline, + project: project, + sha: sha, + ref: branch) create(:ci_build, pipeline: pipeline, stage: 'notify') end - - def status_node(data, status) - xml = Nokogiri::XML.parse(data) - xml.at(%Q{text:contains("#{status}")}) - end end diff --git a/spec/lib/gitlab/changes_list_spec.rb b/spec/lib/gitlab/changes_list_spec.rb new file mode 100644 index 00000000000..69d86144e32 --- /dev/null +++ b/spec/lib/gitlab/changes_list_spec.rb @@ -0,0 +1,30 @@ +require "spec_helper" + +describe Gitlab::ChangesList do + let(:valid_changes_string) { "\n000000 570e7b2 refs/heads/my_branch\nd14d6c 6fd24d refs/heads/master" } + let(:invalid_changes) { 1 } + + context 'when changes is a valid string' do + let(:changes_list) { Gitlab::ChangesList.new(valid_changes_string) } + + it 'splits elements by newline character' do + expect(changes_list).to contain_exactly({ + oldrev: "000000", + newrev: "570e7b2", + ref: "refs/heads/my_branch" + }, { + oldrev: "d14d6c", + newrev: "6fd24d", + ref: "refs/heads/master" + }) + end + + it 'behaves like a list' do + expect(changes_list.first).to eq({ + oldrev: "000000", + newrev: "570e7b2", + ref: "refs/heads/my_branch" + }) + end + end +end diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb new file mode 100644 index 00000000000..39069b49978 --- /dev/null +++ b/spec/lib/gitlab/checks/change_access_spec.rb @@ -0,0 +1,99 @@ +require 'spec_helper' + +describe Gitlab::Checks::ChangeAccess, lib: true do + describe '#exec' do + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:user_access) { Gitlab::UserAccess.new(user, project: project) } + let(:changes) do + { + oldrev: 'be93687618e4b132087f430a4d8fc3a609c9b77c', + newrev: '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51', + ref: 'refs/heads/master' + } + end + + subject { described_class.new(changes, project: project, user_access: user_access).exec } + + before { allow(user_access).to receive(:can_do_action?).with(:push_code).and_return(true) } + + context 'without failed checks' do + it "doesn't return any error" do + expect(subject.status).to be(true) + end + end + + context 'when the user is not allowed to push code' do + it 'returns an error' do + expect(user_access).to receive(:can_do_action?).with(:push_code).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to push code to this project.') + end + end + + context 'tags check' do + let(:changes) do + { + oldrev: 'be93687618e4b132087f430a4d8fc3a609c9b77c', + newrev: '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51', + ref: 'refs/tags/v1.0.0' + } + end + + it 'returns an error if the user is not allowed to update tags' do + expect(user_access).to receive(:can_do_action?).with(:admin_project).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to change existing tags on this project.') + end + end + + context 'protected branches check' do + before do + allow(project).to receive(:protected_branch?).with('master').and_return(true) + end + + it 'returns an error if the user is not allowed to do forced pushes to protected branches' do + expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true) + expect(user_access).to receive(:can_do_action?).with(:force_push_code_to_protected_branches).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to force push code to a protected branch on this project.') + end + + it 'returns an error if the user is not allowed to merge to protected branches' do + expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true) + expect(user_access).to receive(:can_merge_to_branch?).and_return(false) + expect(user_access).to receive(:can_push_to_branch?).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to merge code into protected branches on this project.') + end + + it 'returns an error if the user is not allowed to push to protected branches' do + expect(user_access).to receive(:can_push_to_branch?).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to push code to protected branches on this project.') + end + + context 'branch deletion' do + let(:changes) do + { + oldrev: 'be93687618e4b132087f430a4d8fc3a609c9b77c', + newrev: '0000000000000000000000000000000000000000', + ref: 'refs/heads/master' + } + end + + it 'returns an error if the user is not allowed to delete protected branches' do + expect(user_access).to receive(:can_do_action?).with(:remove_protected_branches).and_return(false) + + expect(subject.status).to be(false) + expect(subject.message).to eq('You are not allowed to delete protected branches from this project.') + end + end + end + end +end diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb index 1e5d6a34f83..cee20234e1f 100644 --- a/spec/models/blob_spec.rb +++ b/spec/models/blob_spec.rb @@ -94,4 +94,26 @@ describe Blob do expect(blob.to_partial_path).to eq 'download' end end + + describe '#size_within_svg_limits?' do + let(:blob) { described_class.decorate(double(:blob)) } + + it 'returns true when the blob size is smaller than the SVG limit' do + expect(blob).to receive(:size).and_return(42) + + expect(blob.size_within_svg_limits?).to eq(true) + end + + it 'returns true when the blob size is equal to the SVG limit' do + expect(blob).to receive(:size).and_return(Blob::MAXIMUM_SVG_SIZE) + + expect(blob.size_within_svg_limits?).to eq(true) + end + + it 'returns false when the blob size is larger than the SVG limit' do + expect(blob).to receive(:size).and_return(1.terabyte) + + expect(blob.size_within_svg_limits?).to eq(false) + end + end end diff --git a/spec/models/build_spec.rb b/spec/models/build_spec.rb index 1812c5b9567..ef55c597642 100644 --- a/spec/models/build_spec.rb +++ b/spec/models/build_spec.rb @@ -764,6 +764,53 @@ describe Ci::Build, models: true do end end + describe '#when' do + subject { build.when } + + context 'if is undefined' do + before do + build.when = nil + end + + context 'use from gitlab-ci.yml' do + before do + stub_ci_pipeline_yaml_file(config) + end + + context 'if config is not found' do + let(:config) { nil } + + it { is_expected.to eq('on_success') } + end + + context 'if config does not have a questioned job' do + let(:config) do + YAML.dump({ + test_other: { + script: 'Hello World' + } + }) + end + + it { is_expected.to eq('on_success') } + end + + context 'if config has when' do + let(:config) do + YAML.dump({ + test: { + script: 'Hello World', + when: 'always' + } + }) + end + + it { is_expected.to eq('always') } + end + end + end + end + describe '#retryable?' do context 'when build is running' do before do @@ -839,8 +886,10 @@ describe Ci::Build, models: true do is_expected.to eq(build) end - context 'for success build' do - before { build.queue } + context 'for successful build' do + before do + build.update(status: 'success') + end it 'creates a new build' do is_expected.to be_pending diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 2dbc3d985b0..25c3f78c3e1 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -1,8 +1,8 @@ require 'spec_helper' describe Ci::Pipeline, models: true do - let(:project) { create(:empty_project) } - let(:pipeline) { create(:ci_pipeline, project: project) } + let(:project) { FactoryGirl.create :empty_project } + let(:pipeline) { FactoryGirl.create :ci_empty_pipeline, project: project } it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:user) } @@ -38,9 +38,6 @@ describe Ci::Pipeline, models: true do it { expect(pipeline.sha).to start_with(subject) } end - describe '#create_next_builds' do - end - describe '#retried' do subject { pipeline.retried } @@ -54,312 +51,9 @@ describe Ci::Pipeline, models: true do end end - describe '#create_builds' do - let!(:pipeline) { FactoryGirl.create :ci_pipeline, project: project, ref: 'master', tag: false } - - def create_builds(trigger_request = nil) - pipeline.create_builds(nil, trigger_request) - end - - def create_next_builds - pipeline.create_next_builds(pipeline.builds.order(:id).last) - end - - it 'creates builds' do - expect(create_builds).to be_truthy - pipeline.builds.update_all(status: "success") - expect(pipeline.builds.count(:all)).to eq(2) - - expect(create_next_builds).to be_truthy - pipeline.builds.update_all(status: "success") - expect(pipeline.builds.count(:all)).to eq(4) - - expect(create_next_builds).to be_truthy - pipeline.builds.update_all(status: "success") - expect(pipeline.builds.count(:all)).to eq(5) - - expect(create_next_builds).to be_falsey - end - - context 'custom stage with first job allowed to fail' do - let(:yaml) do - { - stages: ['clean', 'test'], - clean_job: { - stage: 'clean', - allow_failure: true, - script: 'BUILD', - }, - test_job: { - stage: 'test', - script: 'TEST', - }, - } - end - - before do - stub_ci_pipeline_yaml_file(YAML.dump(yaml)) - create_builds - end - - it 'properly schedules builds' do - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending') - pipeline.builds.running_or_pending.each(&:drop) - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending', 'failed') - end - end - - context 'properly creates builds when "when" is defined' do - let(:yaml) do - { - stages: ["build", "test", "test_failure", "deploy", "cleanup"], - build: { - stage: "build", - script: "BUILD", - }, - test: { - stage: "test", - script: "TEST", - }, - test_failure: { - stage: "test_failure", - script: "ON test failure", - when: "on_failure", - }, - deploy: { - stage: "deploy", - script: "PUBLISH", - }, - cleanup: { - stage: "cleanup", - script: "TIDY UP", - when: "always", - } - } - end - - before do - stub_ci_pipeline_yaml_file(YAML.dump(yaml)) - end - - context 'when builds are successful' do - it 'properly creates builds' do - expect(create_builds).to be_truthy - expect(pipeline.builds.pluck(:name)).to contain_exactly('build') - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'success', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy', 'cleanup') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'success', 'success', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'success', 'success', 'success') - pipeline.reload - expect(pipeline.status).to eq('success') - end - end - - context 'when test job fails' do - it 'properly creates builds' do - expect(create_builds).to be_truthy - expect(pipeline.builds.pluck(:name)).to contain_exactly('build') - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'pending') - pipeline.builds.running_or_pending.each(&:drop) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'failed', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'failed', 'success', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'failed', 'success', 'success') - pipeline.reload - expect(pipeline.status).to eq('failed') - end - end - - context 'when test and test_failure jobs fail' do - it 'properly creates builds' do - expect(create_builds).to be_truthy - expect(pipeline.builds.pluck(:name)).to contain_exactly('build') - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'pending') - pipeline.builds.running_or_pending.each(&:drop) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'failed', 'pending') - pipeline.builds.running_or_pending.each(&:drop) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'failed', 'failed', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'failed', 'failed', 'success') - pipeline.reload - expect(pipeline.status).to eq('failed') - end - end - - context 'when deploy job fails' do - it 'properly creates builds' do - expect(create_builds).to be_truthy - expect(pipeline.builds.pluck(:name)).to contain_exactly('build') - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'success', 'pending') - pipeline.builds.running_or_pending.each(&:drop) - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy', 'cleanup') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'success', 'failed', 'pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'success', 'failed', 'success') - pipeline.reload - expect(pipeline.status).to eq('failed') - end - end - - context 'when build is canceled in the second stage' do - it 'does not schedule builds after build has been canceled' do - expect(create_builds).to be_truthy - expect(pipeline.builds.pluck(:name)).to contain_exactly('build') - expect(pipeline.builds.pluck(:status)).to contain_exactly('pending') - pipeline.builds.running_or_pending.each(&:success) - - expect(pipeline.builds.running_or_pending).not_to be_empty - - expect(pipeline.builds.pluck(:name)).to contain_exactly('build', 'test') - expect(pipeline.builds.pluck(:status)).to contain_exactly('success', 'pending') - pipeline.builds.running_or_pending.each(&:cancel) - - expect(pipeline.builds.running_or_pending).to be_empty - expect(pipeline.reload.status).to eq('canceled') - end - end - - context 'when listing manual actions' do - let(:yaml) do - { - stages: ["build", "test", "staging", "production", "cleanup"], - build: { - stage: "build", - script: "BUILD", - }, - test: { - stage: "test", - script: "TEST", - }, - staging: { - stage: "staging", - script: "PUBLISH", - }, - production: { - stage: "production", - script: "PUBLISH", - when: "manual", - }, - cleanup: { - stage: "cleanup", - script: "TIDY UP", - when: "always", - }, - clear_cache: { - stage: "cleanup", - script: "CLEAR CACHE", - when: "manual", - } - } - end - - it 'returns only for skipped builds' do - # currently all builds are created - expect(create_builds).to be_truthy - expect(manual_actions).to be_empty - - # succeed stage build - pipeline.builds.running_or_pending.each(&:success) - expect(manual_actions).to be_empty - - # succeed stage test - pipeline.builds.running_or_pending.each(&:success) - expect(manual_actions).to be_empty - - # succeed stage staging and skip stage production - pipeline.builds.running_or_pending.each(&:success) - expect(manual_actions).to be_many # production and clear cache - - # succeed stage cleanup - pipeline.builds.running_or_pending.each(&:success) - - # after processing a pipeline we should have 6 builds, 5 succeeded - expect(pipeline.builds.count).to eq(6) - expect(pipeline.builds.success.count).to eq(4) - end - - def manual_actions - pipeline.manual_actions - end - end - end - - context 'when no builds created' do - let(:pipeline) { build(:ci_pipeline) } - - before do - stub_ci_pipeline_yaml_file(YAML.dump(before_script: ['ls'])) - end - - it 'returns false' do - expect(pipeline.create_builds(nil)).to be_falsey - expect(pipeline).not_to be_persisted - end - end - end - - describe "#finished_at" do - let(:pipeline) { FactoryGirl.create :ci_pipeline } - - it "returns finished_at of latest build" do - build = FactoryGirl.create :ci_build, pipeline: pipeline, finished_at: Time.now - 60 - FactoryGirl.create :ci_build, pipeline: pipeline, finished_at: Time.now - 120 - - expect(pipeline.finished_at.to_i).to eq(build.finished_at.to_i) - end - - it "returns nil if there is no finished build" do - FactoryGirl.create :ci_not_started_build, pipeline: pipeline - - expect(pipeline.finished_at).to be_nil - end - end - describe "coverage" do let(:project) { FactoryGirl.create :empty_project, build_coverage_regex: "/.*/" } - let(:pipeline) { FactoryGirl.create :ci_pipeline, project: project } + let(:pipeline) { FactoryGirl.create :ci_empty_pipeline, project: project } it "calculates average when there are two builds with coverage" do FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline @@ -426,33 +120,47 @@ describe Ci::Pipeline, models: true do end end - describe '#update_state' do - it 'executes update_state after touching object' do - expect(pipeline).to receive(:update_state).and_return(true) - pipeline.touch + describe 'state machine' do + let(:current) { Time.now.change(usec: 0) } + let(:build) { create :ci_build, name: 'build1', pipeline: pipeline, started_at: current - 60, finished_at: current } + let(:build2) { create :ci_build, name: 'build2', pipeline: pipeline, started_at: current - 60, finished_at: current } + + describe '#duration' do + before do + build.skip + build2.skip + end + + it 'matches sum of builds duration' do + expect(pipeline.reload.duration).to eq(build.duration + build2.duration) + end end - context 'dependent objects' do - let(:commit_status) { build :commit_status, pipeline: pipeline } + describe '#started_at' do + it 'updates on transitioning to running' do + build.run - it 'executes update_state after saving dependent object' do - expect(pipeline).to receive(:update_state).and_return(true) - commit_status.save + expect(pipeline.reload.started_at).not_to be_nil + end + + it 'does not update on transitioning to success' do + build.success + + expect(pipeline.reload.started_at).to be_nil end end - context 'update state' do - let(:current) { Time.now.change(usec: 0) } - let(:build) { FactoryGirl.create :ci_build, :success, pipeline: pipeline, started_at: current - 120, finished_at: current - 60 } + describe '#finished_at' do + it 'updates on transitioning to success' do + build.success - before do - build + expect(pipeline.reload.finished_at).not_to be_nil end - [:status, :started_at, :finished_at, :duration].each do |param| - it "update #{param}" do - expect(pipeline.send(param)).to eq(build.send(param)) - end + it 'does not update on transitioning to running' do + build.run + + expect(pipeline.reload.finished_at).to be_nil end end end @@ -572,4 +280,64 @@ describe Ci::Pipeline, models: true do end end end + + describe '#status' do + let!(:build) { create(:ci_build, :created, pipeline: pipeline, name: 'test') } + + subject { pipeline.reload.status } + + context 'on queuing' do + before do + build.enqueue + end + + it { is_expected.to eq('pending') } + end + + context 'on run' do + before do + build.enqueue + build.run + end + + it { is_expected.to eq('running') } + end + + context 'on drop' do + before do + build.drop + end + + it { is_expected.to eq('failed') } + end + + context 'on success' do + before do + build.success + end + + it { is_expected.to eq('success') } + end + + context 'on cancel' do + before do + build.cancel + end + + it { is_expected.to eq('canceled') } + end + + context 'on failure and build retry' do + before do + build.drop + Ci::Build.retry(build) + end + + # We are changing a state: created > failed > running + # Instead of: created > failed > pending + # Since the pipeline already run, so it should not be pending anymore + + it { is_expected.to eq('running') } + end + end end diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb index 7df3df4bb9e..bfff639ad78 100644 --- a/spec/models/deployment_spec.rb +++ b/spec/models/deployment_spec.rb @@ -15,4 +15,28 @@ describe Deployment, models: true do it { is_expected.to validate_presence_of(:ref) } it { is_expected.to validate_presence_of(:sha) } + + describe '#includes_commit?' do + let(:project) { create(:project) } + let(:environment) { create(:environment, project: project) } + let(:deployment) do + create(:deployment, environment: environment, sha: project.commit.id) + end + + context 'when there is no project commit' do + it 'returns false' do + commit = project.commit('feature') + + expect(deployment.includes_commit?(commit)).to be false + end + end + + context 'when they share the same tree branch' do + it 'returns true' do + commit = project.commit + + expect(deployment.includes_commit?(commit)).to be true + end + end + end end diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb index 8a84ac0a7c7..c881897926e 100644 --- a/spec/models/environment_spec.rb +++ b/spec/models/environment_spec.rb @@ -30,4 +30,37 @@ describe Environment, models: true do expect(env.external_url).to be_nil end end + + describe '#includes_commit?' do + context 'without a last deployment' do + it "returns false" do + expect(environment.includes_commit?('HEAD')).to be false + end + end + + context 'with a last deployment' do + let(:project) { create(:project) } + let(:environment) { create(:environment, project: project) } + + let!(:deployment) do + create(:deployment, environment: environment, sha: project.commit('master').id) + end + + context 'in the same branch' do + it 'returns true' do + expect(environment.includes_commit?(RepoHelpers.sample_commit)).to be true + end + end + + context 'not in the same branch' do + before do + deployment.update(sha: project.commit('feature').id) + end + + it 'returns false' do + expect(environment.includes_commit?(RepoHelpers.sample_commit)).to be false + end + end + end + end end diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb index 4078b9e4ff5..cbdf7eec082 100644 --- a/spec/models/hooks/system_hook_spec.rb +++ b/spec/models/hooks/system_hook_spec.rb @@ -38,7 +38,7 @@ describe SystemHook, models: true do end it "project_destroy hook" do - Projects::DestroyService.new(project, user, {}).pending_delete! + Projects::DestroyService.new(project, user, {}).async_execute expect(WebMock).to have_requested(:post, system_hook.url).with( body: /project_destroy/, diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb index 44cd3c08718..2277f4e13bf 100644 --- a/spec/models/member_spec.rb +++ b/spec/models/member_spec.rb @@ -10,7 +10,7 @@ describe Member, models: true do it { is_expected.to validate_presence_of(:user) } it { is_expected.to validate_presence_of(:source) } - it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.values) } + it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.all_values) } it_behaves_like 'an object with email-formated attributes', :invite_email do subject { build(:project_member) } diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb index 28673de3189..913d74645a7 100644 --- a/spec/models/members/project_member_spec.rb +++ b/spec/models/members/project_member_spec.rb @@ -27,6 +27,7 @@ describe ProjectMember, models: true do describe 'validations' do it { is_expected.to allow_value('Project').for(:source_type) } it { is_expected.not_to allow_value('project').for(:source_type) } + it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.values) } end describe 'modules' do @@ -40,7 +41,7 @@ describe ProjectMember, models: true do end describe "#destroy" do - let(:owner) { create(:project_member, access_level: ProjectMember::OWNER) } + let(:owner) { create(:project_member, access_level: ProjectMember::MASTER) } let(:project) { owner.project } let(:master) { create(:project_member, project: project) } diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 11cfc7cace1..811e474226f 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -674,6 +674,21 @@ describe MergeRequest, models: true do end end + describe "#environments" do + let(:project) { create(:project) } + let!(:environment) { create(:environment, project: project) } + let!(:environment1) { create(:environment, project: project) } + let!(:environment2) { create(:environment, project: project) } + let(:merge_request) { create(:merge_request, source_project: project) } + + it 'selects deployed environments' do + create(:deployment, environment: environment, sha: project.commit('master').id) + create(:deployment, environment: environment1, sha: project.commit('feature').id) + + expect(merge_request.environments).to eq [environment] + end + end + describe "#reload_diff" do let(:note) { create(:diff_note_on_merge_request, project: subject.project, noteable: subject) } diff --git a/spec/models/project_services/campfire_service_spec.rb b/spec/models/project_services/campfire_service_spec.rb index 3e6da42803b..1adf93258f3 100644 --- a/spec/models/project_services/campfire_service_spec.rb +++ b/spec/models/project_services/campfire_service_spec.rb @@ -39,4 +39,62 @@ describe CampfireService, models: true do it { is_expected.not_to validate_presence_of(:token) } end end + + describe "#execute" do + let(:user) { create(:user) } + let(:project) { create(:project) } + + before do + @campfire_service = CampfireService.new + allow(@campfire_service).to receive_messages( + project_id: project.id, + project: project, + service_hook: true, + token: 'verySecret', + subdomain: 'project-name', + room: 'test-room' + ) + @sample_data = Gitlab::PushDataBuilder.build_sample(project, user) + @rooms_url = 'https://verySecret:X@project-name.campfirenow.com/rooms.json' + @headers = { 'Content-Type' => 'application/json; charset=utf-8' } + end + + it "calls Campfire API to get a list of rooms and speak in a room" do + # make sure a valid list of rooms is returned + body = File.read(Rails.root + 'spec/fixtures/project_services/campfire/rooms.json') + WebMock.stub_request(:get, @rooms_url).to_return( + body: body, + status: 200, + headers: @headers + ) + # stub the speak request with the room id found in the previous request's response + speak_url = 'https://verySecret:X@project-name.campfirenow.com/room/123/speak.json' + WebMock.stub_request(:post, speak_url) + + @campfire_service.execute(@sample_data) + + expect(WebMock).to have_requested(:get, @rooms_url).once + expect(WebMock).to have_requested(:post, speak_url).with( + body: /#{project.path}.*#{@sample_data[:before]}.*#{@sample_data[:after]}/ + ).once + end + + it "calls Campfire API to get a list of rooms but shouldn't speak in a room" do + # return a list of rooms that do not contain a room named 'test-room' + body = File.read(Rails.root + 'spec/fixtures/project_services/campfire/rooms2.json') + WebMock.stub_request(:get, @rooms_url).to_return( + body: body, + status: 200, + headers: @headers + ) + # we want to make sure no request is sent to the /speak endpoint, here is a basic + # regexp that matches this endpoint + speak_url = 'https://verySecret:X@project-name.campfirenow.com/room/.*/speak.json' + + @campfire_service.execute(@sample_data) + + expect(WebMock).to have_requested(:get, @rooms_url).once + expect(WebMock).not_to have_requested(:post, /#{speak_url}/) + end + end end diff --git a/spec/models/project_services/hipchat_service_spec.rb b/spec/models/project_services/hipchat_service_spec.rb index bf438b26690..1b383219eb9 100644 --- a/spec/models/project_services/hipchat_service_spec.rb +++ b/spec/models/project_services/hipchat_service_spec.rb @@ -291,7 +291,8 @@ describe HipchatService, models: true do end context 'build events' do - let(:build) { create(:ci_build) } + let(:pipeline) { create(:ci_empty_pipeline) } + let(:build) { create(:ci_build, pipeline: pipeline) } let(:data) { Gitlab::BuildDataBuilder.build(build) } context 'for failed' do diff --git a/spec/models/project_services/pivotaltracker_service_spec.rb b/spec/models/project_services/pivotaltracker_service_spec.rb index f37edd4d970..d098d988521 100644 --- a/spec/models/project_services/pivotaltracker_service_spec.rb +++ b/spec/models/project_services/pivotaltracker_service_spec.rb @@ -39,4 +39,75 @@ describe PivotaltrackerService, models: true do it { is_expected.not_to validate_presence_of(:token) } end end + + describe 'Execute' do + let(:service) do + PivotaltrackerService.new.tap do |service| + service.token = 'secret_api_token' + end + end + + let(:url) { PivotaltrackerService::API_ENDPOINT } + + def push_data(branch: 'master') + { + object_kind: 'push', + ref: "refs/heads/#{branch}", + commits: [ + { + id: '21c12ea', + author: { + name: 'Some User' + }, + url: 'https://example.com/commit', + message: 'commit message', + } + ] + } + end + + before do + WebMock.stub_request(:post, url) + end + + it 'should post correct message' do + service.execute(push_data) + expect(WebMock).to have_requested(:post, url).with( + body: { + 'source_commit' => { + 'commit_id' => '21c12ea', + 'author' => 'Some User', + 'url' => 'https://example.com/commit', + 'message' => 'commit message' + } + }, + headers: { + 'Content-Type' => 'application/json', + 'X-TrackerToken' => 'secret_api_token' + } + ).once + end + + context 'when allowed branches is specified' do + let(:service) do + super().tap do |service| + service.restrict_to_branch = 'master,v10' + end + end + + it 'should post message if branch is in the list' do + service.execute(push_data(branch: 'master')) + service.execute(push_data(branch: 'v10')) + + expect(WebMock).to have_requested(:post, url).twice + end + + it 'should not post message if branch is not in the list' do + service.execute(push_data(branch: 'mas')) + service.execute(push_data(branch: 'v11')) + + expect(WebMock).not_to have_requested(:post, url) + end + end + end end diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb new file mode 100644 index 00000000000..d78494b76fa --- /dev/null +++ b/spec/requests/api/access_requests_spec.rb @@ -0,0 +1,246 @@ +require 'spec_helper' + +describe API::AccessRequests, api: true do + include ApiHelpers + + let(:master) { create(:user) } + let(:developer) { create(:user) } + let(:access_requester) { create(:user) } + let(:stranger) { create(:user) } + + let(:project) do + project = create(:project, :public, creator_id: master.id, namespace: master.namespace) + project.team << [developer, :developer] + project.team << [master, :master] + project.request_access(access_requester) + project + end + + let(:group) do + group = create(:group, :public) + group.add_developer(developer) + group.add_owner(master) + group.request_access(access_requester) + group + end + + shared_examples 'GET /:sources/:id/access_requests' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { get api("/#{source_type.pluralize}/#{source.id}/access_requests", stranger) } + end + + context 'when authenticated as a non-master/owner' do + %i[developer access_requester stranger].each do |type| + context "as a #{type}" do + it 'returns 403' do + user = public_send(type) + get api("/#{source_type.pluralize}/#{source.id}/access_requests", user) + + expect(response).to have_http_status(403) + end + end + end + end + + context 'when authenticated as a master/owner' do + it 'returns access requesters' do + get api("/#{source_type.pluralize}/#{source.id}/access_requests", master) + + expect(response).to have_http_status(200) + expect(json_response).to be_an Array + expect(json_response.size).to eq(1) + end + end + end + end + + shared_examples 'POST /:sources/:id/access_requests' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { post api("/#{source_type.pluralize}/#{source.id}/access_requests", stranger) } + end + + context 'when authenticated as a member' do + %i[developer master].each do |type| + context "as a #{type}" do + it 'returns 400' do + expect do + user = public_send(type) + post api("/#{source_type.pluralize}/#{source.id}/access_requests", user) + + expect(response).to have_http_status(400) + end.not_to change { source.requesters.count } + end + end + end + end + + context 'when authenticated as an access requester' do + it 'returns 400' do + expect do + post api("/#{source_type.pluralize}/#{source.id}/access_requests", access_requester) + + expect(response).to have_http_status(400) + end.not_to change { source.requesters.count } + end + end + + context 'when authenticated as a stranger' do + it 'returns 201' do + expect do + post api("/#{source_type.pluralize}/#{source.id}/access_requests", stranger) + + expect(response).to have_http_status(201) + end.to change { source.requesters.count }.by(1) + + # User attributes + expect(json_response['id']).to eq(stranger.id) + expect(json_response['name']).to eq(stranger.name) + expect(json_response['username']).to eq(stranger.username) + expect(json_response['state']).to eq(stranger.state) + expect(json_response['avatar_url']).to eq(stranger.avatar_url) + expect(json_response['web_url']).to eq(Gitlab::Routing.url_helpers.user_url(stranger)) + + # Member attributes + expect(json_response['requested_at']).to be_present + end + end + end + end + + shared_examples 'PUT /:sources/:id/access_requests/:user_id/approve' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}/approve", stranger) } + end + + context 'when authenticated as a non-master/owner' do + %i[developer access_requester stranger].each do |type| + context "as a #{type}" do + it 'returns 403' do + user = public_send(type) + put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}/approve", user) + + expect(response).to have_http_status(403) + end + end + end + end + + context 'when authenticated as a master/owner' do + it 'returns 201' do + expect do + put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}/approve", master), + access_level: Member::MASTER + + expect(response).to have_http_status(201) + end.to change { source.members.count }.by(1) + # User attributes + expect(json_response['id']).to eq(access_requester.id) + expect(json_response['name']).to eq(access_requester.name) + expect(json_response['username']).to eq(access_requester.username) + expect(json_response['state']).to eq(access_requester.state) + expect(json_response['avatar_url']).to eq(access_requester.avatar_url) + expect(json_response['web_url']).to eq(Gitlab::Routing.url_helpers.user_url(access_requester)) + + # Member attributes + expect(json_response['access_level']).to eq(Member::MASTER) + end + + context 'user_id does not match an existing access requester' do + it 'returns 404' do + expect do + put api("/#{source_type.pluralize}/#{source.id}/access_requests/#{stranger.id}/approve", master) + + expect(response).to have_http_status(404) + end.not_to change { source.members.count } + end + end + end + end + end + + shared_examples 'DELETE /:sources/:id/access_requests/:user_id' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", stranger) } + end + + context 'when authenticated as a non-master/owner' do + %i[developer stranger].each do |type| + context "as a #{type}" do + it 'returns 403' do + user = public_send(type) + delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", user) + + expect(response).to have_http_status(403) + end + end + end + end + + context 'when authenticated as the access requester' do + it 'returns 200' do + expect do + delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", access_requester) + + expect(response).to have_http_status(200) + end.to change { source.requesters.count }.by(-1) + end + end + + context 'when authenticated as a master/owner' do + it 'returns 200' do + expect do + delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{access_requester.id}", master) + + expect(response).to have_http_status(200) + end.to change { source.requesters.count }.by(-1) + end + + context 'user_id does not match an existing access requester' do + it 'returns 404' do + expect do + delete api("/#{source_type.pluralize}/#{source.id}/access_requests/#{stranger.id}", master) + + expect(response).to have_http_status(404) + end.not_to change { source.requesters.count } + end + end + end + end + end + + it_behaves_like 'GET /:sources/:id/access_requests', 'project' do + let(:source) { project } + end + + it_behaves_like 'GET /:sources/:id/access_requests', 'group' do + let(:source) { group } + end + + it_behaves_like 'POST /:sources/:id/access_requests', 'project' do + let(:source) { project } + end + + it_behaves_like 'POST /:sources/:id/access_requests', 'group' do + let(:source) { group } + end + + it_behaves_like 'PUT /:sources/:id/access_requests/:user_id/approve', 'project' do + let(:source) { project } + end + + it_behaves_like 'PUT /:sources/:id/access_requests/:user_id/approve', 'group' do + let(:source) { group } + end + + it_behaves_like 'DELETE /:sources/:id/access_requests/:user_id', 'project' do + let(:source) { project } + end + + it_behaves_like 'DELETE /:sources/:id/access_requests/:user_id', 'group' do + let(:source) { group } + end +end diff --git a/spec/requests/api/builds_spec.rb b/spec/requests/api/builds_spec.rb index 4da5e211727..73f44756b86 100644 --- a/spec/requests/api/builds_spec.rb +++ b/spec/requests/api/builds_spec.rb @@ -9,7 +9,7 @@ describe API::API, api: true do let!(:developer) { create(:project_member, :developer, user: user, project: project) } let(:reporter) { create(:project_member, :reporter, project: project) } let(:guest) { create(:project_member, :guest, project: project) } - let!(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) } + let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) } let!(:build) { create(:ci_build, pipeline: pipeline) } describe 'GET /projects/:id/builds ' do @@ -180,7 +180,11 @@ describe API::API, api: true do describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do let(:api_user) { reporter.user } - let(:build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } + let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } + + before do + build.success + end def path_for_ref(ref = pipeline.ref, job = build.name) api("/projects/#{project.id}/builds/artifacts/#{ref}/download?job=#{job}", api_user) diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 60c2f14bd3c..7ca75d77673 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -89,16 +89,29 @@ describe API::API, api: true do it "returns nil for commit without CI" do get api("/projects/#{project.id}/repository/commits/#{project.repository.commit.id}", user) + expect(response).to have_http_status(200) expect(json_response['status']).to be_nil end it "returns status for CI" do - pipeline = project.ensure_pipeline('master', project.repository.commit.sha) + pipeline = project.ensure_pipeline(project.repository.commit.sha, 'master') + pipeline.update(status: 'success') + get api("/projects/#{project.id}/repository/commits/#{project.repository.commit.id}", user) + expect(response).to have_http_status(200) expect(json_response['status']).to eq(pipeline.status) end + + it "returns status for CI when pipeline is created" do + project.ensure_pipeline(project.repository.commit.sha, 'master') + + get api("/projects/#{project.id}/repository/commits/#{project.repository.commit.id}", user) + + expect(response).to have_http_status(200) + expect(json_response['status']).to be_nil + end end context "unauthorized user" do diff --git a/spec/requests/api/group_members_spec.rb b/spec/requests/api/group_members_spec.rb deleted file mode 100644 index 8bd6a8062ae..00000000000 --- a/spec/requests/api/group_members_spec.rb +++ /dev/null @@ -1,199 +0,0 @@ -require 'spec_helper' - -describe API::API, api: true do - include ApiHelpers - - let(:owner) { create(:user) } - let(:reporter) { create(:user) } - let(:developer) { create(:user) } - let(:master) { create(:user) } - let(:guest) { create(:user) } - let(:stranger) { create(:user) } - - let!(:group_with_members) do - group = create(:group, :private) - group.add_users([reporter.id], GroupMember::REPORTER) - group.add_users([developer.id], GroupMember::DEVELOPER) - group.add_users([master.id], GroupMember::MASTER) - group.add_users([guest.id], GroupMember::GUEST) - group - end - - let!(:group_no_members) { create(:group) } - - before do - group_with_members.add_owner owner - group_no_members.add_owner owner - end - - describe "GET /groups/:id/members" do - context "when authenticated as user that is part or the group" do - it "each user: returns an array of members groups of group3" do - [owner, master, developer, reporter, guest].each do |user| - get api("/groups/#{group_with_members.id}/members", user) - expect(response).to have_http_status(200) - expect(json_response).to be_an Array - expect(json_response.size).to eq(5) - expect(json_response.find { |e| e['id'] == owner.id }['access_level']).to eq(GroupMember::OWNER) - expect(json_response.find { |e| e['id'] == reporter.id }['access_level']).to eq(GroupMember::REPORTER) - expect(json_response.find { |e| e['id'] == developer.id }['access_level']).to eq(GroupMember::DEVELOPER) - expect(json_response.find { |e| e['id'] == master.id }['access_level']).to eq(GroupMember::MASTER) - expect(json_response.find { |e| e['id'] == guest.id }['access_level']).to eq(GroupMember::GUEST) - end - end - - it 'users not part of the group should get access error' do - get api("/groups/#{group_with_members.id}/members", stranger) - - expect(response).to have_http_status(404) - end - end - end - - describe "POST /groups/:id/members" do - context "when not a member of the group" do - it "does not add guest as member of group_no_members when adding being done by person outside the group" do - post api("/groups/#{group_no_members.id}/members", reporter), user_id: guest.id, access_level: GroupMember::MASTER - expect(response).to have_http_status(403) - end - end - - context "when a member of the group" do - it "returns ok and add new member" do - new_user = create(:user) - - expect do - post api("/groups/#{group_no_members.id}/members", owner), user_id: new_user.id, access_level: GroupMember::MASTER - end.to change { group_no_members.members.count }.by(1) - - expect(response).to have_http_status(201) - expect(json_response['name']).to eq(new_user.name) - expect(json_response['access_level']).to eq(GroupMember::MASTER) - end - - it "does not allow guest to modify group members" do - new_user = create(:user) - - expect do - post api("/groups/#{group_with_members.id}/members", guest), user_id: new_user.id, access_level: GroupMember::MASTER - end.not_to change { group_with_members.members.count } - - expect(response).to have_http_status(403) - end - - it "returns error if member already exists" do - post api("/groups/#{group_with_members.id}/members", owner), user_id: master.id, access_level: GroupMember::MASTER - expect(response).to have_http_status(409) - end - - it "returns a 400 error when user id is not given" do - post api("/groups/#{group_no_members.id}/members", owner), access_level: GroupMember::MASTER - expect(response).to have_http_status(400) - end - - it "returns a 400 error when access level is not given" do - post api("/groups/#{group_no_members.id}/members", owner), user_id: master.id - expect(response).to have_http_status(400) - end - - it "returns a 422 error when access level is not known" do - post api("/groups/#{group_no_members.id}/members", owner), user_id: master.id, access_level: 1234 - expect(response).to have_http_status(422) - end - end - end - - describe 'PUT /groups/:id/members/:user_id' do - context 'when not a member of the group' do - it 'returns a 409 error if the user is not a group member' do - put( - api("/groups/#{group_no_members.id}/members/#{developer.id}", - owner), access_level: GroupMember::MASTER - ) - expect(response).to have_http_status(404) - end - end - - context 'when a member of the group' do - it 'returns ok and update member access level' do - put( - api("/groups/#{group_with_members.id}/members/#{reporter.id}", - owner), - access_level: GroupMember::MASTER - ) - - expect(response).to have_http_status(200) - - get api("/groups/#{group_with_members.id}/members", owner) - json_reporter = json_response.find do |e| - e['id'] == reporter.id - end - - expect(json_reporter['access_level']).to eq(GroupMember::MASTER) - end - - it 'does not allow guest to modify group members' do - put( - api("/groups/#{group_with_members.id}/members/#{developer.id}", - guest), - access_level: GroupMember::MASTER - ) - - expect(response).to have_http_status(403) - - get api("/groups/#{group_with_members.id}/members", owner) - json_developer = json_response.find do |e| - e['id'] == developer.id - end - - expect(json_developer['access_level']).to eq(GroupMember::DEVELOPER) - end - - it 'returns a 400 error when access level is not given' do - put( - api("/groups/#{group_with_members.id}/members/#{master.id}", owner) - ) - expect(response).to have_http_status(400) - end - - it 'returns a 422 error when access level is not known' do - put( - api("/groups/#{group_with_members.id}/members/#{master.id}", owner), - access_level: 1234 - ) - expect(response).to have_http_status(422) - end - end - end - - describe 'DELETE /groups/:id/members/:user_id' do - context 'when not a member of the group' do - it "does not delete guest's membership of group_with_members" do - random_user = create(:user) - delete api("/groups/#{group_with_members.id}/members/#{owner.id}", random_user) - - expect(response).to have_http_status(404) - end - end - - context "when a member of the group" do - it "deletes guest's membership of group" do - expect do - delete api("/groups/#{group_with_members.id}/members/#{guest.id}", owner) - end.to change { group_with_members.members.count }.by(-1) - - expect(response).to have_http_status(200) - end - - it "returns a 404 error when user id is not known" do - delete api("/groups/#{group_with_members.id}/members/1328", owner) - expect(response).to have_http_status(404) - end - - it "does not allow guest to modify group members" do - delete api("/groups/#{group_with_members.id}/members/#{master.id}", guest) - expect(response).to have_http_status(403) - end - end - end -end diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index f6f85d6e95e..be52f88831f 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -275,6 +275,24 @@ describe API::API, api: true do end end + describe 'GET /internal/merge_request_urls' do + let(:repo_name) { "#{project.namespace.name}/#{project.path}" } + let(:changes) { URI.escape("#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch") } + + before do + project.team << [user, :developer] + get api("/internal/merge_request_urls?project=#{repo_name}&changes=#{changes}"), secret_token: secret_token + end + + it 'returns link to create new merge request' do + expect(json_response).to match [{ + "branch_name" => "new_branch", + "url" => "http://localhost/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch", + "new_merge_request" => true + }] + end + end + def pull(key, project, protocol = 'ssh') post( api("/internal/allowed"), diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb new file mode 100644 index 00000000000..a56ee30f7b1 --- /dev/null +++ b/spec/requests/api/members_spec.rb @@ -0,0 +1,312 @@ +require 'spec_helper' + +describe API::Members, api: true do + include ApiHelpers + + let(:master) { create(:user) } + let(:developer) { create(:user) } + let(:access_requester) { create(:user) } + let(:stranger) { create(:user) } + + let(:project) do + project = create(:project, :public, creator_id: master.id, namespace: master.namespace) + project.team << [developer, :developer] + project.team << [master, :master] + project.request_access(access_requester) + project + end + + let!(:group) do + group = create(:group, :public) + group.add_developer(developer) + group.add_owner(master) + group.request_access(access_requester) + group + end + + shared_examples 'GET /:sources/:id/members' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { get api("/#{source_type.pluralize}/#{source.id}/members", stranger) } + end + + context 'when authenticated as a non-member' do + %i[access_requester stranger].each do |type| + context "as a #{type}" do + it 'returns 200' do + user = public_send(type) + get api("/#{source_type.pluralize}/#{source.id}/members", user) + + expect(response).to have_http_status(200) + expect(json_response.size).to eq(2) + end + end + end + end + + it 'finds members with query string' do + get api("/#{source_type.pluralize}/#{source.id}/members", developer), query: master.username + + expect(response).to have_http_status(200) + expect(json_response.count).to eq(1) + expect(json_response.first['username']).to eq(master.username) + end + end + end + + shared_examples 'GET /:sources/:id/members/:user_id' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { get api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", stranger) } + end + + context 'when authenticated as a non-member' do + %i[access_requester stranger].each do |type| + context "as a #{type}" do + it 'returns 200' do + user = public_send(type) + get api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", user) + + expect(response).to have_http_status(200) + # User attributes + expect(json_response['id']).to eq(developer.id) + expect(json_response['name']).to eq(developer.name) + expect(json_response['username']).to eq(developer.username) + expect(json_response['state']).to eq(developer.state) + expect(json_response['avatar_url']).to eq(developer.avatar_url) + expect(json_response['web_url']).to eq(Gitlab::Routing.url_helpers.user_url(developer)) + + # Member attributes + expect(json_response['access_level']).to eq(Member::DEVELOPER) + end + end + end + end + end + end + + shared_examples 'POST /:sources/:id/members' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { post api("/#{source_type.pluralize}/#{source.id}/members", stranger) } + end + + context 'when authenticated as a non-member or member with insufficient rights' do + %i[access_requester stranger developer].each do |type| + context "as a #{type}" do + it 'returns 403' do + user = public_send(type) + post api("/#{source_type.pluralize}/#{source.id}/members", user) + + expect(response).to have_http_status(403) + end + end + end + end + + context 'when authenticated as a master/owner' do + context 'and new member is already a requester' do + it 'transforms the requester into a proper member' do + expect do + post api("/#{source_type.pluralize}/#{source.id}/members", master), + user_id: access_requester.id, access_level: Member::MASTER + + expect(response).to have_http_status(201) + end.to change { source.members.count }.by(1) + expect(source.requesters.count).to eq(0) + expect(json_response['id']).to eq(access_requester.id) + expect(json_response['access_level']).to eq(Member::MASTER) + end + end + + it 'creates a new member' do + expect do + post api("/#{source_type.pluralize}/#{source.id}/members", master), + user_id: stranger.id, access_level: Member::DEVELOPER + + expect(response).to have_http_status(201) + end.to change { source.members.count }.by(1) + expect(json_response['id']).to eq(stranger.id) + expect(json_response['access_level']).to eq(Member::DEVELOPER) + end + end + + it "returns #{source_type == 'project' ? 201 : 409} if member already exists" do + post api("/#{source_type.pluralize}/#{source.id}/members", master), + user_id: master.id, access_level: Member::MASTER + + expect(response).to have_http_status(source_type == 'project' ? 201 : 409) + end + + it 'returns 400 when user_id is not given' do + post api("/#{source_type.pluralize}/#{source.id}/members", master), + access_level: Member::MASTER + + expect(response).to have_http_status(400) + end + + it 'returns 400 when access_level is not given' do + post api("/#{source_type.pluralize}/#{source.id}/members", master), + user_id: stranger.id + + expect(response).to have_http_status(400) + end + + it 'returns 422 when access_level is not valid' do + post api("/#{source_type.pluralize}/#{source.id}/members", master), + user_id: stranger.id, access_level: 1234 + + expect(response).to have_http_status(422) + end + end + end + + shared_examples 'PUT /:sources/:id/members/:user_id' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", stranger) } + end + + context 'when authenticated as a non-member or member with insufficient rights' do + %i[access_requester stranger developer].each do |type| + context "as a #{type}" do + it 'returns 403' do + user = public_send(type) + put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", user) + + expect(response).to have_http_status(403) + end + end + end + end + + context 'when authenticated as a master/owner' do + it 'updates the member' do + put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", master), + access_level: Member::MASTER + + expect(response).to have_http_status(200) + expect(json_response['id']).to eq(developer.id) + expect(json_response['access_level']).to eq(Member::MASTER) + end + end + + it 'returns 409 if member does not exist' do + put api("/#{source_type.pluralize}/#{source.id}/members/123", master), + access_level: Member::MASTER + + expect(response).to have_http_status(404) + end + + it 'returns 400 when access_level is not given' do + put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", master) + + expect(response).to have_http_status(400) + end + + it 'returns 422 when access level is not valid' do + put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", master), + access_level: 1234 + + expect(response).to have_http_status(422) + end + end + end + + shared_examples 'DELETE /:sources/:id/members/:user_id' do |source_type| + context "with :sources == #{source_type.pluralize}" do + it_behaves_like 'a 404 response when source is private' do + let(:route) { delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", stranger) } + end + + context 'when authenticated as a non-member or member with insufficient rights' do + %i[access_requester stranger].each do |type| + context "as a #{type}" do + it 'returns 403' do + user = public_send(type) + delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", user) + + expect(response).to have_http_status(403) + end + end + end + end + + context 'when authenticated as a member and deleting themself' do + it 'deletes the member' do + expect do + delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", developer) + + expect(response).to have_http_status(200) + end.to change { source.members.count }.by(-1) + end + end + + context 'when authenticated as a master/owner' do + context 'and member is a requester' do + it "returns #{source_type == 'project' ? 200 : 404}" do + expect do + delete api("/#{source_type.pluralize}/#{source.id}/members/#{access_requester.id}", master) + + expect(response).to have_http_status(source_type == 'project' ? 200 : 404) + end.not_to change { source.requesters.count } + end + end + + it 'deletes the member' do + expect do + delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", master) + + expect(response).to have_http_status(200) + end.to change { source.members.count }.by(-1) + end + end + + it "returns #{source_type == 'project' ? 200 : 404} if member does not exist" do + delete api("/#{source_type.pluralize}/#{source.id}/members/123", master) + + expect(response).to have_http_status(source_type == 'project' ? 200 : 404) + end + end + end + + it_behaves_like 'GET /:sources/:id/members', 'project' do + let(:source) { project } + end + + it_behaves_like 'GET /:sources/:id/members', 'group' do + let(:source) { group } + end + + it_behaves_like 'GET /:sources/:id/members/:user_id', 'project' do + let(:source) { project } + end + + it_behaves_like 'GET /:sources/:id/members/:user_id', 'group' do + let(:source) { group } + end + + it_behaves_like 'POST /:sources/:id/members', 'project' do + let(:source) { project } + end + + it_behaves_like 'POST /:sources/:id/members', 'group' do + let(:source) { group } + end + + it_behaves_like 'PUT /:sources/:id/members/:user_id', 'project' do + let(:source) { project } + end + + it_behaves_like 'PUT /:sources/:id/members/:user_id', 'group' do + let(:source) { group } + end + + it_behaves_like 'DELETE /:sources/:id/members/:user_id', 'project' do + let(:source) { project } + end + + it_behaves_like 'DELETE /:sources/:id/members/:user_id', 'group' do + let(:source) { group } + end +end diff --git a/spec/requests/api/project_members_spec.rb b/spec/requests/api/project_members_spec.rb deleted file mode 100644 index 13cc0d81ac8..00000000000 --- a/spec/requests/api/project_members_spec.rb +++ /dev/null @@ -1,166 +0,0 @@ -require 'spec_helper' - -describe API::API, api: true do - include ApiHelpers - let(:user) { create(:user) } - let(:user2) { create(:user) } - let(:user3) { create(:user) } - let(:project) { create(:project, creator_id: user.id, namespace: user.namespace) } - let(:project_member) { create(:project_member, :master, user: user, project: project) } - let(:project_member2) { create(:project_member, :developer, user: user3, project: project) } - - describe "GET /projects/:id/members" do - before { project_member } - before { project_member2 } - - it "returns project team members" do - get api("/projects/#{project.id}/members", user) - expect(response).to have_http_status(200) - expect(json_response).to be_an Array - expect(json_response.count).to eq(2) - expect(json_response.map { |u| u['username'] }).to include user.username - end - - it "finds team members with query string" do - get api("/projects/#{project.id}/members", user), query: user.username - expect(response).to have_http_status(200) - expect(json_response).to be_an Array - expect(json_response.count).to eq(1) - expect(json_response.first['username']).to eq(user.username) - end - - it "returns a 404 error if id not found" do - get api("/projects/9999/members", user) - expect(response).to have_http_status(404) - end - end - - describe "GET /projects/:id/members/:user_id" do - before { project_member } - - it "returns project team member" do - get api("/projects/#{project.id}/members/#{user.id}", user) - expect(response).to have_http_status(200) - expect(json_response['username']).to eq(user.username) - expect(json_response['access_level']).to eq(ProjectMember::MASTER) - end - - it "returns a 404 error if user id not found" do - get api("/projects/#{project.id}/members/1234", user) - expect(response).to have_http_status(404) - end - end - - describe "POST /projects/:id/members" do - it "adds user to project team" do - expect do - post api("/projects/#{project.id}/members", user), user_id: user2.id, access_level: ProjectMember::DEVELOPER - end.to change { ProjectMember.count }.by(1) - - expect(response).to have_http_status(201) - expect(json_response['username']).to eq(user2.username) - expect(json_response['access_level']).to eq(ProjectMember::DEVELOPER) - end - - it "returns a 201 status if user is already project member" do - post api("/projects/#{project.id}/members", user), - user_id: user2.id, - access_level: ProjectMember::DEVELOPER - expect do - post api("/projects/#{project.id}/members", user), user_id: user2.id, access_level: ProjectMember::DEVELOPER - end.not_to change { ProjectMember.count } - - expect(response).to have_http_status(201) - expect(json_response['username']).to eq(user2.username) - expect(json_response['access_level']).to eq(ProjectMember::DEVELOPER) - end - - it "returns a 400 error when user id is not given" do - post api("/projects/#{project.id}/members", user), access_level: ProjectMember::MASTER - expect(response).to have_http_status(400) - end - - it "returns a 400 error when access level is not given" do - post api("/projects/#{project.id}/members", user), user_id: user2.id - expect(response).to have_http_status(400) - end - - it "returns a 422 error when access level is not known" do - post api("/projects/#{project.id}/members", user), user_id: user2.id, access_level: 1234 - expect(response).to have_http_status(422) - end - end - - describe "PUT /projects/:id/members/:user_id" do - before { project_member2 } - - it "updates project team member" do - put api("/projects/#{project.id}/members/#{user3.id}", user), access_level: ProjectMember::MASTER - expect(response).to have_http_status(200) - expect(json_response['username']).to eq(user3.username) - expect(json_response['access_level']).to eq(ProjectMember::MASTER) - end - - it "returns a 404 error if user_id is not found" do - put api("/projects/#{project.id}/members/1234", user), access_level: ProjectMember::MASTER - expect(response).to have_http_status(404) - end - - it "returns a 400 error when access level is not given" do - put api("/projects/#{project.id}/members/#{user3.id}", user) - expect(response).to have_http_status(400) - end - - it "returns a 422 error when access level is not known" do - put api("/projects/#{project.id}/members/#{user3.id}", user), access_level: 123 - expect(response).to have_http_status(422) - end - end - - describe "DELETE /projects/:id/members/:user_id" do - before do - project_member - project_member2 - end - - it "removes user from project team" do - expect do - delete api("/projects/#{project.id}/members/#{user3.id}", user) - end.to change { ProjectMember.count }.by(-1) - end - - it "returns 200 if team member is not part of a project" do - delete api("/projects/#{project.id}/members/#{user3.id}", user) - expect do - delete api("/projects/#{project.id}/members/#{user3.id}", user) - end.not_to change { ProjectMember.count } - expect(response).to have_http_status(200) - end - - it "returns 200 if team member already removed" do - delete api("/projects/#{project.id}/members/#{user3.id}", user) - delete api("/projects/#{project.id}/members/#{user3.id}", user) - expect(response).to have_http_status(200) - end - - it "returns 200 OK when the user was not member" do - expect do - delete api("/projects/#{project.id}/members/1000000", user) - end.to change { ProjectMember.count }.by(0) - expect(response).to have_http_status(200) - expect(json_response['id']).to eq(1000000) - expect(json_response['message']).to eq('Access revoked') - end - - context 'when the user is not an admin or owner' do - it 'can leave the project' do - expect do - delete api("/projects/#{project.id}/members/#{user3.id}", user3) - end.to change { ProjectMember.count }.by(-1) - - expect(response).to have_http_status(200) - expect(json_response['id']).to eq(project_member2.id) - end - end - end -end diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb index 3ccd0af652f..887a2ba5b84 100644 --- a/spec/requests/api/todos_spec.rb +++ b/spec/requests/api/todos_spec.rb @@ -117,6 +117,12 @@ describe API::Todos, api: true do expect(response.status).to eq(200) expect(pending_1.reload).to be_done end + + it 'updates todos cache' do + expect_any_instance_of(User).to receive(:update_todos_count_cache).and_call_original + + delete api("/todos/#{pending_1.id}", john_doe) + end end end @@ -139,6 +145,12 @@ describe API::Todos, api: true do expect(pending_2.reload).to be_done expect(pending_3.reload).to be_done end + + it 'updates todos cache' do + expect_any_instance_of(User).to receive(:update_todos_count_cache).and_call_original + + delete api("/todos", john_doe) + end end end diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb index 5702682fc7d..82bba1ce8a4 100644 --- a/spec/requests/api/triggers_spec.rb +++ b/spec/requests/api/triggers_spec.rb @@ -50,7 +50,8 @@ describe API::API do post api("/projects/#{project.id}/trigger/builds"), options.merge(ref: 'master') expect(response).to have_http_status(201) pipeline.builds.reload - expect(pipeline.builds.size).to eq(2) + expect(pipeline.builds.pending.size).to eq(2) + expect(pipeline.builds.size).to eq(5) end it 'returns bad request with no builds created if there\'s no commit for that ref' do diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb index e0e041b4e15..0bbba64a6d5 100644 --- a/spec/requests/api/users_spec.rb +++ b/spec/requests/api/users_spec.rb @@ -564,12 +564,14 @@ describe API::API, api: true do end describe "DELETE /users/:id" do + let!(:namespace) { user.namespace } before { admin } it "deletes user" do delete api("/users/#{user.id}", admin) expect(response).to have_http_status(200) expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound + expect { Namespace.find(namespace.id) }.to raise_error ActiveRecord::RecordNotFound expect(json_response['email']).to eq(user.email) end diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb index 05b309096cb..ca7932dc5da 100644 --- a/spec/requests/ci/api/builds_spec.rb +++ b/spec/requests/ci/api/builds_spec.rb @@ -6,112 +6,102 @@ describe Ci::API::API do let(:runner) { FactoryGirl.create(:ci_runner, tag_list: ["mysql", "ruby"]) } let(:project) { FactoryGirl.create(:empty_project) } - before do - stub_ci_pipeline_to_return_yaml_file - end - describe "Builds API for runners" do - let(:shared_runner) { FactoryGirl.create(:ci_runner, token: "SharedRunner") } - let(:shared_project) { FactoryGirl.create(:empty_project, name: "SharedProject") } + let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } before do - FactoryGirl.create :ci_runner_project, project: project, runner: runner + project.runners << runner end describe "POST /builds/register" do - it "starts a build" do - pipeline = FactoryGirl.create(:ci_pipeline, project: project, ref: 'master') - pipeline.create_builds(nil) - build = pipeline.builds.first + let!(:build) { create(:ci_build, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } - post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin } + it "starts a build" do + register_builds info: { platform: :darwin } expect(response).to have_http_status(201) expect(json_response['sha']).to eq(build.sha) expect(runner.reload.platform).to eq("darwin") + expect(json_response["options"]).to eq({ "image" => "ruby:2.1", "services" => ["postgres"] }) + expect(json_response["variables"]).to include( + { "key" => "CI_BUILD_NAME", "value" => "spinach", "public" => true }, + { "key" => "CI_BUILD_STAGE", "value" => "test", "public" => true }, + { "key" => "DB_NAME", "value" => "postgres", "public" => true } + ) end - it "returns 404 error if no pending build found" do - post ci_api("/builds/register"), token: runner.token - - expect(response).to have_http_status(404) - end - - it "returns 404 error if no builds for specific runner" do - pipeline = FactoryGirl.create(:ci_pipeline, project: shared_project) - FactoryGirl.create(:ci_build, pipeline: pipeline, status: 'pending') + context 'when builds are finished' do + before do + build.success + end - post ci_api("/builds/register"), token: runner.token + it "returns 404 error if no builds for specific runner" do + register_builds - expect(response).to have_http_status(404) + expect(response).to have_http_status(404) + end end - it "returns 404 error if no builds for shared runner" do - pipeline = FactoryGirl.create(:ci_pipeline, project: project) - FactoryGirl.create(:ci_build, pipeline: pipeline, status: 'pending') + context 'for other project with builds' do + before do + build.success + create(:ci_build, :pending) + end - post ci_api("/builds/register"), token: shared_runner.token + it "returns 404 error if no builds for shared runner" do + register_builds - expect(response).to have_http_status(404) + expect(response).to have_http_status(404) + end end - it "returns options" do - pipeline = FactoryGirl.create(:ci_pipeline, project: project, ref: 'master') - pipeline.create_builds(nil) + context 'for shared runner' do + let(:shared_runner) { create(:ci_runner, token: "SharedRunner") } - post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin } + it "should return 404 error if no builds for shared runner" do + register_builds shared_runner.token - expect(response).to have_http_status(201) - expect(json_response["options"]).to eq({ "image" => "ruby:2.1", "services" => ["postgres"] }) + expect(response).to have_http_status(404) + end end - it "returns variables" do - pipeline = FactoryGirl.create(:ci_pipeline, project: project, ref: 'master') - pipeline.create_builds(nil) - project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value") - - post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin } + context 'for triggered build' do + before do + trigger = create(:ci_trigger, project: project) + create(:ci_trigger_request_with_variables, pipeline: pipeline, builds: [build], trigger: trigger) + project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value") + end - expect(response).to have_http_status(201) - expect(json_response["variables"]).to include( - { "key" => "CI_BUILD_NAME", "value" => "spinach", "public" => true }, - { "key" => "CI_BUILD_STAGE", "value" => "test", "public" => true }, - { "key" => "DB_NAME", "value" => "postgres", "public" => true }, - { "key" => "SECRET_KEY", "value" => "secret_value", "public" => false } - ) + it "returns variables for triggers" do + register_builds info: { platform: :darwin } + + expect(response).to have_http_status(201) + expect(json_response["variables"]).to include( + { "key" => "CI_BUILD_NAME", "value" => "spinach", "public" => true }, + { "key" => "CI_BUILD_STAGE", "value" => "test", "public" => true }, + { "key" => "CI_BUILD_TRIGGERED", "value" => "true", "public" => true }, + { "key" => "DB_NAME", "value" => "postgres", "public" => true }, + { "key" => "SECRET_KEY", "value" => "secret_value", "public" => false }, + { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false }, + ) + end end - it "returns variables for triggers" do - trigger = FactoryGirl.create(:ci_trigger, project: project) - pipeline = FactoryGirl.create(:ci_pipeline, project: project, ref: 'master') - - trigger_request = FactoryGirl.create(:ci_trigger_request_with_variables, pipeline: pipeline, trigger: trigger) - pipeline.create_builds(nil, trigger_request) - project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value") - - post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin } - - expect(response).to have_http_status(201) - expect(json_response["variables"]).to include( - { "key" => "CI_BUILD_NAME", "value" => "spinach", "public" => true }, - { "key" => "CI_BUILD_STAGE", "value" => "test", "public" => true }, - { "key" => "CI_BUILD_TRIGGERED", "value" => "true", "public" => true }, - { "key" => "DB_NAME", "value" => "postgres", "public" => true }, - { "key" => "SECRET_KEY", "value" => "secret_value", "public" => false }, - { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false } - ) - end + context 'with multiple builds' do + before do + build.success + end - it "returns dependent builds" do - pipeline = FactoryGirl.create(:ci_pipeline, project: project, ref: 'master') - pipeline.create_builds(nil, nil) - pipeline.builds.where(stage: 'test').each(&:success) + let!(:test_build) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) } - post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin } + it "returns dependent builds" do + register_builds info: { platform: :darwin } - expect(response).to have_http_status(201) - expect(json_response["depends_on_builds"].count).to eq(2) - expect(json_response["depends_on_builds"][0]["name"]).to eq("rspec") + expect(response).to have_http_status(201) + expect(json_response["id"]).to eq(test_build.id) + expect(json_response["depends_on_builds"].count).to eq(1) + expect(json_response["depends_on_builds"][0]).to include('id' => build.id, 'name' => 'spinach') + end end %w(name version revision platform architecture).each do |param| @@ -121,8 +111,9 @@ describe Ci::API::API do subject { runner.read_attribute(param.to_sym) } it do - post ci_api("/builds/register"), token: runner.token, info: { param => value } - expect(response).to have_http_status(404) + register_builds info: { param => value } + + expect(response).to have_http_status(201) runner.reload is_expected.to eq(value) end @@ -131,8 +122,7 @@ describe Ci::API::API do context 'when build has no tags' do before do - pipeline = create(:ci_pipeline, project: project) - create(:ci_build, pipeline: pipeline, tags: []) + build.update(tags: []) end context 'when runner is allowed to pick untagged builds' do @@ -154,17 +144,15 @@ describe Ci::API::API do expect(response).to have_http_status 404 end end + end - def register_builds - post ci_api("/builds/register"), token: runner.token, - info: { platform: :darwin } - end + def register_builds(token = runner.token, **params) + post ci_api("/builds/register"), params.merge(token: token) end end describe "PUT /builds/:id" do - let(:pipeline) {create(:ci_pipeline, project: project)} - let(:build) { create(:ci_build, :trace, pipeline: pipeline, runner_id: runner.id) } + let(:build) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) } before do build.run! @@ -189,7 +177,7 @@ describe Ci::API::API do end describe 'PATCH /builds/:id/trace.txt' do - let(:build) { create(:ci_build, :trace, runner_id: runner.id) } + let(:build) { create(:ci_build, :pending, :trace, runner_id: runner.id) } let(:headers) { { Ci::API::Helpers::BUILD_TOKEN_HEADER => build.token, 'Content-Type' => 'text/plain' } } let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) } @@ -237,8 +225,7 @@ describe Ci::API::API do context "Artifacts" do let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') } let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') } - let(:pipeline) { create(:ci_pipeline, project: project) } - let(:build) { create(:ci_build, pipeline: pipeline, runner_id: runner.id) } + let(:build) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) } let(:authorize_url) { ci_api("/builds/#{build.id}/artifacts/authorize") } let(:post_url) { ci_api("/builds/#{build.id}/artifacts") } let(:delete_url) { ci_api("/builds/#{build.id}/artifacts") } diff --git a/spec/requests/ci/api/triggers_spec.rb b/spec/requests/ci/api/triggers_spec.rb index 3312bd11669..0a0f979f57d 100644 --- a/spec/requests/ci/api/triggers_spec.rb +++ b/spec/requests/ci/api/triggers_spec.rb @@ -42,7 +42,8 @@ describe Ci::API::API do post ci_api("/projects/#{project.ci_id}/refs/master/trigger"), options expect(response).to have_http_status(201) pipeline.builds.reload - expect(pipeline.builds.size).to eq(2) + expect(pipeline.builds.pending.size).to eq(2) + expect(pipeline.builds.size).to eq(5) end it 'returns bad request with no builds created if there\'s no commit for that ref' do diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 93d2bc160cc..4c9b4a8ba42 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::Lfs::Router do +describe 'Git LFS API and storage' do let(:user) { create(:user) } let!(:lfs_object) { create(:lfs_object, :with_file) } @@ -31,10 +31,11 @@ describe Gitlab::Lfs::Router do 'operation' => 'upload' } end + let(:authorization) { authorize_user } before do allow(Gitlab.config.lfs).to receive(:enabled).and_return(false) - post_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers + post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers end it 'responds with 501' do @@ -71,8 +72,9 @@ describe Gitlab::Lfs::Router do end context 'when handling lfs request using deprecated API' do + let(:authorization) { authorize_user } before do - post_json "#{project.http_url_to_repo}/info/lfs/objects", nil, headers + post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects", nil, headers end it_behaves_like 'a deprecated' @@ -118,8 +120,8 @@ describe Gitlab::Lfs::Router do project.lfs_objects << lfs_object end - it 'responds with status 403' do - expect(response).to have_http_status(403) + it 'responds with status 404' do + expect(response).to have_http_status(404) end end @@ -147,8 +149,8 @@ describe Gitlab::Lfs::Router do context 'without required headers' do let(:authorization) { authorize_user } - it 'responds with status 403' do - expect(response).to have_http_status(403) + it 'responds with status 404' do + expect(response).to have_http_status(404) end end end @@ -162,7 +164,7 @@ describe Gitlab::Lfs::Router do enable_lfs update_lfs_permissions update_user_permissions - post_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers + post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers end describe 'download' do @@ -304,10 +306,10 @@ describe Gitlab::Lfs::Router do end context 'when user does is not member of the project' do - let(:role) { :guest } + let(:update_user_permissions) { nil } - it 'responds with 403' do - expect(response).to have_http_status(403) + it 'responds with 404' do + expect(response).to have_http_status(404) end end @@ -510,6 +512,7 @@ describe Gitlab::Lfs::Router do describe 'unsupported' do let(:project) { create(:empty_project) } + let(:authorization) { authorize_user } let(:body) do { 'operation' => 'other', 'objects' => [ @@ -553,11 +556,11 @@ describe Gitlab::Lfs::Router do context 'and request is sent with a malformed headers' do before do - put_finalize('cat /etc/passwd') + put_finalize('/etc/passwd') end it 'does not recognize it as a valid lfs command' do - expect(response).to have_http_status(403) + expect(response).to have_http_status(401) end end end @@ -582,6 +585,16 @@ describe Gitlab::Lfs::Router do expect(response).to have_http_status(403) end end + + context 'and request is sent with a malformed headers' do + before do + put_finalize('/etc/passwd') + end + + it 'does not recognize it as a valid lfs command' do + expect(response).to have_http_status(403) + end + end end describe 'to one project' do @@ -624,9 +637,25 @@ describe Gitlab::Lfs::Router do expect(lfs_object.projects.pluck(:id)).to include(project.id) end end + + context 'invalid tempfiles' do + it 'rejects slashes in the tempfile name (path traversal' do + put_finalize('foo/bar') + expect(response).to have_http_status(403) + end + + it 'rejects tempfile names that do not start with the oid' do + put_finalize("foo#{sample_oid}") + expect(response).to have_http_status(403) + end + end end describe 'and user does not have push access' do + before do + project.team << [user, :reporter] + end + it_behaves_like 'forbidden' end end @@ -758,8 +787,8 @@ describe Gitlab::Lfs::Router do Projects::ForkService.new(project, user, {}).execute end - def post_json(url, body = nil, headers = nil) - post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => 'application/json')) + def post_lfs_json(url, body = nil, headers = nil) + post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => 'application/vnd.git-lfs+json')) end def json_response diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb index b941e78f983..77842057a10 100644 --- a/spec/routing/project_routing_spec.rb +++ b/spec/routing/project_routing_spec.rb @@ -60,7 +60,7 @@ end # project GET /:id(.:format) projects#show # PUT /:id(.:format) projects#update # DELETE /:id(.:format) projects#destroy -# markdown_preview_project POST /:id/markdown_preview(.:format) projects#markdown_preview +# preview_markdown_project POST /:id/preview_markdown(.:format) projects#preview_markdown describe ProjectsController, 'routing' do it 'to #create' do expect(post('/projects')).to route_to('projects#create') @@ -91,9 +91,9 @@ describe ProjectsController, 'routing' do expect(delete('/gitlab/gitlabhq')).to route_to('projects#destroy', namespace_id: 'gitlab', id: 'gitlabhq') end - it 'to #markdown_preview' do - expect(post('/gitlab/gitlabhq/markdown_preview')).to( - route_to('projects#markdown_preview', namespace_id: 'gitlab', id: 'gitlabhq') + it 'to #preview_markdown' do + expect(post('/gitlab/gitlabhq/preview_markdown')).to( + route_to('projects#preview_markdown', namespace_id: 'gitlab', id: 'gitlabhq') ) end end diff --git a/spec/services/ci/create_builds_service_spec.rb b/spec/services/ci/create_builds_service_spec.rb deleted file mode 100644 index 8b0becd83d3..00000000000 --- a/spec/services/ci/create_builds_service_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -require 'spec_helper' - -describe Ci::CreateBuildsService, services: true do - let(:pipeline) { create(:ci_pipeline, ref: 'master') } - let(:user) { create(:user) } - - describe '#execute' do - # Using stubbed .gitlab-ci.yml created in commit factory - # - - subject do - described_class.new(pipeline).execute('test', user, status, nil) - end - - context 'next builds available' do - let(:status) { 'success' } - - it { is_expected.to be_an_instance_of Array } - it { is_expected.to all(be_an_instance_of Ci::Build) } - - it 'does not persist created builds' do - expect(subject.first).not_to be_persisted - end - end - - context 'builds skipped' do - let(:status) { 'skipped' } - - it { is_expected.to be_empty } - end - end -end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb new file mode 100644 index 00000000000..4aadd009f3e --- /dev/null +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -0,0 +1,214 @@ +require 'spec_helper' + +describe Ci::CreatePipelineService, services: true do + let(:project) { FactoryGirl.create(:project) } + let(:user) { create(:admin) } + + before do + stub_ci_pipeline_to_return_yaml_file + end + + describe '#execute' do + def execute(params) + described_class.new(project, user, params).execute + end + + context 'valid params' do + let(:pipeline) do + execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: [{ message: "Message" }]) + end + + it { expect(pipeline).to be_kind_of(Ci::Pipeline) } + it { expect(pipeline).to be_valid } + it { expect(pipeline).to be_persisted } + it { expect(pipeline).to eq(project.pipelines.last) } + it { expect(pipeline).to have_attributes(user: user) } + it { expect(pipeline.builds.first).to be_kind_of(Ci::Build) } + end + + context "skip tag if there is no build for it" do + it "creates commit if there is appropriate job" do + result = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: [{ message: "Message" }]) + expect(result).to be_persisted + end + + it "creates commit if there is no appropriate job but deploy job has right ref setting" do + config = YAML.dump({ deploy: { script: "ls", only: ["master"] } }) + stub_ci_pipeline_yaml_file(config) + result = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: [{ message: "Message" }]) + + expect(result).to be_persisted + end + end + + it 'skips creating pipeline for refs without .gitlab-ci.yml' do + stub_ci_pipeline_yaml_file(nil) + result = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: [{ message: 'Message' }]) + + expect(result).not_to be_persisted + expect(Ci::Pipeline.count).to eq(0) + end + + it 'fails commits if yaml is invalid' do + message = 'message' + allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message } + stub_ci_pipeline_yaml_file('invalid: file: file') + commits = [{ message: message }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq('failed') + expect(pipeline.yaml_errors).not_to be_nil + end + + context 'when commit contains a [ci skip] directive' do + let(:message) { "some message[ci skip]" } + let(:messageFlip) { "some message[skip ci]" } + let(:capMessage) { "some message[CI SKIP]" } + let(:capMessageFlip) { "some message[SKIP CI]" } + + before do + allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message } + end + + it "skips builds creation if there is [ci skip] tag in commit message" do + commits = [{ message: message }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq("skipped") + end + + it "skips builds creation if there is [skip ci] tag in commit message" do + commits = [{ message: messageFlip }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq("skipped") + end + + it "skips builds creation if there is [CI SKIP] tag in commit message" do + commits = [{ message: capMessage }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq("skipped") + end + + it "skips builds creation if there is [SKIP CI] tag in commit message" do + commits = [{ message: capMessageFlip }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq("skipped") + end + + it "does not skips builds creation if there is no [ci skip] or [skip ci] tag in commit message" do + allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { "some message" } + + commits = [{ message: "some message" }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.first.name).to eq("rspec") + end + + it "fails builds creation if there is [ci skip] tag in commit message and yaml is invalid" do + stub_ci_pipeline_yaml_file('invalid: file: fiile') + commits = [{ message: message }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.builds.any?).to be false + expect(pipeline.status).to eq("failed") + expect(pipeline.yaml_errors).not_to be_nil + end + end + + it "creates commit with failed status if yaml is invalid" do + stub_ci_pipeline_yaml_file('invalid: file') + commits = [{ message: "some message" }] + pipeline = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: commits) + + expect(pipeline).to be_persisted + expect(pipeline.status).to eq("failed") + expect(pipeline.builds.any?).to be false + end + + context 'when there are no jobs for this pipeline' do + before do + config = YAML.dump({ test: { script: 'ls', only: ['feature'] } }) + stub_ci_pipeline_yaml_file(config) + end + + it 'does not create a new pipeline' do + result = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: [{ message: 'some msg' }]) + + expect(result).not_to be_persisted + expect(Ci::Build.all).to be_empty + expect(Ci::Pipeline.count).to eq(0) + end + end + + context 'with manual actions' do + before do + config = YAML.dump({ deploy: { script: 'ls', when: 'manual' } }) + stub_ci_pipeline_yaml_file(config) + end + + it 'does not create a new pipeline' do + result = execute(ref: 'refs/heads/master', + before: '00000000', + after: project.commit.id, + commits: [{ message: 'some msg' }]) + + expect(result).to be_persisted + expect(result.manual_actions).not_to be_empty + end + end + end +end diff --git a/spec/services/ci/create_trigger_request_service_spec.rb b/spec/services/ci/create_trigger_request_service_spec.rb index b72e0bd3dbe..d8c443d29d5 100644 --- a/spec/services/ci/create_trigger_request_service_spec.rb +++ b/spec/services/ci/create_trigger_request_service_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe Ci::CreateTriggerRequestService, services: true do - let(:service) { Ci::CreateTriggerRequestService.new } + let(:service) { described_class.new } let(:project) { create(:project) } let(:trigger) { create(:ci_trigger, project: project) } @@ -27,8 +27,7 @@ describe Ci::CreateTriggerRequestService, services: true do subject { service.execute(project, trigger, 'master') } before do - stub_ci_pipeline_yaml_file('{}') - FactoryGirl.create :ci_pipeline, project: project + stub_ci_pipeline_yaml_file('script: { only: [develop], script: hello World }') end it { expect(subject).to be_nil } diff --git a/spec/services/ci/image_for_build_service_spec.rb b/spec/services/ci/image_for_build_service_spec.rb index 21e00b11a12..c931c3e4829 100644 --- a/spec/services/ci/image_for_build_service_spec.rb +++ b/spec/services/ci/image_for_build_service_spec.rb @@ -5,8 +5,8 @@ module Ci let(:service) { ImageForBuildService.new } let(:project) { FactoryGirl.create(:empty_project) } let(:commit_sha) { '01234567890123456789' } - let(:commit) { project.ensure_pipeline('master', commit_sha) } - let(:build) { FactoryGirl.create(:ci_build, pipeline: commit) } + let(:pipeline) { project.ensure_pipeline(commit_sha, 'master') } + let(:build) { FactoryGirl.create(:ci_build, pipeline: pipeline) } describe '#execute' do before { build } diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb new file mode 100644 index 00000000000..ad8c2485888 --- /dev/null +++ b/spec/services/ci/process_pipeline_service_spec.rb @@ -0,0 +1,288 @@ +require 'spec_helper' + +describe Ci::ProcessPipelineService, services: true do + let(:pipeline) { create(:ci_pipeline, ref: 'master') } + let(:user) { create(:user) } + let(:all_builds) { pipeline.builds } + let(:builds) { all_builds.where.not(status: [:created, :skipped]) } + let(:config) { nil } + + before do + allow(pipeline).to receive(:ci_yaml_file).and_return(config) + end + + describe '#execute' do + def create_builds + described_class.new(pipeline.project, user).execute(pipeline) + end + + def succeed_pending + builds.pending.update_all(status: 'success') + end + + context 'start queuing next builds' do + before do + create(:ci_build, :created, pipeline: pipeline, name: 'linux', stage_idx: 0) + create(:ci_build, :created, pipeline: pipeline, name: 'mac', stage_idx: 0) + create(:ci_build, :created, pipeline: pipeline, name: 'rspec', stage_idx: 1) + create(:ci_build, :created, pipeline: pipeline, name: 'rubocop', stage_idx: 1) + create(:ci_build, :created, pipeline: pipeline, name: 'deploy', stage_idx: 2) + end + + it 'processes a pipeline' do + expect(create_builds).to be_truthy + succeed_pending + expect(builds.success.count).to eq(2) + + expect(create_builds).to be_truthy + succeed_pending + expect(builds.success.count).to eq(4) + + expect(create_builds).to be_truthy + succeed_pending + expect(builds.success.count).to eq(5) + + expect(create_builds).to be_falsey + end + + it 'does not process pipeline if existing stage is running' do + expect(create_builds).to be_truthy + expect(builds.pending.count).to eq(2) + + expect(create_builds).to be_falsey + expect(builds.pending.count).to eq(2) + end + end + + context 'custom stage with first job allowed to fail' do + before do + create(:ci_build, :created, pipeline: pipeline, name: 'clean_job', stage_idx: 0, allow_failure: true) + create(:ci_build, :created, pipeline: pipeline, name: 'test_job', stage_idx: 1, allow_failure: true) + end + + it 'automatically triggers a next stage when build finishes' do + expect(create_builds).to be_truthy + expect(builds.pluck(:status)).to contain_exactly('pending') + + pipeline.builds.running_or_pending.each(&:drop) + expect(builds.pluck(:status)).to contain_exactly('failed', 'pending') + end + end + + context 'properly creates builds when "when" is defined' do + before do + create(:ci_build, :created, pipeline: pipeline, name: 'build', stage_idx: 0) + create(:ci_build, :created, pipeline: pipeline, name: 'test', stage_idx: 1) + create(:ci_build, :created, pipeline: pipeline, name: 'test_failure', stage_idx: 2, when: 'on_failure') + create(:ci_build, :created, pipeline: pipeline, name: 'deploy', stage_idx: 3) + create(:ci_build, :created, pipeline: pipeline, name: 'production', stage_idx: 3, when: 'manual') + create(:ci_build, :created, pipeline: pipeline, name: 'cleanup', stage_idx: 4, when: 'always') + create(:ci_build, :created, pipeline: pipeline, name: 'clear cache', stage_idx: 4, when: 'manual') + end + + context 'when builds are successful' do + it 'properly creates builds' do + expect(create_builds).to be_truthy + expect(builds.pluck(:name)).to contain_exactly('build') + expect(builds.pluck(:status)).to contain_exactly('pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test') + expect(builds.pluck(:status)).to contain_exactly('success', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy') + expect(builds.pluck(:status)).to contain_exactly('success', 'success', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy', 'cleanup') + expect(builds.pluck(:status)).to contain_exactly('success', 'success', 'success', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:status)).to contain_exactly('success', 'success', 'success', 'success') + pipeline.reload + expect(pipeline.status).to eq('success') + end + end + + context 'when test job fails' do + it 'properly creates builds' do + expect(create_builds).to be_truthy + expect(builds.pluck(:name)).to contain_exactly('build') + expect(builds.pluck(:status)).to contain_exactly('pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test') + expect(builds.pluck(:status)).to contain_exactly('success', 'pending') + pipeline.builds.running_or_pending.each(&:drop) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure') + expect(builds.pluck(:status)).to contain_exactly('success', 'failed', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup') + expect(builds.pluck(:status)).to contain_exactly('success', 'failed', 'success', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:status)).to contain_exactly('success', 'failed', 'success', 'success') + pipeline.reload + expect(pipeline.status).to eq('failed') + end + end + + context 'when test and test_failure jobs fail' do + it 'properly creates builds' do + expect(create_builds).to be_truthy + expect(builds.pluck(:name)).to contain_exactly('build') + expect(builds.pluck(:status)).to contain_exactly('pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test') + expect(builds.pluck(:status)).to contain_exactly('success', 'pending') + pipeline.builds.running_or_pending.each(&:drop) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure') + expect(builds.pluck(:status)).to contain_exactly('success', 'failed', 'pending') + pipeline.builds.running_or_pending.each(&:drop) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup') + expect(builds.pluck(:status)).to contain_exactly('success', 'failed', 'failed', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup') + expect(builds.pluck(:status)).to contain_exactly('success', 'failed', 'failed', 'success') + pipeline.reload + expect(pipeline.status).to eq('failed') + end + end + + context 'when deploy job fails' do + it 'properly creates builds' do + expect(create_builds).to be_truthy + expect(builds.pluck(:name)).to contain_exactly('build') + expect(builds.pluck(:status)).to contain_exactly('pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test') + expect(builds.pluck(:status)).to contain_exactly('success', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy') + expect(builds.pluck(:status)).to contain_exactly('success', 'success', 'pending') + pipeline.builds.running_or_pending.each(&:drop) + + expect(builds.pluck(:name)).to contain_exactly('build', 'test', 'deploy', 'cleanup') + expect(builds.pluck(:status)).to contain_exactly('success', 'success', 'failed', 'pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.pluck(:status)).to contain_exactly('success', 'success', 'failed', 'success') + pipeline.reload + expect(pipeline.status).to eq('failed') + end + end + + context 'when build is canceled in the second stage' do + it 'does not schedule builds after build has been canceled' do + expect(create_builds).to be_truthy + expect(builds.pluck(:name)).to contain_exactly('build') + expect(builds.pluck(:status)).to contain_exactly('pending') + pipeline.builds.running_or_pending.each(&:success) + + expect(builds.running_or_pending).not_to be_empty + + expect(builds.pluck(:name)).to contain_exactly('build', 'test') + expect(builds.pluck(:status)).to contain_exactly('success', 'pending') + pipeline.builds.running_or_pending.each(&:cancel) + + expect(builds.running_or_pending).to be_empty + expect(pipeline.reload.status).to eq('canceled') + end + end + + context 'when listing manual actions' do + it 'returns only for skipped builds' do + # currently all builds are created + expect(create_builds).to be_truthy + expect(manual_actions).to be_empty + + # succeed stage build + pipeline.builds.running_or_pending.each(&:success) + expect(manual_actions).to be_empty + + # succeed stage test + pipeline.builds.running_or_pending.each(&:success) + expect(manual_actions).to be_one # production + + # succeed stage deploy + pipeline.builds.running_or_pending.each(&:success) + expect(manual_actions).to be_many # production and clear cache + end + + def manual_actions + pipeline.manual_actions + end + end + end + + context 'creates a builds from .gitlab-ci.yml' do + let(:config) do + YAML.dump({ + rspec: { + stage: 'test', + script: 'rspec' + }, + rubocop: { + stage: 'test', + script: 'rubocop' + }, + deploy: { + stage: 'deploy', + script: 'deploy' + } + }) + end + + # Using stubbed .gitlab-ci.yml created in commit factory + # + + before do + stub_ci_pipeline_yaml_file(config) + create(:ci_build, :created, pipeline: pipeline, name: 'linux', stage: 'build', stage_idx: 0) + create(:ci_build, :created, pipeline: pipeline, name: 'mac', stage: 'build', stage_idx: 0) + end + + it 'when processing a pipeline' do + # Currently we have two builds with state created + expect(builds.count).to eq(0) + expect(all_builds.count).to eq(2) + + # Create builds will mark the created as pending + expect(create_builds).to be_truthy + expect(builds.count).to eq(2) + expect(all_builds.count).to eq(2) + + # When we builds succeed we will create a rest of pipeline from .gitlab-ci.yml + # We will have 2 succeeded, 2 pending (from stage test), total 5 (one more build from deploy) + succeed_pending + expect(create_builds).to be_truthy + expect(builds.success.count).to eq(2) + expect(builds.pending.count).to eq(2) + expect(all_builds.count).to eq(5) + + # When we succeed the 2 pending from stage test, + # We will queue a deploy stage, no new builds will be created + succeed_pending + expect(create_builds).to be_truthy + expect(builds.pending.count).to eq(1) + expect(builds.success.count).to eq(4) + expect(all_builds.count).to eq(5) + + # When we succeed last pending build, we will have a total of 5 succeeded builds, no new builds will be created + succeed_pending + expect(create_builds).to be_falsey + expect(builds.success.count).to eq(5) + expect(all_builds.count).to eq(5) + end + end + end +end diff --git a/spec/services/create_commit_builds_service_spec.rb b/spec/services/create_commit_builds_service_spec.rb deleted file mode 100644 index d4c5e584421..00000000000 --- a/spec/services/create_commit_builds_service_spec.rb +++ /dev/null @@ -1,241 +0,0 @@ -require 'spec_helper' - -describe CreateCommitBuildsService, services: true do - let(:service) { CreateCommitBuildsService.new } - let(:project) { FactoryGirl.create(:empty_project) } - let(:user) { create(:user) } - - before do - stub_ci_pipeline_to_return_yaml_file - end - - describe '#execute' do - context 'valid params' do - let(:pipeline) do - service.execute(project, user, - ref: 'refs/heads/master', - before: '00000000', - after: '31das312', - commits: [{ message: "Message" }] - ) - end - - it { expect(pipeline).to be_kind_of(Ci::Pipeline) } - it { expect(pipeline).to be_valid } - it { expect(pipeline).to be_persisted } - it { expect(pipeline).to eq(project.pipelines.last) } - it { expect(pipeline).to have_attributes(user: user) } - it { expect(pipeline.builds.first).to be_kind_of(Ci::Build) } - end - - context "skip tag if there is no build for it" do - it "creates commit if there is appropriate job" do - result = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: [{ message: "Message" }] - ) - expect(result).to be_persisted - end - - it "creates commit if there is no appropriate job but deploy job has right ref setting" do - config = YAML.dump({ deploy: { script: "ls", only: ["0_1"] } }) - stub_ci_pipeline_yaml_file(config) - - result = service.execute(project, user, - ref: 'refs/heads/0_1', - before: '00000000', - after: '31das312', - commits: [{ message: "Message" }] - ) - expect(result).to be_persisted - end - end - - it 'skips creating pipeline for refs without .gitlab-ci.yml' do - stub_ci_pipeline_yaml_file(nil) - result = service.execute(project, user, - ref: 'refs/heads/0_1', - before: '00000000', - after: '31das312', - commits: [{ message: 'Message' }] - ) - expect(result).to be_falsey - expect(Ci::Pipeline.count).to eq(0) - end - - it 'fails commits if yaml is invalid' do - message = 'message' - allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message } - stub_ci_pipeline_yaml_file('invalid: file: file') - commits = [{ message: message }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq('failed') - expect(pipeline.yaml_errors).not_to be_nil - end - - context 'when commit contains a [ci skip] directive' do - let(:message) { "some message[ci skip]" } - let(:messageFlip) { "some message[skip ci]" } - let(:capMessage) { "some message[CI SKIP]" } - let(:capMessageFlip) { "some message[SKIP CI]" } - - before do - allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message } - end - - it "skips builds creation if there is [ci skip] tag in commit message" do - commits = [{ message: message }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq("skipped") - end - - it "skips builds creation if there is [skip ci] tag in commit message" do - commits = [{ message: messageFlip }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq("skipped") - end - - it "skips builds creation if there is [CI SKIP] tag in commit message" do - commits = [{ message: capMessage }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq("skipped") - end - - it "skips builds creation if there is [SKIP CI] tag in commit message" do - commits = [{ message: capMessageFlip }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq("skipped") - end - - it "does not skips builds creation if there is no [ci skip] or [skip ci] tag in commit message" do - allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { "some message" } - - commits = [{ message: "some message" }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - - expect(pipeline).to be_persisted - expect(pipeline.builds.first.name).to eq("staging") - end - - it "skips builds creation if there is [ci skip] tag in commit message and yaml is invalid" do - stub_ci_pipeline_yaml_file('invalid: file: fiile') - commits = [{ message: message }] - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - expect(pipeline).to be_persisted - expect(pipeline.builds.any?).to be false - expect(pipeline.status).to eq("skipped") - expect(pipeline.yaml_errors).to be_nil - end - end - - it "skips build creation if there are already builds" do - allow_any_instance_of(Ci::Pipeline).to receive(:ci_yaml_file) { gitlab_ci_yaml } - - commits = [{ message: "message" }] - pipeline = service.execute(project, user, - ref: 'refs/heads/master', - before: '00000000', - after: '31das312', - commits: commits - ) - expect(pipeline).to be_persisted - expect(pipeline.builds.count(:all)).to eq(2) - - pipeline = service.execute(project, user, - ref: 'refs/heads/master', - before: '00000000', - after: '31das312', - commits: commits - ) - expect(pipeline).to be_persisted - expect(pipeline.builds.count(:all)).to eq(2) - end - - it "creates commit with failed status if yaml is invalid" do - stub_ci_pipeline_yaml_file('invalid: file') - - commits = [{ message: "some message" }] - - pipeline = service.execute(project, user, - ref: 'refs/tags/0_1', - before: '00000000', - after: '31das312', - commits: commits - ) - - expect(pipeline).to be_persisted - expect(pipeline.status).to eq("failed") - expect(pipeline.builds.any?).to be false - end - - context 'when there are no jobs for this pipeline' do - before do - config = YAML.dump({ test: { script: 'ls', only: ['feature'] } }) - stub_ci_pipeline_yaml_file(config) - end - - it 'does not create a new pipeline' do - result = service.execute(project, user, - ref: 'refs/heads/master', - before: '00000000', - after: '31das312', - commits: [{ message: 'some msg' }]) - - expect(result).to be_falsey - expect(Ci::Build.all).to be_empty - expect(Ci::Pipeline.count).to eq(0) - end - end - end -end diff --git a/spec/services/delete_user_service_spec.rb b/spec/services/delete_user_service_spec.rb index a65938fa03b..418a12a83a9 100644 --- a/spec/services/delete_user_service_spec.rb +++ b/spec/services/delete_user_service_spec.rb @@ -9,13 +9,15 @@ describe DeleteUserService, services: true do context 'no options are given' do it 'deletes the user' do - DeleteUserService.new(current_user).execute(user) + user_data = DeleteUserService.new(current_user).execute(user) - expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound) + expect { user_data['email'].to eq(user.email) } + expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound) + expect { Namespace.with_deleted.find(user.namespace.id) }.to raise_error(ActiveRecord::RecordNotFound) end it 'will delete the project in the near future' do - expect_any_instance_of(Projects::DestroyService).to receive(:pending_delete!).once + expect_any_instance_of(Projects::DestroyService).to receive(:async_execute).once DeleteUserService.new(current_user).execute(user) end diff --git a/spec/services/destroy_group_service_spec.rb b/spec/services/destroy_group_service_spec.rb index eca8ddd8ea4..da724643604 100644 --- a/spec/services/destroy_group_service_spec.rb +++ b/spec/services/destroy_group_service_spec.rb @@ -7,38 +7,52 @@ describe DestroyGroupService, services: true do let!(:gitlab_shell) { Gitlab::Shell.new } let!(:remove_path) { group.path + "+#{group.id}+deleted" } - context 'database records' do - before do - destroy_group(group, user) + shared_examples 'group destruction' do |async| + context 'database records' do + before do + destroy_group(group, user, async) + end + + it { expect(Group.all).not_to include(group) } + it { expect(Project.all).not_to include(project) } end - it { expect(Group.all).not_to include(group) } - it { expect(Project.all).not_to include(project) } - end + context 'file system' do + context 'Sidekiq inline' do + before do + # Run sidekiq immediatly to check that renamed dir will be removed + Sidekiq::Testing.inline! { destroy_group(group, user, async) } + end - context 'file system' do - context 'Sidekiq inline' do - before do - # Run sidekiq immediatly to check that renamed dir will be removed - Sidekiq::Testing.inline! { destroy_group(group, user) } + it { expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey } + it { expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey } end - it { expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey } - it { expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey } - end + context 'Sidekiq fake' do + before do + # Dont run sidekiq to check if renamed repository exists + Sidekiq::Testing.fake! { destroy_group(group, user, async) } + end - context 'Sidekiq fake' do - before do - # Dont run sidekiq to check if renamed repository exists - Sidekiq::Testing.fake! { destroy_group(group, user) } + it { expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey } + it { expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_truthy } end + end - it { expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey } - it { expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_truthy } + def destroy_group(group, user, async) + if async + DestroyGroupService.new(group, user).async_execute + else + DestroyGroupService.new(group, user).execute + end end end - def destroy_group(group, user) - DestroyGroupService.new(group, user).execute + describe 'asynchronous delete' do + it_behaves_like 'group destruction', true + end + + describe 'synchronous delete' do + it_behaves_like 'group destruction', false end end diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb new file mode 100644 index 00000000000..8a4b76367e3 --- /dev/null +++ b/spec/services/merge_requests/get_urls_service_spec.rb @@ -0,0 +1,134 @@ +require "spec_helper" + +describe MergeRequests::GetUrlsService do + let(:project) { create(:project, :public) } + let(:service) { MergeRequests::GetUrlsService.new(project) } + let(:source_branch) { "my_branch" } + let(:new_merge_request_url) { "http://localhost/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" } + let(:show_merge_request_url) { "http://localhost/#{project.namespace.name}/#{project.path}/merge_requests/#{merge_request.iid}" } + let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" } + let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" } + let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" } + let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master" } + + describe "#execute" do + shared_examples 'new_merge_request_link' do + it 'returns url to create new merge request' do + result = service.execute(changes) + expect(result).to match([{ + branch_name: source_branch, + url: new_merge_request_url, + new_merge_request: true + }]) + end + end + + shared_examples 'show_merge_request_url' do + it 'returns url to view merge request' do + result = service.execute(changes) + expect(result).to match([{ + branch_name: source_branch, + url: show_merge_request_url, + new_merge_request: false + }]) + end + end + + shared_examples 'no_merge_request_url' do + it 'returns no URL' do + result = service.execute(changes) + expect(result).to be_empty + end + end + + context 'pushing to default branch' do + let(:changes) { default_branch_changes } + it_behaves_like 'no_merge_request_url' + end + + context 'pushing to project with MRs disabled' do + let(:changes) { new_branch_changes } + + before do + project.merge_requests_enabled = false + end + + it_behaves_like 'no_merge_request_url' + end + + context 'pushing one completely new branch' do + let(:changes) { new_branch_changes } + it_behaves_like 'new_merge_request_link' + end + + context 'pushing to existing branch but no merge request' do + let(:changes) { existing_branch_changes } + it_behaves_like 'new_merge_request_link' + end + + context 'pushing to deleted branch' do + let(:changes) { deleted_branch_changes } + it_behaves_like 'no_merge_request_url' + end + + context 'pushing to existing branch and merge request opened' do + let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) } + let(:changes) { existing_branch_changes } + it_behaves_like 'show_merge_request_url' + end + + context 'pushing to existing branch and merge request is reopened' do + let!(:merge_request) { create(:merge_request, :reopened, source_project: project, source_branch: source_branch) } + let(:changes) { existing_branch_changes } + it_behaves_like 'show_merge_request_url' + end + + context 'pushing to existing branch from forked project' do + let(:user) { create(:user) } + let!(:forked_project) { Projects::ForkService.new(project, user).execute } + let!(:merge_request) { create(:merge_request, source_project: forked_project, target_project: project, source_branch: source_branch) } + let(:changes) { existing_branch_changes } + # Source project is now the forked one + let(:service) { MergeRequests::GetUrlsService.new(forked_project) } + + before do + allow(forked_project).to receive(:empty_repo?).and_return(false) + end + + it_behaves_like 'show_merge_request_url' + end + + context 'pushing to existing branch and merge request is closed' do + let!(:merge_request) { create(:merge_request, :closed, source_project: project, source_branch: source_branch) } + let(:changes) { existing_branch_changes } + it_behaves_like 'new_merge_request_link' + end + + context 'pushing to existing branch and merge request is merged' do + let!(:merge_request) { create(:merge_request, :merged, source_project: project, source_branch: source_branch) } + let(:changes) { existing_branch_changes } + it_behaves_like 'new_merge_request_link' + end + + context 'pushing new branch and existing branch (with merge request created) at once' do + let!(:merge_request) { create(:merge_request, source_project: project, source_branch: "existing_branch") } + let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" } + let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/existing_branch" } + let(:changes) { "#{new_branch_changes}\n#{existing_branch_changes}" } + let(:new_merge_request_url) { "http://localhost/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" } + + it 'returns 2 urls for both creating new and showing merge request' do + result = service.execute(changes) + expect(result).to match([{ + branch_name: "new_branch", + url: new_merge_request_url, + new_merge_request: true + }, { + branch_name: "existing_branch", + url: show_merge_request_url, + new_merge_request: false + }]) + end + end + end +end diff --git a/spec/services/merge_requests/merge_when_build_succeeds_service_spec.rb b/spec/services/merge_requests/merge_when_build_succeeds_service_spec.rb index 4da8146e3d6..520e906b21f 100644 --- a/spec/services/merge_requests/merge_when_build_succeeds_service_spec.rb +++ b/spec/services/merge_requests/merge_when_build_succeeds_service_spec.rb @@ -110,19 +110,15 @@ describe MergeRequests::MergeWhenBuildSucceedsService do context 'properly handles multiple stages' do let(:ref) { mr_merge_if_green_enabled.source_branch } - let(:build) { create(:ci_build, pipeline: pipeline, ref: ref, name: 'build', stage: 'build') } - let(:test) { create(:ci_build, pipeline: pipeline, ref: ref, name: 'test', stage: 'test') } + let!(:build) { create(:ci_build, :created, pipeline: pipeline, ref: ref, name: 'build', stage: 'build') } + let!(:test) { create(:ci_build, :created, pipeline: pipeline, ref: ref, name: 'test', stage: 'test') } + let(:pipeline) { create(:ci_empty_pipeline, ref: mr_merge_if_green_enabled.source_branch, project: project) } before do # This behavior of MergeRequest: we instantiate a new object allow_any_instance_of(MergeRequest).to receive(:pipeline).and_wrap_original do Ci::Pipeline.find(pipeline.id) end - - # We create test after the build - allow(pipeline).to receive(:create_next_builds).and_wrap_original do - test - end end it "doesn't merge if some stages failed" do diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb index 34d8ea9090e..6c3cbeae13c 100644 --- a/spec/services/todo_service_spec.rb +++ b/spec/services/todo_service_spec.rb @@ -472,6 +472,42 @@ describe TodoService, services: true do expect(john_doe.todos_pending_count).to eq(1) end + describe '#mark_todos_as_done' do + let(:issue) { create(:issue, project: project, author: author, assignee: john_doe) } + + it 'marks a relation of todos as done' do + create(:todo, :mentioned, user: john_doe, target: issue, project: project) + + todos = TodosFinder.new(john_doe, {}).execute + expect { TodoService.new.mark_todos_as_done(todos, john_doe) } + .to change { john_doe.todos.done.count }.from(0).to(1) + end + + it 'marks an array of todos as done' do + todo = create(:todo, :mentioned, user: john_doe, target: issue, project: project) + + expect { TodoService.new.mark_todos_as_done([todo], john_doe) } + .to change { todo.reload.state }.from('pending').to('done') + end + + it 'returns the number of updated todos' do # Needed on API + todo = create(:todo, :mentioned, user: john_doe, target: issue, project: project) + + expect(TodoService.new.mark_todos_as_done([todo], john_doe)).to eq(1) + end + + it 'caches the number of todos of a user', :caching do + create(:todo, :mentioned, user: john_doe, target: issue, project: project) + todo = create(:todo, :mentioned, user: john_doe, target: issue, project: project) + TodoService.new.mark_todos_as_done([todo], john_doe) + + expect_any_instance_of(TodosFinder).not_to receive(:execute) + + expect(john_doe.todos_done_count).to eq(1) + expect(john_doe.todos_pending_count).to eq(1) + end + end + def should_create_todo(attributes = {}) attributes.reverse_merge!( project: project, diff --git a/spec/support/api/members_shared_examples.rb b/spec/support/api/members_shared_examples.rb new file mode 100644 index 00000000000..dab71a35a55 --- /dev/null +++ b/spec/support/api/members_shared_examples.rb @@ -0,0 +1,11 @@ +shared_examples 'a 404 response when source is private' do + before do + source.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE) + end + + it 'returns 404' do + route + + expect(response).to have_http_status(404) + end +end diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index baf78208ec5..548e7780c36 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -42,7 +42,7 @@ describe 'gitlab:app namespace rake task' do before do allow(Dir).to receive(:glob).and_return([]) allow(Dir).to receive(:chdir) - allow(File).to receive(:exists?).and_return(true) + allow(File).to receive(:exist?).and_return(true) allow(Kernel).to receive(:system).and_return(true) allow(FileUtils).to receive(:cp_r).and_return(true) allow(FileUtils).to receive(:mv).and_return(true) diff --git a/spec/views/projects/merge_requests/_heading.html.haml_spec.rb b/spec/views/projects/merge_requests/_heading.html.haml_spec.rb new file mode 100644 index 00000000000..733b2dfa7ff --- /dev/null +++ b/spec/views/projects/merge_requests/_heading.html.haml_spec.rb @@ -0,0 +1,26 @@ +require 'spec_helper' + +describe 'projects/merge_requests/widget/_heading' do + include Devise::TestHelpers + + context 'when released to an environment' do + let(:project) { merge_request.target_project } + let(:merge_request) { create(:merge_request, :merged) } + let(:environment) { create(:environment, project: project) } + let!(:deployment) do + create(:deployment, environment: environment, sha: project.commit('master').id) + end + + before do + assign(:merge_request, merge_request) + assign(:project, project) + + render + end + + it 'displays that the environment is deployed' do + expect(rendered).to match("Deployed to") + expect(rendered).to match("#{environment.name}") + end + end +end diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb new file mode 100644 index 00000000000..4e4eaf9b2f7 --- /dev/null +++ b/spec/workers/group_destroy_worker_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe GroupDestroyWorker do + let(:group) { create(:group) } + let(:user) { create(:admin) } + let!(:project) { create(:project, namespace: group) } + + subject { GroupDestroyWorker.new } + + describe "#perform" do + it "deletes the project" do + subject.perform(group.id, user.id) + + expect(Group.all).not_to include(group) + expect(Project.all).not_to include(project) + expect(Dir.exist?(project.path)).to be_falsey + end + end +end diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb index 7f803a06902..1d2cf7acddd 100644 --- a/spec/workers/post_receive_spec.rb +++ b/spec/workers/post_receive_spec.rb @@ -53,7 +53,13 @@ describe PostReceive do subject { PostReceive.new.perform(pwd(project), key_id, base64_changes) } context "creates a Ci::Pipeline for every change" do - before { stub_ci_pipeline_to_return_yaml_file } + before do + allow_any_instance_of(Ci::CreatePipelineService).to receive(:commit) do + OpenStruct.new(id: '123456') + end + allow_any_instance_of(Ci::CreatePipelineService).to receive(:branch?).and_return(true) + stub_ci_pipeline_to_return_yaml_file + end it { expect{ subject }.to change{ Ci::Pipeline.count }.by(2) } end |