diff options
Diffstat (limited to 'spec')
92 files changed, 1904 insertions, 436 deletions
diff --git a/spec/bin/storage_check_spec.rb b/spec/bin/storage_check_spec.rb new file mode 100644 index 00000000000..02f6fcb6e3a --- /dev/null +++ b/spec/bin/storage_check_spec.rb @@ -0,0 +1,13 @@ +require 'spec_helper' + +describe 'bin/storage_check' do + it 'is executable' do + command = %w[bin/storage_check -t unix://the/path/to/a/unix-socket.sock -i 10 -d] + expected_output = 'Checking unix://the/path/to/a/unix-socket.sock every 10 seconds' + + output, status = Gitlab::Popen.popen(command, Rails.root.to_s) + + expect(status).to eq(0) + expect(output).to include(expected_output) + end +end diff --git a/spec/controllers/admin/health_check_controller_spec.rb b/spec/controllers/admin/health_check_controller_spec.rb index 0b8e0c8a065..d15ee0021d9 100644 --- a/spec/controllers/admin/health_check_controller_spec.rb +++ b/spec/controllers/admin/health_check_controller_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Admin::HealthCheckController, broken_storage: true do +describe Admin::HealthCheckController do let(:admin) { create(:admin) } before do @@ -17,7 +17,7 @@ describe Admin::HealthCheckController, broken_storage: true do describe 'POST reset_storage_health' do it 'resets all storage health information' do - expect(Gitlab::Git::Storage::CircuitBreaker).to receive(:reset_all!) + expect(Gitlab::Git::Storage::FailureInfo).to receive(:reset_all!) post :reset_storage_health end diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb index 9c6d584f59b..362d5cc4514 100644 --- a/spec/controllers/groups/group_members_controller_spec.rb +++ b/spec/controllers/groups/group_members_controller_spec.rb @@ -62,6 +62,25 @@ describe Groups::GroupMembersController do end end + describe 'PUT update' do + let(:requester) { create(:group_member, :access_request, group: group) } + + before do + group.add_owner(user) + sign_in(user) + end + + Gitlab::Access.options.each do |label, value| + it "can change the access level to #{label}" do + xhr :put, :update, group_member: { access_level: value }, + group_id: group, + id: requester + + expect(requester.reload.human_access).to eq(label) + end + end + end + describe 'DELETE destroy' do let(:member) { create(:group_member, :developer, group: group) } diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb index 9e9cf4f2c1f..95946def5f9 100644 --- a/spec/controllers/health_controller_spec.rb +++ b/spec/controllers/health_controller_spec.rb @@ -14,6 +14,48 @@ describe HealthController do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') end + describe '#storage_check' do + before do + allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip) + end + + subject { post :storage_check } + + it 'checks all the configured storages' do + expect(Gitlab::Git::Storage::Checker).to receive(:check_all).and_call_original + + subject + end + + it 'returns the check interval' do + stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true') + stub_application_setting(circuitbreaker_check_interval: 10) + + subject + + expect(json_response['check_interval']).to eq(10) + end + + context 'with failing storages', :broken_storage do + before do + stub_storage_settings( + broken: { path: 'tmp/tests/non-existent-repositories' } + ) + end + + it 'includes the failure information' do + subject + + expected_results = [ + { 'storage' => 'broken', 'success' => false }, + { 'storage' => 'default', 'success' => true } + ] + + expect(json_response['results']).to eq(expected_results) + end + end + end + describe '#readiness' do shared_context 'endpoint responding with readiness data' do let(:request_params) { {} } diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index 4dbbaecdd6d..c5d08cb0b9d 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -272,6 +272,20 @@ describe Projects::IssuesController do expect(response).to have_http_status(:ok) expect(issue.reload.title).to eq('New title') end + + context 'when Akismet is enabled and the issue is identified as spam' do + before do + stub_application_setting(recaptcha_enabled: true) + allow_any_instance_of(SpamService).to receive(:check_for_spam?).and_return(true) + allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true) + end + + it 'renders json with recaptcha_html' do + subject + + expect(JSON.parse(response.body)).to have_key('recaptcha_html') + end + end end context 'when user does not have access to update issue' do @@ -504,17 +518,16 @@ describe Projects::IssuesController do expect(spam_logs.first.recaptcha_verified).to be_falsey end - it 'renders json errors' do + it 'renders recaptcha_html json response' do update_issue - expect(json_response) - .to eql("errors" => ["Your issue has been recognized as spam. Please, change the content or solve the reCAPTCHA to proceed."]) + expect(json_response).to have_key('recaptcha_html') end - it 'returns 422 status' do + it 'returns 200 status' do update_issue - expect(response).to have_gitlab_http_status(422) + expect(response).to have_gitlab_http_status(200) end end diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb index a34dc27a5ed..290dba0610a 100644 --- a/spec/controllers/projects/project_members_controller_spec.rb +++ b/spec/controllers/projects/project_members_controller_spec.rb @@ -66,6 +66,26 @@ describe Projects::ProjectMembersController do end end + describe 'PUT update' do + let(:requester) { create(:project_member, :access_request, project: project) } + + before do + project.add_master(user) + sign_in(user) + end + + Gitlab::Access.options.each do |label, value| + it "can change the access level to #{label}" do + xhr :put, :update, project_member: { access_level: value }, + namespace_id: project.namespace, + project_id: project, + id: requester + + expect(requester.reload.human_access).to eq(label) + end + end + end + describe 'DELETE destroy' do let(:member) { create(:project_member, :developer, project: project) } diff --git a/spec/factories/users.rb b/spec/factories/users.rb index 4000cd085b7..8ace424f8af 100644 --- a/spec/factories/users.rb +++ b/spec/factories/users.rb @@ -58,6 +58,10 @@ FactoryGirl.define do end end + trait :readme do + project_view :readme + end + factory :omniauth_user do transient do extern_uid '123456' diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb index 4430fc15501..ac3392b49f9 100644 --- a/spec/features/admin/admin_health_check_spec.rb +++ b/spec/features/admin/admin_health_check_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -feature "Admin Health Check", :feature, :broken_storage do +feature "Admin Health Check", :feature do include StubENV before do @@ -36,6 +36,7 @@ feature "Admin Health Check", :feature, :broken_storage do context 'when services are up' do before do + stub_storage_settings({}) # Hide the broken storage visit admin_health_check_path end @@ -56,10 +57,8 @@ feature "Admin Health Check", :feature, :broken_storage do end end - context 'with repository storage failures' do + context 'with repository storage failures', :broken_storage do before do - # Track a failure - Gitlab::Git::Storage::CircuitBreaker.for_storage('broken').perform { nil } rescue nil visit admin_health_check_path end @@ -67,9 +66,10 @@ feature "Admin Health Check", :feature, :broken_storage do hostname = Gitlab::Environment.hostname maximum_failures = Gitlab::CurrentSettings.current_application_settings .circuitbreaker_failure_count_threshold + number_of_failures = maximum_failures + 1 - expect(page).to have_content('broken: failed storage access attempt on host:') - expect(page).to have_content("#{hostname}: 1 of #{maximum_failures} failures.") + expect(page).to have_content("broken: #{number_of_failures} failed storage access attempts:") + expect(page).to have_content("#{hostname}: #{number_of_failures} of #{maximum_failures} failures.") end it 'allows resetting storage failures' do diff --git a/spec/features/groups/members/manage_members.rb b/spec/features/groups/members/manage_members.rb index da1e17225db..21f7b4999ad 100644 --- a/spec/features/groups/members/manage_members.rb +++ b/spec/features/groups/members/manage_members.rb @@ -38,6 +38,27 @@ feature 'Groups > Members > Manage members' do end end + scenario 'do not disclose email addresses', :js do + group.add_owner(user1) + create(:user, email: 'undisclosed_email@gitlab.com', name: "Jane 'invisible' Doe") + + visit group_group_members_path(group) + + find('.select2-container').click + select_input = find('.select2-input') + + select_input.send_keys('@gitlab.com') + wait_for_requests + + expect(page).to have_content('No matches found') + + select_input.native.clear + select_input.send_keys('undisclosed_email@gitlab.com') + wait_for_requests + + expect(page).to have_content("Jane 'invisible' Doe") + end + scenario 'remove user from group', :js do group.add_owner(user1) group.add_developer(user2) diff --git a/spec/features/merge_requests/image_diff_notes.rb b/spec/features/merge_requests/image_diff_notes.rb index 3c53b51e330..021c4e03428 100644 --- a/spec/features/merge_requests/image_diff_notes.rb +++ b/spec/features/merge_requests/image_diff_notes.rb @@ -185,6 +185,18 @@ feature 'image diff notes', :js do expect(page).to have_content(diff_note.note) end end + + describe 'image view modes' do + before do + visit project_commit_path(project, '2f63565e7aac07bcdadb654e253078b727143ec4') + end + + it 'resizes image in onion skin view mode' do + find('.view-modes-menu .onion-skin').click + + expect(find('.onion-skin-frame')['style']).to match('width: 228px; height: 240px;') + end + end end def create_image_diff_note diff --git a/spec/features/merge_requests/widget_spec.rb b/spec/features/merge_requests/widget_spec.rb index 2bad3b02250..3ee094c216e 100644 --- a/spec/features/merge_requests/widget_spec.rb +++ b/spec/features/merge_requests/widget_spec.rb @@ -63,6 +63,18 @@ describe 'Merge request', :js do expect(page).to have_selector('.accept-merge-request') expect(find('.accept-merge-request')['disabled']).not_to be(true) end + + it 'allows me to merge, see cherry-pick modal and load branches list' do + wait_for_requests + click_button 'Merge' + + wait_for_requests + click_link 'Cherry-pick' + page.find('.js-project-refs-dropdown').click + wait_for_requests + + expect(page.all('.js-cherry-pick-form .dropdown-content li').size).to be > 1 + end end context 'view merge request with external CI service' do diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb index 8b8080563d3..749aa25e632 100644 --- a/spec/helpers/preferences_helper_spec.rb +++ b/spec/helpers/preferences_helper_spec.rb @@ -77,15 +77,6 @@ describe PreferencesHelper do end end - def stub_user(messages = {}) - if messages.empty? - allow(helper).to receive(:current_user).and_return(nil) - else - allow(helper).to receive(:current_user) - .and_return(double('user', messages)) - end - end - describe '#default_project_view' do context 'user not signed in' do before do @@ -125,5 +116,70 @@ describe PreferencesHelper do end end end + + context 'user signed in' do + let(:user) { create(:user, :readme) } + let(:project) { create(:project, :public, :repository) } + + before do + helper.instance_variable_set(:@project, project) + allow(helper).to receive(:current_user).and_return(user) + end + + context 'when the user is allowed to see the code' do + it 'returns the project view' do + allow(helper).to receive(:can?).with(user, :download_code, project).and_return(true) + + expect(helper.default_project_view).to eq('readme') + end + end + + context 'with wikis enabled and the right policy for the user' do + before do + project.project_feature.update_attribute(:issues_access_level, 0) + allow(helper).to receive(:can?).with(user, :download_code, project).and_return(false) + end + + it 'returns wiki if the user has the right policy' do + allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(true) + + expect(helper.default_project_view).to eq('wiki') + end + + it 'returns customize_workflow if the user does not have the right policy' do + allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(false) + + expect(helper.default_project_view).to eq('customize_workflow') + end + end + + context 'with issues as a feature available' do + it 'return issues' do + allow(helper).to receive(:can?).with(user, :download_code, project).and_return(false) + allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(false) + + expect(helper.default_project_view).to eq('projects/issues/issues') + end + end + + context 'with no activity, no wikies and no issues' do + it 'returns customize_workflow as default' do + project.project_feature.update_attribute(:issues_access_level, 0) + allow(helper).to receive(:can?).with(user, :download_code, project).and_return(false) + allow(helper).to receive(:can?).with(user, :read_wiki, project).and_return(false) + + expect(helper.default_project_view).to eq('customize_workflow') + end + end + end + end + + def stub_user(messages = {}) + if messages.empty? + allow(helper).to receive(:current_user).and_return(nil) + else + allow(helper).to receive(:current_user) + .and_return(double('user', messages)) + end end end diff --git a/spec/javascripts/boards/boards_store_spec.js b/spec/javascripts/boards/boards_store_spec.js index 9e5b0bd3efe..0e656858182 100644 --- a/spec/javascripts/boards/boards_store_spec.js +++ b/spec/javascripts/boards/boards_store_spec.js @@ -9,7 +9,6 @@ import Vue from 'vue'; import Cookies from 'js-cookie'; -import '~/lib/utils/url_utility'; import '~/boards/models/issue'; import '~/boards/models/label'; import '~/boards/models/list'; diff --git a/spec/javascripts/boards/issue_spec.js b/spec/javascripts/boards/issue_spec.js index 10b88878c2a..41dcb19df3c 100644 --- a/spec/javascripts/boards/issue_spec.js +++ b/spec/javascripts/boards/issue_spec.js @@ -4,7 +4,6 @@ /* global mockBoardService */ import Vue from 'vue'; -import '~/lib/utils/url_utility'; import '~/boards/models/issue'; import '~/boards/models/label'; import '~/boards/models/list'; diff --git a/spec/javascripts/boards/list_spec.js b/spec/javascripts/boards/list_spec.js index d4627223a12..eead396ca7e 100644 --- a/spec/javascripts/boards/list_spec.js +++ b/spec/javascripts/boards/list_spec.js @@ -9,7 +9,6 @@ import Vue from 'vue'; -import '~/lib/utils/url_utility'; import '~/boards/models/issue'; import '~/boards/models/label'; import '~/boards/models/list'; diff --git a/spec/javascripts/deploy_keys/components/action_btn_spec.js b/spec/javascripts/deploy_keys/components/action_btn_spec.js index 5b93fbc5575..7025c3d836c 100644 --- a/spec/javascripts/deploy_keys/components/action_btn_spec.js +++ b/spec/javascripts/deploy_keys/components/action_btn_spec.js @@ -34,7 +34,7 @@ describe('Deploy keys action btn', () => { setTimeout(() => { expect( eventHub.$emit, - ).toHaveBeenCalledWith('enable.key', deployKey); + ).toHaveBeenCalledWith('enable.key', deployKey, jasmine.anything()); done(); }); diff --git a/spec/javascripts/deploy_keys/components/app_spec.js b/spec/javascripts/deploy_keys/components/app_spec.js index 700897f50b0..0ca9290d3d2 100644 --- a/spec/javascripts/deploy_keys/components/app_spec.js +++ b/spec/javascripts/deploy_keys/components/app_spec.js @@ -139,4 +139,18 @@ describe('Deploy keys app component', () => { it('hasKeys returns true when there are keys', () => { expect(vm.hasKeys).toEqual(3); }); + + it('resets remove button loading state', (done) => { + spyOn(window, 'confirm').and.returnValue(false); + + const btn = vm.$el.querySelector('.btn-warning'); + + btn.click(); + + Vue.nextTick(() => { + expect(btn.querySelector('.fa')).toBeNull(); + + done(); + }); + }); }); diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js index 230c15e5de6..5111632d681 100644 --- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js @@ -1,8 +1,8 @@ +import * as urlUtils from '~/lib/utils/url_utility'; import * as recentSearchesStoreSrc from '~/filtered_search/stores/recent_searches_store'; import RecentSearchesService from '~/filtered_search/services/recent_searches_service'; import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error'; import RecentSearchesRoot from '~/filtered_search/recent_searches_root'; -import '~/lib/utils/url_utility'; import '~/lib/utils/common_utils'; import '~/filtered_search/filtered_search_token_keys'; import '~/filtered_search/filtered_search_tokenizer'; @@ -162,7 +162,7 @@ describe('Filtered Search Manager', () => { it('should search with a single word', (done) => { input.value = 'searchTerm'; - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + spyOn(urlUtils, 'visitUrl').and.callFake((url) => { expect(url).toEqual(`${defaultParams}&search=searchTerm`); done(); }); @@ -173,7 +173,7 @@ describe('Filtered Search Manager', () => { it('should search with multiple words', (done) => { input.value = 'awesome search terms'; - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + spyOn(urlUtils, 'visitUrl').and.callFake((url) => { expect(url).toEqual(`${defaultParams}&search=awesome+search+terms`); done(); }); @@ -184,7 +184,7 @@ describe('Filtered Search Manager', () => { it('should search with special characters', (done) => { input.value = '~!@#$%^&*()_+{}:<>,.?/'; - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + spyOn(urlUtils, 'visitUrl').and.callFake((url) => { expect(url).toEqual(`${defaultParams}&search=~!%40%23%24%25%5E%26*()_%2B%7B%7D%3A%3C%3E%2C.%3F%2F`); done(); }); @@ -198,7 +198,7 @@ describe('Filtered Search Manager', () => { ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')} `); - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + spyOn(urlUtils, 'visitUrl').and.callFake((url) => { expect(url).toEqual(`${defaultParams}&label_name[]=bug`); done(); }); diff --git a/spec/javascripts/fly_out_nav_spec.js b/spec/javascripts/fly_out_nav_spec.js index 4f20e31f511..a3fa07d5bc2 100644 --- a/spec/javascripts/fly_out_nav_spec.js +++ b/spec/javascripts/fly_out_nav_spec.js @@ -253,7 +253,7 @@ describe('Fly out sidebar navigation', () => { it('shows collapsed only sub-items if icon only sidebar', () => { const subItems = el.querySelector('.sidebar-sub-level-items'); const sidebar = document.createElement('div'); - sidebar.classList.add('sidebar-icons-only'); + sidebar.classList.add('sidebar-collapsed-desktop'); subItems.classList.add('is-fly-out-only'); setSidebar(sidebar); @@ -343,7 +343,7 @@ describe('Fly out sidebar navigation', () => { it('returns true when active & collapsed sidebar', () => { const sidebar = document.createElement('div'); - sidebar.classList.add('sidebar-icons-only'); + sidebar.classList.add('sidebar-collapsed-desktop'); el.classList.add('active'); setSidebar(sidebar); diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js index ca048123bf7..b13d1bf8dff 100644 --- a/spec/javascripts/gl_dropdown_spec.js +++ b/spec/javascripts/gl_dropdown_spec.js @@ -2,7 +2,7 @@ import '~/gl_dropdown'; import '~/lib/utils/common_utils'; -import '~/lib/utils/url_utility'; +import * as urlUtils from '~/lib/utils/url_utility'; describe('glDropdown', function describeDropdown() { preloadFixtures('static/gl_dropdown.html.raw'); @@ -137,13 +137,13 @@ describe('glDropdown', function describeDropdown() { expect(this.dropdownContainerElement).toHaveClass('open'); const randomIndex = Math.floor(Math.random() * (this.projectsData.length - 1)) + 0; navigateWithKeys('down', randomIndex, () => { - spyOn(gl.utils, 'visitUrl').and.stub(); + spyOn(urlUtils, 'visitUrl').and.stub(); navigateWithKeys('enter', null, () => { expect(this.dropdownContainerElement).not.toHaveClass('open'); const link = $(`${ITEM_SELECTOR}:eq(${randomIndex}) a`, this.$dropdownMenuElement); expect(link).toHaveClass('is-active'); const linkedLocation = link.attr('href'); - if (linkedLocation && linkedLocation !== '#') expect(gl.utils.visitUrl).toHaveBeenCalledWith(linkedLocation); + if (linkedLocation && linkedLocation !== '#') expect(urlUtils.visitUrl).toHaveBeenCalledWith(linkedLocation); }); }); }); diff --git a/spec/javascripts/groups/components/app_spec.js b/spec/javascripts/groups/components/app_spec.js index 59d4f7c45c6..97e39f6411b 100644 --- a/spec/javascripts/groups/components/app_spec.js +++ b/spec/javascripts/groups/components/app_spec.js @@ -1,9 +1,9 @@ import Vue from 'vue'; +import * as utils from '~/lib/utils/url_utility'; import appComponent from '~/groups/components/app.vue'; import groupFolderComponent from '~/groups/components/group_folder.vue'; import groupItemComponent from '~/groups/components/group_item.vue'; - import eventHub from '~/groups/event_hub'; import GroupsStore from '~/groups/store/groups_store'; import GroupsService from '~/groups/service/groups_service'; @@ -176,7 +176,7 @@ describe('AppComponent', () => { it('should fetch groups for provided page details and update window state', (done) => { spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockGroups)); spyOn(vm, 'updateGroups').and.callThrough(); - spyOn(gl.utils, 'mergeUrlParams').and.callThrough(); + spyOn(utils, 'mergeUrlParams').and.callThrough(); spyOn(window.history, 'replaceState'); spyOn($, 'scrollTo'); @@ -192,7 +192,7 @@ describe('AppComponent', () => { setTimeout(() => { expect(vm.isLoading).toBeFalsy(); expect($.scrollTo).toHaveBeenCalledWith(0); - expect(gl.utils.mergeUrlParams).toHaveBeenCalledWith({ page: 2 }, jasmine.any(String)); + expect(utils.mergeUrlParams).toHaveBeenCalledWith({ page: 2 }, jasmine.any(String)); expect(window.history.replaceState).toHaveBeenCalledWith({ page: jasmine.any(String), }, jasmine.any(String), jasmine.any(String)); diff --git a/spec/javascripts/groups/components/group_item_spec.js b/spec/javascripts/groups/components/group_item_spec.js index 0f4fbdae445..618d0022e4f 100644 --- a/spec/javascripts/groups/components/group_item_spec.js +++ b/spec/javascripts/groups/components/group_item_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; - +import * as urlUtils from '~/lib/utils/url_utility'; import groupItemComponent from '~/groups/components/group_item.vue'; import groupFolderComponent from '~/groups/components/group_folder.vue'; import eventHub from '~/groups/event_hub'; @@ -136,13 +136,13 @@ describe('GroupItemComponent', () => { const group = Object.assign({}, mockParentGroupItem); group.childrenCount = 0; const newVm = createComponent(group); - spyOn(gl.utils, 'visitUrl').and.stub(); + spyOn(urlUtils, 'visitUrl').and.stub(); spyOn(eventHub, '$emit'); newVm.onClickRowGroup(event); setTimeout(() => { expect(eventHub.$emit).not.toHaveBeenCalled(); - expect(gl.utils.visitUrl).toHaveBeenCalledWith(newVm.group.relativePath); + expect(urlUtils.visitUrl).toHaveBeenCalledWith(newVm.group.relativePath); done(); }, 0); }); diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js index b47a8bf705f..729c3c29f22 100644 --- a/spec/javascripts/issue_show/components/app_spec.js +++ b/spec/javascripts/issue_show/components/app_spec.js @@ -1,9 +1,11 @@ import Vue from 'vue'; import '~/render_math'; import '~/render_gfm'; +import * as urlUtils from '~/lib/utils/url_utility'; import issuableApp from '~/issue_show/components/app.vue'; import eventHub from '~/issue_show/event_hub'; import issueShowData from '../mock_data'; +import setTimeoutPromise from '../../helpers/set_timeout_promise_helper'; function formatText(text) { return text.trim().replace(/\s\s+/g, ' '); @@ -55,6 +57,8 @@ describe('Issuable output', () => { Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor); vm.poll.stop(); + + vm.$destroy(); }); it('should render a title/description/edited and update title/description/edited on update', (done) => { @@ -177,7 +181,7 @@ describe('Issuable output', () => { }); it('does not redirect if issue has not moved', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => { resolve({ json() { @@ -193,7 +197,7 @@ describe('Issuable output', () => { setTimeout(() => { expect( - gl.utils.visitUrl, + urlUtils.visitUrl, ).not.toHaveBeenCalled(); done(); @@ -201,7 +205,7 @@ describe('Issuable output', () => { }); it('redirects if returned web_url has changed', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => { resolve({ json() { @@ -217,7 +221,7 @@ describe('Issuable output', () => { setTimeout(() => { expect( - gl.utils.visitUrl, + urlUtils.visitUrl, ).toHaveBeenCalledWith('/testing-issue-move'); done(); @@ -268,9 +272,55 @@ describe('Issuable output', () => { }); }); + it('opens recaptcha dialog if update rejected as spam', (done) => { + function mockScriptSrc() { + const recaptchaChild = vm.$children + .find(child => child.$options._componentTag === 'recaptcha-dialog'); // eslint-disable-line no-underscore-dangle + + recaptchaChild.scriptSrc = '//scriptsrc'; + } + + let modal; + const promise = new Promise((resolve) => { + resolve({ + json() { + return { + recaptcha_html: '<div class="g-recaptcha">recaptcha_html</div>', + }; + }, + }); + }); + + spyOn(vm.service, 'updateIssuable').and.returnValue(promise); + + vm.canUpdate = true; + vm.showForm = true; + + vm.$nextTick() + .then(() => mockScriptSrc()) + .then(() => vm.updateIssuable()) + .then(promise) + .then(() => setTimeoutPromise()) + .then(() => { + modal = vm.$el.querySelector('.js-recaptcha-dialog'); + + expect(modal.style.display).not.toEqual('none'); + expect(modal.querySelector('.g-recaptcha').textContent).toEqual('recaptcha_html'); + expect(document.body.querySelector('.js-recaptcha-script').src).toMatch('//scriptsrc'); + }) + .then(() => modal.querySelector('.close').click()) + .then(() => vm.$nextTick()) + .then(() => { + expect(modal.style.display).toEqual('none'); + expect(document.body.querySelector('.js-recaptcha-script')).toBeNull(); + }) + .then(done) + .catch(done.fail); + }); + describe('deleteIssuable', () => { it('changes URL when deleted', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn(vm.service, 'deleteIssuable').and.callFake(() => new Promise((resolve) => { resolve({ json() { @@ -283,7 +333,7 @@ describe('Issuable output', () => { setTimeout(() => { expect( - gl.utils.visitUrl, + urlUtils.visitUrl, ).toHaveBeenCalledWith('/test'); done(); @@ -291,7 +341,7 @@ describe('Issuable output', () => { }); it('stops polling when deleting', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn(vm.poll, 'stop').and.callThrough(); spyOn(vm.service, 'deleteIssuable').and.callFake(() => new Promise((resolve) => { resolve({ diff --git a/spec/javascripts/issue_show/components/description_spec.js b/spec/javascripts/issue_show/components/description_spec.js index 163e5cdd062..2e000a1063f 100644 --- a/spec/javascripts/issue_show/components/description_spec.js +++ b/spec/javascripts/issue_show/components/description_spec.js @@ -51,6 +51,35 @@ describe('Description component', () => { }); }); + it('opens recaptcha dialog if update rejected as spam', (done) => { + let modal; + const recaptchaChild = vm.$children + .find(child => child.$options._componentTag === 'recaptcha-dialog'); // eslint-disable-line no-underscore-dangle + + recaptchaChild.scriptSrc = '//scriptsrc'; + + vm.taskListUpdateSuccess({ + recaptcha_html: '<div class="g-recaptcha">recaptcha_html</div>', + }); + + vm.$nextTick() + .then(() => { + modal = vm.$el.querySelector('.js-recaptcha-dialog'); + + expect(modal.style.display).not.toEqual('none'); + expect(modal.querySelector('.g-recaptcha').textContent).toEqual('recaptcha_html'); + expect(document.body.querySelector('.js-recaptcha-script').src).toMatch('//scriptsrc'); + }) + .then(() => modal.querySelector('.close').click()) + .then(() => vm.$nextTick()) + .then(() => { + expect(modal.style.display).toEqual('none'); + expect(document.body.querySelector('.js-recaptcha-script')).toBeNull(); + }) + .then(done) + .catch(done.fail); + }); + describe('TaskList', () => { beforeEach(() => { vm = mountComponent(DescriptionComponent, Object.assign({}, props, { @@ -86,6 +115,7 @@ describe('Description component', () => { dataType: 'issuableType', fieldName: 'description', selector: '.detail-page-description', + onSuccess: jasmine.any(Function), }); done(); }); diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js index 5e67911d338..4f06237deb5 100644 --- a/spec/javascripts/job_spec.js +++ b/spec/javascripts/job_spec.js @@ -1,6 +1,6 @@ import { bytesToKiB } from '~/lib/utils/number_utils'; +import * as urlUtils from '~/lib/utils/url_utility'; import '~/lib/utils/datetime_utility'; -import '~/lib/utils/url_utility'; import Job from '~/job'; import '~/breakpoints'; @@ -28,7 +28,7 @@ describe('Job', () => { }); it('copies build options', function () { - expect(this.job.pageUrl).toBe(JOB_URL); + expect(this.job.pagePath).toBe(JOB_URL); expect(this.job.buildStatus).toBe('success'); expect(this.job.buildStage).toBe('test'); expect(this.job.state).toBe(''); @@ -65,7 +65,7 @@ describe('Job', () => { const deferred2 = $.Deferred(); const deferred3 = $.Deferred(); spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise(), deferred3.promise()); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); deferred1.resolve({ html: '<span>Update<span>', @@ -103,7 +103,7 @@ describe('Job', () => { spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise(), deferred3.promise()); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); deferred1.resolve({ html: '<span>Update<span>', @@ -134,7 +134,7 @@ describe('Job', () => { describe('truncated information', () => { describe('when size is less than total', () => { it('shows information about truncated log', () => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); const deferred = $.Deferred(); spyOn($, 'ajax').and.returnValue(deferred.promise()); @@ -153,7 +153,7 @@ describe('Job', () => { it('shows the size in KiB', () => { const size = 50; - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); const deferred = $.Deferred(); spyOn($, 'ajax').and.returnValue(deferred.promise()); @@ -179,7 +179,7 @@ describe('Job', () => { spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise(), deferred3.promise()); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); deferred1.resolve({ html: '<span>Update</span>', @@ -214,7 +214,7 @@ describe('Job', () => { it('renders the raw link', () => { const deferred = $.Deferred(); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn($, 'ajax').and.returnValue(deferred.promise()); deferred.resolve({ @@ -236,7 +236,7 @@ describe('Job', () => { describe('when size is equal than total', () => { it('does not show the trunctated information', () => { const deferred = $.Deferred(); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn($, 'ajax').and.returnValue(deferred.promise()); deferred.resolve({ @@ -257,7 +257,7 @@ describe('Job', () => { describe('output trace', () => { beforeEach(() => { const deferred = $.Deferred(); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); spyOn($, 'ajax').and.returnValue(deferred.promise()); deferred.resolve({ diff --git a/spec/javascripts/lib/utils/datefix_spec.js b/spec/javascripts/lib/utils/datefix_spec.js index e58ac4300ba..a9f3abcf2a4 100644 --- a/spec/javascripts/lib/utils/datefix_spec.js +++ b/spec/javascripts/lib/utils/datefix_spec.js @@ -21,7 +21,7 @@ describe('datefix', () => { describe('pikadayToString', () => { it('should format a UTC date into yyyy-mm-dd format', () => { - expect(pikadayToString(new Date('2020-01-29'))).toEqual('2020-01-29'); + expect(pikadayToString(new Date('2020-01-29:00:00'))).toEqual('2020-01-29'); }); }); }); diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js index e441d1153ed..5076435e7a8 100644 --- a/spec/javascripts/merge_request_tabs_spec.js +++ b/spec/javascripts/merge_request_tabs_spec.js @@ -1,6 +1,7 @@ /* eslint-disable no-var, comma-dangle, object-shorthand */ /* global Notes */ +import * as urlUtils from '~/lib/utils/url_utility'; import '~/merge_request_tabs'; import '~/commit/pipelines/pipelines_bundle'; import '~/breakpoints'; @@ -333,7 +334,7 @@ import 'vendor/jquery.scrollTo'; describe('with note fragment hash', () => { it('should expand and scroll to linked fragment hash #note_xxx', function () { - spyOn(window.gl.utils, 'getLocationHash').and.returnValue(noteId); + spyOn(urlUtils, 'getLocationHash').and.returnValue(noteId); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); expect(noteId.length).toBeGreaterThan(0); @@ -345,7 +346,7 @@ import 'vendor/jquery.scrollTo'; }); it('should gracefully ignore non-existant fragment hash', function () { - spyOn(window.gl.utils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); + spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); expect(window.notes.toggleDiffNote).not.toHaveBeenCalled(); @@ -354,7 +355,7 @@ import 'vendor/jquery.scrollTo'; describe('with line number fragment hash', () => { it('should gracefully ignore line number fragment hash', function () { - spyOn(window.gl.utils, 'getLocationHash').and.returnValue(noteLineNumId); + spyOn(urlUtils, 'getLocationHash').and.returnValue(noteLineNumId); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); expect(noteLineNumId.length).toBeGreaterThan(0); @@ -387,7 +388,7 @@ import 'vendor/jquery.scrollTo'; describe('with note fragment hash', () => { it('should expand and scroll to linked fragment hash #note_xxx', function () { - spyOn(window.gl.utils, 'getLocationHash').and.returnValue(noteId); + spyOn(urlUtils, 'getLocationHash').and.returnValue(noteId); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); @@ -400,7 +401,7 @@ import 'vendor/jquery.scrollTo'; }); it('should gracefully ignore non-existant fragment hash', function () { - spyOn(window.gl.utils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); + spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist'); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); expect(window.notes.toggleDiffNote).not.toHaveBeenCalled(); @@ -409,7 +410,7 @@ import 'vendor/jquery.scrollTo'; describe('with line number fragment hash', () => { it('should gracefully ignore line number fragment hash', function () { - spyOn(window.gl.utils, 'getLocationHash').and.returnValue(noteLineNumId); + spyOn(urlUtils, 'getLocationHash').and.returnValue(noteLineNumId); this.class.loadDiff('/foo/bar/merge_requests/1/diffs'); expect(noteLineNumId.length).toBeGreaterThan(0); diff --git a/spec/javascripts/monitoring/graph/deployment_spec.js b/spec/javascripts/monitoring/graph/deployment_spec.js index dea42d755d4..bf6ada8185e 100644 --- a/spec/javascripts/monitoring/graph/deployment_spec.js +++ b/spec/javascripts/monitoring/graph/deployment_spec.js @@ -118,7 +118,7 @@ describe('MonitoringDeployment', () => { ).not.toEqual('display: none;'); }); - it('shows the refText inside a text element with the deploy-info-text class', () => { + it('contains date, refs and the "deployed" text', () => { reducedDeploymentData[0].showDeploymentFlag = true; const component = createComponent({ showDeployInfo: true, @@ -129,8 +129,31 @@ describe('MonitoringDeployment', () => { }); expect( - component.$el.querySelector('.deploy-info-text').firstChild.nodeValue.trim(), - ).toEqual(component.refText(reducedDeploymentData[0])); + component.$el.querySelectorAll('.deploy-info-text'), + ).toContainText('Deployed'); + + expect( + component.$el.querySelectorAll('.deploy-info-text'), + ).toContainText('Wed, May 31'); + + expect( + component.$el.querySelectorAll('.deploy-info-text'), + ).toContainText(component.refText(reducedDeploymentData[0])); + }); + + it('contains a link to the commit contents', () => { + reducedDeploymentData[0].showDeploymentFlag = true; + const component = createComponent({ + showDeployInfo: true, + deploymentData: reducedDeploymentData, + graphHeight: 300, + graphWidth: 440, + graphHeightOffset: 120, + }); + + expect( + component.$el.querySelectorAll('.deploy-info-text-link')[0].parentElement.getAttribute('xlink:href'), + ).not.toEqual(''); }); it('should contain a hidden gradient', () => { diff --git a/spec/javascripts/monitoring/graph_spec.js b/spec/javascripts/monitoring/graph_spec.js index fd79abe241a..b1d69752bad 100644 --- a/spec/javascripts/monitoring/graph_spec.js +++ b/spec/javascripts/monitoring/graph_spec.js @@ -4,6 +4,8 @@ import MonitoringMixins from '~/monitoring/mixins/monitoring_mixins'; import eventHub from '~/monitoring/event_hub'; import { deploymentData, convertDatesMultipleSeries, singleRowMetricsMultipleSeries } from './mock_data'; +const tagsPath = 'http://test.host/frontend-fixtures/environments-project/tags'; +const projectPath = 'http://test.host/frontend-fixtures/environments-project'; const createComponent = (propsData) => { const Component = Vue.extend(Graph); @@ -25,6 +27,8 @@ describe('Graph', () => { classType: 'col-md-6', updateAspectRatio: false, deploymentData, + tagsPath, + projectPath, }); expect(component.$el.querySelector('.text-center').innerText.trim()).toBe(component.graphData.title); @@ -37,6 +41,8 @@ describe('Graph', () => { classType: 'col-md-6', updateAspectRatio: false, deploymentData, + tagsPath, + projectPath, }); const transformedHeight = `${component.graphHeight - 100}`; @@ -50,6 +56,8 @@ describe('Graph', () => { classType: 'col-md-6', updateAspectRatio: false, deploymentData, + tagsPath, + projectPath, }); const viewBoxArray = component.outerViewBox.split(' '); @@ -65,6 +73,8 @@ describe('Graph', () => { classType: 'col-md-6', updateAspectRatio: false, deploymentData, + tagsPath, + projectPath, }); spyOn(eventHub, '$emit'); @@ -81,6 +91,8 @@ describe('Graph', () => { classType: 'col-md-6', updateAspectRatio: false, deploymentData, + tagsPath, + projectPath, }); expect(component.yAxisLabel).toEqual(component.graphData.y_label); @@ -98,6 +110,8 @@ describe('Graph', () => { hoveredDate: new Date('Sun Aug 27 2017 06:11:51 GMT-0500 (CDT)'), currentDeployXPos: null, }, + tagsPath, + projectPath, }); component.positionFlag(); diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js index 6b34855b8b2..1f4e858e731 100644 --- a/spec/javascripts/monitoring/mock_data.js +++ b/spec/javascripts/monitoring/mock_data.js @@ -2430,33 +2430,39 @@ export const deploymentData = [ id: 111, iid: 3, sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', + commitUrl: 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', ref: { name: 'master' }, created_at: '2017-05-31T21:23:37.881Z', tag: false, + tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', 'last?': true }, { id: 110, iid: 2, sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', + commitUrl: 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187', ref: { name: 'master' }, created_at: '2017-05-30T20:08:04.629Z', tag: false, + tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', 'last?': false }, { id: 109, iid: 1, sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2', + commitUrl: 'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2', ref: { name: 'update2-readme' }, created_at: '2017-05-30T17:42:38.409Z', tag: false, + tagUrl: 'http://test.host/frontend-fixtures/environments-project/tags/false', 'last?': false } ]; diff --git a/spec/javascripts/notes/components/issue_note_spec.js b/spec/javascripts/notes/components/issue_note_spec.js index 73fd188dbe5..bd888b2cbae 100644 --- a/spec/javascripts/notes/components/issue_note_spec.js +++ b/spec/javascripts/notes/components/issue_note_spec.js @@ -41,4 +41,19 @@ describe('issue_note', () => { it('should render issue body', () => { expect(vm.$el.querySelector('.note-text').innerHTML).toEqual(note.note_html); }); + + it('prevents note preview xss', (done) => { + const imgSrc = 'data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7'; + const noteBody = `<img src="${imgSrc}" onload="alert(1)" />`; + const alertSpy = spyOn(window, 'alert'); + vm.updateNote = () => new Promise($.noop); + + vm.formUpdateHandler(noteBody, null, $.noop); + + setTimeout(() => { + expect(alertSpy).not.toHaveBeenCalled(); + expect(vm.note.note_html).toEqual(_.escape(noteBody)); + done(); + }, 0); + }); }); diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js index 677a389b88f..2612c5fd7bc 100644 --- a/spec/javascripts/notes_spec.js +++ b/spec/javascripts/notes_spec.js @@ -1,6 +1,7 @@ /* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */ /* global Notes */ +import * as urlUtils from '~/lib/utils/url_utility'; import 'autosize'; import '~/gl_form'; import '~/lib/utils/text_utility'; @@ -168,8 +169,7 @@ import '~/notes'; }); it('sets target when hash matches', () => { - spyOn(gl.utils, 'getLocationHash'); - gl.utils.getLocationHash.and.returnValue(hash); + spyOn(urlUtils, 'getLocationHash').and.returnValue(hash); Notes.updateNoteTargetSelector($note); @@ -178,8 +178,7 @@ import '~/notes'; }); it('unsets target when hash does not match', () => { - spyOn(gl.utils, 'getLocationHash'); - gl.utils.getLocationHash.and.returnValue('note_doesnotexist'); + spyOn(urlUtils, 'getLocationHash').and.returnValue('note_doesnotexist'); Notes.updateNoteTargetSelector($note); @@ -187,8 +186,7 @@ import '~/notes'; }); it('unsets target when there is not a hash fragment anymore', () => { - spyOn(gl.utils, 'getLocationHash'); - gl.utils.getLocationHash.and.returnValue(null); + spyOn(urlUtils, 'getLocationHash').and.returnValue(null); Notes.updateNoteTargetSelector($note); diff --git a/spec/javascripts/pager_spec.js b/spec/javascripts/pager_spec.js index 1d3e1263371..fe3ea996eac 100644 --- a/spec/javascripts/pager_spec.js +++ b/spec/javascripts/pager_spec.js @@ -1,5 +1,6 @@ /* global fixture */ +import * as utils from '~/lib/utils/url_utility'; import '~/pager'; describe('pager', () => { @@ -30,7 +31,7 @@ describe('pager', () => { it('should use current url if data-href attribute not provided', () => { const href = `${gl.TEST_HOST}/some_list`; - spyOn(gl.utils, 'removeParams').and.returnValue(href); + spyOn(utils, 'removeParams').and.returnValue(href); Pager.init(); expect(Pager.url).toBe(href); }); @@ -44,9 +45,9 @@ describe('pager', () => { it('keeps extra query parameters from url', () => { window.history.replaceState({}, null, '?filter=test&offset=100'); const href = `${gl.TEST_HOST}/some_list?filter=test`; - spyOn(gl.utils, 'removeParams').and.returnValue(href); + spyOn(utils, 'removeParams').and.returnValue(href); Pager.init(); - expect(gl.utils.removeParams).toHaveBeenCalledWith(['limit', 'offset']); + expect(utils.removeParams).toHaveBeenCalledWith(['limit', 'offset']); expect(Pager.url).toEqual(href); }); }); diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js index 1c794123095..72712e058e5 100644 --- a/spec/javascripts/repo/components/repo_commit_section_spec.js +++ b/spec/javascripts/repo/components/repo_commit_section_spec.js @@ -1,4 +1,5 @@ import Vue from 'vue'; +import * as urlUtils from '~/lib/utils/url_utility'; import store from '~/repo/stores'; import service from '~/repo/services'; import repoCommitSection from '~/repo/components/repo_commit_section.vue'; @@ -97,7 +98,7 @@ describe('RepoCommitSection', () => { }); it('redirects to MR creation page if start new MR checkbox checked', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); vm.startNewMR = true; vm.makeCommit(); @@ -105,7 +106,7 @@ describe('RepoCommitSection', () => { getSetTimeoutPromise() .then(() => Vue.nextTick()) .then(() => { - expect(gl.utils.visitUrl).toHaveBeenCalled(); + expect(urlUtils.visitUrl).toHaveBeenCalled(); }) .then(done) .catch(done.fail); diff --git a/spec/javascripts/repo/stores/actions/tree_spec.js b/spec/javascripts/repo/stores/actions/tree_spec.js index 393a797c6a3..2bbc49d5a9f 100644 --- a/spec/javascripts/repo/stores/actions/tree_spec.js +++ b/spec/javascripts/repo/stores/actions/tree_spec.js @@ -1,4 +1,5 @@ import Vue from 'vue'; +import * as urlUtils from '~/lib/utils/url_utility'; import store from '~/repo/stores'; import service from '~/repo/services'; import { file, resetStore } from '../../helpers'; @@ -255,7 +256,7 @@ describe('Multi-file store tree actions', () => { let row; beforeEach(() => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); row = { url: 'submoduleurl', @@ -276,7 +277,7 @@ describe('Multi-file store tree actions', () => { it('opens submodule URL', (done) => { store.dispatch('clickedTreeRow', row) .then(() => { - expect(gl.utils.visitUrl).toHaveBeenCalledWith('submoduleurl'); + expect(urlUtils.visitUrl).toHaveBeenCalledWith('submoduleurl'); done(); }).catch(done.fail); diff --git a/spec/javascripts/repo/stores/actions_spec.js b/spec/javascripts/repo/stores/actions_spec.js index f2a7a698912..21d87e46216 100644 --- a/spec/javascripts/repo/stores/actions_spec.js +++ b/spec/javascripts/repo/stores/actions_spec.js @@ -1,4 +1,5 @@ import Vue from 'vue'; +import * as urlUtils from '~/lib/utils/url_utility'; import store from '~/repo/stores'; import service from '~/repo/services'; import { resetStore, file } from '../helpers'; @@ -10,11 +11,11 @@ describe('Multi-file store actions', () => { describe('redirectToUrl', () => { it('calls visitUrl', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); store.dispatch('redirectToUrl', 'test') .then(() => { - expect(gl.utils.visitUrl).toHaveBeenCalledWith('test'); + expect(urlUtils.visitUrl).toHaveBeenCalledWith('test'); done(); }) @@ -326,13 +327,13 @@ describe('Multi-file store actions', () => { }); it('redirects to new merge request page', (done) => { - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); store.state.endpoints.newMergeRequestUrl = 'newMergeRequestUrl?branch='; store.dispatch('commitChanges', { payload, newMr: true }) .then(() => { - expect(gl.utils.visitUrl).toHaveBeenCalledWith('newMergeRequestUrl?branch=master'); + expect(urlUtils.visitUrl).toHaveBeenCalledWith('newMergeRequestUrl?branch=master'); done(); }).catch(done.fail); diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js index fdfc59a6f12..1e4c2c9faad 100644 --- a/spec/javascripts/search_autocomplete_spec.js +++ b/spec/javascripts/search_autocomplete_spec.js @@ -3,6 +3,7 @@ import '~/gl_dropdown'; import '~/search_autocomplete'; import '~/lib/utils/common_utils'; +import * as urlUtils from '~/lib/utils/url_utility'; (function() { var assertLinks, dashboardIssuesPath, dashboardMRsPath, groupIssuesPath, groupMRsPath, groupName, mockDashboardOptions, mockGroupOptions, mockProjectOptions, projectIssuesPath, projectMRsPath, projectName, userId, widget; @@ -121,7 +122,7 @@ import '~/lib/utils/common_utils'; loadFixtures('static/search_autocomplete.html.raw'); // Prevent turbolinks from triggering within gl_dropdown - spyOn(window.gl.utils, 'visitUrl').and.returnValue(true); + spyOn(urlUtils, 'visitUrl').and.returnValue(true); window.gon = {}; window.gon.current_user_id = userId; diff --git a/spec/javascripts/sidebar/sidebar_mediator_spec.js b/spec/javascripts/sidebar/sidebar_mediator_spec.js index 14c34d5a78c..9efd109b996 100644 --- a/spec/javascripts/sidebar/sidebar_mediator_spec.js +++ b/spec/javascripts/sidebar/sidebar_mediator_spec.js @@ -1,4 +1,5 @@ import Vue from 'vue'; +import * as urlUtils from '~/lib/utils/url_utility'; import SidebarMediator from '~/sidebar/sidebar_mediator'; import SidebarStore from '~/sidebar/stores/sidebar_store'; import SidebarService from '~/sidebar/services/sidebar_service'; @@ -85,12 +86,12 @@ describe('Sidebar mediator', () => { const moveToProjectId = 7; this.mediator.store.setMoveToProjectId(moveToProjectId); spyOn(this.mediator.service, 'moveIssue').and.callThrough(); - spyOn(gl.utils, 'visitUrl'); + spyOn(urlUtils, 'visitUrl'); this.mediator.moveIssue() .then(() => { expect(this.mediator.service.moveIssue).toHaveBeenCalledWith(moveToProjectId); - expect(gl.utils.visitUrl).toHaveBeenCalledWith('/root/some-project/issues/5'); + expect(urlUtils.visitUrl).toHaveBeenCalledWith('/root/some-project/issues/5'); }) .then(done) .catch(done.fail); diff --git a/spec/javascripts/todos_spec.js b/spec/javascripts/todos_spec.js index 7d3c9319a11..59e16f0786e 100644 --- a/spec/javascripts/todos_spec.js +++ b/spec/javascripts/todos_spec.js @@ -1,3 +1,4 @@ +import * as urlUtils from '~/lib/utils/url_utility'; import Todos from '~/todos'; import '~/lib/utils/common_utils'; @@ -16,7 +17,7 @@ describe('Todos', () => { it('opens the todo url', (done) => { const todoLink = todoItem.dataset.url; - spyOn(gl.utils, 'visitUrl').and.callFake((url) => { + spyOn(urlUtils, 'visitUrl').and.callFake((url) => { expect(url).toEqual(todoLink); done(); }); @@ -31,7 +32,7 @@ describe('Todos', () => { beforeEach(() => { metakeyEvent = $.Event('click', { keyCode: 91, ctrlKey: true }); - visitUrlSpy = spyOn(gl.utils, 'visitUrl').and.callFake(() => {}); + visitUrlSpy = spyOn(urlUtils, 'visitUrl').and.callFake(() => {}); windowOpenSpy = spyOn(window, 'open').and.callFake(() => {}); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js index 7ee998c8fce..d5b8947c86f 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js @@ -1,4 +1,5 @@ import Vue from 'vue'; +import * as urlUtils from '~/lib/utils/url_utility'; import deploymentComponent from '~/vue_merge_request_widget/components/mr_widget_deployment'; import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; @@ -108,13 +109,13 @@ describe('MRWidgetDeployment', () => { it('should show a confirm dialog and call service.stopEnvironment when confirmed', (done) => { spyOn(window, 'confirm').and.returnValue(true); spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(true)); - spyOn(gl.utils, 'visitUrl').and.returnValue(true); + spyOn(urlUtils, 'visitUrl').and.returnValue(true); vm = mockStopEnvironment(); expect(window.confirm).toHaveBeenCalled(); expect(MRWidgetService.stopEnvironment).toHaveBeenCalledWith(deploymentMockData.stop_url); setTimeout(() => { - expect(gl.utils.visitUrl).toHaveBeenCalledWith(url); + expect(urlUtils.visitUrl).toHaveBeenCalledWith(url); done(); }, 333); }); diff --git a/spec/javascripts/vue_shared/components/popup_dialog_spec.js b/spec/javascripts/vue_shared/components/popup_dialog_spec.js new file mode 100644 index 00000000000..5c1d2a196f4 --- /dev/null +++ b/spec/javascripts/vue_shared/components/popup_dialog_spec.js @@ -0,0 +1,12 @@ +import Vue from 'vue'; +import PopupDialog from '~/vue_shared/components/popup_dialog.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; + +describe('PopupDialog', () => { + it('does not render a primary button if no primaryButtonLabel', () => { + const popupDialog = Vue.extend(PopupDialog); + const vm = mountComponent(popupDialog); + + expect(vm.$el.querySelector('.js-primary-button')).toBeNull(); + }); +}); diff --git a/spec/lib/banzai/filter/table_of_contents_filter_spec.rb b/spec/lib/banzai/filter/table_of_contents_filter_spec.rb index 85eddde732e..0cfef4ff5bf 100644 --- a/spec/lib/banzai/filter/table_of_contents_filter_spec.rb +++ b/spec/lib/banzai/filter/table_of_contents_filter_spec.rb @@ -65,6 +65,13 @@ describe Banzai::Filter::TableOfContentsFilter do expect(doc.css('h2 a').first.attr('href')).to eq '#one-1' end + it 'prepends a prefix to digits-only ids' do + doc = filter(header(1, "123") + header(2, "1.0")) + + expect(doc.css('h1 a').first.attr('href')).to eq '#anchor-123' + expect(doc.css('h2 a').first.attr('href')).to eq '#anchor-10' + end + it 'supports Unicode' do doc = filter(header(1, '한글')) expect(doc.css('h1 a').first.attr('id')).to eq 'user-content-한글' diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb index 7f3bf5fc41c..8a83e446935 100644 --- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb +++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb @@ -132,6 +132,23 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git')) end + + it 'moves an existing project to the correct path' do + # This is a quick way to get a valid repository instead of copying an + # existing one. Since it's not persisted, the importer will try to + # create the project. + project = build(:project, :repository) + original_commit_count = project.repository.commit_count + + bare_repo = Gitlab::BareRepositoryImport::Repository.new(project.repository_storage_path, project.repository.path) + gitlab_importer = described_class.new(admin, bare_repo) + + expect(gitlab_importer).to receive(:create_project).and_call_original + + new_project = gitlab_importer.create_project_if_needed + + expect(new_project.repository.commit_count).to eq(original_commit_count) + end end context 'with Wiki' do diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb index 2db737f5fb6..61b73abcba4 100644 --- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb +++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb @@ -46,6 +46,13 @@ describe ::Gitlab::BareRepositoryImport::Repository do describe '#project_full_path' do it 'returns the project full path' do expect(project_repo_path.repo_path).to eq('/full/path/to/repo.git') + expect(project_repo_path.project_full_path).to eq('to/repo') + end + + it 'with no trailing slash in the root path' do + repo_path = described_class.new('/full/path', '/full/path/to/repo.git') + + expect(repo_path.project_full_path).to eq('to/repo') end end end diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb new file mode 100644 index 00000000000..fa1575e2177 --- /dev/null +++ b/spec/lib/gitlab/checks/project_moved_spec.rb @@ -0,0 +1,81 @@ +require 'rails_helper' + +describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do + let(:user) { create(:user) } + let(:project) { create(:project) } + + describe '.fetch_redirct_message' do + context 'with a redirect message queue' do + it 'should return the redirect message' do + project_moved = described_class.new(project, user, 'foo/bar', 'http') + project_moved.add_redirect_message + + expect(described_class.fetch_redirect_message(user.id, project.id)).to eq(project_moved.redirect_message) + end + + it 'should delete the redirect message from redis' do + project_moved = described_class.new(project, user, 'foo/bar', 'http') + project_moved.add_redirect_message + + expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).not_to be_nil + described_class.fetch_redirect_message(user.id, project.id) + expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).to be_nil + end + end + + context 'with no redirect message queue' do + it 'should return nil' do + expect(described_class.fetch_redirect_message(1, 2)).to be_nil + end + end + end + + describe '#add_redirect_message' do + it 'should queue a redirect message' do + project_moved = described_class.new(project, user, 'foo/bar', 'http') + expect(project_moved.add_redirect_message).to eq("OK") + end + end + + describe '#redirect_message' do + context 'when the push is rejected' do + it 'should return a redirect message telling the user to try again' do + project_moved = described_class.new(project, user, 'foo/bar', 'http') + message = "Project 'foo/bar' was moved to '#{project.full_path}'." + + "\n\nPlease update your Git remote:" + + "\n\n git remote set-url origin #{project.http_url_to_repo} and try again.\n" + + expect(project_moved.redirect_message(rejected: true)).to eq(message) + end + end + + context 'when the push is not rejected' do + it 'should return a redirect message' do + project_moved = described_class.new(project, user, 'foo/bar', 'http') + message = "Project 'foo/bar' was moved to '#{project.full_path}'." + + "\n\nPlease update your Git remote:" + + "\n\n git remote set-url origin #{project.http_url_to_repo}\n" + + expect(project_moved.redirect_message).to eq(message) + end + end + end + + describe '#permanent_redirect?' do + context 'with a permanent RedirectRoute' do + it 'should return true' do + project.route.create_redirect('foo/bar', permanent: true) + project_moved = described_class.new(project, user, 'foo/bar', 'http') + expect(project_moved.permanent_redirect?).to be_truthy + end + end + + context 'without a permanent RedirectRoute' do + it 'should return false' do + project.route.create_redirect('foo/bar') + project_moved = described_class.new(project, user, 'foo/bar', 'http') + expect(project_moved.permanent_redirect?).to be_falsy + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb index 0f1d72080c5..3ae7053a995 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb @@ -6,46 +6,81 @@ describe Gitlab::Ci::Pipeline::Chain::Build do let(:pipeline) { Ci::Pipeline.new } let(:command) do - double('command', source: :push, - origin_ref: 'master', - checkout_sha: project.commit.id, - after_sha: nil, - before_sha: nil, - trigger_request: nil, - schedule: nil, - project: project, - current_user: user) + Gitlab::Ci::Pipeline::Chain::Command.new( + source: :push, + origin_ref: 'master', + checkout_sha: project.commit.id, + after_sha: nil, + before_sha: nil, + trigger_request: nil, + schedule: nil, + project: project, + current_user: user) end let(:step) { described_class.new(pipeline, command) } before do stub_repository_ci_yaml_file(sha: anything) - - step.perform! end it 'never breaks the chain' do + step.perform! + expect(step.break?).to be false end it 'fills pipeline object with data' do + step.perform! + expect(pipeline.sha).not_to be_empty expect(pipeline.sha).to eq project.commit.id expect(pipeline.ref).to eq 'master' + expect(pipeline.tag).to be false expect(pipeline.user).to eq user expect(pipeline.project).to eq project end it 'sets a valid config source' do + step.perform! + expect(pipeline.repository_source?).to be true end it 'returns a valid pipeline' do + step.perform! + expect(pipeline).to be_valid end it 'does not persist a pipeline' do + step.perform! + expect(pipeline).not_to be_persisted end + + context 'when pipeline is running for a tag' do + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + source: :push, + origin_ref: 'mytag', + checkout_sha: project.commit.id, + after_sha: nil, + before_sha: nil, + trigger_request: nil, + schedule: nil, + project: project, + current_user: user) + end + + before do + allow_any_instance_of(Repository).to receive(:tag_exists?).with('mytag').and_return(true) + + step.perform! + end + + it 'correctly indicated that this is a tagged pipeline' do + expect(pipeline).to be_tag + end + end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb new file mode 100644 index 00000000000..75a177d2d1f --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb @@ -0,0 +1,185 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Chain::Command do + set(:project) { create(:project, :repository) } + + describe '#initialize' do + subject do + described_class.new(origin_ref: 'master') + end + + it 'properly initialises object from hash' do + expect(subject.origin_ref).to eq('master') + end + end + + context 'handling of origin_ref' do + let(:command) { described_class.new(project: project, origin_ref: origin_ref) } + + describe '#branch_exists?' do + subject { command.branch_exists? } + + context 'for existing branch' do + let(:origin_ref) { 'master' } + + it { is_expected.to eq(true) } + end + + context 'for invalid branch' do + let(:origin_ref) { 'something' } + + it { is_expected.to eq(false) } + end + end + + describe '#tag_exists?' do + subject { command.tag_exists? } + + context 'for existing ref' do + let(:origin_ref) { 'v1.0.0' } + + it { is_expected.to eq(true) } + end + + context 'for invalid ref' do + let(:origin_ref) { 'something' } + + it { is_expected.to eq(false) } + end + end + + describe '#ref' do + subject { command.ref } + + context 'for regular ref' do + let(:origin_ref) { 'master' } + + it { is_expected.to eq('master') } + end + + context 'for branch ref' do + let(:origin_ref) { 'refs/heads/master' } + + it { is_expected.to eq('master') } + end + + context 'for tag ref' do + let(:origin_ref) { 'refs/tags/1.0.0' } + + it { is_expected.to eq('1.0.0') } + end + + context 'for other refs' do + let(:origin_ref) { 'refs/merge-requests/11/head' } + + it { is_expected.to eq('refs/merge-requests/11/head') } + end + end + end + + describe '#sha' do + subject { command.sha } + + context 'when invalid checkout_sha is specified' do + let(:command) { described_class.new(project: project, checkout_sha: 'aaa') } + + it 'returns empty value' do + is_expected.to be_nil + end + end + + context 'when a valid checkout_sha is specified' do + let(:command) { described_class.new(project: project, checkout_sha: project.commit.id) } + + it 'returns checkout_sha' do + is_expected.to eq(project.commit.id) + end + end + + context 'when a valid after_sha is specified' do + let(:command) { described_class.new(project: project, after_sha: project.commit.id) } + + it 'returns after_sha' do + is_expected.to eq(project.commit.id) + end + end + + context 'when a valid origin_ref is specified' do + let(:command) { described_class.new(project: project, origin_ref: 'HEAD') } + + it 'returns SHA for given ref' do + is_expected.to eq(project.commit.id) + end + end + end + + describe '#origin_sha' do + subject { command.origin_sha } + + context 'when using checkout_sha and after_sha' do + let(:command) { described_class.new(project: project, checkout_sha: 'aaa', after_sha: 'bbb') } + + it 'uses checkout_sha' do + is_expected.to eq('aaa') + end + end + + context 'when using after_sha only' do + let(:command) { described_class.new(project: project, after_sha: 'bbb') } + + it 'uses after_sha' do + is_expected.to eq('bbb') + end + end + end + + describe '#before_sha' do + subject { command.before_sha } + + context 'when using checkout_sha and before_sha' do + let(:command) { described_class.new(project: project, checkout_sha: 'aaa', before_sha: 'bbb') } + + it 'uses before_sha' do + is_expected.to eq('bbb') + end + end + + context 'when using checkout_sha only' do + let(:command) { described_class.new(project: project, checkout_sha: 'aaa') } + + it 'uses checkout_sha' do + is_expected.to eq('aaa') + end + end + + context 'when checkout_sha and before_sha are empty' do + let(:command) { described_class.new(project: project) } + + it 'uses BLANK_SHA' do + is_expected.to eq(Gitlab::Git::BLANK_SHA) + end + end + end + + describe '#protected_ref?' do + let(:command) { described_class.new(project: project, origin_ref: 'my-branch') } + + subject { command.protected_ref? } + + context 'when a ref is protected' do + before do + expect_any_instance_of(Project).to receive(:protected_for?).with('my-branch').and_return(true) + end + + it { is_expected.to eq(true) } + end + + context 'when a ref is unprotected' do + before do + expect_any_instance_of(Project).to receive(:protected_for?).with('my-branch').and_return(false) + end + + it { is_expected.to eq(false) } + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb index f54e2326b06..1b03227d67b 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb @@ -10,9 +10,9 @@ describe Gitlab::Ci::Pipeline::Chain::Create do end let(:command) do - double('command', project: project, - current_user: user, - seeds_block: nil) + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, seeds_block: nil) end let(:step) { described_class.new(pipeline, command) } diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb index e165e0fac2a..eca23694a2b 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::Ci::Pipeline::Chain::Sequence do set(:user) { create(:user) } let(:pipeline) { build_stubbed(:ci_pipeline) } - let(:command) { double('command' ) } + let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new } let(:first_step) { spy('first step') } let(:second_step) { spy('second step') } let(:sequence) { [first_step, second_step] } diff --git a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb index 32bd5de829b..dc13cae961c 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb @@ -6,10 +6,11 @@ describe Gitlab::Ci::Pipeline::Chain::Skip do set(:pipeline) { create(:ci_pipeline, project: project) } let(:command) do - double('command', project: project, - current_user: user, - ignore_skip_ci: false, - save_incompleted: true) + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + ignore_skip_ci: false, + save_incompleted: true) end let(:step) { described_class.new(pipeline, command) } diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb index 0bbdd23f4d6..a973ccda8de 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb @@ -5,11 +5,12 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities do set(:user) { create(:user) } let(:pipeline) do - build_stubbed(:ci_pipeline, ref: ref, project: project) + build_stubbed(:ci_pipeline, project: project) end let(:command) do - double('command', project: project, current_user: user) + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, current_user: user, origin_ref: ref) end let(:step) { described_class.new(pipeline, command) } diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb index 8357af38f92..5c12c6e6392 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb @@ -5,9 +5,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do set(:user) { create(:user) } let(:command) do - double('command', project: project, - current_user: user, - save_incompleted: true) + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + save_incompleted: true) end let!(:step) { described_class.new(pipeline, command) } diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb index bb356efe9ad..fb1b53fc55c 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb @@ -3,10 +3,7 @@ require 'spec_helper' describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do set(:project) { create(:project, :repository) } set(:user) { create(:user) } - - let(:command) do - double('command', project: project, current_user: user) - end + let(:pipeline) { build_stubbed(:ci_pipeline) } let!(:step) { described_class.new(pipeline, command) } @@ -14,9 +11,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do step.perform! end - context 'when pipeline ref and sha exists' do - let(:pipeline) do - build_stubbed(:ci_pipeline, ref: 'master', sha: '123', project: project) + context 'when ref and sha exists' do + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, current_user: user, origin_ref: 'master', checkout_sha: project.commit.id) end it 'does not break the chain' do @@ -28,9 +26,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do end end - context 'when pipeline ref does not exist' do - let(:pipeline) do - build_stubbed(:ci_pipeline, ref: 'something', project: project) + context 'when ref does not exist' do + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, current_user: user, origin_ref: 'something') end it 'breaks the chain' do @@ -43,9 +42,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do end end - context 'when pipeline does not have SHA set' do - let(:pipeline) do - build_stubbed(:ci_pipeline, ref: 'master', sha: nil, project: project) + context 'when does not have existing SHA set' do + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, current_user: user, origin_ref: 'master', checkout_sha: 'something') end it 'breaks the chain' do diff --git a/spec/lib/gitlab/diff/inline_diff_spec.rb b/spec/lib/gitlab/diff/inline_diff_spec.rb index 15451c2cf99..0a41362f606 100644 --- a/spec/lib/gitlab/diff/inline_diff_spec.rb +++ b/spec/lib/gitlab/diff/inline_diff_spec.rb @@ -31,6 +31,10 @@ describe Gitlab::Diff::InlineDiff do expect(subject[7]).to eq([17..17]) expect(subject[8]).to be_nil end + + it 'can handle unchanged empty lines' do + expect { described_class.for_lines(['- bar', '+ baz', '']) }.not_to raise_error + end end describe "#inline_diffs" do diff --git a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb index e361d1a7393..51ce3116880 100644 --- a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb @@ -10,14 +10,14 @@ describe Gitlab::Email::Handler::CreateMergeRequestHandler do stub_config_setting(host: 'localhost') end + after do + TestEnv.clean_test_path + end + let(:email_raw) { fixture_file('emails/valid_new_merge_request.eml') } let(:namespace) { create(:namespace, path: 'gitlabhq') } - # project's git repository is not deleted when project is deleted - # between tests. Then tests fail because re-creation of the project with - # the same name fails on existing git repository -> skip_disk_validation - # ignores repository existence on disk - let!(:project) { create(:project, :public, :repository, skip_disk_validation: true, namespace: namespace, path: 'gitlabhq') } + let!(:project) { create(:project, :public, :repository, namespace: namespace, path: 'gitlabhq') } let!(:user) do create( :user, diff --git a/spec/lib/gitlab/git/storage/checker_spec.rb b/spec/lib/gitlab/git/storage/checker_spec.rb new file mode 100644 index 00000000000..d74c3bcb04c --- /dev/null +++ b/spec/lib/gitlab/git/storage/checker_spec.rb @@ -0,0 +1,132 @@ +require 'spec_helper' + +describe Gitlab::Git::Storage::Checker, :clean_gitlab_redis_shared_state do + let(:storage_name) { 'default' } + let(:hostname) { Gitlab::Environment.hostname } + let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" } + + subject(:checker) { described_class.new(storage_name) } + + def value_from_redis(name) + Gitlab::Git::Storage.redis.with do |redis| + redis.hmget(cache_key, name) + end.first + end + + def set_in_redis(name, value) + Gitlab::Git::Storage.redis.with do |redis| + redis.hmset(cache_key, name, value) + end.first + end + + describe '.check_all' do + it 'calls a check for each storage' do + fake_checker_default = double + fake_checker_broken = double + fake_logger = fake_logger + + expect(described_class).to receive(:new).with('default', fake_logger) { fake_checker_default } + expect(described_class).to receive(:new).with('broken', fake_logger) { fake_checker_broken } + expect(fake_checker_default).to receive(:check_with_lease) + expect(fake_checker_broken).to receive(:check_with_lease) + + described_class.check_all(fake_logger) + end + + context 'with broken storage', :broken_storage do + it 'returns the results' do + expected_result = [ + { storage: 'default', success: true }, + { storage: 'broken', success: false } + ] + + expect(described_class.check_all).to eq(expected_result) + end + end + end + + describe '#initialize' do + it 'assigns the settings' do + expect(checker.hostname).to eq(hostname) + expect(checker.storage).to eq('default') + expect(checker.storage_path).to eq(TestEnv.repos_path) + end + end + + describe '#check_with_lease' do + it 'only allows one check at a time' do + expect(checker).to receive(:check).once { sleep 1 } + + thread = Thread.new { checker.check_with_lease } + checker.check_with_lease + thread.join + end + + it 'returns a result hash' do + expect(checker.check_with_lease).to eq(storage: 'default', success: true) + end + end + + describe '#check' do + it 'tracks that the storage was accessible' do + set_in_redis(:failure_count, 10) + set_in_redis(:last_failure, Time.now.to_f) + + checker.check + + expect(value_from_redis(:failure_count).to_i).to eq(0) + expect(value_from_redis(:last_failure)).to be_empty + expect(value_from_redis(:first_failure)).to be_empty + end + + it 'calls the check with the correct arguments' do + stub_application_setting(circuitbreaker_storage_timeout: 30, + circuitbreaker_access_retries: 3) + + expect(Gitlab::Git::Storage::ForkedStorageCheck) + .to receive(:storage_available?).with(TestEnv.repos_path, 30, 3) + .and_call_original + + checker.check + end + + it 'returns `true`' do + expect(checker.check).to eq(true) + end + + it 'maintains known storage keys' do + Timecop.freeze do + # Insert an old key to expire + old_entry = Time.now.to_i - 3.days.to_i + Gitlab::Git::Storage.redis.with do |redis| + redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, old_entry, 'to_be_removed') + end + + checker.check + + known_keys = Gitlab::Git::Storage.redis.with do |redis| + redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1) + end + + expect(known_keys).to contain_exactly(cache_key) + end + end + + context 'the storage is not available', :broken_storage do + let(:storage_name) { 'broken' } + + it 'tracks that the storage was inaccessible' do + Timecop.freeze do + expect { checker.check }.to change { value_from_redis(:failure_count).to_i }.by(1) + + expect(value_from_redis(:last_failure)).not_to be_empty + expect(value_from_redis(:first_failure)).not_to be_empty + end + end + + it 'returns `false`' do + expect(checker.check).to eq(false) + end + end + end +end diff --git a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb index f34c9f09057..210b90bfba9 100644 --- a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb +++ b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb @@ -1,11 +1,18 @@ require 'spec_helper' -describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: true, broken_storage: true do +describe Gitlab::Git::Storage::CircuitBreaker, :broken_storage do let(:storage_name) { 'default' } let(:circuit_breaker) { described_class.new(storage_name, hostname) } let(:hostname) { Gitlab::Environment.hostname } let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" } + def set_in_redis(name, value) + Gitlab::Git::Storage.redis.with do |redis| + redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key) + redis.hmset(cache_key, name, value) + end.first + end + before do # Override test-settings for the circuitbreaker with something more realistic # for these specs. @@ -19,36 +26,7 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: ) end - def value_from_redis(name) - Gitlab::Git::Storage.redis.with do |redis| - redis.hmget(cache_key, name) - end.first - end - - def set_in_redis(name, value) - Gitlab::Git::Storage.redis.with do |redis| - redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key) - redis.hmset(cache_key, name, value) - end.first - end - - describe '.reset_all!' do - it 'clears all entries form redis' do - set_in_redis(:failure_count, 10) - - described_class.reset_all! - - key_exists = Gitlab::Git::Storage.redis.with { |redis| redis.exists(cache_key) } - - expect(key_exists).to be_falsey - end - - it 'does not break when there are no keys in redis' do - expect { described_class.reset_all! }.not_to raise_error - end - end - - describe '.for_storage' do + describe '.for_storage', :request_store do it 'only builds a single circuitbreaker per storage' do expect(described_class).to receive(:new).once.and_call_original @@ -71,7 +49,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: it 'assigns the settings' do expect(circuit_breaker.hostname).to eq(hostname) expect(circuit_breaker.storage).to eq('default') - expect(circuit_breaker.storage_path).to eq(TestEnv.repos_path) end end @@ -91,9 +68,9 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: end end - describe '#failure_wait_time' do + describe '#check_interval' do it 'reads the value from settings' do - expect(circuit_breaker.failure_wait_time).to eq(1) + expect(circuit_breaker.check_interval).to eq(1) end end @@ -114,12 +91,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: expect(circuit_breaker.access_retries).to eq(4) end end - - describe '#backoff_threshold' do - it 'reads the value from settings' do - expect(circuit_breaker.backoff_threshold).to eq(5) - end - end end describe '#perform' do @@ -134,19 +105,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: end end - it 'raises the correct exception when backing off' do - Timecop.freeze do - set_in_redis(:last_failure, 1.second.ago.to_f) - set_in_redis(:failure_count, 90) - - expect { |b| circuit_breaker.perform(&b) } - .to raise_error do |exception| - expect(exception).to be_kind_of(Gitlab::Git::Storage::Failing) - expect(exception.retry_after).to eq(30) - end - end - end - it 'yields the block' do expect { |b| circuit_breaker.perform(&b) } .to yield_control @@ -170,54 +128,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: .to raise_error(Rugged::OSError) end - it 'tracks that the storage was accessible' do - set_in_redis(:failure_count, 10) - set_in_redis(:last_failure, Time.now.to_f) - - circuit_breaker.perform { '' } - - expect(value_from_redis(:failure_count).to_i).to eq(0) - expect(value_from_redis(:last_failure)).to be_empty - expect(circuit_breaker.failure_count).to eq(0) - expect(circuit_breaker.last_failure).to be_nil - end - - it 'maintains known storage keys' do - Timecop.freeze do - # Insert an old key to expire - old_entry = Time.now.to_i - 3.days.to_i - Gitlab::Git::Storage.redis.with do |redis| - redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, old_entry, 'to_be_removed') - end - - circuit_breaker.perform { '' } - - known_keys = Gitlab::Git::Storage.redis.with do |redis| - redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1) - end - - expect(known_keys).to contain_exactly(cache_key) - end - end - - it 'only performs the accessibility check once' do - expect(Gitlab::Git::Storage::ForkedStorageCheck) - .to receive(:storage_available?).once.and_call_original - - 2.times { circuit_breaker.perform { '' } } - end - - it 'calls the check with the correct arguments' do - stub_application_setting(circuitbreaker_storage_timeout: 30, - circuitbreaker_access_retries: 3) - - expect(Gitlab::Git::Storage::ForkedStorageCheck) - .to receive(:storage_available?).with(TestEnv.repos_path, 30, 3) - .and_call_original - - circuit_breaker.perform { '' } - end - context 'with the feature disabled' do before do stub_feature_flags(git_storage_circuit_breaker: false) @@ -240,31 +150,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: expect(result).to eq('hello') end end - - context 'the storage is not available' do - let(:storage_name) { 'broken' } - - it 'raises the correct exception' do - expect(circuit_breaker).to receive(:track_storage_inaccessible) - - expect { circuit_breaker.perform { '' } } - .to raise_error do |exception| - expect(exception).to be_kind_of(Gitlab::Git::Storage::Inaccessible) - expect(exception.retry_after).to eq(30) - end - end - - it 'tracks that the storage was inaccessible' do - Timecop.freeze do - expect { circuit_breaker.perform { '' } }.to raise_error(Gitlab::Git::Storage::Inaccessible) - - expect(value_from_redis(:failure_count).to_i).to eq(1) - expect(value_from_redis(:last_failure)).not_to be_empty - expect(circuit_breaker.failure_count).to eq(1) - expect(circuit_breaker.last_failure).to be_within(1.second).of(Time.now) - end - end - end end describe '#circuit_broken?' do @@ -283,32 +168,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: end end - describe '#backing_off?' do - it 'is true when there was a recent failure' do - Timecop.freeze do - set_in_redis(:last_failure, 1.second.ago.to_f) - set_in_redis(:failure_count, 90) - - expect(circuit_breaker.backing_off?).to be_truthy - end - end - - context 'the `failure_wait_time` is set to 0' do - before do - stub_application_setting(circuitbreaker_failure_wait_time: 0) - end - - it 'is working even when there are failures' do - Timecop.freeze do - set_in_redis(:last_failure, 0.seconds.ago.to_f) - set_in_redis(:failure_count, 90) - - expect(circuit_breaker.backing_off?).to be_falsey - end - end - end - end - describe '#last_failure' do it 'returns the last failure time' do time = Time.parse("2017-05-26 17:52:30") diff --git a/spec/lib/gitlab/git/storage/failure_info_spec.rb b/spec/lib/gitlab/git/storage/failure_info_spec.rb new file mode 100644 index 00000000000..bae88fdda86 --- /dev/null +++ b/spec/lib/gitlab/git/storage/failure_info_spec.rb @@ -0,0 +1,70 @@ +require 'spec_helper' + +describe Gitlab::Git::Storage::FailureInfo, :broken_storage do + let(:storage_name) { 'default' } + let(:hostname) { Gitlab::Environment.hostname } + let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" } + + def value_from_redis(name) + Gitlab::Git::Storage.redis.with do |redis| + redis.hmget(cache_key, name) + end.first + end + + def set_in_redis(name, value) + Gitlab::Git::Storage.redis.with do |redis| + redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key) + redis.hmset(cache_key, name, value) + end.first + end + + describe '.reset_all!' do + it 'clears all entries form redis' do + set_in_redis(:failure_count, 10) + + described_class.reset_all! + + key_exists = Gitlab::Git::Storage.redis.with { |redis| redis.exists(cache_key) } + + expect(key_exists).to be_falsey + end + + it 'does not break when there are no keys in redis' do + expect { described_class.reset_all! }.not_to raise_error + end + end + + describe '.load' do + it 'loads failure information for a storage on a host' do + first_failure = Time.parse("2017-11-14 17:52:30") + last_failure = Time.parse("2017-11-14 18:54:37") + failure_count = 11 + + set_in_redis(:first_failure, first_failure.to_i) + set_in_redis(:last_failure, last_failure.to_i) + set_in_redis(:failure_count, failure_count.to_i) + + info = described_class.load(cache_key) + + expect(info.first_failure).to eq(first_failure) + expect(info.last_failure).to eq(last_failure) + expect(info.failure_count).to eq(failure_count) + end + end + + describe '#no_failures?' do + it 'is true when there are no failures' do + info = described_class.new(nil, nil, 0) + + expect(info.no_failures?).to be_truthy + end + + it 'is false when there are failures' do + info = described_class.new(Time.parse("2017-11-14 17:52:30"), + Time.parse("2017-11-14 18:54:37"), + 20) + + expect(info.no_failures?).to be_falsy + end + end +end diff --git a/spec/lib/gitlab/git/storage/health_spec.rb b/spec/lib/gitlab/git/storage/health_spec.rb index d7a52a04fbb..bb670fc5d94 100644 --- a/spec/lib/gitlab/git/storage/health_spec.rb +++ b/spec/lib/gitlab/git/storage/health_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Gitlab::Git::Storage::Health, clean_gitlab_redis_shared_state: true, broken_storage: true do +describe Gitlab::Git::Storage::Health, broken_storage: true do let(:host1_key) { 'storage_accessible:broken:web01' } let(:host2_key) { 'storage_accessible:default:kiq01' } diff --git a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb index 5db37f55e03..93ad20011de 100644 --- a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb +++ b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb @@ -27,7 +27,7 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do end describe '#failure_info' do - it { Timecop.freeze { expect(breaker.failure_info).to eq(Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(Time.now, breaker.failure_count_threshold)) } } + it { expect(breaker.failure_info.no_failures?).to be_falsy } end end @@ -49,7 +49,7 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do end describe '#failure_info' do - it { expect(breaker.failure_info).to eq(Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(nil, 0)) } + it { expect(breaker.failure_info.no_failures?).to be_truthy } end end diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index c9643c5da47..2db560c2cec 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -193,7 +193,15 @@ describe Gitlab::GitAccess do let(:actor) { build(:rsa_deploy_key_2048, user: user) } end - describe '#check_project_moved!' do + shared_examples 'check_project_moved' do + it 'enqueues a redirected message' do + push_access_check + + expect(Gitlab::Checks::ProjectMoved.fetch_redirect_message(user.id, project.id)).not_to be_nil + end + end + + describe '#check_project_moved!', :clean_gitlab_redis_shared_state do before do project.add_master(user) end @@ -207,7 +215,40 @@ describe Gitlab::GitAccess do end end - context 'when a redirect was followed to find the project' do + context 'when a permanent redirect and ssh protocol' do + let(:redirected_path) { 'some/other-path' } + + before do + allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true) + end + + it 'allows push and pull access' do + aggregate_failures do + expect { push_access_check }.not_to raise_error + end + end + + it_behaves_like 'check_project_moved' + end + + context 'with a permanent redirect and http protocol' do + let(:redirected_path) { 'some/other-path' } + let(:protocol) { 'http' } + + before do + allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true) + end + + it 'allows_push and pull access' do + aggregate_failures do + expect { push_access_check }.not_to raise_error + end + end + + it_behaves_like 'check_project_moved' + end + + context 'with a temporal redirect and ssh protocol' do let(:redirected_path) { 'some/other-path' } it 'blocks push and pull access' do @@ -219,16 +260,15 @@ describe Gitlab::GitAccess do expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/) end end + end - context 'http protocol' do - let(:protocol) { 'http' } + context 'with a temporal redirect and http protocol' do + let(:redirected_path) { 'some/other-path' } + let(:protocol) { 'http' } - it 'includes the path to the project using HTTP' do - aggregate_failures do - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) - end - end + it 'does not allow to push and pull access' do + expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) + expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) end end end diff --git a/spec/lib/gitlab/identifier_spec.rb b/spec/lib/gitlab/identifier_spec.rb index cfaeb1f0d4f..0385dd762c2 100644 --- a/spec/lib/gitlab/identifier_spec.rb +++ b/spec/lib/gitlab/identifier_spec.rb @@ -70,6 +70,10 @@ describe Gitlab::Identifier do expect(identifier.identify_using_commit(project, '123')).to eq(user) end end + + it 'returns nil if the project & ref are not present' do + expect(identifier.identify_using_commit(nil, nil)).to be_nil + end end describe '#identify_using_user' do diff --git a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb index 667e4747897..f66451c5188 100644 --- a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb @@ -21,7 +21,6 @@ describe Gitlab::Metrics::Samplers::InfluxSampler do it 'samples various statistics' do expect(sampler).to receive(:sample_memory_usage) expect(sampler).to receive(:sample_file_descriptors) - expect(sampler).to receive(:sample_objects) expect(sampler).to receive(:sample_gc) expect(sampler).to receive(:flush) @@ -72,28 +71,6 @@ describe Gitlab::Metrics::Samplers::InfluxSampler do end end - if Gitlab::Metrics.mri? - describe '#sample_objects' do - it 'adds a metric containing the amount of allocated objects' do - expect(sampler).to receive(:add_metric) - .with(/object_counts/, an_instance_of(Hash), an_instance_of(Hash)) - .at_least(:once) - .and_call_original - - sampler.sample_objects - end - - it 'ignores classes without a name' do - expect(Allocations).to receive(:to_hash).and_return({ Class.new => 4 }) - - expect(sampler).not_to receive(:add_metric) - .with('object_counts', an_instance_of(Hash), type: nil) - - sampler.sample_objects - end - end - end - describe '#sample_gc' do it 'adds a metric containing garbage collection statistics' do expect(GC::Profiler).to receive(:total_time).and_return(0.24) diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb index 53699327da1..375cbf8a9ca 100644 --- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb @@ -11,7 +11,6 @@ describe Gitlab::Metrics::Samplers::RubySampler do it 'samples various statistics' do expect(Gitlab::Metrics::System).to receive(:memory_usage) expect(Gitlab::Metrics::System).to receive(:file_descriptor_count) - expect(sampler).to receive(:sample_objects) expect(sampler).to receive(:sample_gc) sampler.sample @@ -65,26 +64,4 @@ describe Gitlab::Metrics::Samplers::RubySampler do sampler.sample end end - - if Gitlab::Metrics.mri? - describe '#sample_objects' do - it 'adds a metric containing the amount of allocated objects' do - expect(sampler.metrics[:objects_total]).to receive(:set) - .with(include(class: anything), be > 0) - .at_least(:once) - .and_call_original - - sampler.sample - end - - it 'ignores classes without a name' do - expect(Allocations).to receive(:to_hash).and_return({ Class.new => 4 }) - - expect(sampler.metrics[:objects_total]).not_to receive(:set) - .with(include(class: 'object_counts'), anything) - - sampler.sample - end - end - end end diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb index ef874368077..8ec3f55e6de 100644 --- a/spec/lib/gitlab/reference_extractor_spec.rb +++ b/spec/lib/gitlab/reference_extractor_spec.rb @@ -115,6 +115,15 @@ describe Gitlab::ReferenceExtractor do end end + it 'does not include anchors from table of contents in issue references' do + issue1 = create(:issue, project: project) + issue2 = create(:issue, project: project) + + subject.analyze("not real issue <h4>#{issue1.iid}</h4>, real issue #{issue2.to_reference}") + + expect(subject.issues).to match_array([issue2]) + end + it 'accesses valid issue objects' do @i0 = create(:issue, project: project) @i1 = create(:issue, project: project) diff --git a/spec/lib/gitlab/storage_check/cli_spec.rb b/spec/lib/gitlab/storage_check/cli_spec.rb new file mode 100644 index 00000000000..6db0925899c --- /dev/null +++ b/spec/lib/gitlab/storage_check/cli_spec.rb @@ -0,0 +1,19 @@ +require 'spec_helper' + +describe Gitlab::StorageCheck::CLI do + let(:options) { Gitlab::StorageCheck::Options.new('unix://tmp/socket.sock', nil, 1, false) } + subject(:runner) { described_class.new(options) } + + describe '#update_settings' do + it 'updates the interval when changed in a valid response and logs the change' do + fake_response = double + expect(fake_response).to receive(:valid?).and_return(true) + expect(fake_response).to receive(:check_interval).and_return(42) + expect(runner.logger).to receive(:info) + + runner.update_settings(fake_response) + + expect(options.interval).to eq(42) + end + end +end diff --git a/spec/lib/gitlab/storage_check/gitlab_caller_spec.rb b/spec/lib/gitlab/storage_check/gitlab_caller_spec.rb new file mode 100644 index 00000000000..d869022fd31 --- /dev/null +++ b/spec/lib/gitlab/storage_check/gitlab_caller_spec.rb @@ -0,0 +1,46 @@ +require 'spec_helper' + +describe Gitlab::StorageCheck::GitlabCaller do + let(:options) { Gitlab::StorageCheck::Options.new('unix://tmp/socket.sock', nil, nil, false) } + subject(:gitlab_caller) { described_class.new(options) } + + describe '#call!' do + context 'when a socket is given' do + it 'calls a socket' do + fake_connection = double + expect(fake_connection).to receive(:post) + expect(Excon).to receive(:new).with('unix://tmp/socket.sock', socket: "tmp/socket.sock") { fake_connection } + + gitlab_caller.call! + end + end + + context 'when a host is given' do + let(:options) { Gitlab::StorageCheck::Options.new('http://localhost:8080', nil, nil, false) } + + it 'it calls a http response' do + fake_connection = double + expect(Excon).to receive(:new).with('http://localhost:8080', socket: nil) { fake_connection } + expect(fake_connection).to receive(:post) + + gitlab_caller.call! + end + end + end + + describe '#headers' do + it 'Adds the JSON header' do + headers = gitlab_caller.headers + + expect(headers['Content-Type']).to eq('application/json') + end + + context 'when a token was provided' do + let(:options) { Gitlab::StorageCheck::Options.new('unix://tmp/socket.sock', 'atoken', nil, false) } + + it 'adds it to the headers' do + expect(gitlab_caller.headers['TOKEN']).to eq('atoken') + end + end + end +end diff --git a/spec/lib/gitlab/storage_check/option_parser_spec.rb b/spec/lib/gitlab/storage_check/option_parser_spec.rb new file mode 100644 index 00000000000..cad4dfbefcf --- /dev/null +++ b/spec/lib/gitlab/storage_check/option_parser_spec.rb @@ -0,0 +1,31 @@ +require 'spec_helper' + +describe Gitlab::StorageCheck::OptionParser do + describe '.parse!' do + it 'assigns all options' do + args = %w(--target unix://tmp/hello/world.sock --token thetoken --interval 42) + + options = described_class.parse!(args) + + expect(options.token).to eq('thetoken') + expect(options.interval).to eq(42) + expect(options.target).to eq('unix://tmp/hello/world.sock') + end + + it 'requires the interval to be a number' do + args = %w(--target unix://tmp/hello/world.sock --interval fortytwo) + + expect { described_class.parse!(args) }.to raise_error(OptionParser::InvalidArgument) + end + + it 'raises an error if the scheme is not included' do + args = %w(--target tmp/hello/world.sock) + + expect { described_class.parse!(args) }.to raise_error(OptionParser::InvalidArgument) + end + + it 'raises an error if both socket and host are missing' do + expect { described_class.parse!([]) }.to raise_error(OptionParser::InvalidArgument) + end + end +end diff --git a/spec/lib/gitlab/storage_check/response_spec.rb b/spec/lib/gitlab/storage_check/response_spec.rb new file mode 100644 index 00000000000..0ff2963e443 --- /dev/null +++ b/spec/lib/gitlab/storage_check/response_spec.rb @@ -0,0 +1,54 @@ +require 'spec_helper' + +describe Gitlab::StorageCheck::Response do + let(:fake_json) do + { + check_interval: 42, + results: [ + { storage: 'working', success: true }, + { storage: 'skipped', success: nil }, + { storage: 'failing', success: false } + ] + }.to_json + end + + let(:fake_http_response) do + fake_response = instance_double("Excon::Response - Status check") + allow(fake_response).to receive(:status).and_return(200) + allow(fake_response).to receive(:body).and_return(fake_json) + allow(fake_response).to receive(:headers).and_return('Content-Type' => 'application/json') + + fake_response + end + let(:response) { described_class.new(fake_http_response) } + + describe '#valid?' do + it 'is valid for a success response with parseable JSON' do + expect(response).to be_valid + end + end + + describe '#check_interval' do + it 'returns the result from the JSON' do + expect(response.check_interval).to eq(42) + end + end + + describe '#responsive_shards' do + it 'contains the names of working shards' do + expect(response.responsive_shards).to contain_exactly('working') + end + end + + describe '#skipped_shards' do + it 'contains the names of skipped shards' do + expect(response.skipped_shards).to contain_exactly('skipped') + end + end + + describe '#failing_shards' do + it 'contains the name of failing shards' do + expect(response.failing_shards).to contain_exactly('failing') + end + end +end diff --git a/spec/migrations/remove_assignee_id_from_issue_spec.rb b/spec/migrations/remove_assignee_id_from_issue_spec.rb new file mode 100644 index 00000000000..2c6f992d3ae --- /dev/null +++ b/spec/migrations/remove_assignee_id_from_issue_spec.rb @@ -0,0 +1,37 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20170523073948_remove_assignee_id_from_issue.rb') + +describe RemoveAssigneeIdFromIssue, :migration do + let(:issues) { table(:issues) } + let(:issue_assignees) { table(:issue_assignees) } + let(:users) { table(:users) } + + let!(:user_1) { users.create(email: 'email1@example.com') } + let!(:user_2) { users.create(email: 'email2@example.com') } + let!(:user_3) { users.create(email: 'email3@example.com') } + + def create_issue(assignees:) + issues.create.tap do |issue| + assignees.each do |assignee| + issue_assignees.create(issue_id: issue.id, user_id: assignee.id) + end + end + end + + let!(:issue_single_assignee) { create_issue(assignees: [user_1]) } + let!(:issue_no_assignee) { create_issue(assignees: []) } + let!(:issue_multiple_assignees) { create_issue(assignees: [user_2, user_3]) } + + describe '#down' do + it 'sets the assignee_id to a random matching assignee from the assignees table' do + migrate! + disable_migrations_output { described_class.new.down } + + expect(issue_single_assignee.reload.assignee_id).to eq(user_1.id) + expect(issue_no_assignee.reload.assignee_id).to be_nil + expect(issue_multiple_assignees.reload.assignee_id).to eq(user_2.id).or(user_3.id) + + disable_migrations_output { described_class.new.up } + end + end +end diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb index 0b7e16cc33c..ef480e7a80a 100644 --- a/spec/models/application_setting_spec.rb +++ b/spec/models/application_setting_spec.rb @@ -115,9 +115,8 @@ describe ApplicationSetting do end context 'circuitbreaker settings' do - [:circuitbreaker_backoff_threshold, - :circuitbreaker_failure_count_threshold, - :circuitbreaker_failure_wait_time, + [:circuitbreaker_failure_count_threshold, + :circuitbreaker_check_interval, :circuitbreaker_failure_reset_time, :circuitbreaker_storage_timeout].each do |field| it "Validates #{field} as number" do @@ -126,16 +125,6 @@ describe ApplicationSetting do .is_greater_than_or_equal_to(0) end end - - it 'requires the `backoff_threshold` to be lower than the `failure_count_threshold`' do - setting.circuitbreaker_failure_count_threshold = 10 - setting.circuitbreaker_backoff_threshold = 15 - failure_message = "The circuitbreaker backoff threshold should be lower "\ - "than the failure count threshold" - - expect(setting).not_to be_valid - expect(setting.errors[:circuitbreaker_backoff_threshold]).to include(failure_message) - end end context 'repository storages' do diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 26d33663dad..a6258676767 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -1868,6 +1868,94 @@ describe Ci::Build do end end + describe 'state transition: any => [:running]' do + shared_examples 'validation is active' do + context 'when depended job has not been completed yet' do + let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) } + end + + context 'when artifacts of depended job has been expired' do + let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) } + end + + context 'when artifacts of depended job has been erased' do + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) } + + before do + pre_stage_job.erase + end + + it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) } + end + end + + shared_examples 'validation is not active' do + context 'when depended job has not been completed yet' do + let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when artifacts of depended job has been expired' do + let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when artifacts of depended job has been erased' do + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) } + + before do + pre_stage_job.erase + end + + it { expect { job.run! }.not_to raise_error } + end + end + + let!(:job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 1, options: options) } + + context 'when validates for dependencies is enabled' do + before do + stub_feature_flags(ci_disable_validates_dependencies: false) + end + + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) } + + context 'when "dependencies" keyword is not defined' do + let(:options) { {} } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when "dependencies" keyword is empty' do + let(:options) { { dependencies: [] } } + + it { expect { job.run! }.not_to raise_error } + end + + context 'when "dependencies" keyword is specified' do + let(:options) { { dependencies: ['test'] } } + + it_behaves_like 'validation is active' + end + end + + context 'when validates for dependencies is disabled' do + let(:options) { { dependencies: ['test'] } } + + before do + stub_feature_flags(ci_disable_validates_dependencies: true) + end + + it_behaves_like 'validation is not active' + end + end + describe 'state transition when build fails' do let(:service) { MergeRequests::AddTodoWhenBuildFailsService.new(project, user) } diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index d4b1e7c8dd4..bb89e093890 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -1244,7 +1244,7 @@ describe Ci::Pipeline, :mailer do describe '#execute_hooks' do let!(:build_a) { create_build('a', 0) } - let!(:build_b) { create_build('b', 1) } + let!(:build_b) { create_build('b', 0) } let!(:hook) do create(:project_hook, project: project, pipeline_events: enabled) @@ -1300,6 +1300,8 @@ describe Ci::Pipeline, :mailer do end context 'when stage one failed' do + let!(:build_b) { create_build('b', 1) } + before do build_a.drop end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 3817f20bfe7..b7c6286fd83 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -559,4 +559,34 @@ describe Namespace do end end end + + describe "#allowed_path_by_redirects" do + let(:namespace1) { create(:namespace, path: 'foo') } + + context "when the path has been taken before" do + before do + namespace1.path = 'bar' + namespace1.save! + end + + it 'should be invalid' do + namespace2 = build(:group, path: 'foo') + expect(namespace2).to be_invalid + end + + it 'should return an error on path' do + namespace2 = build(:group, path: 'foo') + namespace2.valid? + expect(namespace2.errors.messages[:path].first).to eq('foo has been taken before. Please use another one') + end + end + + context "when the path has not been taken before" do + it 'should be valid' do + expect(RedirectRoute.count).to eq(0) + namespace = build(:namespace) + expect(namespace).to be_valid + end + end + end end diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb index 01440b15674..2bb1c49b740 100644 --- a/spec/models/personal_access_token_spec.rb +++ b/spec/models/personal_access_token_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe PersonalAccessToken do + subject { described_class } + describe '.build' do let(:personal_access_token) { build(:personal_access_token) } let(:invalid_personal_access_token) { build(:personal_access_token, :invalid) } @@ -45,6 +47,29 @@ describe PersonalAccessToken do end end + describe 'Redis storage' do + let(:user_id) { 123 } + let(:token) { 'abc000foo' } + + before do + subject.redis_store!(user_id, token) + end + + it 'returns stored data' do + expect(subject.redis_getdel(user_id)).to eq(token) + end + + context 'after deletion' do + before do + expect(subject.redis_getdel(user_id)).to eq(token) + end + + it 'token is removed' do + expect(subject.redis_getdel(user_id)).to be_nil + end + end + end + context "validations" do let(:personal_access_token) { build(:personal_access_token) } diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 82ed1ecee33..358bc3dfb94 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -29,7 +29,9 @@ describe Repository do def expect_to_raise_storage_error expect { yield }.to raise_error do |exception| storage_exceptions = [Gitlab::Git::Storage::Inaccessible, Gitlab::Git::CommandError, GRPC::Unavailable] - expect(exception.class).to be_in(storage_exceptions) + known_exception = storage_exceptions.select { |e| exception.is_a?(e) } + + expect(known_exception).not_to be_nil end end @@ -634,9 +636,7 @@ describe Repository do end describe '#fetch_ref' do - # Setting the var here, sidesteps the stub that makes gitaly raise an error - # before the actual test call - set(:broken_repository) { create(:project, :broken_storage).repository } + let(:broken_repository) { create(:project, :broken_storage).repository } describe 'when storage is broken', :broken_storage do it 'should raise a storage error' do diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb index fece370c03f..ddad6862a63 100644 --- a/spec/models/route_spec.rb +++ b/spec/models/route_spec.rb @@ -87,6 +87,7 @@ describe Route do end context 'when conflicting redirects exist' do + let(:route) { create(:project).route } let!(:conflicting_redirect1) { route.create_redirect('bar/test') } let!(:conflicting_redirect2) { route.create_redirect('bar/test/foo') } let!(:conflicting_redirect3) { route.create_redirect('gitlab-org') } @@ -141,11 +142,50 @@ describe Route do expect(redirect_route.source).to eq(route.source) expect(redirect_route.path).to eq('foo') end + + context 'when the source is a Project' do + it 'creates a temporal RedirectRoute' do + project = create(:project) + route = project.route + redirect_route = route.create_redirect('foo') + expect(redirect_route.permanent?).to be_falsy + end + end + + context 'when the source is not a project' do + it 'creates a permanent RedirectRoute' do + redirect_route = route.create_redirect('foo', permanent: true) + expect(redirect_route.permanent?).to be_truthy + end + end end describe '#delete_conflicting_redirects' do + context 'with permanent redirect' do + it 'does not delete the redirect' do + route.create_redirect("#{route.path}/foo", permanent: true) + + expect do + route.delete_conflicting_redirects + end.not_to change { RedirectRoute.count } + end + end + + context 'with temporal redirect' do + let(:route) { create(:project).route } + + it 'deletes the redirect' do + route.create_redirect("#{route.path}/foo") + + expect do + route.delete_conflicting_redirects + end.to change { RedirectRoute.count }.by(-1) + end + end + context 'when a redirect route with the same path exists' do context 'when the redirect route has matching case' do + let(:route) { create(:project).route } let!(:redirect1) { route.create_redirect(route.path) } it 'deletes the redirect' do @@ -169,6 +209,7 @@ describe Route do end context 'when the redirect route is differently cased' do + let(:route) { create(:project).route } let!(:redirect1) { route.create_redirect(route.path.upcase) } it 'deletes the redirect' do @@ -185,7 +226,32 @@ describe Route do expect(route.conflicting_redirects).to be_an(ActiveRecord::Relation) end + context 'with permanent redirects' do + it 'does not return anything' do + route.create_redirect("#{route.path}/foo", permanent: true) + route.create_redirect("#{route.path}/foo/bar", permanent: true) + route.create_redirect("#{route.path}/baz/quz", permanent: true) + + expect(route.conflicting_redirects).to be_empty + end + end + + context 'with temporal redirects' do + let(:route) { create(:project).route } + + it 'returns the redirect routes' do + route = create(:project).route + redirect1 = route.create_redirect("#{route.path}/foo") + redirect2 = route.create_redirect("#{route.path}/foo/bar") + redirect3 = route.create_redirect("#{route.path}/baz/quz") + + expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3]) + end + end + context 'when a redirect route with the same path exists' do + let(:route) { create(:project).route } + context 'when the redirect route has matching case' do let!(:redirect1) { route.create_redirect(route.path) } @@ -214,4 +280,42 @@ describe Route do end end end + + describe "#conflicting_redirect_exists?" do + context 'when a conflicting redirect exists' do + let(:group1) { create(:group, path: 'foo') } + let(:group2) { create(:group, path: 'baz') } + + it 'should not be saved' do + group1.path = 'bar' + group1.save + + group2.path = 'foo' + + expect(group2.save).to be_falsy + end + + it 'should return an error on path' do + group1.path = 'bar' + group1.save + + group2.path = 'foo' + group2.valid? + expect(group2.errors["route.path"].first).to eq('foo has been taken before. Please use another one') + end + end + + context 'when a conflicting redirect does not exist' do + let(:project1) { create(:project, path: 'foo') } + let(:project2) { create(:project, path: 'baz') } + + it 'should be saved' do + project1.path = 'bar' + project1.save + + project2.path = 'foo' + expect(project2.save).to be_truthy + end + end + end end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 03c96a8f5aa..4687d9dfa00 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -913,11 +913,11 @@ describe User do describe 'email matching' do it 'returns users with a matching Email' do - expect(described_class.search(user.email)).to eq([user, user2]) + expect(described_class.search(user.email)).to eq([user]) end - it 'returns users with a partially matching Email' do - expect(described_class.search(user.email[0..2])).to eq([user, user2]) + it 'does not return users with a partially matching Email' do + expect(described_class.search(user.email[0..2])).not_to include(user, user2) end it 'returns users with a matching Email regardless of the casing' do @@ -973,8 +973,8 @@ describe User do expect(search_with_secondary_emails(user.email)).to eq([user]) end - it 'returns users with a partially matching email' do - expect(search_with_secondary_emails(user.email[0..2])).to eq([user]) + it 'does not return users with a partially matching email' do + expect(search_with_secondary_emails(user.email[0..2])).not_to include([user]) end it 'returns users with a matching email regardless of the casing' do @@ -997,29 +997,8 @@ describe User do expect(search_with_secondary_emails(email.email)).to eq([email.user]) end - it 'returns users with a matching part of secondary email' do - expect(search_with_secondary_emails(email.email[1..4])).to eq([email.user]) - end - - it 'return users with a matching part of secondary email regardless of case' do - expect(search_with_secondary_emails(email.email[1..4].upcase)).to eq([email.user]) - expect(search_with_secondary_emails(email.email[1..4].downcase)).to eq([email.user]) - expect(search_with_secondary_emails(email.email[1..4].capitalize)).to eq([email.user]) - end - - it 'returns multiple users with matching secondary emails' do - email1 = create(:email, email: '1_testemail@example.com') - email2 = create(:email, email: '2_testemail@example.com') - email3 = create(:email, email: 'other@email.com') - email3.user.update_attributes!(email: 'another@mail.com') - - expect( - search_with_secondary_emails('testemail@example.com').map(&:id) - ).to include(email1.user.id, email2.user.id) - - expect( - search_with_secondary_emails('testemail@example.com').map(&:id) - ).not_to include(email3.user.id) + it 'does not return users with a matching part of secondary email' do + expect(search_with_secondary_emails(email.email[1..4])).not_to include([email.user]) end end @@ -2592,4 +2571,28 @@ describe User do include_examples 'max member access for groups' end end + + describe "#username_previously_taken?" do + let(:user1) { create(:user, username: 'foo') } + + context 'when the username has been taken before' do + before do + user1.username = 'bar' + user1.save! + end + + it 'should raise an ActiveRecord::RecordInvalid exception' do + user2 = build(:user, username: 'foo') + expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Path foo has been taken before/) + end + end + + context 'when the username has not been taken before' do + it 'should be valid' do + expect(RedirectRoute.count).to eq(0) + user2 = build(:user, username: 'baz') + expect(user2).to be_valid + end + end + end end diff --git a/spec/requests/api/circuit_breakers_spec.rb b/spec/requests/api/circuit_breakers_spec.rb index 3b858c40fd6..fe76f057115 100644 --- a/spec/requests/api/circuit_breakers_spec.rb +++ b/spec/requests/api/circuit_breakers_spec.rb @@ -47,7 +47,7 @@ describe API::CircuitBreakers do describe 'DELETE circuit_breakers/repository_storage' do it 'clears all circuit_breakers' do - expect(Gitlab::Git::Storage::CircuitBreaker).to receive(:reset_all!) + expect(Gitlab::Git::Storage::FailureInfo).to receive(:reset_all!) delete api('/circuit_breakers/repository_storage', admin) diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb index 554723d6b1e..6330c140246 100644 --- a/spec/requests/api/groups_spec.rb +++ b/spec/requests/api/groups_spec.rb @@ -173,6 +173,28 @@ describe API::Groups do end describe "GET /groups/:id" do + # Given a group, create one project for each visibility level + # + # group - Group to add projects to + # share_with - If provided, each project will be shared with this Group + # + # Returns a Hash of visibility_level => Project pairs + def add_projects_to_group(group, share_with: nil) + projects = { + public: create(:project, :public, namespace: group), + internal: create(:project, :internal, namespace: group), + private: create(:project, :private, namespace: group) + } + + if share_with + create(:project_group_link, project: projects[:public], group: share_with) + create(:project_group_link, project: projects[:internal], group: share_with) + create(:project_group_link, project: projects[:private], group: share_with) + end + + projects + end + context 'when unauthenticated' do it 'returns 404 for a private group' do get api("/groups/#{group2.id}") @@ -183,6 +205,26 @@ describe API::Groups do get api("/groups/#{group1.id}") expect(response).to have_gitlab_http_status(200) end + + it 'returns only public projects in the group' do + public_group = create(:group, :public) + projects = add_projects_to_group(public_group) + + get api("/groups/#{public_group.id}") + + expect(json_response['projects'].map { |p| p['id'].to_i }) + .to contain_exactly(projects[:public].id) + end + + it 'returns only public projects shared with the group' do + public_group = create(:group, :public) + projects = add_projects_to_group(public_group, share_with: group1) + + get api("/groups/#{group1.id}") + + expect(json_response['shared_projects'].map { |p| p['id'].to_i }) + .to contain_exactly(projects[:public].id) + end end context "when authenticated as user" do @@ -222,6 +264,26 @@ describe API::Groups do expect(response).to have_gitlab_http_status(404) end + + it 'returns only public and internal projects in the group' do + public_group = create(:group, :public) + projects = add_projects_to_group(public_group) + + get api("/groups/#{public_group.id}", user2) + + expect(json_response['projects'].map { |p| p['id'].to_i }) + .to contain_exactly(projects[:public].id, projects[:internal].id) + end + + it 'returns only public and internal projects shared with the group' do + public_group = create(:group, :public) + projects = add_projects_to_group(public_group, share_with: group1) + + get api("/groups/#{group1.id}", user2) + + expect(json_response['shared_projects'].map { |p| p['id'].to_i }) + .to contain_exactly(projects[:public].id, projects[:internal].id) + end end context "when authenticated as admin" do diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index 67e1539cbc3..3c31980b273 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -537,16 +537,7 @@ describe API::Internal do context 'the project path was changed' do let!(:old_path_to_repo) { project.repository.path_to_repo } - let!(:old_full_path) { project.full_path } - let(:project_moved_message) do - <<-MSG.strip_heredoc - Project '#{old_full_path}' was moved to '#{project.full_path}'. - - Please update your Git remote and try again: - - git remote set-url origin #{project.ssh_url_to_repo} - MSG - end + let!(:repository) { project.repository } before do project.team << [user, :developer] @@ -555,19 +546,17 @@ describe API::Internal do end it 'rejects the push' do - push_with_path(key, old_path_to_repo) + push(key, project) expect(response).to have_gitlab_http_status(200) - expect(json_response['status']).to be_falsey - expect(json_response['message']).to eq(project_moved_message) + expect(json_response['status']).to be_falsy end it 'rejects the SSH pull' do - pull_with_path(key, old_path_to_repo) + pull(key, project) expect(response).to have_gitlab_http_status(200) - expect(json_response['status']).to be_falsey - expect(json_response['message']).to eq(project_moved_message) + expect(json_response['status']).to be_falsy end end end @@ -695,7 +684,7 @@ describe API::Internal do # end # end - describe 'POST /internal/post_receive' do + describe 'POST /internal/post_receive', :clean_gitlab_redis_shared_state do let(:identifier) { 'key-123' } let(:valid_params) do @@ -713,6 +702,8 @@ describe API::Internal do before do project.team << [user, :developer] + allow(described_class).to receive(:identify).and_return(user) + allow_any_instance_of(Gitlab::Identifier).to receive(:identify).and_return(user) end it 'enqueues a PostReceive worker job' do @@ -780,6 +771,19 @@ describe API::Internal do expect(json_response['broadcast_message']).to eq(nil) end end + + context 'with a redirected data' do + it 'returns redirected message on the response' do + project_moved = Gitlab::Checks::ProjectMoved.new(project, user, 'foo/baz', 'http') + project_moved.add_redirect_message + + post api("/internal/post_receive"), valid_params + + expect(response).to have_gitlab_http_status(200) + expect(json_response["redirected_message"]).to be_present + expect(json_response["redirected_message"]).to eq(project_moved.redirect_message) + end + end end describe 'POST /internal/pre_receive' do diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb index 99525cd0a6a..3f5070a1fd2 100644 --- a/spec/requests/api/issues_spec.rb +++ b/spec/requests/api/issues_spec.rb @@ -860,6 +860,20 @@ describe API::Issues, :mailer do end end + context 'user does not have permissions to create issue' do + let(:not_member) { create(:user) } + + before do + project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE) + end + + it 'renders 403' do + post api("/projects/#{project.id}/issues", not_member), title: 'new issue' + + expect(response).to have_gitlab_http_status(403) + end + end + it 'creates a new project issue' do post api("/projects/#{project.id}/issues", user), title: 'new issue', labels: 'label, label2', weight: 3, diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb index 63175c40a18..015d4b9a491 100644 --- a/spec/requests/api/settings_spec.rb +++ b/spec/requests/api/settings_spec.rb @@ -54,7 +54,7 @@ describe API::Settings, 'Settings' do dsa_key_restriction: 2048, ecdsa_key_restriction: 384, ed25519_key_restriction: 256, - circuitbreaker_failure_wait_time: 2 + circuitbreaker_check_interval: 2 expect(response).to have_gitlab_http_status(200) expect(json_response['default_projects_limit']).to eq(3) @@ -75,7 +75,7 @@ describe API::Settings, 'Settings' do expect(json_response['dsa_key_restriction']).to eq(2048) expect(json_response['ecdsa_key_restriction']).to eq(384) expect(json_response['ed25519_key_restriction']).to eq(256) - expect(json_response['circuitbreaker_failure_wait_time']).to eq(2) + expect(json_response['circuitbreaker_check_interval']).to eq(2) end end diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index a16f98bec36..fa02fffc82a 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -324,9 +324,9 @@ describe 'Git HTTP requests' do <<-MSG.strip_heredoc Project '#{redirect.path}' was moved to '#{project.full_path}'. - Please update your Git remote and try again: + Please update your Git remote: - git remote set-url origin #{project.http_url_to_repo} + git remote set-url origin #{project.http_url_to_repo} and try again. MSG end @@ -533,9 +533,9 @@ describe 'Git HTTP requests' do <<-MSG.strip_heredoc Project '#{redirect.path}' was moved to '#{project.full_path}'. - Please update your Git remote and try again: + Please update your Git remote: - git remote set-url origin #{project.http_url_to_repo} + git remote set-url origin #{project.http_url_to_repo} and try again. MSG end diff --git a/spec/rubocop/cop/migration/remove_column_spec.rb b/spec/rubocop/cop/migration/remove_column_spec.rb new file mode 100644 index 00000000000..89112f01723 --- /dev/null +++ b/spec/rubocop/cop/migration/remove_column_spec.rb @@ -0,0 +1,68 @@ +require 'spec_helper' + +require 'rubocop' +require 'rubocop/rspec/support' + +require_relative '../../../../rubocop/cop/migration/remove_column' + +describe RuboCop::Cop::Migration::RemoveColumn do + include CopHelper + + subject(:cop) { described_class.new } + + def source(meth = 'change') + "def #{meth}; remove_column :table, :column; end" + end + + context 'in a regular migration' do + before do + allow(cop).to receive(:in_migration?).and_return(true) + allow(cop).to receive(:in_post_deployment_migration?).and_return(false) + end + + it 'registers an offense when remove_column is used in the change method' do + inspect_source(cop, source('change')) + + aggregate_failures do + expect(cop.offenses.size).to eq(1) + expect(cop.offenses.map(&:line)).to eq([1]) + end + end + + it 'registers an offense when remove_column is used in the up method' do + inspect_source(cop, source('up')) + + aggregate_failures do + expect(cop.offenses.size).to eq(1) + expect(cop.offenses.map(&:line)).to eq([1]) + end + end + + it 'registers no offense when remove_column is used in the down method' do + inspect_source(cop, source('down')) + + expect(cop.offenses.size).to eq(0) + end + end + + context 'in a post-deployment migration' do + before do + allow(cop).to receive(:in_migration?).and_return(true) + allow(cop).to receive(:in_post_deployment_migration?).and_return(true) + end + + it 'registers no offense' do + inspect_source(cop, source) + + expect(cop.offenses.size).to eq(0) + end + end + + context 'outside of a migration' do + it 'registers no offense' do + inspect_source(cop, source) + + expect(cop.offenses.size).to eq(0) + end + end +end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index b0de8d447a2..41ce81e0651 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -518,5 +518,20 @@ describe Ci::CreatePipelineService do end end end + + context 'when pipeline is running for a tag' do + before do + config = YAML.dump(test: { script: 'test', only: ['branches'] }, + deploy: { script: 'deploy', only: ['tags'] }) + + stub_ci_pipeline_yaml_file(config) + end + + it 'creates a tagged pipeline' do + pipeline = execute_service(ref: 'v1.0.0') + + expect(pipeline.tag?).to be true + end + end end end diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb index decdd577226..3ee59014b5b 100644 --- a/spec/services/ci/register_job_service_spec.rb +++ b/spec/services/ci/register_job_service_spec.rb @@ -276,6 +276,89 @@ module Ci end end + context 'when "dependencies" keyword is specified' do + shared_examples 'not pick' do + it 'does not pick the build and drops the build' do + expect(subject).to be_nil + expect(pending_job.reload).to be_failed + expect(pending_job).to be_missing_dependency_failure + end + end + + shared_examples 'validation is active' do + context 'when depended job has not been completed yet' do + let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it_behaves_like 'not pick' + end + + context 'when artifacts of depended job has been expired' do + let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it_behaves_like 'not pick' + end + + context 'when artifacts of depended job has been erased' do + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) } + + before do + pre_stage_job.erase + end + + it_behaves_like 'not pick' + end + end + + shared_examples 'validation is not active' do + context 'when depended job has not been completed yet' do + let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect(subject).to eq(pending_job) } + end + + context 'when artifacts of depended job has been expired' do + let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) } + + it { expect(subject).to eq(pending_job) } + end + + context 'when artifacts of depended job has been erased' do + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) } + + before do + pre_stage_job.erase + end + + it { expect(subject).to eq(pending_job) } + end + end + + before do + stub_feature_flags(ci_disable_validates_dependencies: false) + end + + let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) } + let!(:pending_job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 1, options: { dependencies: ['test'] } ) } + + subject { execute(specific_runner) } + + context 'when validates for dependencies is enabled' do + before do + stub_feature_flags(ci_disable_validates_dependencies: false) + end + + it_behaves_like 'validation is active' + end + + context 'when validates for dependencies is disabled' do + before do + stub_feature_flags(ci_disable_validates_dependencies: true) + end + + it_behaves_like 'validation is not active' + end + end + def execute(runner) described_class.new(runner).execute.build end diff --git a/spec/services/members/authorized_destroy_service_spec.rb b/spec/services/members/authorized_destroy_service_spec.rb index 2d04d824180..d4ef31c0c74 100644 --- a/spec/services/members/authorized_destroy_service_spec.rb +++ b/spec/services/members/authorized_destroy_service_spec.rb @@ -45,7 +45,7 @@ describe Members::AuthorizedDestroyService do expect { described_class.new(member, member_user).execute } .to change { number_of_assigned_issuables(member_user) }.from(4).to(2) - expect(issue.reload.assignee_id).to be_nil + expect(issue.reload.assignee_ids).to be_empty expect(merge_request.reload.assignee_id).to be_nil end end diff --git a/spec/services/users/keys_count_service_spec.rb b/spec/services/users/keys_count_service_spec.rb index a188cf86772..bee8380e8b7 100644 --- a/spec/services/users/keys_count_service_spec.rb +++ b/spec/services/users/keys_count_service_spec.rb @@ -15,14 +15,12 @@ describe Users::KeysCountService, :use_clean_rails_memory_store_caching do expect(service.count).to eq(1) end - it 'caches the number of keys in Redis' do + it 'caches the number of keys in Redis', :request_store do + service.delete_cache + control_count = ActiveRecord::QueryRecorder.new { service.count }.count service.delete_cache - recorder = ActiveRecord::QueryRecorder.new do - 2.times { service.count } - end - - expect(recorder.count).to eq(1) + expect { 2.times { service.count } }.not_to exceed_query_limit(control_count) end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 242a2230b67..f94fb8733d5 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -121,18 +121,6 @@ RSpec.configure do |config| reset_delivered_emails! end - # Stub the `ForkedStorageCheck.storage_available?` method unless - # `:broken_storage` metadata is defined - # - # This check can be slow and is unnecessary in a test environment where we - # know the storage is available, because we create it at runtime - config.before(:example) do |example| - unless example.metadata[:broken_storage] - allow(Gitlab::Git::Storage::ForkedStorageCheck) - .to receive(:storage_available?).and_return(true) - end - end - config.around(:each, :use_clean_rails_memory_store_caching) do |example| caching_store = Rails.cache Rails.cache = ActiveSupport::Cache::MemoryStore.new diff --git a/spec/support/stored_repositories.rb b/spec/support/stored_repositories.rb index f3deae0f455..f9121cce985 100644 --- a/spec/support/stored_repositories.rb +++ b/spec/support/stored_repositories.rb @@ -12,6 +12,25 @@ RSpec.configure do |config| raise GRPC::Unavailable.new('Gitaly broken in this spec') end - Gitlab::Git::Storage::CircuitBreaker.reset_all! + # Track the maximum number of failures + first_failure = Time.parse("2017-11-14 17:52:30") + last_failure = Time.parse("2017-11-14 18:54:37") + failure_count = Gitlab::CurrentSettings + .current_application_settings + .circuitbreaker_failure_count_threshold + 1 + cache_key = "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}broken:#{Gitlab::Environment.hostname}" + + Gitlab::Git::Storage.redis.with do |redis| + redis.pipelined do + redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key) + redis.hset(cache_key, :first_failure, first_failure.to_i) + redis.hset(cache_key, :last_failure, last_failure.to_i) + redis.hset(cache_key, :failure_count, failure_count.to_i) + end + end + end + + config.after(:each, :broken_storage) do + Gitlab::Git::Storage.redis.with(&:flushall) end end diff --git a/spec/support/stub_configuration.rb b/spec/support/stub_configuration.rb index 4ead78529c3..b36cf3c544c 100644 --- a/spec/support/stub_configuration.rb +++ b/spec/support/stub_configuration.rb @@ -43,6 +43,8 @@ module StubConfiguration end def stub_storage_settings(messages) + messages.deep_stringify_keys! + # Default storage is always required messages['default'] ||= Gitlab.config.repositories.storages.default messages.each do |storage_name, storage_settings| |
