diff options
Diffstat (limited to 'spec')
81 files changed, 2036 insertions, 305 deletions
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb index b73ca0c2346..768c7e99c96 100644 --- a/spec/controllers/application_controller_spec.rb +++ b/spec/controllers/application_controller_spec.rb @@ -6,6 +6,10 @@ describe ApplicationController do describe '#check_password_expiration' do let(:controller) { described_class.new } + before do + allow(controller).to receive(:session).and_return({}) + end + it 'redirects if the user is over their password expiry' do user.password_expires_at = Time.new(2002) diff --git a/spec/controllers/groups/children_controller_spec.rb b/spec/controllers/groups/children_controller_spec.rb index 4262d474e59..cb1b460fc0e 100644 --- a/spec/controllers/groups/children_controller_spec.rb +++ b/spec/controllers/groups/children_controller_spec.rb @@ -280,6 +280,17 @@ describe Groups::ChildrenController do expect(assigns(:children)).to contain_exactly(other_subgroup, *next_page_projects.take(per_page - 1)) end + + context 'with a mixed first page' do + let!(:first_page_subgroups) { [create(:group, :public, parent: group)] } + let!(:first_page_projects) { create_list(:project, per_page, :public, namespace: group) } + + it 'correctly calculates the counts' do + get :index, group_id: group.to_param, sort: 'id_asc', page: 2, format: :json + + expect(response).to have_gitlab_http_status(200) + end + end end end end diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb index 5dc27e2bbba..fd90c0d8bad 100644 --- a/spec/controllers/projects/commit_controller_spec.rb +++ b/spec/controllers/projects/commit_controller_spec.rb @@ -1,15 +1,15 @@ require 'spec_helper' describe Projects::CommitController do - let(:project) { create(:project, :repository) } - let(:user) { create(:user) } + set(:project) { create(:project, :repository) } + set(:user) { create(:user) } let(:commit) { project.commit("master") } let(:master_pickable_sha) { '7d3b0f7cff5f37573aea97cebfd5692ea1689924' } let(:master_pickable_commit) { project.commit(master_pickable_sha) } before do sign_in(user) - project.team << [user, :master] + project.add_master(user) end describe 'GET show' do diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb index 5f5a789d5cc..37e9f863fc4 100644 --- a/spec/controllers/projects/notes_controller_spec.rb +++ b/spec/controllers/projects/notes_controller_spec.rb @@ -336,6 +336,29 @@ describe Projects::NotesController do end end + describe 'PUT update' do + let(:request_params) do + { + namespace_id: project.namespace, + project_id: project, + id: note, + format: :json, + note: { + note: "New comment" + } + } + end + + before do + sign_in(note.author) + project.team << [note.author, :developer] + end + + it "updates the note" do + expect { put :update, request_params }.to change { note.reload.note } + end + end + describe 'DELETE destroy' do let(:request_params) do { diff --git a/spec/factories/fork_network_members.rb b/spec/factories/fork_network_members.rb new file mode 100644 index 00000000000..509c4e1fa1c --- /dev/null +++ b/spec/factories/fork_network_members.rb @@ -0,0 +1,8 @@ +FactoryGirl.define do + factory :fork_network_member do + association :project + association :fork_network + + forked_from_project { fork_network.root_project } + end +end diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb index f0d05504b7e..ab4ae123429 100644 --- a/spec/factories/notes.rb +++ b/spec/factories/notes.rb @@ -130,6 +130,7 @@ FactoryGirl.define do before(:create) do |note, evaluator| discussion = evaluator.in_reply_to next unless discussion + discussion = discussion.to_discussion if discussion.is_a?(Note) next unless discussion diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb index b47f9055d29..a69b428d117 100644 --- a/spec/features/admin/admin_users_spec.rb +++ b/spec/features/admin/admin_users_spec.rb @@ -167,19 +167,36 @@ describe "Admin::Users" do it 'sees impersonation log out icon' do icon = first('.fa.fa-user-secret') - expect(icon).not_to eql nil + expect(icon).not_to be nil end it 'logs out of impersonated user back to original user' do find(:css, 'li.impersonation a').click - expect(page.find(:css, '.header-user .profile-link')['data-user']).to eql(current_user.username) + expect(page.find(:css, '.header-user .profile-link')['data-user']).to eq(current_user.username) end it 'is redirected back to the impersonated users page in the admin after stopping' do find(:css, 'li.impersonation a').click - expect(current_path).to eql "/admin/users/#{another_user.username}" + expect(current_path).to eq("/admin/users/#{another_user.username}") + end + end + + context 'when impersonating a user with an expired password' do + before do + another_user.update(password_expires_at: Time.now - 5.minutes) + click_link 'Impersonate' + end + + it 'does not redirect to password change page' do + expect(current_path).to eq('/') + end + + it 'is redirected back to the impersonated users page in the admin after stopping' do + find(:css, 'li.impersonation a').click + + expect(current_path).to eq("/admin/users/#{another_user.username}") end end end diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb index 9137ab82ff4..205900615c4 100644 --- a/spec/features/boards/sidebar_spec.rb +++ b/spec/features/boards/sidebar_spec.rb @@ -331,11 +331,29 @@ describe 'Issue Boards', :js do context 'subscription' do it 'changes issue subscription' do click_card(card) + wait_for_requests - page.within('.subscription') do + page.within('.subscriptions') do click_button 'Subscribe' wait_for_requests - expect(page).to have_content("Unsubscribe") + + expect(page).to have_content('Unsubscribe') + end + end + + it 'has "Unsubscribe" button when already subscribed' do + create(:subscription, user: user, project: project, subscribable: issue2, subscribed: true) + visit project_board_path(project, board) + wait_for_requests + + click_card(card) + wait_for_requests + + page.within('.subscriptions') do + click_button 'Unsubscribe' + wait_for_requests + + expect(page).to have_content('Subscribe') end end end diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb index 479fb713297..98586ddbd81 100644 --- a/spec/features/commits_spec.rb +++ b/spec/features/commits_spec.rb @@ -1,8 +1,6 @@ require 'spec_helper' describe 'Commits' do - include CiStatusHelper - let(:project) { create(:project, :repository) } let(:user) { create(:user) } @@ -33,7 +31,7 @@ describe 'Commits' do describe 'Commit builds' do before do - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) end it { expect(page).to have_content pipeline.sha[0..7] } @@ -79,7 +77,7 @@ describe 'Commits' do describe 'Commit builds', :js do before do - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) end it 'shows pipeline`s data' do @@ -95,7 +93,7 @@ describe 'Commits' do end it do - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) click_on 'Download artifacts' expect(page.response_headers['Content-Type']).to eq(artifacts_file.content_type) end @@ -103,7 +101,7 @@ describe 'Commits' do describe 'Cancel all builds' do it 'cancels commit', :js do - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) click_on 'Cancel running' expect(page).to have_content 'canceled' end @@ -111,7 +109,7 @@ describe 'Commits' do describe 'Cancel build' do it 'cancels build', :js do - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) find('.js-btn-cancel-pipeline').click expect(page).to have_content 'canceled' end @@ -120,13 +118,13 @@ describe 'Commits' do describe '.gitlab-ci.yml not found warning' do context 'ci builds enabled' do it "does not show warning" do - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) expect(page).not_to have_content '.gitlab-ci.yml not found in this commit' end it 'shows warning' do stub_ci_pipeline_yaml_file(nil) - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) expect(page).to have_content '.gitlab-ci.yml not found in this commit' end end @@ -135,7 +133,7 @@ describe 'Commits' do before do stub_ci_builds_disabled stub_ci_pipeline_yaml_file(nil) - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) end it 'does not show warning' do @@ -149,7 +147,7 @@ describe 'Commits' do before do project.team << [user, :reporter] build.update_attributes(artifacts_file: artifacts_file) - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) end it 'Renders header', :js do @@ -171,7 +169,7 @@ describe 'Commits' do visibility_level: Gitlab::VisibilityLevel::INTERNAL, public_builds: false) build.update_attributes(artifacts_file: artifacts_file) - visit ci_status_path(pipeline) + visit pipeline_path(pipeline) end it do @@ -202,5 +200,12 @@ describe 'Commits' do expect(page).to have_content("committed #{commit.committed_date.strftime("%b %d, %Y")}") end end + + it 'shows the ref switcher with the multi-file editor enabled', :js do + set_cookie('new_repo', 'true') + visit project_commits_path(project, branch_name) + + expect(find('.js-project-refs-dropdown')).to have_content branch_name + end end end diff --git a/spec/fixtures/api/schemas/issue.json b/spec/fixtures/api/schemas/issue.json index a55ecaa5697..b579e32c9aa 100644 --- a/spec/fixtures/api/schemas/issue.json +++ b/spec/fixtures/api/schemas/issue.json @@ -13,6 +13,8 @@ "confidential": { "type": "boolean" }, "due_date": { "type": ["date", "null"] }, "relative_position": { "type": "integer" }, + "issue_sidebar_endpoint": { "type": "string" }, + "toggle_subscription_endpoint": { "type": "string" }, "project": { "id": { "type": "integer" }, "path": { "type": "string" } diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb index d7b66e6f078..c358ccae9c3 100644 --- a/spec/helpers/tree_helper_spec.rb +++ b/spec/helpers/tree_helper_spec.rb @@ -1,10 +1,36 @@ require 'spec_helper' describe TreeHelper do + let(:project) { create(:project, :repository) } + let(:repository) { project.repository } + let(:sha) { 'ce369011c189f62c815f5971d096b26759bab0d1' } + + describe '.render_tree' do + before do + @id = sha + @project = project + end + + it 'displays all entries without a warning' do + tree = repository.tree(sha, 'files') + + html = render_tree(tree) + + expect(html).not_to have_selector('.tree-truncated-warning') + end + + it 'truncates entries and adds a warning' do + stub_const('TreeHelper::FILE_LIMIT', 1) + tree = repository.tree(sha, 'files') + + html = render_tree(tree) + + expect(html).to have_selector('.tree-truncated-warning', count: 1) + expect(html).to have_selector('.tree-item-file-name', count: 1) + end + end + describe 'flatten_tree' do - let(:project) { create(:project, :repository) } - let(:repository) { project.repository } - let(:sha) { 'ce369011c189f62c815f5971d096b26759bab0d1' } let(:tree) { repository.tree(sha, 'files') } let(:root_path) { 'files' } let(:tree_item) { tree.entries.find { |entry| entry.path == path } } diff --git a/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js b/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js index ec2c549e032..f96f20ed4a5 100644 --- a/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js +++ b/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js @@ -21,13 +21,18 @@ describe('Unicode Support Map', () => { }); it('should call .getItem and .setItem', () => { - const allArgs = window.localStorage.setItem.calls.allArgs(); - - expect(window.localStorage.getItem).toHaveBeenCalledWith('gl-emoji-user-agent'); - expect(allArgs[0][0]).toBe('gl-emoji-user-agent'); - expect(allArgs[0][1]).toBe(navigator.userAgent); - expect(allArgs[1][0]).toBe('gl-emoji-unicode-support-map'); - expect(allArgs[1][1]).toBe(stringSupportMap); + const getArgs = window.localStorage.getItem.calls.allArgs(); + const setArgs = window.localStorage.setItem.calls.allArgs(); + + expect(getArgs[0][0]).toBe('gl-emoji-version'); + expect(getArgs[1][0]).toBe('gl-emoji-user-agent'); + + expect(setArgs[0][0]).toBe('gl-emoji-version'); + expect(setArgs[0][1]).toBe('0.2.0'); + expect(setArgs[1][0]).toBe('gl-emoji-user-agent'); + expect(setArgs[1][1]).toBe(navigator.userAgent); + expect(setArgs[2][0]).toBe('gl-emoji-unicode-support-map'); + expect(setArgs[2][1]).toBe(stringSupportMap); }); }); diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js index 83b13b06dc1..8f607899b20 100644 --- a/spec/javascripts/boards/board_card_spec.js +++ b/spec/javascripts/boards/board_card_spec.js @@ -9,10 +9,11 @@ import Vue from 'vue'; import '~/boards/models/assignee'; +import eventHub from '~/boards/eventhub'; import '~/boards/models/list'; import '~/boards/models/label'; import '~/boards/stores/boards_store'; -import boardCard from '~/boards/components/board_card'; +import boardCard from '~/boards/components/board_card.vue'; import './mock_data'; describe('Board card', () => { @@ -157,33 +158,35 @@ describe('Board card', () => { }); it('sets detail issue to card issue on mouse up', () => { + spyOn(eventHub, '$emit'); + triggerEvent('mousedown'); triggerEvent('mouseup'); - expect(gl.issueBoards.BoardsStore.detail.issue).toEqual(vm.issue); + expect(eventHub.$emit).toHaveBeenCalledWith('newDetailIssue', vm.issue); expect(gl.issueBoards.BoardsStore.detail.list).toEqual(vm.list); }); it('adds active class if detail issue is set', (done) => { - triggerEvent('mousedown'); - triggerEvent('mouseup'); - - setTimeout(() => { - expect(vm.$el.classList.contains('is-active')).toBe(true); - done(); - }, 0); + vm.detailIssue.issue = vm.issue; + + Vue.nextTick() + .then(() => { + expect(vm.$el.classList.contains('is-active')).toBe(true); + }) + .then(done) + .catch(done.fail); }); it('resets detail issue to empty if already set', () => { - triggerEvent('mousedown'); - triggerEvent('mouseup'); + spyOn(eventHub, '$emit'); - expect(gl.issueBoards.BoardsStore.detail.issue).toEqual(vm.issue); + gl.issueBoards.BoardsStore.detail.issue = vm.issue; triggerEvent('mousedown'); triggerEvent('mouseup'); - expect(gl.issueBoards.BoardsStore.detail.issue).toEqual({}); + expect(eventHub.$emit).toHaveBeenCalledWith('clearDetailIssue'); }); }); }); diff --git a/spec/javascripts/boards/issue_spec.js b/spec/javascripts/boards/issue_spec.js index 022d286d5df..ccde657789a 100644 --- a/spec/javascripts/boards/issue_spec.js +++ b/spec/javascripts/boards/issue_spec.js @@ -133,6 +133,19 @@ describe('Issue model', () => { expect(relativePositionIssue.position).toBe(1); }); + it('updates data', () => { + issue.updateData({ subscribed: true }); + expect(issue.subscribed).toBe(true); + }); + + it('sets fetching state', () => { + expect(issue.isFetching.subscriptions).toBe(true); + + issue.setFetchingState('subscriptions', false); + + expect(issue.isFetching.subscriptions).toBe(false); + }); + describe('update', () => { it('passes assignee ids when there are assignees', (done) => { spyOn(Vue.http, 'patch').and.callFake((url, data) => { diff --git a/spec/javascripts/emoji_spec.js b/spec/javascripts/emoji_spec.js index fa11c602ec3..124d91f4477 100644 --- a/spec/javascripts/emoji_spec.js +++ b/spec/javascripts/emoji_spec.js @@ -1,6 +1,7 @@ import { glEmojiTag } from '~/emoji'; import isEmojiUnicodeSupported, { isFlagEmoji, + isRainbowFlagEmoji, isKeycapEmoji, isSkinToneComboEmoji, isHorceRacingSkinToneComboEmoji, @@ -217,6 +218,24 @@ describe('gl_emoji', () => { }); }); + describe('isRainbowFlagEmoji', () => { + it('should gracefully handle empty string', () => { + expect(isRainbowFlagEmoji('')).toBeFalsy(); + }); + it('should detect rainbow_flag', () => { + expect(isRainbowFlagEmoji('🏳🌈')).toBeTruthy(); + }); + it('should not detect flag_white on its\' own', () => { + expect(isRainbowFlagEmoji('🏳')).toBeFalsy(); + }); + it('should not detect rainbow on its\' own', () => { + expect(isRainbowFlagEmoji('🌈')).toBeFalsy(); + }); + it('should not detect flag_white with something else', () => { + expect(isRainbowFlagEmoji('🏳🔵')).toBeFalsy(); + }); + }); + describe('isKeycapEmoji', () => { it('should gracefully handle empty string', () => { expect(isKeycapEmoji('')).toBeFalsy(); diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js index 2ea290108a4..5662c7387fb 100644 --- a/spec/javascripts/issue_show/components/app_spec.js +++ b/spec/javascripts/issue_show/components/app_spec.js @@ -223,23 +223,46 @@ describe('Issuable output', () => { }); }); - it('closes form on error', (done) => { - spyOn(window, 'Flash').and.callThrough(); - spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve, reject) => { - reject(); - })); + describe('error when updating', () => { + beforeEach(() => { + spyOn(window, 'Flash').and.callThrough(); + spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve, reject) => { + reject(); + })); + }); - vm.updateIssuable(); + it('closes form on error', (done) => { + vm.updateIssuable(); - setTimeout(() => { - expect( - eventHub.$emit, - ).toHaveBeenCalledWith('close.form'); - expect( - window.Flash, - ).toHaveBeenCalledWith('Error updating issue'); + setTimeout(() => { + expect( + eventHub.$emit, + ).toHaveBeenCalledWith('close.form'); + expect( + window.Flash, + ).toHaveBeenCalledWith('Error updating issue'); - done(); + done(); + }); + }); + + it('returns the correct error message for issuableType', (done) => { + vm.issuableType = 'merge request'; + + Vue.nextTick(() => { + vm.updateIssuable(); + + setTimeout(() => { + expect( + eventHub.$emit, + ).toHaveBeenCalledWith('close.form'); + expect( + window.Flash, + ).toHaveBeenCalledWith('Error updating merge request'); + + done(); + }); + }); }); }); }); diff --git a/spec/javascripts/issue_show/components/edit_actions_spec.js b/spec/javascripts/issue_show/components/edit_actions_spec.js index f6625b748b6..d779ab7bb31 100644 --- a/spec/javascripts/issue_show/components/edit_actions_spec.js +++ b/spec/javascripts/issue_show/components/edit_actions_spec.js @@ -61,6 +61,15 @@ describe('Edit Actions components', () => { }); }); + it('should not show delete button if showDeleteButton is false', (done) => { + vm.showDeleteButton = false; + + Vue.nextTick(() => { + expect(vm.$el.querySelector('.btn-danger')).toBeNull(); + done(); + }); + }); + describe('updateIssuable', () => { it('sends update.issauble event when clicking save button', () => { vm.$el.querySelector('.btn-save').click(); diff --git a/spec/javascripts/jobs/job_details_mediator_spec.js b/spec/javascripts/jobs/job_details_mediator_spec.js index 1d7fa7e12fc..3069a0cd60e 100644 --- a/spec/javascripts/jobs/job_details_mediator_spec.js +++ b/spec/javascripts/jobs/job_details_mediator_spec.js @@ -1,39 +1,35 @@ -import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import JobMediator from '~/jobs/job_details_mediator'; import job from './mock_data'; describe('JobMediator', () => { let mediator; + let mock; beforeEach(() => { - mediator = new JobMediator({ endpoint: 'foo' }); + mediator = new JobMediator({ endpoint: 'jobs/40291672.json' }); + mock = new MockAdapter(axios); }); it('should set defaults', () => { expect(mediator.store).toBeDefined(); expect(mediator.service).toBeDefined(); - expect(mediator.options).toEqual({ endpoint: 'foo' }); + expect(mediator.options).toEqual({ endpoint: 'jobs/40291672.json' }); expect(mediator.state.isLoading).toEqual(false); }); describe('request and store data', () => { - const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify(job), { - status: 200, - })); - }; - beforeEach(() => { - Vue.http.interceptors.push(interceptor); + mock.onGet().reply(200, job, {}); }); afterEach(() => { - Vue.http.interceptors = _.without(Vue.http.interceptor, interceptor); + mock.restore(); }); it('should store received data', (done) => { mediator.fetchJob(); - setTimeout(() => { expect(mediator.store.state.job).toEqual(job); done(); diff --git a/spec/javascripts/monitoring/graph_path_spec.js b/spec/javascripts/monitoring/graph_path_spec.js index 8ece913ada8..c83bd19345f 100644 --- a/spec/javascripts/monitoring/graph_path_spec.js +++ b/spec/javascripts/monitoring/graph_path_spec.js @@ -32,4 +32,21 @@ describe('Monitoring Paths', () => { expect(metricLine.getAttribute('stroke')).toBe('#1f78d1'); expect(metricLine.getAttribute('d')).toBe(firstTimeSeries.linePath); }); + + describe('Computed properties', () => { + it('strokeDashArray', () => { + const component = createComponent({ + generatedLinePath: firstTimeSeries.linePath, + generatedAreaPath: firstTimeSeries.areaPath, + lineColor: firstTimeSeries.lineColor, + areaColor: firstTimeSeries.areaColor, + }); + + component.lineStyle = 'dashed'; + expect(component.strokeDashArray).toBe('3, 1'); + + component.lineStyle = 'dotted'; + expect(component.strokeDashArray).toBe('1, 1'); + }); + }); }); diff --git a/spec/javascripts/new_branch_spec.js b/spec/javascripts/new_branch_spec.js index c57f44dae17..50a5e4ff056 100644 --- a/spec/javascripts/new_branch_spec.js +++ b/spec/javascripts/new_branch_spec.js @@ -1,7 +1,6 @@ /* eslint-disable space-before-function-paren, one-var, no-var, one-var-declaration-per-line, no-return-assign, quotes, max-len */ -/* global NewBranchForm */ -import '~/new_branch_form'; +import NewBranchForm from '~/new_branch_form'; (function() { describe('Branch', function() { diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js index 0795d0aaa82..1ad7c2d8efa 100644 --- a/spec/javascripts/vue_mr_widget/mock_data.js +++ b/spec/javascripts/vue_mr_widget/mock_data.js @@ -202,7 +202,6 @@ export default { "revert_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1", "email_patches_path": "/root/acets-app/merge_requests/22.patch", "plain_diff_path": "/root/acets-app/merge_requests/22.diff", - "ci_status_path": "/root/acets-app/merge_requests/22/ci_status", "status_path": "/root/acets-app/merge_requests/22.json", "merge_check_path": "/root/acets-app/merge_requests/22/merge_check", "ci_environments_status_url": "/root/acets-app/merge_requests/22/ci_environments_status", diff --git a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js index 2cf4d8e00ed..24484796bf1 100644 --- a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js +++ b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js @@ -16,7 +16,7 @@ describe('Issue Warning Component', () => { isLocked: true, }); - expect(vm.$el.querySelector('i').className).toEqual('fa icon fa-lock'); + expect(vm.$el.querySelector('.icon use').href.baseVal).toMatch(/lock$/); expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This issue is locked. Only project members can comment.'); }); }); @@ -27,7 +27,7 @@ describe('Issue Warning Component', () => { isConfidential: true, }); - expect(vm.$el.querySelector('i').className).toEqual('fa icon fa-eye-slash'); + expect(vm.$el.querySelector('.icon use').href.baseVal).toMatch(/eye-slash$/); expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This is a confidential issue. Your comment will not be visible to the public.'); }); }); @@ -39,7 +39,7 @@ describe('Issue Warning Component', () => { isConfidential: true, }); - expect(vm.$el.querySelector('i')).toBeFalsy(); + expect(vm.$el.querySelector('.icon')).toBeFalsy(); expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This issue is confidential and locked. People without permission will never get a notification and won\'t be able to comment.'); }); }); diff --git a/spec/javascripts/vue_shared/components/loading_button_spec.js b/spec/javascripts/vue_shared/components/loading_button_spec.js index 97c8a08fcdd..49bf8ee6f7c 100644 --- a/spec/javascripts/vue_shared/components/loading_button_spec.js +++ b/spec/javascripts/vue_shared/components/loading_button_spec.js @@ -66,6 +66,23 @@ describe('LoadingButton', function () { }); }); + describe('container class', () => { + it('should default to btn btn-align-content', () => { + vm = mountComponent(LoadingButton, {}); + expect(vm.$el.classList.contains('btn')).toEqual(true); + expect(vm.$el.classList.contains('btn-align-content')).toEqual(true); + }); + + it('should be configurable through props', () => { + vm = mountComponent(LoadingButton, { + containerClass: 'test-class', + }); + expect(vm.$el.classList.contains('btn')).toEqual(false); + expect(vm.$el.classList.contains('btn-align-content')).toEqual(false); + expect(vm.$el.classList.contains('test-class')).toEqual(true); + }); + }); + describe('click callback prop', () => { it('calls given callback when normal', () => { vm = mountComponent(LoadingButton, { @@ -81,7 +98,6 @@ describe('LoadingButton', function () { it('does not call given callback when disabled because of loading', () => { vm = mountComponent(LoadingButton, { loading: true, - indeterminate: true, }); spyOn(vm, '$emit'); diff --git a/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js b/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js new file mode 100644 index 00000000000..818ef0af3c2 --- /dev/null +++ b/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js @@ -0,0 +1,37 @@ +import Vue from 'vue'; +import toolbar from '~/vue_shared/components/markdown/toolbar.vue'; +import mountComponent from '../../../helpers/vue_mount_component_helper'; + +describe('toolbar', () => { + let vm; + const Toolbar = Vue.extend(toolbar); + const props = { + markdownDocsPath: '', + }; + + afterEach(() => { + vm.$destroy(); + }); + + describe('user can attach file', () => { + beforeEach(() => { + vm = mountComponent(Toolbar, props); + }); + + it('should render uploading-container', () => { + expect(vm.$el.querySelector('.uploading-container')).not.toBeNull(); + }); + }); + + describe('user cannot attach file', () => { + beforeEach(() => { + vm = mountComponent(Toolbar, Object.assign({}, props, { + canAttachFile: false, + })); + }); + + it('should not render uploading-container', () => { + expect(vm.$el.querySelector('.uploading-container')).toBeNull(); + }); + }); +}); diff --git a/spec/lib/container_registry/path_spec.rb b/spec/lib/container_registry/path_spec.rb index 84cacdd3f0d..010deae822c 100644 --- a/spec/lib/container_registry/path_spec.rb +++ b/spec/lib/container_registry/path_spec.rb @@ -86,6 +86,24 @@ describe ContainerRegistry::Path do it { is_expected.to be_valid } end + + context 'when path contains double underscore' do + let(:path) { 'my/repository__name' } + + it { is_expected.to be_valid } + end + + context 'when path contains invalid separator with dot' do + let(:path) { 'some/registry-.name' } + + it { is_expected.not_to be_valid } + end + + context 'when path contains invalid separator with underscore' do + let(:path) { 'some/registry._name' } + + it { is_expected.not_to be_valid } + end end describe '#has_repository?' do diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb new file mode 100644 index 00000000000..ffcd90b9fcb --- /dev/null +++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb @@ -0,0 +1,67 @@ +require 'spec_helper' + +describe Gitlab::Auth::RequestAuthenticator do + let(:env) do + { + 'rack.input' => '', + 'REQUEST_METHOD' => 'GET' + } + end + let(:request) { ActionDispatch::Request.new(env) } + + subject { described_class.new(request) } + + describe '#user' do + let!(:sessionless_user) { build(:user) } + let!(:session_user) { build(:user) } + + it 'returns sessionless user first' do + allow_any_instance_of(described_class).to receive(:find_sessionless_user).and_return(sessionless_user) + allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_return(session_user) + + expect(subject.user).to eq sessionless_user + end + + it 'returns session user if no sessionless user found' do + allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_return(session_user) + + expect(subject.user).to eq session_user + end + + it 'returns nil if no user found' do + expect(subject.user).to be_blank + end + + it 'bubbles up exceptions' do + allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_raise(Gitlab::Auth::UnauthorizedError) + end + end + + describe '#find_sessionless_user' do + let!(:access_token_user) { build(:user) } + let!(:rss_token_user) { build(:user) } + + it 'returns access_token user first' do + allow_any_instance_of(described_class).to receive(:find_user_from_access_token).and_return(access_token_user) + allow_any_instance_of(described_class).to receive(:find_user_from_rss_token).and_return(rss_token_user) + + expect(subject.find_sessionless_user).to eq access_token_user + end + + it 'returns rss_token user if no access_token user found' do + allow_any_instance_of(described_class).to receive(:find_user_from_rss_token).and_return(rss_token_user) + + expect(subject.find_sessionless_user).to eq rss_token_user + end + + it 'returns nil if no user found' do + expect(subject.find_sessionless_user).to be_blank + end + + it 'rescue Gitlab::Auth::AuthenticationError exceptions' do + allow_any_instance_of(described_class).to receive(:find_user_from_access_token).and_raise(Gitlab::Auth::UnauthorizedError) + + expect(subject.find_sessionless_user).to be_blank + end + end +end diff --git a/spec/lib/gitlab/auth/user_auth_finders_spec.rb b/spec/lib/gitlab/auth/user_auth_finders_spec.rb new file mode 100644 index 00000000000..4637816570c --- /dev/null +++ b/spec/lib/gitlab/auth/user_auth_finders_spec.rb @@ -0,0 +1,194 @@ +require 'spec_helper' + +describe Gitlab::Auth::UserAuthFinders do + include described_class + + let(:user) { create(:user) } + let(:env) do + { + 'rack.input' => '' + } + end + let(:request) { Rack::Request.new(env)} + + def set_param(key, value) + request.update_param(key, value) + end + + describe '#find_user_from_warden' do + context 'with CSRF token' do + before do + allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(true) + end + + context 'with invalid credentials' do + it 'returns nil' do + expect(find_user_from_warden).to be_nil + end + end + + context 'with valid credentials' do + it 'returns the user' do + env['warden'] = double("warden", authenticate: user) + + expect(find_user_from_warden).to eq user + end + end + end + + context 'without CSRF token' do + it 'returns nil' do + allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(false) + env['warden'] = double("warden", authenticate: user) + + expect(find_user_from_warden).to be_nil + end + end + end + + describe '#find_user_from_rss_token' do + context 'when the request format is atom' do + before do + env['HTTP_ACCEPT'] = 'application/atom+xml' + end + + it 'returns user if valid rss_token' do + set_param(:rss_token, user.rss_token) + + expect(find_user_from_rss_token).to eq user + end + + it 'returns nil if rss_token is blank' do + expect(find_user_from_rss_token).to be_nil + end + + it 'returns exception if invalid rss_token' do + set_param(:rss_token, 'invalid_token') + + expect { find_user_from_rss_token }.to raise_error(Gitlab::Auth::UnauthorizedError) + end + end + + context 'when the request format is not atom' do + it 'returns nil' do + set_param(:rss_token, user.rss_token) + + expect(find_user_from_rss_token).to be_nil + end + end + end + + describe '#find_user_from_access_token' do + let(:personal_access_token) { create(:personal_access_token, user: user) } + + it 'returns nil if no access_token present' do + expect(find_personal_access_token).to be_nil + end + + context 'when validate_access_token! returns valid' do + it 'returns user' do + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token + + expect(find_user_from_access_token).to eq user + end + + it 'returns exception if token has no user' do + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token + allow_any_instance_of(PersonalAccessToken).to receive(:user).and_return(nil) + + expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) + end + end + end + + describe '#find_personal_access_token' do + let(:personal_access_token) { create(:personal_access_token, user: user) } + + context 'passed as header' do + it 'returns token if valid personal_access_token' do + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token + + expect(find_personal_access_token).to eq personal_access_token + end + end + + context 'passed as param' do + it 'returns token if valid personal_access_token' do + set_param(Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_PARAM, personal_access_token.token) + + expect(find_personal_access_token).to eq personal_access_token + end + end + + it 'returns nil if no personal_access_token' do + expect(find_personal_access_token).to be_nil + end + + it 'returns exception if invalid personal_access_token' do + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = 'invalid_token' + + expect { find_personal_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) + end + end + + describe '#find_oauth_access_token' do + let(:application) { Doorkeeper::Application.create!(name: 'MyApp', redirect_uri: 'https://app.com', owner: user) } + let(:token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') } + + context 'passed as header' do + it 'returns token if valid oauth_access_token' do + env['HTTP_AUTHORIZATION'] = "Bearer #{token.token}" + + expect(find_oauth_access_token.token).to eq token.token + end + end + + context 'passed as param' do + it 'returns user if valid oauth_access_token' do + set_param(:access_token, token.token) + + expect(find_oauth_access_token.token).to eq token.token + end + end + + it 'returns nil if no oauth_access_token' do + expect(find_oauth_access_token).to be_nil + end + + it 'returns exception if invalid oauth_access_token' do + env['HTTP_AUTHORIZATION'] = "Bearer invalid_token" + + expect { find_oauth_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) + end + end + + describe '#validate_access_token!' do + let(:personal_access_token) { create(:personal_access_token, user: user) } + + it 'returns nil if no access_token present' do + expect(validate_access_token!).to be_nil + end + + context 'token is not valid' do + before do + allow_any_instance_of(described_class).to receive(:access_token).and_return(personal_access_token) + end + + it 'returns Gitlab::Auth::ExpiredError if token expired' do + personal_access_token.expires_at = 1.day.ago + + expect { validate_access_token! }.to raise_error(Gitlab::Auth::ExpiredError) + end + + it 'returns Gitlab::Auth::RevokedError if token revoked' do + personal_access_token.revoke! + + expect { validate_access_token! }.to raise_error(Gitlab::Auth::RevokedError) + end + + it 'returns Gitlab::Auth::InsufficientScopeError if invalid token scope' do + expect { validate_access_token!(scopes: [:sudo]) }.to raise_error(Gitlab::Auth::InsufficientScopeError) + end + end + end +end diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb index 2c2684a6fc9..994992f79d4 100644 --- a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb @@ -3,12 +3,9 @@ require 'spec_helper' describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do let(:migration) { described_class.new } let(:base1) { create(:project) } - let(:base1_fork1) { create(:project) } - let(:base1_fork2) { create(:project) } let(:base2) { create(:project) } let(:base2_fork1) { create(:project) } - let(:base2_fork2) { create(:project) } let!(:forked_project_links) { table(:forked_project_links) } let!(:fork_networks) { table(:fork_networks) } @@ -21,21 +18,24 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch # A normal fork link forked_project_links.create(id: 1, forked_from_project_id: base1.id, - forked_to_project_id: base1_fork1.id) + forked_to_project_id: create(:project).id) forked_project_links.create(id: 2, forked_from_project_id: base1.id, - forked_to_project_id: base1_fork2.id) - + forked_to_project_id: create(:project).id) forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id) + + # create a fork of a fork forked_project_links.create(id: 4, forked_from_project_id: base2_fork1.id, forked_to_project_id: create(:project).id) - forked_project_links.create(id: 5, - forked_from_project_id: base2.id, - forked_to_project_id: base2_fork2.id) + forked_from_project_id: create(:project).id, + forked_to_project_id: create(:project).id) + + # Stub out the calls to the other migrations + allow(BackgroundMigrationWorker).to receive(:perform_in) migration.perform(1, 3) end @@ -80,11 +80,11 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch end it 'only processes a single batch of links at a time' do - expect(fork_network_members.count).to eq(5) + expect(fork_networks.count).to eq(2) migration.perform(3, 5) - expect(fork_network_members.count).to eq(7) + expect(fork_networks.count).to eq(3) end it 'can be repeated without effect' do diff --git a/spec/migrations/populate_merge_requests_latest_merge_request_diff_id_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb index 4ea7f441f7c..0cb753c5853 100644 --- a/spec/migrations/populate_merge_requests_latest_merge_request_diff_id_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb @@ -1,7 +1,6 @@ require 'spec_helper' -require Rails.root.join('db', 'post_migrate', '20171026082505_populate_merge_requests_latest_merge_request_diff_id') -describe PopulateMergeRequestsLatestMergeRequestDiffId, :migration do +describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do let(:projects_table) { table(:projects) } let(:merge_requests_table) { table(:merge_requests) } let(:merge_request_diffs_table) { table(:merge_request_diffs) } @@ -27,30 +26,32 @@ describe PopulateMergeRequestsLatestMergeRequestDiffId, :migration do merge_request_diffs_table.where(merge_request_id: merge_request.id) end - describe '#up' do + describe '#perform' do it 'ignores MRs without diffs' do merge_request_without_diff = create_mr!('without_diff') + mr_id = merge_request_without_diff.id expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil - expect { migrate! } + expect { subject.perform(mr_id, mr_id) } .not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id } end it 'ignores MRs that have a diff ID already set' do merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3) diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id) + mr_id = merge_request_with_multiple_diffs.id merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id) - expect { migrate! } + expect { subject.perform(mr_id, mr_id) } .not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id } end it 'migrates multiple MR diffs to the correct values' do merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) } - migrate! + subject.perform(merge_requests.first.id, merge_requests.last.id) merge_requests.each do |merge_request| expect(merge_request.reload.latest_merge_request_diff_id) diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb index a66347ead76..a6a1d9e619f 100644 --- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb @@ -54,11 +54,13 @@ describe Gitlab::BitbucketImport::Importer do create( :project, import_source: project_identifier, + import_url: "https://bitbucket.org/#{project_identifier}.git", import_data_attributes: { credentials: data } ) end let(:importer) { described_class.new(project) } + let(:gitlab_shell) { double } let(:issues_statuses_sample_data) do { @@ -67,6 +69,10 @@ describe Gitlab::BitbucketImport::Importer do } end + before do + allow(importer).to receive(:gitlab_shell) { gitlab_shell } + end + context 'issues statuses' do before do # HACK: Bitbucket::Representation.const_get('Issue') seems to return ::Issue without this @@ -110,15 +116,36 @@ describe Gitlab::BitbucketImport::Importer do end it 'maps statuses to open or closed' do + allow(importer).to receive(:import_wiki) + importer.execute expect(project.issues.where(state: "closed").size).to eq(5) expect(project.issues.where(state: "opened").size).to eq(2) end - it 'calls import_wiki' do - expect(importer).to receive(:import_wiki) - importer.execute + describe 'wiki import' do + it 'is skipped when the wiki exists' do + expect(project.wiki).to receive(:repository_exists?) { true } + expect(importer.gitlab_shell).not_to receive(:import_repository) + + importer.execute + + expect(importer.errors).to be_empty + end + + it 'imports to the project disk_path' do + expect(project.wiki).to receive(:repository_exists?) { false } + expect(importer.gitlab_shell).to receive(:import_repository).with( + project.repository_storage_path, + project.wiki.disk_path, + project.import_url + '/wiki' + ) + + importer.execute + + expect(importer.errors).to be_empty + end end end end diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb index bf981d2f6f6..92792144429 100644 --- a/spec/lib/gitlab/conflict/file_spec.rb +++ b/spec/lib/gitlab/conflict/file_spec.rb @@ -84,6 +84,13 @@ describe Gitlab::Conflict::File do expect(line.text).to eq(html_to_text(line.rich_text)) end end + + # This spec will break if Rouge's highlighting changes, but we need to + # ensure that the lines are actually highlighted. + it 'highlights the lines correctly' do + expect(conflict_file.lines.first.rich_text) + .to eq("<span id=\"LC1\" class=\"line\" lang=\"ruby\"><span class=\"k\">module</span> <span class=\"nn\">Gitlab</span></span>\n") + end end describe '#sections' do diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb index 8922370b0a0..e850b5cd6a4 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb @@ -87,6 +87,14 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0') end + it 'does not move the repositories when hashed storage is enabled' do + project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) + + expect(subject).not_to receive(:move_repository) + + subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0') + end + it 'moves uploads' do expect(subject).to receive(:move_uploads) .with('known-parent/the-path', 'known-parent/the-path0') @@ -94,6 +102,14 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0') end + it 'does not move uploads when hashed storage is enabled for attachments' do + project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:attachments]) + + expect(subject).not_to receive(:move_uploads) + + subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0') + end + it 'moves pages' do expect(subject).to receive(:move_pages) .with('known-parent/the-path', 'known-parent/the-path0') diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index c91895cedc3..ff9acfd08b9 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -116,12 +116,8 @@ describe Gitlab::Diff::File do end context 'when renamed' do - let(:commit) { project.commit('6907208d755b60ebeacb2e9dfea74c92c3449a1f') } - let(:diff_file) { commit.diffs.diff_file_with_new_path('files/js/commit.coffee') } - - before do - allow(diff_file.new_blob).to receive(:id).and_return(diff_file.old_blob.id) - end + let(:commit) { project.commit('94bb47ca1297b7b3731ff2a36923640991e9236f') } + let(:diff_file) { commit.diffs.diff_file_with_new_path('CHANGELOG.md') } it 'returns false' do expect(diff_file.content_changed?).to be_falsey diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb index ee657101f4c..65edc750f39 100644 --- a/spec/lib/gitlab/git/diff_collection_spec.rb +++ b/spec/lib/gitlab/git/diff_collection_spec.rb @@ -487,6 +487,7 @@ describe Gitlab::Git::DiffCollection, seed_helper: true do loop do break if @count.zero? + # It is critical to decrement before yielding. We may never reach the lines after 'yield'. @count -= 1 yield @value diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 5d990b42c24..f0da77c61bb 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -629,16 +629,29 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe '#remote_tags' do + let(:remote_name) { 'upstream' } let(:target_commit_id) { SeedRepo::Commit::ID } + let(:user) { create(:user) } + let(:tag_name) { 'v0.0.1' } + let(:tag_message) { 'My tag' } + let(:remote_repository) do + Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') + end - subject { repository.remote_tags('upstream') } + subject { repository.remote_tags(remote_name) } - it 'gets the remote tags' do - expect(repository).to receive(:list_remote_tags).with('upstream') - .and_return(["#{target_commit_id}\trefs/tags/v0.0.1\n"]) + before do + repository.add_remote(remote_name, remote_repository.path) + remote_repository.add_tag(tag_name, user: user, target: target_commit_id) + end + + after do + ensure_seeds + end + it 'gets the remote tags' do expect(subject.first).to be_an_instance_of(Gitlab::Git::Tag) - expect(subject.first.name).to eq('v0.0.1') + expect(subject.first.name).to eq(tag_name) expect(subject.first.dereferenced_target.id).to eq(target_commit_id) end end @@ -1770,6 +1783,32 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#delete_all_refs_except' do + let(:repository) do + Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') + end + + before do + repository.write_ref("refs/delete/a", "0b4bc9a49b562e85de7cc9e834518ea6828729b9") + repository.write_ref("refs/also-delete/b", "12d65c8dd2b2676fa3ac47d955accc085a37a9c1") + repository.write_ref("refs/keep/c", "6473c90867124755509e100d0d35ebdc85a0b6ae") + repository.write_ref("refs/also-keep/d", "0b4bc9a49b562e85de7cc9e834518ea6828729b9") + end + + after do + ensure_seeds + end + + it 'deletes all refs except those with the specified prefixes' do + repository.delete_all_refs_except(%w(refs/keep refs/also-keep refs/heads)) + expect(repository.ref_exists?("refs/delete/a")).to be(false) + expect(repository.ref_exists?("refs/also-delete/b")).to be(false) + expect(repository.ref_exists?("refs/keep/c")).to be(true) + expect(repository.ref_exists?("refs/also-keep/d")).to be(true) + expect(repository.ref_exists?("refs/heads/master")).to be(true) + end + end + def create_remote_branch(repository, remote_name, branch_name, source_branch_name) source_branch = repository.branches.find { |branch| branch.name == source_branch_name } rugged = repository.rugged diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb index 8127b4842b7..951e146a30a 100644 --- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb @@ -104,4 +104,17 @@ describe Gitlab::GitalyClient::RefService do expect { client.ref_exists?('reXXXXX') }.to raise_error(ArgumentError) end end + + describe '#delete_refs' do + let(:prefixes) { %w(refs/heads refs/keep-around) } + + it 'sends a delete_refs message' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:delete_refs) + .with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash)) + .and_return(double('delete_refs_response')) + + client.delete_refs(except_with_prefixes: prefixes) + end + end end diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb index 30da56bec16..26529c4759d 100644 --- a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb @@ -41,7 +41,8 @@ describe Gitlab::HookData::IssuableBuilder do labels: [ [{ id: 1, title: 'foo' }], [{ id: 1, title: 'foo' }, { id: 2, title: 'bar' }] - ] + ], + total_time_spent: [1, 2] } end let(:data) { builder.build(user: user, changes: changes) } @@ -53,6 +54,10 @@ describe Gitlab::HookData::IssuableBuilder do labels: { previous: [{ id: 1, title: 'foo' }], current: [{ id: 1, title: 'foo' }, { id: 2, title: 'bar' }] + }, + total_time_spent: { + previous: 1, + current: 2 } })) end diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index e4b4cf5ba85..c2bda6f8821 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -155,7 +155,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do end it 'has no source if source/target differ' do - expect(MergeRequest.find_by_title('MR2').source_project_id).to eq(-1) + expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil end end diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb index 67121937398..60a134be939 100644 --- a/spec/lib/gitlab/middleware/go_spec.rb +++ b/spec/lib/gitlab/middleware/go_spec.rb @@ -127,6 +127,14 @@ describe Gitlab::Middleware::Go do include_examples 'go-get=1', enabled_protocol: nil end + + context 'with nothing disabled (blank string)' do + before do + stub_application_setting(enabled_git_access_protocol: '') + end + + include_examples 'go-get=1', enabled_protocol: nil + end end def go diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb index c7471a21fda..2f19fb7312d 100644 --- a/spec/lib/gitlab/o_auth/user_spec.rb +++ b/spec/lib/gitlab/o_auth/user_spec.rb @@ -662,4 +662,13 @@ describe Gitlab::OAuth::User do end end end + + describe '.find_by_uid_and_provider' do + let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') } + + it 'normalizes extern_uid' do + allow(oauth_user.auth_hash).to receive(:uid).and_return('MY-UID') + expect(oauth_user.find_user).to eql gl_user + end + end end diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb new file mode 100644 index 00000000000..cf6ae5cda74 --- /dev/null +++ b/spec/migrations/remove_empty_fork_networks_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20171114104051_remove_empty_fork_networks.rb') + +describe RemoveEmptyForkNetworks, :migration do + let!(:fork_networks) { table(:fork_networks) } + + let(:deleted_project) { create(:project) } + let!(:empty_network) { create(:fork_network, id: 1, root_project_id: deleted_project.id) } + let!(:other_network) { create(:fork_network, id: 2, root_project_id: create(:project).id) } + + before do + deleted_project.destroy! + end + + it 'deletes only the fork network without members' do + expect(fork_networks.count).to eq(2) + + migrate! + + expect(fork_networks.find_by(id: empty_network.id)).to be_nil + expect(fork_networks.find_by(id: other_network.id)).not_to be_nil + expect(fork_networks.count).to eq(1) + end +end diff --git a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb index f95bd6e3511..76afb6c19cf 100644 --- a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb +++ b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb @@ -2,19 +2,6 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20170703130158_schedule_merge_request_diff_migrations') describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do - matcher :be_scheduled_migration do |time, *expected| - match do |migration| - BackgroundMigrationWorker.jobs.any? do |job| - job['args'] == [migration, expected] && - job['at'].to_i == time.to_i - end - end - - failure_message do |migration| - "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!" - end - end - let(:merge_request_diffs) { table(:merge_request_diffs) } let(:merge_requests) { table(:merge_requests) } let(:projects) { table(:projects) } @@ -37,9 +24,9 @@ describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do Timecop.freeze do migrate! - expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes.from_now, 1, 1) - expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes.from_now, 2, 2) - expect(described_class::MIGRATION).to be_scheduled_migration(15.minutes.from_now, 4, 4) + expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes, 1, 1) + expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, 2, 2) + expect(described_class::MIGRATION).to be_scheduled_migration(15.minutes, 4, 4) expect(BackgroundMigrationWorker.jobs.size).to eq 3 end end diff --git a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb index 4ab1bb67058..cf323973384 100644 --- a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb +++ b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb @@ -2,19 +2,6 @@ require 'spec_helper' require Rails.root.join('db', 'post_migrate', '20170926150348_schedule_merge_request_diff_migrations_take_two') describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do - matcher :be_scheduled_migration do |time, *expected| - match do |migration| - BackgroundMigrationWorker.jobs.any? do |job| - job['args'] == [migration, expected] && - job['at'].to_i == time.to_i - end - end - - failure_message do |migration| - "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!" - end - end - let(:merge_request_diffs) { table(:merge_request_diffs) } let(:merge_requests) { table(:merge_requests) } let(:projects) { table(:projects) } @@ -37,9 +24,9 @@ describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do Timecop.freeze do migrate! - expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes.from_now, 1, 1) - expect(described_class::MIGRATION).to be_scheduled_migration(20.minutes.from_now, 2, 2) - expect(described_class::MIGRATION).to be_scheduled_migration(30.minutes.from_now, 4, 4) + expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, 1, 1) + expect(described_class::MIGRATION).to be_scheduled_migration(20.minutes, 2, 2) + expect(described_class::MIGRATION).to be_scheduled_migration(30.minutes, 4, 4) expect(BackgroundMigrationWorker.jobs.size).to eq 3 end end diff --git a/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb b/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb new file mode 100644 index 00000000000..158d0bc02ed --- /dev/null +++ b/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb @@ -0,0 +1,64 @@ +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20171026082505_schedule_merge_request_latest_merge_request_diff_id_migrations') + +describe ScheduleMergeRequestLatestMergeRequestDiffIdMigrations, :migration, :sidekiq do + let(:projects_table) { table(:projects) } + let(:merge_requests_table) { table(:merge_requests) } + let(:merge_request_diffs_table) { table(:merge_request_diffs) } + + let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') } + + let!(:merge_request_1) { create_mr!('mr_1', diffs: 1) } + let!(:merge_request_2) { create_mr!('mr_2', diffs: 2) } + let!(:merge_request_migrated) { create_mr!('merge_request_migrated', diffs: 3) } + let!(:merge_request_4) { create_mr!('mr_4', diffs: 3) } + + def create_mr!(name, diffs: 0) + merge_request = + merge_requests_table.create!(target_project_id: project.id, + target_branch: 'master', + source_project_id: project.id, + source_branch: name, + title: name) + + diffs.times do + merge_request_diffs_table.create!(merge_request_id: merge_request.id) + end + + merge_request + end + + def diffs_for(merge_request) + merge_request_diffs_table.where(merge_request_id: merge_request.id) + end + + before do + stub_const("#{described_class.name}::BATCH_SIZE", 1) + + diff_id = diffs_for(merge_request_migrated).minimum(:id) + merge_request_migrated.update!(latest_merge_request_diff_id: diff_id) + end + + it 'correctly schedules background migrations' do + Sidekiq::Testing.fake! do + Timecop.freeze do + migrate! + + expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes, merge_request_1.id, merge_request_1.id) + expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, merge_request_2.id, merge_request_2.id) + expect(described_class::MIGRATION).to be_scheduled_migration(15.minutes, merge_request_4.id, merge_request_4.id) + expect(BackgroundMigrationWorker.jobs.size).to eq 3 + end + end + end + + it 'schedules background migrations' do + Sidekiq::Testing.inline! do + expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 3 + + migrate! + + expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 0 + end + end +end diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb index 47342f98283..81e35e6c931 100644 --- a/spec/models/blob_spec.rb +++ b/spec/models/blob_spec.rb @@ -16,6 +16,23 @@ describe Blob do end end + describe '.lazy' do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit_by(oid: 'e63f41fe459e62e1228fcef60d7189127aeba95a') } + + it 'fetches all blobs when the first is accessed' do + changelog = described_class.lazy(project, commit.id, 'CHANGELOG') + contributing = described_class.lazy(project, commit.id, 'CONTRIBUTING.md') + + expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original + expect(Gitlab::Git::Blob).not_to receive(:find) + + # Access property so the values are loaded + changelog.id + contributing.id + end + end + describe '#data' do context 'using a binary blob' do it 'returns the data as-is' do diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 2c9e7013b77..b89b0e555d9 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -625,38 +625,29 @@ describe Ci::Pipeline, :mailer do shared_context 'with some outdated pipelines' do before do - create_pipeline(:canceled, 'ref', 'A') - create_pipeline(:success, 'ref', 'A') - create_pipeline(:failed, 'ref', 'B') - create_pipeline(:skipped, 'feature', 'C') + create_pipeline(:canceled, 'ref', 'A', project) + create_pipeline(:success, 'ref', 'A', project) + create_pipeline(:failed, 'ref', 'B', project) + create_pipeline(:skipped, 'feature', 'C', project) end - def create_pipeline(status, ref, sha) - create(:ci_empty_pipeline, status: status, ref: ref, sha: sha) + def create_pipeline(status, ref, sha, project) + create( + :ci_empty_pipeline, + status: status, + ref: ref, + sha: sha, + project: project + ) end end - describe '.latest' do + describe '.newest_first' do include_context 'with some outdated pipelines' - context 'when no ref is specified' do - let(:pipelines) { described_class.latest.all } - - it 'returns the latest pipeline for the same ref and different sha' do - expect(pipelines.map(&:sha)).to contain_exactly('A', 'B', 'C') - expect(pipelines.map(&:status)) - .to contain_exactly('success', 'failed', 'skipped') - end - end - - context 'when ref is specified' do - let(:pipelines) { described_class.latest('ref').all } - - it 'returns the latest pipeline for ref and different sha' do - expect(pipelines.map(&:sha)).to contain_exactly('A', 'B') - expect(pipelines.map(&:status)) - .to contain_exactly('success', 'failed') - end + it 'returns the pipelines from new to old' do + expect(described_class.newest_first.pluck(:status)) + .to eq(%w[skipped failed success canceled]) end end @@ -664,20 +655,14 @@ describe Ci::Pipeline, :mailer do include_context 'with some outdated pipelines' context 'when no ref is specified' do - let(:latest_status) { described_class.latest_status } - - it 'returns the latest status for the same ref and different sha' do - expect(latest_status).to eq(described_class.latest.status) - expect(latest_status).to eq('failed') + it 'returns the status of the latest pipeline' do + expect(described_class.latest_status).to eq('skipped') end end context 'when ref is specified' do - let(:latest_status) { described_class.latest_status('ref') } - - it 'returns the latest status for ref and different sha' do - expect(latest_status).to eq(described_class.latest_status('ref')) - expect(latest_status).to eq('failed') + it 'returns the status of the latest pipeline for the given ref' do + expect(described_class.latest_status('ref')).to eq('failed') end end end @@ -686,7 +671,7 @@ describe Ci::Pipeline, :mailer do include_context 'with some outdated pipelines' let!(:latest_successful_pipeline) do - create_pipeline(:success, 'ref', 'D') + create_pipeline(:success, 'ref', 'D', project) end it 'returns the latest successful pipeline' do @@ -698,8 +683,13 @@ describe Ci::Pipeline, :mailer do describe '.latest_successful_for_refs' do include_context 'with some outdated pipelines' - let!(:latest_successful_pipeline1) { create_pipeline(:success, 'ref1', 'D') } - let!(:latest_successful_pipeline2) { create_pipeline(:success, 'ref2', 'D') } + let!(:latest_successful_pipeline1) do + create_pipeline(:success, 'ref1', 'D', project) + end + + let!(:latest_successful_pipeline2) do + create_pipeline(:success, 'ref2', 'D', project) + end it 'returns the latest successful pipeline for both refs' do refs = %w(ref1 ref2 ref3) @@ -708,6 +698,62 @@ describe Ci::Pipeline, :mailer do end end + describe '.latest_status_per_commit' do + let(:project) { create(:project) } + + before do + pairs = [ + %w[success ref1 123], + %w[manual master 123], + %w[failed ref 456] + ] + + pairs.each do |(status, ref, sha)| + create( + :ci_empty_pipeline, + status: status, + ref: ref, + sha: sha, + project: project + ) + end + end + + context 'without a ref' do + it 'returns a Hash containing the latest status per commit for all refs' do + expect(described_class.latest_status_per_commit(%w[123 456])) + .to eq({ '123' => 'manual', '456' => 'failed' }) + end + + it 'only includes the status of the given commit SHAs' do + expect(described_class.latest_status_per_commit(%w[123])) + .to eq({ '123' => 'manual' }) + end + + context 'when there are two pipelines for a ref and SHA' do + it 'returns the status of the latest pipeline' do + create( + :ci_empty_pipeline, + status: 'failed', + ref: 'master', + sha: '123', + project: project + ) + + expect(described_class.latest_status_per_commit(%w[123])) + .to eq({ '123' => 'failed' }) + end + end + end + + context 'with a ref' do + it 'only includes the pipelines for the given ref' do + expect(described_class.latest_status_per_commit(%w[123 456], 'master')) + .to eq({ '123' => 'manual' }) + end + end + end + describe '.internal_sources' do subject { described_class.internal_sources } diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb new file mode 100644 index 00000000000..066fe7d154e --- /dev/null +++ b/spec/models/commit_collection_spec.rb @@ -0,0 +1,59 @@ +require 'spec_helper' + +describe CommitCollection do + let(:project) { create(:project, :repository) } + let(:commit) { project.commit } + + describe '#each' do + it 'yields every commit' do + collection = described_class.new(project, [commit]) + + expect { |b| collection.each(&b) }.to yield_with_args(commit) + end + end + + describe '#with_pipeline_status' do + it 'sets the pipeline status for every commit so no additional queries are necessary' do + create( + :ci_empty_pipeline, + ref: 'master', + sha: commit.id, + status: 'success', + project: project + ) + + collection = described_class.new(project, [commit]) + collection.with_pipeline_status + + recorder = ActiveRecord::QueryRecorder.new do + expect(commit.status).to eq('success') + end + + expect(recorder.count).to be_zero + end + end + + describe '#respond_to_missing?' do + it 'returns true when the underlying Array responds to the message' do + collection = described_class.new(project, []) + + expect(collection.respond_to?(:last)).to eq(true) + end + + it 'returns false when the underlying Array does not respond to the message' do + collection = described_class.new(project, []) + + expect(collection.respond_to?(:foo)).to eq(false) + end + end + + describe '#method_missing' do + it 'delegates undefined methods to the underlying Array' do + collection = described_class.new(project, [commit]) + + expect(collection.length).to eq(1) + expect(collection.last).to eq(commit) + expect(collection).not_to be_empty + end + end +end diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index e3cfa149e3a..d18a5c9dfa6 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -351,12 +351,19 @@ eos end it 'gives compound status from latest pipelines if ref is nil' do - expect(commit.status(nil)).to eq(Ci::Pipeline.latest_status) - expect(commit.status(nil)).to eq('failed') + expect(commit.status(nil)).to eq(pipeline_from_fix.status) end end end + describe '#set_status_for_ref' do + it 'sets the status for a given reference' do + commit.set_status_for_ref('master', 'failed') + + expect(commit.status('master')).to eq('failed') + end + end + describe '#participants' do let(:user1) { build(:user) } let(:user2) { build(:user) } diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index ba57301a3c9..4dfbb14952e 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -265,25 +265,44 @@ describe Issuable do end describe '#to_hook_data' do + let(:builder) { double } + context 'labels are updated' do let(:labels) { create_list(:label, 2) } before do issue.update(labels: [labels[1]]) + expect(Gitlab::HookData::IssuableBuilder) + .to receive(:new).with(issue).and_return(builder) end it 'delegates to Gitlab::HookData::IssuableBuilder#build' do - builder = double + expect(builder).to receive(:build).with( + user: user, + changes: hash_including( + 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]] + )) + issue.to_hook_data(user, old_labels: [labels[0]]) + end + end + + context 'total_time_spent is updated' do + before do + issue.spend_time(duration: 2, user: user, spent_at: Time.now) + issue.save expect(Gitlab::HookData::IssuableBuilder) .to receive(:new).with(issue).and_return(builder) + end + + it 'delegates to Gitlab::HookData::IssuableBuilder#build' do expect(builder).to receive(:build).with( user: user, changes: hash_including( - 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]] + 'total_time_spent' => [1, 2] )) - issue.to_hook_data(user, old_labels: [labels[0]]) + issue.to_hook_data(user, old_total_time_spent: 1) end end @@ -292,13 +311,11 @@ describe Issuable do before do issue.assignees << user << user2 + expect(Gitlab::HookData::IssuableBuilder) + .to receive(:new).with(issue).and_return(builder) end it 'delegates to Gitlab::HookData::IssuableBuilder#build' do - builder = double - - expect(Gitlab::HookData::IssuableBuilder) - .to receive(:new).with(issue).and_return(builder) expect(builder).to receive(:build).with( user: user, changes: hash_including( @@ -316,13 +333,11 @@ describe Issuable do before do merge_request.update(assignee: user) merge_request.update(assignee: user2) + expect(Gitlab::HookData::IssuableBuilder) + .to receive(:new).with(merge_request).and_return(builder) end it 'delegates to Gitlab::HookData::IssuableBuilder#build' do - builder = double - - expect(Gitlab::HookData::IssuableBuilder) - .to receive(:new).with(merge_request).and_return(builder) expect(builder).to receive(:build).with( user: user, changes: hash_including( diff --git a/spec/models/diff_viewer/base_spec.rb b/spec/models/diff_viewer/base_spec.rb index b26de3f3b97..c90b32c5d77 100644 --- a/spec/models/diff_viewer/base_spec.rb +++ b/spec/models/diff_viewer/base_spec.rb @@ -32,10 +32,8 @@ describe DiffViewer::Base do end context 'when the binaryness does not match' do - before do - allow(diff_file.old_blob).to receive(:binary?).and_return(false) - allow(diff_file.new_blob).to receive(:binary?).and_return(false) - end + let(:commit) { project.commit_by(oid: 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f') } + let(:diff_file) { commit.diffs.diff_file_with_new_path('Gemfile.zip') } it 'returns false' do expect(viewer_class.can_render?(diff_file)).to be_falsey @@ -60,8 +58,7 @@ describe DiffViewer::Base do context 'when the binaryness does not match' do before do - allow(diff_file.old_blob).to receive(:binary?).and_return(true) - allow(diff_file.new_blob).to receive(:binary?).and_return(true) + allow_any_instance_of(Blob).to receive(:binary?).and_return(true) end it 'returns false' do @@ -77,12 +74,12 @@ describe DiffViewer::Base do end context 'when the file was renamed and only the old blob is supported' do - let(:commit) { project.commit('2f63565e7aac07bcdadb654e253078b727143ec4') } + let(:commit) { project.commit_by(oid: '2f63565e7aac07bcdadb654e253078b727143ec4') } let(:diff_file) { commit.diffs.diff_file_with_new_path('files/images/6049019_460s.jpg') } before do allow(diff_file).to receive(:renamed_file?).and_return(true) - allow(diff_file.new_blob).to receive(:extension).and_return('jpeg') + viewer_class.extensions = %w(notjpg) end it 'returns false' do @@ -94,8 +91,7 @@ describe DiffViewer::Base do describe '#collapsed?' do context 'when the combined blob size is larger than the collapse limit' do before do - allow(diff_file.old_blob).to receive(:raw_size).and_return(512.kilobytes) - allow(diff_file.new_blob).to receive(:raw_size).and_return(513.kilobytes) + allow(diff_file).to receive(:raw_size).and_return(1025.kilobytes) end it 'returns true' do @@ -113,8 +109,7 @@ describe DiffViewer::Base do describe '#too_large?' do context 'when the combined blob size is larger than the size limit' do before do - allow(diff_file.old_blob).to receive(:raw_size).and_return(2.megabytes) - allow(diff_file.new_blob).to receive(:raw_size).and_return(4.megabytes) + allow(diff_file).to receive(:raw_size).and_return(6.megabytes) end it 'returns true' do @@ -132,8 +127,7 @@ describe DiffViewer::Base do describe '#render_error' do context 'when the combined blob size is larger than the size limit' do before do - allow(diff_file.old_blob).to receive(:raw_size).and_return(2.megabytes) - allow(diff_file.new_blob).to receive(:raw_size).and_return(4.megabytes) + allow(diff_file).to receive(:raw_size).and_return(6.megabytes) end it 'returns :too_large' do diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb index 92e613f92de..98a8f6d4cc9 100644 --- a/spec/models/diff_viewer/server_side_spec.rb +++ b/spec/models/diff_viewer/server_side_spec.rb @@ -1,9 +1,9 @@ require 'spec_helper' describe DiffViewer::ServerSide do - let(:project) { create(:project, :repository) } - let(:commit) { project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') } - let(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') } + set(:project) { create(:project, :repository) } + let(:commit) { project.commit_by(oid: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') } + let!(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') } let(:viewer_class) do Class.new(DiffViewer::Base) do @@ -15,8 +15,7 @@ describe DiffViewer::ServerSide do describe '#prepare!' do it 'loads all diff file data' do - expect(diff_file.old_blob).to receive(:load_all_data!) - expect(diff_file.new_blob).to receive(:load_all_data!) + expect(Blob).to receive(:lazy).at_least(:twice) subject.prepare! end diff --git a/spec/models/fork_network_member_spec.rb b/spec/models/fork_network_member_spec.rb index 532ca1fca8c..25bf596fddc 100644 --- a/spec/models/fork_network_member_spec.rb +++ b/spec/models/fork_network_member_spec.rb @@ -5,4 +5,22 @@ describe ForkNetworkMember do it { is_expected.to validate_presence_of(:project) } it { is_expected.to validate_presence_of(:fork_network) } end + + describe 'destroying a ForkNetworkMember' do + let(:fork_network_member) { create(:fork_network_member) } + let(:fork_network) { fork_network_member.fork_network } + + it 'removes the fork network if it was the last member' do + fork_network.fork_network_members.destroy_all + + expect(ForkNetwork.count).to eq(0) + end + + it 'does not destroy the fork network if there are members left' do + fork_network_member.destroy! + + # The root of the fork network is left + expect(ForkNetwork.count).to eq(1) + end + end end diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb index 3ed048744de..a45a6088831 100644 --- a/spec/models/identity_spec.rb +++ b/spec/models/identity_spec.rb @@ -33,5 +33,15 @@ describe Identity do expect(identity).to eq(ldap_identity) end end + + context 'any other provider' do + let!(:test_entity) { create(:identity, provider: 'test_provider', extern_uid: 'test_uid') } + + it 'the extern_uid lookup is case insensitive' do + identity = described_class.with_extern_uid('test_provider', 'TEST_UID').first + + expect(identity).to eq(test_entity) + end + end end end diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb index 81c2057e175..4cd9e3f4f1d 100644 --- a/spec/models/key_spec.rb +++ b/spec/models/key_spec.rb @@ -166,4 +166,27 @@ describe Key, :mailer do expect(key.public_key.key_text).to eq(valid_key) end end + + describe '#refresh_user_cache', :use_clean_rails_memory_store_caching do + context 'when the key belongs to a user' do + it 'refreshes the keys count cache for the user' do + expect_any_instance_of(Users::KeysCountService) + .to receive(:refresh_cache) + .and_call_original + + key = create(:personal_key) + + expect(Users::KeysCountService.new(key.user).count).to eq(1) + end + end + + context 'when the key does not belong to a user' do + it 'does nothing' do + expect_any_instance_of(Users::KeysCountService) + .not_to receive(:refresh_cache) + + create(:key) + end + end + end end diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb index 13e37fffa4e..47f4a792e5c 100644 --- a/spec/models/milestone_spec.rb +++ b/spec/models/milestone_spec.rb @@ -11,7 +11,7 @@ describe Milestone do milestone = build(:milestone, start_date: Date.tomorrow, due_date: Date.yesterday) expect(milestone).not_to be_valid - expect(milestone.errors[:start_date]).to include("Can't be greater than due date") + expect(milestone.errors[:due_date]).to include("must be greater than start date") end end end diff --git a/spec/models/project_services/flowdock_service_spec.rb b/spec/models/project_services/flowdock_service_spec.rb index 5e8e880985e..fabcb142858 100644 --- a/spec/models/project_services/flowdock_service_spec.rb +++ b/spec/models/project_services/flowdock_service_spec.rb @@ -46,6 +46,7 @@ describe FlowdockService do @sample_data[:commits].each do |commit| # One request to Flowdock per new commit next if commit[:id] == @sample_data[:before] + expect(WebMock).to have_requested(:post, @api_url).with( body: /#{commit[:id]}.*#{project.path}/ ).once diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index 3d46434fc27..929086305ba 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -10,6 +10,10 @@ describe ProjectWiki do subject { project_wiki } + it { is_expected.to delegate_method(:empty?).to :pages } + it { is_expected.to delegate_method(:repository_storage_path).to :project } + it { is_expected.to delegate_method(:hashed_storage?).to :project } + describe "#path_with_namespace" do it "returns the project path with namespace with the .wiki extension" do expect(subject.path_with_namespace).to eq(project.full_path + '.wiki') diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 88732962071..86647ddf6ce 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -828,7 +828,7 @@ describe User do end end - describe '#require_ssh_key?' do + describe '#require_ssh_key?', :use_clean_rails_memory_store_caching do protocol_and_expectation = { 'http' => false, 'ssh' => true, @@ -843,6 +843,12 @@ describe User do expect(user.require_ssh_key?).to eq(expected) end end + + it 'returns false when the user has 1 or more SSH keys' do + key = create(:personal_key) + + expect(key.user.require_ssh_key?).to eq(false) + end end end diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index a7227b38850..ea75434e399 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -373,7 +373,7 @@ describe WikiPage do end it 'returns commit sha' do - expect(@page.last_commit_sha).to eq @page.commit.sha + expect(@page.last_commit_sha).to eq @page.last_version.sha end it 'is changed after page updated' do diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb index 6c0996c543d..0462f494e15 100644 --- a/spec/requests/api/helpers_spec.rb +++ b/spec/requests/api/helpers_spec.rb @@ -11,7 +11,6 @@ describe API::Helpers do let(:admin) { create(:admin) } let(:key) { create(:key, user: user) } - let(:params) { {} } let(:csrf_token) { SecureRandom.base64(ActionController::RequestForgeryProtection::AUTHENTICITY_TOKEN_LENGTH) } let(:env) do { @@ -19,10 +18,13 @@ describe API::Helpers do 'rack.session' => { _csrf_token: csrf_token }, - 'REQUEST_METHOD' => 'GET' + 'REQUEST_METHOD' => 'GET', + 'CONTENT_TYPE' => 'text/plain;charset=utf-8' } end let(:header) { } + let(:request) { Grape::Request.new(env)} + let(:params) { request.params } before do allow_any_instance_of(self.class).to receive(:options).and_return({}) @@ -37,6 +39,10 @@ describe API::Helpers do raise Exception.new("#{status} - #{message}") end + def set_param(key, value) + request.update_param(key, value) + end + describe ".current_user" do subject { current_user } @@ -132,13 +138,13 @@ describe API::Helpers do let(:personal_access_token) { create(:personal_access_token, user: user) } it "returns a 401 response for an invalid token" do - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = 'invalid token' + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = 'invalid token' expect { current_user }.to raise_error /401/ end it "returns a 403 response for a user without access" do - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(false) expect { current_user }.to raise_error /403/ @@ -146,35 +152,35 @@ describe API::Helpers do it 'returns a 403 response for a user who is blocked' do user.block! - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token expect { current_user }.to raise_error /403/ end it "sets current_user" do - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token expect(current_user).to eq(user) end it "does not allow tokens without the appropriate scope" do personal_access_token = create(:personal_access_token, user: user, scopes: ['read_user']) - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token - expect { current_user }.to raise_error API::APIGuard::InsufficientScopeError + expect { current_user }.to raise_error Gitlab::Auth::InsufficientScopeError end it 'does not allow revoked tokens' do personal_access_token.revoke! - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token - expect { current_user }.to raise_error API::APIGuard::RevokedError + expect { current_user }.to raise_error Gitlab::Auth::RevokedError end it 'does not allow expired tokens' do personal_access_token.update_attributes!(expires_at: 1.day.ago) - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token - expect { current_user }.to raise_error API::APIGuard::ExpiredError + expect { current_user }.to raise_error Gitlab::Auth::ExpiredError end end end @@ -350,7 +356,7 @@ describe API::Helpers do context 'when using param' do context 'when providing username' do before do - params[API::Helpers::SUDO_PARAM] = user.username + set_param(API::Helpers::SUDO_PARAM, user.username) end it_behaves_like 'successful sudo' @@ -358,7 +364,7 @@ describe API::Helpers do context 'when providing user ID' do before do - params[API::Helpers::SUDO_PARAM] = user.id.to_s + set_param(API::Helpers::SUDO_PARAM, user.id.to_s) end it_behaves_like 'successful sudo' @@ -368,7 +374,7 @@ describe API::Helpers do context 'when user does not exist' do before do - params[API::Helpers::SUDO_PARAM] = 'nonexistent' + set_param(API::Helpers::SUDO_PARAM, 'nonexistent') end it 'raises an error' do @@ -382,11 +388,11 @@ describe API::Helpers do token.scopes = %w[api] token.save! - params[API::Helpers::SUDO_PARAM] = user.id.to_s + set_param(API::Helpers::SUDO_PARAM, user.id.to_s) end it 'raises an error' do - expect { current_user }.to raise_error API::APIGuard::InsufficientScopeError + expect { current_user }.to raise_error Gitlab::Auth::InsufficientScopeError end end end @@ -396,7 +402,7 @@ describe API::Helpers do token.user = user token.save! - params[API::Helpers::SUDO_PARAM] = user.id.to_s + set_param(API::Helpers::SUDO_PARAM, user.id.to_s) end it 'raises an error' do @@ -420,7 +426,7 @@ describe API::Helpers do context 'passed as param' do before do - params[API::APIGuard::PRIVATE_TOKEN_PARAM] = token.token + set_param(Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_PARAM, token.token) end it_behaves_like 'sudo' @@ -428,7 +434,7 @@ describe API::Helpers do context 'passed as header' do before do - env[API::APIGuard::PRIVATE_TOKEN_HEADER] = token.token + env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = token.token end it_behaves_like 'sudo' diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index d919899282d..34ecdd1e164 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -203,18 +203,44 @@ describe API::Internal do end context 'with env passed as a JSON' do - it 'sets env in RequestStore' do - expect(Gitlab::Git::Env).to receive(:set).with({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' - }) + context 'when relative path envs are not set' do + it 'sets env in RequestStore' do + expect(Gitlab::Git::Env).to receive(:set).with({ + 'GIT_OBJECT_DIRECTORY' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' + }) + + push(key, project.wiki, env: { + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar' + }.to_json) - push(key, project.wiki, env: { - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar' - }.to_json) + expect(response).to have_gitlab_http_status(200) + end + end - expect(response).to have_gitlab_http_status(200) + context 'when relative path envs are set' do + it 'sets env in RequestStore' do + obj_dir_relative = './objects' + alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2'] + repo_path = project.wiki.repository.path_to_repo + + expect(Gitlab::Git::Env).to receive(:set).with({ + 'GIT_OBJECT_DIRECTORY' => File.join(repo_path, obj_dir_relative), + 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => alt_obj_dirs_relative.map { |d| File.join(repo_path, d) }, + 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative, + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative + }) + + push(key, project.wiki, env: { + GIT_OBJECT_DIRECTORY: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar', + GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative, + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative + }.to_json) + + expect(response).to have_gitlab_http_status(200) + end end end diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index 50f6c8b7d64..a41345da05b 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -437,6 +437,7 @@ describe API::Projects do project.each_pair do |k, v| next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k) + expect(json_response[k.to_s]).to eq(v) end @@ -643,6 +644,7 @@ describe API::Projects do expect(response).to have_gitlab_http_status(201) project.each_pair do |k, v| next if %i[has_external_issue_tracker path].include?(k) + expect(json_response[k.to_s]).to eq(v) end end diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb index f62ad747c73..27288b98d1c 100644 --- a/spec/requests/api/v3/projects_spec.rb +++ b/spec/requests/api/v3/projects_spec.rb @@ -404,6 +404,7 @@ describe API::V3::Projects do project.each_pair do |k, v| next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k) + expect(json_response[k.to_s]).to eq(v) end @@ -547,6 +548,7 @@ describe API::V3::Projects do expect(response).to have_gitlab_http_status(201) project.each_pair do |k, v| next if %i[has_external_issue_tracker path].include?(k) + expect(json_response[k.to_s]).to eq(v) end end diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb index 0b1f8ce6f6d..1a5ad9b04e4 100644 --- a/spec/requests/openid_connect_spec.rb +++ b/spec/requests/openid_connect_spec.rb @@ -107,6 +107,15 @@ describe 'OpenID Connect requests' do end end + # These 2 calls shouldn't actually throw, they should be handled as an + # unauthorized request, so we should be able to check the response. + # + # This was not possible due to an issue with Warden: + # https://github.com/hassox/warden/pull/162 + # + # When the patch gets merged and we update Warden, these specs will need to + # updated to check the response instead of a raised exception. + # https://gitlab.com/gitlab-org/gitlab-ce/issues/40218 context 'when user is blocked' do it 'returns authentication error' do access_grant @@ -114,7 +123,7 @@ describe 'OpenID Connect requests' do expect do request_access_token - end.to throw_symbol :warden + end.to raise_error UncaughtThrowError end end @@ -125,7 +134,7 @@ describe 'OpenID Connect requests' do expect do request_access_token - end.to throw_symbol :warden + end.to raise_error UncaughtThrowError end end end diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb new file mode 100644 index 00000000000..0fec14d0cce --- /dev/null +++ b/spec/requests/rack_attack_global_spec.rb @@ -0,0 +1,362 @@ +require 'spec_helper' + +describe 'Rack Attack global throttles' do + let(:settings) { Gitlab::CurrentSettings.current_application_settings } + + # Start with really high limits and override them with low limits to ensure + # the right settings are being exercised + let(:settings_to_set) do + { + throttle_unauthenticated_requests_per_period: 100, + throttle_unauthenticated_period_in_seconds: 1, + throttle_authenticated_api_requests_per_period: 100, + throttle_authenticated_api_period_in_seconds: 1, + throttle_authenticated_web_requests_per_period: 100, + throttle_authenticated_web_period_in_seconds: 1 + } + end + + let(:requests_per_period) { 1 } + let(:period_in_seconds) { 10000 } + let(:period) { period_in_seconds.seconds } + + let(:url_that_does_not_require_authentication) { '/users/sign_in' } + let(:url_that_requires_authentication) { '/dashboard/snippets' } + let(:api_partial_url) { '/todos' } + + around do |example| + # Instead of test environment's :null_store so the throttles can increment + Rack::Attack.cache.store = ActiveSupport::Cache::MemoryStore.new + + # Make time-dependent tests deterministic + Timecop.freeze { example.run } + + Rack::Attack.cache.store = Rails.cache + end + + # Requires let variables: + # * throttle_setting_prefix (e.g. "throttle_authenticated_api" or "throttle_authenticated_web") + # * get_args + # * other_user_get_args + shared_examples_for 'rate-limited token-authenticated requests' do + before do + # Set low limits + settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period + settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds + end + + context 'when the throttle is enabled' do + before do + settings_to_set[:"#{throttle_setting_prefix}_enabled"] = true + stub_application_setting(settings_to_set) + end + + it 'rejects requests over the rate limit' do + # At first, allow requests under the rate limit. + requests_per_period.times do + get(*get_args) + expect(response).to have_http_status 200 + end + + # the last straw + expect_rejection { get(*get_args) } + end + + it 'allows requests after throttling and then waiting for the next period' do + requests_per_period.times do + get(*get_args) + expect(response).to have_http_status 200 + end + + expect_rejection { get(*get_args) } + + Timecop.travel(period.from_now) do + requests_per_period.times do + get(*get_args) + expect(response).to have_http_status 200 + end + + expect_rejection { get(*get_args) } + end + end + + it 'counts requests from different users separately, even from the same IP' do + requests_per_period.times do + get(*get_args) + expect(response).to have_http_status 200 + end + + # would be over the limit if this wasn't a different user + get(*other_user_get_args) + expect(response).to have_http_status 200 + end + + it 'counts all requests from the same user, even via different IPs' do + requests_per_period.times do + get(*get_args) + expect(response).to have_http_status 200 + end + + expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4') + + expect_rejection { get(*get_args) } + end + end + + context 'when the throttle is disabled' do + before do + settings_to_set[:"#{throttle_setting_prefix}_enabled"] = false + stub_application_setting(settings_to_set) + end + + it 'allows requests over the rate limit' do + (1 + requests_per_period).times do + get(*get_args) + expect(response).to have_http_status 200 + end + end + end + end + + describe 'unauthenticated requests' do + before do + # Set low limits + settings_to_set[:throttle_unauthenticated_requests_per_period] = requests_per_period + settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds + end + + context 'when the throttle is enabled' do + before do + settings_to_set[:throttle_unauthenticated_enabled] = true + stub_application_setting(settings_to_set) + end + + it 'rejects requests over the rate limit' do + # At first, allow requests under the rate limit. + requests_per_period.times do + get url_that_does_not_require_authentication + expect(response).to have_http_status 200 + end + + # the last straw + expect_rejection { get url_that_does_not_require_authentication } + end + + it 'allows requests after throttling and then waiting for the next period' do + requests_per_period.times do + get url_that_does_not_require_authentication + expect(response).to have_http_status 200 + end + + expect_rejection { get url_that_does_not_require_authentication } + + Timecop.travel(period.from_now) do + requests_per_period.times do + get url_that_does_not_require_authentication + expect(response).to have_http_status 200 + end + + expect_rejection { get url_that_does_not_require_authentication } + end + end + + it 'counts requests from different IPs separately' do + requests_per_period.times do + get url_that_does_not_require_authentication + expect(response).to have_http_status 200 + end + + expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4') + + # would be over limit for the same IP + get url_that_does_not_require_authentication + expect(response).to have_http_status 200 + end + end + + context 'when the throttle is disabled' do + before do + settings_to_set[:throttle_unauthenticated_enabled] = false + stub_application_setting(settings_to_set) + end + + it 'allows requests over the rate limit' do + (1 + requests_per_period).times do + get url_that_does_not_require_authentication + expect(response).to have_http_status 200 + end + end + end + end + + describe 'API requests authenticated with personal access token', :api do + let(:user) { create(:user) } + let(:token) { create(:personal_access_token, user: user) } + let(:other_user) { create(:user) } + let(:other_user_token) { create(:personal_access_token, user: other_user) } + let(:throttle_setting_prefix) { 'throttle_authenticated_api' } + + context 'with the token in the query string' do + let(:get_args) { [api(api_partial_url, personal_access_token: token)] } + let(:other_user_get_args) { [api(api_partial_url, personal_access_token: other_user_token)] } + + it_behaves_like 'rate-limited token-authenticated requests' + end + + context 'with the token in the headers' do + let(:get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) } + let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) } + + it_behaves_like 'rate-limited token-authenticated requests' + end + end + + describe 'API requests authenticated with OAuth token', :api do + let(:user) { create(:user) } + let(:application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user) } + let(:token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "api") } + let(:other_user) { create(:user) } + let(:other_user_application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: other_user) } + let(:other_user_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: other_user.id, scopes: "api") } + let(:throttle_setting_prefix) { 'throttle_authenticated_api' } + + context 'with the token in the query string' do + let(:get_args) { [api(api_partial_url, oauth_access_token: token)] } + let(:other_user_get_args) { [api(api_partial_url, oauth_access_token: other_user_token)] } + + it_behaves_like 'rate-limited token-authenticated requests' + end + + context 'with the token in the headers' do + let(:get_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) } + let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) } + + it_behaves_like 'rate-limited token-authenticated requests' + end + end + + describe '"web" (non-API) requests authenticated with RSS token' do + let(:user) { create(:user) } + let(:other_user) { create(:user) } + let(:throttle_setting_prefix) { 'throttle_authenticated_web' } + + context 'with the token in the query string' do + let(:get_args) { [rss_url(user), nil] } + let(:other_user_get_args) { [rss_url(other_user), nil] } + + it_behaves_like 'rate-limited token-authenticated requests' + end + end + + describe 'web requests authenticated with regular login' do + let(:user) { create(:user) } + + before do + login_as(user) + + # Set low limits + settings_to_set[:throttle_authenticated_web_requests_per_period] = requests_per_period + settings_to_set[:throttle_authenticated_web_period_in_seconds] = period_in_seconds + end + + context 'when the throttle is enabled' do + before do + settings_to_set[:throttle_authenticated_web_enabled] = true + stub_application_setting(settings_to_set) + end + + it 'rejects requests over the rate limit' do + # At first, allow requests under the rate limit. + requests_per_period.times do + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + + # the last straw + expect_rejection { get url_that_requires_authentication } + end + + it 'allows requests after throttling and then waiting for the next period' do + requests_per_period.times do + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + + expect_rejection { get url_that_requires_authentication } + + Timecop.travel(period.from_now) do + requests_per_period.times do + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + + expect_rejection { get url_that_requires_authentication } + end + end + + it 'counts requests from different users separately, even from the same IP' do + requests_per_period.times do + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + + # would be over the limit if this wasn't a different user + login_as(create(:user)) + + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + + it 'counts all requests from the same user, even via different IPs' do + requests_per_period.times do + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + + expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4') + + expect_rejection { get url_that_requires_authentication } + end + end + + context 'when the throttle is disabled' do + before do + settings_to_set[:throttle_authenticated_web_enabled] = false + stub_application_setting(settings_to_set) + end + + it 'allows requests over the rate limit' do + (1 + requests_per_period).times do + get url_that_requires_authentication + expect(response).to have_http_status 200 + end + end + end + end + + def api_get_args_with_token_headers(partial_url, token_headers) + ["/api/#{API::API.version}#{partial_url}", nil, token_headers] + end + + def rss_url(user) + "/dashboard/projects.atom?rss_token=#{user.rss_token}" + end + + def private_token_headers(user) + { 'HTTP_PRIVATE_TOKEN' => user.private_token } + end + + def personal_access_token_headers(personal_access_token) + { 'HTTP_PRIVATE_TOKEN' => personal_access_token.token } + end + + def oauth_token_headers(oauth_access_token) + { 'AUTHORIZATION' => "Bearer #{oauth_access_token.token}" } + end + + def expect_rejection(&block) + yield + + expect(response).to have_http_status(429) + end +end diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb index 7a4c8304e62..71788028cbf 100644 --- a/spec/routing/group_routing_spec.rb +++ b/spec/routing/group_routing_spec.rb @@ -39,13 +39,19 @@ describe "Groups", "routing" do describe 'legacy redirection' do describe 'labels' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/labels", "/groups/complex.group-namegit/-/labels/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/labels", "/groups/complex.group-namegit/-/labels" do let(:resource) { create(:group, parent: group, path: 'labels') } end + + context 'when requesting JSON' do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/labels.json", "/groups/complex.group-namegit/-/labels.json" do + let(:resource) { create(:group, parent: group, path: 'labels') } + end + end end describe 'group_members' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/group_members", "/groups/complex.group-namegit/-/group_members/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/group_members", "/groups/complex.group-namegit/-/group_members" do let(:resource) { create(:group, parent: group, path: 'group_members') } end end @@ -60,7 +66,7 @@ describe "Groups", "routing" do end describe 'milestones' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones", "/groups/complex.group-namegit/-/milestones/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones", "/groups/complex.group-namegit/-/milestones" do let(:resource) { create(:group, parent: group, path: 'milestones') } end @@ -76,18 +82,18 @@ describe "Groups", "routing" do end context 'with a query string' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?hello=world", "/groups/complex.group-namegit/-/milestones/?hello=world" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?hello=world", "/groups/complex.group-namegit/-/milestones?hello=world" do let(:resource) { create(:group, parent: group, path: 'milestones') } end - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?milestones=/milestones", "/groups/complex.group-namegit/-/milestones/?milestones=/milestones" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?milestones=/milestones", "/groups/complex.group-namegit/-/milestones?milestones=/milestones" do let(:resource) { create(:group, parent: group, path: 'milestones') } end end end describe 'edit' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/edit", "/groups/complex.group-namegit/-/edit/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/edit", "/groups/complex.group-namegit/-/edit" do let(:resource) do pending('still rejected because of the wildcard reserved word') create(:group, parent: group, path: 'edit') @@ -96,29 +102,29 @@ describe "Groups", "routing" do end describe 'issues' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/issues", "/groups/complex.group-namegit/-/issues/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/issues", "/groups/complex.group-namegit/-/issues" do let(:resource) { create(:group, parent: group, path: 'issues') } end end describe 'merge_requests' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/merge_requests", "/groups/complex.group-namegit/-/merge_requests/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/merge_requests", "/groups/complex.group-namegit/-/merge_requests" do let(:resource) { create(:group, parent: group, path: 'merge_requests') } end end describe 'projects' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/projects", "/groups/complex.group-namegit/-/projects/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/projects", "/groups/complex.group-namegit/-/projects" do let(:resource) { create(:group, parent: group, path: 'projects') } end end describe 'activity' do - it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/activity", "/groups/complex.group-namegit/-/activity/" do + it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/activity", "/groups/complex.group-namegit/-/activity" do let(:resource) { create(:group, parent: group, path: 'activity') } end - it_behaves_like 'redirecting a legacy path', "/groups/activity/activity", "/groups/activity/-/activity/" do + it_behaves_like 'redirecting a legacy path', "/groups/activity/activity", "/groups/activity/-/activity" do let!(:parent) { create(:group, path: 'activity') } let(:resource) { create(:group, parent: parent, path: 'activity') } end diff --git a/spec/rubocop/cop/line_break_after_guard_clauses_spec.rb b/spec/rubocop/cop/line_break_after_guard_clauses_spec.rb new file mode 100644 index 00000000000..8899dc85384 --- /dev/null +++ b/spec/rubocop/cop/line_break_after_guard_clauses_spec.rb @@ -0,0 +1,160 @@ +require 'spec_helper' +require 'rubocop' +require 'rubocop/rspec/support' +require_relative '../../../rubocop/cop/line_break_after_guard_clauses' + +describe RuboCop::Cop::LineBreakAfterGuardClauses do + include CopHelper + + subject(:cop) { described_class.new } + + shared_examples 'examples with guard clause' do |title| + %w[if unless].each do |conditional| + it "flags violation for #{title} #{conditional} without line breaks" do + source = <<~RUBY + #{title} #{conditional} condition + do_stuff + RUBY + inspect_source(cop, source) + + expect(cop.offenses.size).to eq(1) + offense = cop.offenses.first + + expect(offense.line).to eq(1) + expect(cop.highlights).to eq(["#{title} #{conditional} condition"]) + expect(offense.message).to eq('Add a line break after guard clauses') + end + + it "doesn't flag violation for #{title} #{conditional} with line break" do + source = <<~RUBY + #{title} #{conditional} condition + + do_stuff + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} on multiple lines without line break" do + source = <<~RUBY + #{conditional} condition + #{title} + end + do_stuff + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by end keyword" do + source = <<~RUBY + def test + #{title} #{conditional} condition + end + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by elsif keyword" do + source = <<~RUBY + if model + #{title} #{conditional} condition + elsif + do_something + end + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by else keyword" do + source = <<~RUBY + if model + #{title} #{conditional} condition + else + do_something + end + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by when keyword" do + source = <<~RUBY + case model + when condition_a + #{title} #{conditional} condition + when condition_b + do_something + end + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by rescue keyword" do + source = <<~RUBY + begin + #{title} #{conditional} condition + rescue StandardError + do_something + end + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by ensure keyword" do + source = <<~RUBY + def foo + #{title} #{conditional} condition + ensure + do_something + end + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by another guard clause" do + source = <<~RUBY + #{title} #{conditional} condition + #{title} #{conditional} condition + + do_stuff + RUBY + inspect_source(cop, source) + + expect(cop.offenses).to be_empty + end + + it "autocorrects #{title} #{conditional} guard clauses without line break" do + source = <<~RUBY + #{title} #{conditional} condition + do_stuff + RUBY + autocorrected = autocorrect_source(cop, source) + + expected_source = <<~RUBY + #{title} #{conditional} condition + + do_stuff + RUBY + expect(autocorrected).to eql(expected_source) + end + end + end + + %w[return fail raise next break throw].each do |example| + it_behaves_like 'examples with guard clause', example + end +end diff --git a/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb deleted file mode 100644 index 07cb3fc4a2e..00000000000 --- a/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -require 'spec_helper' - -require 'rubocop' -require 'rubocop/rspec/support' - -require_relative '../../../../rubocop/cop/migration/add_column_with_default_to_large_table' - -describe RuboCop::Cop::Migration::AddColumnWithDefaultToLargeTable do - include CopHelper - - subject(:cop) { described_class.new } - - context 'in migration' do - before do - allow(cop).to receive(:in_migration?).and_return(true) - end - - described_class::LARGE_TABLES.each do |table| - it "registers an offense for the #{table} table" do - inspect_source(cop, "add_column_with_default :#{table}, :column, default: true") - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end - end - end - - it 'registers no offense for non-blacklisted tables' do - inspect_source(cop, "add_column_with_default :table, :column, default: true") - - expect(cop.offenses).to be_empty - end - end - - context 'outside of migration' do - it 'registers no offense' do - table = described_class::LARGE_TABLES.sample - inspect_source(cop, "add_column_with_default :#{table}, :column, default: true") - - expect(cop.offenses).to be_empty - end - end -end diff --git a/spec/rubocop/cop/migration/update_large_table_spec.rb b/spec/rubocop/cop/migration/update_large_table_spec.rb new file mode 100644 index 00000000000..17b19e139e4 --- /dev/null +++ b/spec/rubocop/cop/migration/update_large_table_spec.rb @@ -0,0 +1,69 @@ +require 'spec_helper' + +require 'rubocop' +require 'rubocop/rspec/support' + +require_relative '../../../../rubocop/cop/migration/update_large_table' + +describe RuboCop::Cop::Migration::UpdateLargeTable do + include CopHelper + + subject(:cop) { described_class.new } + + context 'in migration' do + before do + allow(cop).to receive(:in_migration?).and_return(true) + end + + shared_examples 'large tables' do |update_method| + described_class::LARGE_TABLES.each do |table| + it "registers an offense for the #{table} table" do + inspect_source(cop, "#{update_method} :#{table}, :column, default: true") + + aggregate_failures do + expect(cop.offenses.size).to eq(1) + expect(cop.offenses.map(&:line)).to eq([1]) + end + end + end + end + + context 'for the add_column_with_default method' do + include_examples 'large tables', 'add_column_with_default' + end + + context 'for the update_column_in_batches method' do + include_examples 'large tables', 'update_column_in_batches' + end + + it 'registers no offense for non-blacklisted tables' do + inspect_source(cop, "add_column_with_default :table, :column, default: true") + + expect(cop.offenses).to be_empty + end + + it 'registers no offense for non-blacklisted methods' do + table = described_class::LARGE_TABLES.sample + + inspect_source(cop, "some_other_method :#{table}, :column, default: true") + + expect(cop.offenses).to be_empty + end + end + + context 'outside of migration' do + let(:table) { described_class::LARGE_TABLES.sample } + + it 'registers no offense for add_column_with_default' do + inspect_source(cop, "add_column_with_default :#{table}, :column, default: true") + + expect(cop.offenses).to be_empty + end + + it 'registers no offense for update_column_in_batches' do + inspect_source(cop, "add_column_with_default :#{table}, :column, default: true") + + expect(cop.offenses).to be_empty + end + end +end diff --git a/spec/services/base_count_service_spec.rb b/spec/services/base_count_service_spec.rb new file mode 100644 index 00000000000..5ec8ed0976d --- /dev/null +++ b/spec/services/base_count_service_spec.rb @@ -0,0 +1,80 @@ +require 'spec_helper' + +describe BaseCountService, :use_clean_rails_memory_store_caching do + let(:service) { described_class.new } + + describe '#relation_for_count' do + it 'raises NotImplementedError' do + expect { service.relation_for_count }.to raise_error(NotImplementedError) + end + end + + describe '#count' do + it 'returns the number of values' do + expect(service) + .to receive(:cache_key) + .and_return('foo') + + expect(service) + .to receive(:uncached_count) + .and_return(5) + + expect(service.count).to eq(5) + end + end + + describe '#uncached_count' do + it 'returns the uncached number of values' do + expect(service) + .to receive(:relation_for_count) + .and_return(double(:relation, count: 5)) + + expect(service.uncached_count).to eq(5) + end + end + + describe '#refresh_cache' do + it 'refreshes the cache' do + allow(service) + .to receive(:cache_key) + .and_return('foo') + + allow(service) + .to receive(:uncached_count) + .and_return(4) + + service.refresh_cache + + expect(Rails.cache.fetch(service.cache_key, raw: service.raw?)).to eq(4) + end + end + + describe '#delete_cache' do + it 'deletes the cache' do + allow(service) + .to receive(:cache_key) + .and_return('foo') + + allow(service) + .to receive(:uncached_count) + .and_return(4) + + service.refresh_cache + service.delete_cache + + expect(Rails.cache.fetch(service.cache_key, raw: service.raw?)).to be_nil + end + end + + describe '#raw?' do + it 'returns false' do + expect(service.raw?).to eq(false) + end + end + + describe '#cache_key' do + it 'raises NotImplementedError' do + expect { service.cache_key }.to raise_error(NotImplementedError) + end + end +end diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index 98409be4236..5ce6ca70c83 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -80,7 +80,7 @@ describe MergeRequests::UpdateService, :mailer do it 'executes hooks with update action' do expect(service) .to have_received(:execute_hooks) - .with(@merge_request, 'update', old_labels: [], old_assignees: [user3]) + .with(@merge_request, 'update', old_labels: [], old_assignees: [user3], old_total_time_spent: 0) end it 'sends email to user2 about assign of new merge request and email to user3 about merge request unassignment' do diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb index 16e288b3148..af35e17bfa7 100644 --- a/spec/services/milestones/destroy_service_spec.rb +++ b/spec/services/milestones/destroy_service_spec.rb @@ -5,7 +5,7 @@ describe Milestones::DestroyService do let(:project) { create(:project) } let(:milestone) { create(:milestone, title: 'Milestone v1.0', project: project) } let!(:issue) { create(:issue, project: project, milestone: milestone) } - let(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) } + let!(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) } before do project.team << [user, :master] diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb index 9f2df6d6d19..a0a2843b676 100644 --- a/spec/services/milestones/promote_service_spec.rb +++ b/spec/services/milestones/promote_service_spec.rb @@ -25,6 +25,18 @@ describe Milestones::PromoteService do expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError) end + + it 'does not promote milestone and update issuables if promoted milestone is not valid' do + issue = create(:issue, milestone: milestone, project: project) + merge_request = create(:merge_request, milestone: milestone, source_project: project) + allow_any_instance_of(Milestone).to receive(:valid?).and_return(false) + + expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError) + + expect(milestone.reload).to be_persisted + expect(issue.reload.milestone).to eq(milestone) + expect(merge_request.reload.milestone).to eq(milestone) + end end context 'without duplicated milestone titles across projects' do @@ -34,6 +46,16 @@ describe Milestones::PromoteService do expect(promoted_milestone).to be_group_milestone end + it 'does not update issuables without milestone with the new promoted milestone' do + issue_without_milestone = create(:issue, project: project, milestone: nil) + merge_request_without_milestone = create(:merge_request, milestone: nil, source_project: project) + + service.execute(milestone) + + expect(issue_without_milestone.reload.milestone).to be_nil + expect(merge_request_without_milestone.reload.milestone).to be_nil + end + it 'sets issuables with new promoted milestone' do issue = create(:issue, milestone: milestone, project: project) merge_request = create(:merge_request, milestone: milestone, source_project: project) @@ -59,6 +81,20 @@ describe Milestones::PromoteService do expect(Milestone.exists?(milestone_2.id)).to be_falsy end + it 'does not update issuables without milestone with the new promoted milestone' do + issue_without_milestone_1 = create(:issue, project: project, milestone: nil) + issue_without_milestone_2 = create(:issue, project: project_2, milestone: nil) + merge_request_without_milestone_1 = create(:merge_request, milestone: nil, source_project: project) + merge_request_without_milestone_2 = create(:merge_request, milestone: nil, source_project: project_2) + + service.execute(milestone) + + expect(issue_without_milestone_1.reload.milestone).to be_nil + expect(issue_without_milestone_2.reload.milestone).to be_nil + expect(merge_request_without_milestone_1.reload.milestone).to be_nil + expect(merge_request_without_milestone_2.reload.milestone).to be_nil + end + it 'sets all issuables with new promoted milestone' do issue = create(:issue, milestone: milestone, project: project) issue_2 = create(:issue, milestone: milestone_2, project: project_2) diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index b13e12e7c94..db5de572b6d 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -280,6 +280,7 @@ describe NotificationService, :mailer do next if member.id == @u_disabled.id # Author should not be notified next if member.id == note.author.id + should_email(member) end @@ -327,6 +328,7 @@ describe NotificationService, :mailer do next if member.id == @u_disabled.id # Author should not be notified next if member.id == note.author.id + should_email(member) end diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb index 2459f371a91..2b1337bee7e 100644 --- a/spec/services/projects/transfer_service_spec.rb +++ b/spec/services/projects/transfer_service_spec.rb @@ -42,6 +42,18 @@ describe Projects::TransferService do expect(service).to receive(:execute_system_hooks) end end + + it 'disk path has moved' do + old_path = project.repository.disk_path + old_full_path = project.repository.full_path + + transfer_project(project, user, group) + + expect(project.repository.disk_path).not_to eq(old_path) + expect(project.repository.full_path).not_to eq(old_full_path) + expect(project.disk_path).not_to eq(old_path) + expect(project.disk_path).to start_with(group.path) + end end context 'when transfer fails' do @@ -188,6 +200,26 @@ describe Projects::TransferService do end end + context 'when hashed storage in use' do + let(:hashed_project) { create(:project, :repository, :hashed, namespace: user.namespace) } + + before do + group.add_owner(user) + end + + it 'does not move the directory' do + old_path = hashed_project.repository.disk_path + old_full_path = hashed_project.repository.full_path + + transfer_project(hashed_project, user, group) + project.reload + + expect(hashed_project.repository.disk_path).to eq(old_path) + expect(hashed_project.repository.full_path).to eq(old_full_path) + expect(hashed_project.disk_path).to eq(old_path) + end + end + describe 'refreshing project authorizations' do let(:group) { create(:group) } let(:owner) { project.namespace.owner } diff --git a/spec/services/users/keys_count_service_spec.rb b/spec/services/users/keys_count_service_spec.rb new file mode 100644 index 00000000000..a188cf86772 --- /dev/null +++ b/spec/services/users/keys_count_service_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Users::KeysCountService, :use_clean_rails_memory_store_caching do + let(:user) { create(:user) } + let(:service) { described_class.new(user) } + + describe '#count' do + before do + create(:personal_key, user: user) + end + + it 'returns the number of SSH keys as an Integer' do + expect(service.count).to eq(1) + end + + it 'caches the number of keys in Redis' do + service.delete_cache + + recorder = ActiveRecord::QueryRecorder.new do + 2.times { service.count } + end + + expect(recorder.count).to eq(1) + end + end + + describe '#refresh_cache' do + it 'refreshes the Redis cache' do + Rails.cache.write(service.cache_key, 10) + service.refresh_cache + + expect(Rails.cache.fetch(service.cache_key, raw: true)).to be_zero + end + end + + describe '#delete_cache' do + it 'removes the cache' do + service.count + service.delete_cache + + expect(Rails.cache.fetch(service.cache_key, raw: true)).to be_nil + end + end + + describe '#uncached_count' do + it 'returns the number of SSH keys' do + expect(service.uncached_count).to be_zero + end + + it 'does not cache the number of keys' do + recorder = ActiveRecord::QueryRecorder.new do + 2.times { service.uncached_count } + end + + expect(recorder.count).to be > 0 + end + end + + describe '#cache_key' do + it 'returns the cache key' do + expect(service.cache_key).to eq("users/key-count-service/#{user.id}") + end + end +end diff --git a/spec/support/fixture_helpers.rb b/spec/support/fixture_helpers.rb index 5515c355cea..128aaaf25fe 100644 --- a/spec/support/fixture_helpers.rb +++ b/spec/support/fixture_helpers.rb @@ -1,6 +1,7 @@ module FixtureHelpers def fixture_file(filename) return '' if filename.blank? + File.read(expand_fixture_path(filename)) end diff --git a/spec/support/generate-seed-repo-rb b/spec/support/generate-seed-repo-rb index ef3c8e7087f..4ee33f9725b 100755 --- a/spec/support/generate-seed-repo-rb +++ b/spec/support/generate-seed-repo-rb @@ -33,6 +33,7 @@ end def capture!(cmd, dir) output = IO.popen(cmd, 'r', chdir: dir) { |io| io.read } raise "command failed with #{$?}: #{cmd.join(' ')}" unless $?.success? + output.chomp end diff --git a/spec/support/gitaly.rb b/spec/support/gitaly.rb index 1512b3e0620..c7e8a39a617 100644 --- a/spec/support/gitaly.rb +++ b/spec/support/gitaly.rb @@ -4,6 +4,7 @@ RSpec.configure do |config| allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false) else next if example.metadata[:skip_gitaly_mock] + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true) end end diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index 5dd8fe8eaa5..255f0a37ec8 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -57,7 +57,7 @@ describe 'gitlab:gitaly namespace rake task' do it 'calls gmake in the gitaly directory' do expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0]) - expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake BUNDLE_PATH=/fake/bundle_path]).and_return(true) + expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake BUNDLE_FLAGS=--no-deployment]).and_return(true) run_rake_task('gitlab:gitaly:install', clone_path) end @@ -70,7 +70,7 @@ describe 'gitlab:gitaly namespace rake task' do end it 'calls make in the gitaly directory' do - expect(main_object).to receive(:run_command!).with(command_preamble + %w[make BUNDLE_PATH=/fake/bundle_path]).and_return(true) + expect(main_object).to receive(:run_command!).with(command_preamble + %w[make BUNDLE_FLAGS=--no-deployment]).and_return(true) run_rake_task('gitlab:gitaly:install', clone_path) end diff --git a/spec/unicorn/unicorn_spec.rb b/spec/unicorn/unicorn_spec.rb index 41de94d35c2..79a566975df 100644 --- a/spec/unicorn/unicorn_spec.rb +++ b/spec/unicorn/unicorn_spec.rb @@ -71,6 +71,7 @@ describe 'Unicorn' do timeout = 5 * 60 timeout.times do return if File.exist?(ready_file) + pid = Process.waitpid(master_pid, Process::WNOHANG) raise "unicorn failed to boot: #{$?}" unless pid.nil? |