From 6f7881ee9dcec34141a8f34fc814b56b366d2b48 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Tue, 24 Mar 2020 09:09:25 +0000 Subject: Add latest changes from gitlab-org/gitlab@master --- spec/factories/user_canonical_emails.rb | 8 + spec/frontend/notes/components/note_app_spec.js | 349 --------------------- spec/frontend/notes/components/notes_app_spec.js | 349 +++++++++++++++++++++ spec/frontend/u2f/authenticate_spec.js | 109 +++++++ spec/frontend/u2f/mock_u2f_device.js | 23 ++ spec/frontend/u2f/register_spec.js | 83 +++++ spec/javascripts/u2f/authenticate_spec.js | 102 ------ spec/javascripts/u2f/mock_u2f_device.js | 23 -- spec/javascripts/u2f/register_spec.js | 79 ----- spec/models/clusters/applications/ingress_spec.rb | 58 +++- .../clusters/applications/prometheus_spec.rb | 45 +++ spec/models/project_spec.rb | 18 ++ spec/models/user_canonical_email_spec.rb | 20 ++ .../check_upgrade_progress_service_spec.rb | 94 ++++++ .../applications/prometheus_config_service_spec.rb | 158 ++++++++++ .../applications/prometheus_update_service_spec.rb | 92 ++++++ .../applications/schedule_update_service_spec.rb | 37 +++ spec/services/users/build_service_spec.rb | 8 + spec/services/users/create_service_spec.rb | 3 +- .../users/update_canonical_email_service_spec.rb | 116 +++++++ spec/services/users/update_service_spec.rb | 26 ++ spec/workers/cluster_update_app_worker_spec.rb | 98 ++++++ .../cluster_wait_for_app_update_worker_spec.rb | 27 ++ 23 files changed, 1359 insertions(+), 566 deletions(-) create mode 100644 spec/factories/user_canonical_emails.rb delete mode 100644 spec/frontend/notes/components/note_app_spec.js create mode 100644 spec/frontend/notes/components/notes_app_spec.js create mode 100644 spec/frontend/u2f/authenticate_spec.js create mode 100644 spec/frontend/u2f/mock_u2f_device.js create mode 100644 spec/frontend/u2f/register_spec.js delete mode 100644 spec/javascripts/u2f/authenticate_spec.js delete mode 100644 spec/javascripts/u2f/mock_u2f_device.js delete mode 100644 spec/javascripts/u2f/register_spec.js create mode 100644 spec/models/user_canonical_email_spec.rb create mode 100644 spec/services/clusters/applications/check_upgrade_progress_service_spec.rb create mode 100644 spec/services/clusters/applications/prometheus_config_service_spec.rb create mode 100644 spec/services/clusters/applications/prometheus_update_service_spec.rb create mode 100644 spec/services/clusters/applications/schedule_update_service_spec.rb create mode 100644 spec/services/users/update_canonical_email_service_spec.rb create mode 100644 spec/workers/cluster_update_app_worker_spec.rb create mode 100644 spec/workers/cluster_wait_for_app_update_worker_spec.rb (limited to 'spec') diff --git a/spec/factories/user_canonical_emails.rb b/spec/factories/user_canonical_emails.rb new file mode 100644 index 00000000000..0161d25c525 --- /dev/null +++ b/spec/factories/user_canonical_emails.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :user_canonical_email do + user + canonical_email { user.email } + end +end diff --git a/spec/frontend/notes/components/note_app_spec.js b/spec/frontend/notes/components/note_app_spec.js deleted file mode 100644 index 2d0cca18647..00000000000 --- a/spec/frontend/notes/components/note_app_spec.js +++ /dev/null @@ -1,349 +0,0 @@ -import $ from 'helpers/jquery'; -import AxiosMockAdapter from 'axios-mock-adapter'; -import Vue from 'vue'; -import { mount } from '@vue/test-utils'; -import { setTestTimeout } from 'helpers/timeout'; -import axios from '~/lib/utils/axios_utils'; -import NotesApp from '~/notes/components/notes_app.vue'; -import createStore from '~/notes/stores'; -import '~/behaviors/markdown/render_gfm'; -// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491) -import * as mockData from '../../notes/mock_data'; -import * as urlUtility from '~/lib/utils/url_utility'; - -jest.mock('~/user_popovers', () => jest.fn()); - -setTestTimeout(1000); - -describe('note_app', () => { - let axiosMock; - let mountComponent; - let wrapper; - let store; - - /** - * waits for fetchNotes() to complete - */ - const waitForDiscussionsRequest = () => - new Promise(resolve => { - const { vm } = wrapper.find(NotesApp); - const unwatch = vm.$watch('isFetching', isFetching => { - if (isFetching) { - return; - } - - unwatch(); - resolve(); - }); - }); - - beforeEach(() => { - $('body').attr('data-page', 'projects:merge_requests:show'); - - axiosMock = new AxiosMockAdapter(axios); - - store = createStore(); - mountComponent = data => { - const propsData = data || { - noteableData: mockData.noteableDataMock, - notesData: mockData.notesDataMock, - userData: mockData.userDataMock, - }; - - return mount( - { - components: { - NotesApp, - }, - template: `
- -
`, - }, - { - propsData, - store, - }, - ); - }; - }); - - afterEach(() => { - wrapper.destroy(); - axiosMock.restore(); - }); - - describe('set data', () => { - beforeEach(() => { - setFixtures('
'); - - axiosMock.onAny().reply(200, []); - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('should set notes data', () => { - expect(store.state.notesData).toEqual(mockData.notesDataMock); - }); - - it('should set issue data', () => { - expect(store.state.noteableData).toEqual(mockData.noteableDataMock); - }); - - it('should set user data', () => { - expect(store.state.userData).toEqual(mockData.userDataMock); - }); - - it('should fetch discussions', () => { - expect(store.state.discussions).toEqual([]); - }); - - it('updates discussions badge', () => { - expect(document.querySelector('.js-discussions-count').textContent).toEqual('0'); - }); - }); - - describe('render', () => { - beforeEach(() => { - setFixtures('
'); - - axiosMock.onAny().reply(mockData.getIndividualNoteResponse); - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('should render list of notes', () => { - const note = - mockData.INDIVIDUAL_NOTE_RESPONSE_MAP.GET[ - '/gitlab-org/gitlab-foss/issues/26/discussions.json' - ][0].notes[0]; - - expect( - wrapper - .find('.main-notes-list .note-header-author-name') - .text() - .trim(), - ).toEqual(note.author.name); - - expect(wrapper.find('.main-notes-list .note-text').html()).toContain(note.note_html); - }); - - it('should render form', () => { - expect(wrapper.find('.js-main-target-form').name()).toEqual('form'); - expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual( - 'Write a comment or drag your files here…', - ); - }); - - it('should render form comment button as disabled', () => { - expect(wrapper.find('.js-note-new-discussion').attributes('disabled')).toEqual('disabled'); - }); - - it('updates discussions badge', () => { - expect(document.querySelector('.js-discussions-count').textContent).toEqual('2'); - }); - }); - - describe('render with comments disabled', () => { - beforeEach(() => { - setFixtures('
'); - - axiosMock.onAny().reply(mockData.getIndividualNoteResponse); - store.state.commentsDisabled = true; - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('should not render form when commenting is disabled', () => { - expect(wrapper.find('.js-main-target-form').exists()).toBe(false); - }); - - it('should render discussion filter note `commentsDisabled` is true', () => { - expect(wrapper.find('.js-discussion-filter-note').exists()).toBe(true); - }); - }); - - describe('while fetching data', () => { - beforeEach(() => { - setFixtures('
'); - axiosMock.onAny().reply(200, []); - wrapper = mountComponent(); - }); - - afterEach(() => waitForDiscussionsRequest()); - - it('renders skeleton notes', () => { - expect(wrapper.find('.animation-container').exists()).toBe(true); - }); - - it('should render form', () => { - expect(wrapper.find('.js-main-target-form').name()).toEqual('form'); - expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual( - 'Write a comment or drag your files here…', - ); - }); - - it('should not update discussions badge (it should be blank)', () => { - expect(document.querySelector('.js-discussions-count').textContent).toEqual(''); - }); - }); - - describe('update note', () => { - describe('individual note', () => { - beforeEach(() => { - axiosMock.onAny().reply(mockData.getIndividualNoteResponse); - wrapper = mountComponent(); - return waitForDiscussionsRequest().then(() => { - wrapper.find('.js-note-edit').trigger('click'); - }); - }); - - it('renders edit form', () => { - expect(wrapper.find('.js-vue-issue-note-form').exists()).toBe(true); - }); - - it('calls the store action to update the note', () => { - jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} })); - wrapper.find('.js-vue-issue-note-form').value = 'this is a note'; - wrapper.find('.js-vue-issue-save').trigger('click'); - - expect(axios.put).toHaveBeenCalled(); - }); - }); - - describe('discussion note', () => { - beforeEach(() => { - axiosMock.onAny().reply(mockData.getDiscussionNoteResponse); - wrapper = mountComponent(); - return waitForDiscussionsRequest().then(() => { - wrapper.find('.js-note-edit').trigger('click'); - }); - }); - - it('renders edit form', () => { - expect(wrapper.find('.js-vue-issue-note-form').exists()).toBe(true); - }); - - it('updates the note and resets the edit form', () => { - jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} })); - wrapper.find('.js-vue-issue-note-form').value = 'this is a note'; - wrapper.find('.js-vue-issue-save').trigger('click'); - - expect(axios.put).toHaveBeenCalled(); - }); - }); - }); - - describe('new note form', () => { - beforeEach(() => { - axiosMock.onAny().reply(mockData.getIndividualNoteResponse); - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('should render markdown docs url', () => { - const { markdownDocsPath } = mockData.notesDataMock; - - expect( - wrapper - .find(`a[href="${markdownDocsPath}"]`) - .text() - .trim(), - ).toEqual('Markdown'); - }); - - it('should render quick action docs url', () => { - const { quickActionsDocsPath } = mockData.notesDataMock; - - expect( - wrapper - .find(`a[href="${quickActionsDocsPath}"]`) - .text() - .trim(), - ).toEqual('quick actions'); - }); - }); - - describe('edit form', () => { - beforeEach(() => { - axiosMock.onAny().reply(mockData.getIndividualNoteResponse); - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('should render markdown docs url', () => { - wrapper.find('.js-note-edit').trigger('click'); - const { markdownDocsPath } = mockData.notesDataMock; - - return Vue.nextTick().then(() => { - expect( - wrapper - .find(`.edit-note a[href="${markdownDocsPath}"]`) - .text() - .trim(), - ).toEqual('Markdown is supported'); - }); - }); - - it('should not render quick actions docs url', () => { - wrapper.find('.js-note-edit').trigger('click'); - const { quickActionsDocsPath } = mockData.notesDataMock; - - return wrapper.vm.$nextTick().then(() => { - expect(wrapper.find(`.edit-note a[href="${quickActionsDocsPath}"]`).exists()).toBe(false); - }); - }); - }); - - describe('emoji awards', () => { - beforeEach(() => { - axiosMock.onAny().reply(200, []); - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('dispatches toggleAward after toggleAward event', () => { - const toggleAwardEvent = new CustomEvent('toggleAward', { - detail: { - awardName: 'test', - noteId: 1, - }, - }); - const toggleAwardAction = jest.fn().mockName('toggleAward'); - wrapper.vm.$store.hotUpdate({ - actions: { - toggleAward: toggleAwardAction, - stopPolling() {}, - }, - }); - - wrapper.vm.$parent.$el.dispatchEvent(toggleAwardEvent); - - expect(toggleAwardAction).toHaveBeenCalledTimes(1); - const [, payload] = toggleAwardAction.mock.calls[0]; - - expect(payload).toEqual({ - awardName: 'test', - noteId: 1, - }); - }); - }); - - describe('mounted', () => { - beforeEach(() => { - axiosMock.onAny().reply(mockData.getIndividualNoteResponse); - wrapper = mountComponent(); - return waitForDiscussionsRequest(); - }); - - it('should listen hashchange event', () => { - const notesApp = wrapper.find(NotesApp); - const hash = 'some dummy hash'; - jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce(hash); - const setTargetNoteHash = jest.spyOn(notesApp.vm, 'setTargetNoteHash'); - - window.dispatchEvent(new Event('hashchange'), hash); - - expect(setTargetNoteHash).toHaveBeenCalled(); - }); - }); -}); diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js new file mode 100644 index 00000000000..2d0cca18647 --- /dev/null +++ b/spec/frontend/notes/components/notes_app_spec.js @@ -0,0 +1,349 @@ +import $ from 'helpers/jquery'; +import AxiosMockAdapter from 'axios-mock-adapter'; +import Vue from 'vue'; +import { mount } from '@vue/test-utils'; +import { setTestTimeout } from 'helpers/timeout'; +import axios from '~/lib/utils/axios_utils'; +import NotesApp from '~/notes/components/notes_app.vue'; +import createStore from '~/notes/stores'; +import '~/behaviors/markdown/render_gfm'; +// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491) +import * as mockData from '../../notes/mock_data'; +import * as urlUtility from '~/lib/utils/url_utility'; + +jest.mock('~/user_popovers', () => jest.fn()); + +setTestTimeout(1000); + +describe('note_app', () => { + let axiosMock; + let mountComponent; + let wrapper; + let store; + + /** + * waits for fetchNotes() to complete + */ + const waitForDiscussionsRequest = () => + new Promise(resolve => { + const { vm } = wrapper.find(NotesApp); + const unwatch = vm.$watch('isFetching', isFetching => { + if (isFetching) { + return; + } + + unwatch(); + resolve(); + }); + }); + + beforeEach(() => { + $('body').attr('data-page', 'projects:merge_requests:show'); + + axiosMock = new AxiosMockAdapter(axios); + + store = createStore(); + mountComponent = data => { + const propsData = data || { + noteableData: mockData.noteableDataMock, + notesData: mockData.notesDataMock, + userData: mockData.userDataMock, + }; + + return mount( + { + components: { + NotesApp, + }, + template: `
+ +
`, + }, + { + propsData, + store, + }, + ); + }; + }); + + afterEach(() => { + wrapper.destroy(); + axiosMock.restore(); + }); + + describe('set data', () => { + beforeEach(() => { + setFixtures('
'); + + axiosMock.onAny().reply(200, []); + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('should set notes data', () => { + expect(store.state.notesData).toEqual(mockData.notesDataMock); + }); + + it('should set issue data', () => { + expect(store.state.noteableData).toEqual(mockData.noteableDataMock); + }); + + it('should set user data', () => { + expect(store.state.userData).toEqual(mockData.userDataMock); + }); + + it('should fetch discussions', () => { + expect(store.state.discussions).toEqual([]); + }); + + it('updates discussions badge', () => { + expect(document.querySelector('.js-discussions-count').textContent).toEqual('0'); + }); + }); + + describe('render', () => { + beforeEach(() => { + setFixtures('
'); + + axiosMock.onAny().reply(mockData.getIndividualNoteResponse); + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('should render list of notes', () => { + const note = + mockData.INDIVIDUAL_NOTE_RESPONSE_MAP.GET[ + '/gitlab-org/gitlab-foss/issues/26/discussions.json' + ][0].notes[0]; + + expect( + wrapper + .find('.main-notes-list .note-header-author-name') + .text() + .trim(), + ).toEqual(note.author.name); + + expect(wrapper.find('.main-notes-list .note-text').html()).toContain(note.note_html); + }); + + it('should render form', () => { + expect(wrapper.find('.js-main-target-form').name()).toEqual('form'); + expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual( + 'Write a comment or drag your files here…', + ); + }); + + it('should render form comment button as disabled', () => { + expect(wrapper.find('.js-note-new-discussion').attributes('disabled')).toEqual('disabled'); + }); + + it('updates discussions badge', () => { + expect(document.querySelector('.js-discussions-count').textContent).toEqual('2'); + }); + }); + + describe('render with comments disabled', () => { + beforeEach(() => { + setFixtures('
'); + + axiosMock.onAny().reply(mockData.getIndividualNoteResponse); + store.state.commentsDisabled = true; + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('should not render form when commenting is disabled', () => { + expect(wrapper.find('.js-main-target-form').exists()).toBe(false); + }); + + it('should render discussion filter note `commentsDisabled` is true', () => { + expect(wrapper.find('.js-discussion-filter-note').exists()).toBe(true); + }); + }); + + describe('while fetching data', () => { + beforeEach(() => { + setFixtures('
'); + axiosMock.onAny().reply(200, []); + wrapper = mountComponent(); + }); + + afterEach(() => waitForDiscussionsRequest()); + + it('renders skeleton notes', () => { + expect(wrapper.find('.animation-container').exists()).toBe(true); + }); + + it('should render form', () => { + expect(wrapper.find('.js-main-target-form').name()).toEqual('form'); + expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual( + 'Write a comment or drag your files here…', + ); + }); + + it('should not update discussions badge (it should be blank)', () => { + expect(document.querySelector('.js-discussions-count').textContent).toEqual(''); + }); + }); + + describe('update note', () => { + describe('individual note', () => { + beforeEach(() => { + axiosMock.onAny().reply(mockData.getIndividualNoteResponse); + wrapper = mountComponent(); + return waitForDiscussionsRequest().then(() => { + wrapper.find('.js-note-edit').trigger('click'); + }); + }); + + it('renders edit form', () => { + expect(wrapper.find('.js-vue-issue-note-form').exists()).toBe(true); + }); + + it('calls the store action to update the note', () => { + jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} })); + wrapper.find('.js-vue-issue-note-form').value = 'this is a note'; + wrapper.find('.js-vue-issue-save').trigger('click'); + + expect(axios.put).toHaveBeenCalled(); + }); + }); + + describe('discussion note', () => { + beforeEach(() => { + axiosMock.onAny().reply(mockData.getDiscussionNoteResponse); + wrapper = mountComponent(); + return waitForDiscussionsRequest().then(() => { + wrapper.find('.js-note-edit').trigger('click'); + }); + }); + + it('renders edit form', () => { + expect(wrapper.find('.js-vue-issue-note-form').exists()).toBe(true); + }); + + it('updates the note and resets the edit form', () => { + jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} })); + wrapper.find('.js-vue-issue-note-form').value = 'this is a note'; + wrapper.find('.js-vue-issue-save').trigger('click'); + + expect(axios.put).toHaveBeenCalled(); + }); + }); + }); + + describe('new note form', () => { + beforeEach(() => { + axiosMock.onAny().reply(mockData.getIndividualNoteResponse); + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('should render markdown docs url', () => { + const { markdownDocsPath } = mockData.notesDataMock; + + expect( + wrapper + .find(`a[href="${markdownDocsPath}"]`) + .text() + .trim(), + ).toEqual('Markdown'); + }); + + it('should render quick action docs url', () => { + const { quickActionsDocsPath } = mockData.notesDataMock; + + expect( + wrapper + .find(`a[href="${quickActionsDocsPath}"]`) + .text() + .trim(), + ).toEqual('quick actions'); + }); + }); + + describe('edit form', () => { + beforeEach(() => { + axiosMock.onAny().reply(mockData.getIndividualNoteResponse); + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('should render markdown docs url', () => { + wrapper.find('.js-note-edit').trigger('click'); + const { markdownDocsPath } = mockData.notesDataMock; + + return Vue.nextTick().then(() => { + expect( + wrapper + .find(`.edit-note a[href="${markdownDocsPath}"]`) + .text() + .trim(), + ).toEqual('Markdown is supported'); + }); + }); + + it('should not render quick actions docs url', () => { + wrapper.find('.js-note-edit').trigger('click'); + const { quickActionsDocsPath } = mockData.notesDataMock; + + return wrapper.vm.$nextTick().then(() => { + expect(wrapper.find(`.edit-note a[href="${quickActionsDocsPath}"]`).exists()).toBe(false); + }); + }); + }); + + describe('emoji awards', () => { + beforeEach(() => { + axiosMock.onAny().reply(200, []); + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('dispatches toggleAward after toggleAward event', () => { + const toggleAwardEvent = new CustomEvent('toggleAward', { + detail: { + awardName: 'test', + noteId: 1, + }, + }); + const toggleAwardAction = jest.fn().mockName('toggleAward'); + wrapper.vm.$store.hotUpdate({ + actions: { + toggleAward: toggleAwardAction, + stopPolling() {}, + }, + }); + + wrapper.vm.$parent.$el.dispatchEvent(toggleAwardEvent); + + expect(toggleAwardAction).toHaveBeenCalledTimes(1); + const [, payload] = toggleAwardAction.mock.calls[0]; + + expect(payload).toEqual({ + awardName: 'test', + noteId: 1, + }); + }); + }); + + describe('mounted', () => { + beforeEach(() => { + axiosMock.onAny().reply(mockData.getIndividualNoteResponse); + wrapper = mountComponent(); + return waitForDiscussionsRequest(); + }); + + it('should listen hashchange event', () => { + const notesApp = wrapper.find(NotesApp); + const hash = 'some dummy hash'; + jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce(hash); + const setTargetNoteHash = jest.spyOn(notesApp.vm, 'setTargetNoteHash'); + + window.dispatchEvent(new Event('hashchange'), hash); + + expect(setTargetNoteHash).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/frontend/u2f/authenticate_spec.js b/spec/frontend/u2f/authenticate_spec.js new file mode 100644 index 00000000000..1d39c4857ae --- /dev/null +++ b/spec/frontend/u2f/authenticate_spec.js @@ -0,0 +1,109 @@ +import $ from 'jquery'; +import U2FAuthenticate from '~/u2f/authenticate'; +import 'vendor/u2f'; +import MockU2FDevice from './mock_u2f_device'; + +describe('U2FAuthenticate', () => { + let u2fDevice; + let container; + let component; + + preloadFixtures('u2f/authenticate.html'); + + beforeEach(() => { + loadFixtures('u2f/authenticate.html'); + u2fDevice = new MockU2FDevice(); + container = $('#js-authenticate-u2f'); + component = new U2FAuthenticate( + container, + '#js-login-u2f-form', + { + sign_requests: [], + }, + document.querySelector('#js-login-2fa-device'), + document.querySelector('.js-2fa-form'), + ); + }); + + describe('with u2f unavailable', () => { + let oldu2f; + + beforeEach(() => { + jest.spyOn(component, 'switchToFallbackUI').mockImplementation(() => {}); + oldu2f = window.u2f; + window.u2f = null; + }); + + afterEach(() => { + window.u2f = oldu2f; + }); + + it('falls back to normal 2fa', done => { + component + .start() + .then(() => { + expect(component.switchToFallbackUI).toHaveBeenCalled(); + done(); + }) + .catch(done.fail); + }); + }); + + describe('with u2f available', () => { + beforeEach(done => { + // bypass automatic form submission within renderAuthenticated + jest.spyOn(component, 'renderAuthenticated').mockReturnValue(true); + u2fDevice = new MockU2FDevice(); + + component + .start() + .then(done) + .catch(done.fail); + }); + + it('allows authenticating via a U2F device', () => { + const inProgressMessage = container.find('p'); + + expect(inProgressMessage.text()).toContain('Trying to communicate with your device'); + u2fDevice.respondToAuthenticateRequest({ + deviceData: 'this is data from the device', + }); + + expect(component.renderAuthenticated).toHaveBeenCalledWith( + '{"deviceData":"this is data from the device"}', + ); + }); + + describe('errors', () => { + it('displays an error message', () => { + const setupButton = container.find('#js-login-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToAuthenticateRequest({ + errorCode: 'error!', + }); + const errorMessage = container.find('p'); + + expect(errorMessage.text()).toContain('There was a problem communicating with your device'); + }); + + it('allows retrying authentication after an error', () => { + let setupButton = container.find('#js-login-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToAuthenticateRequest({ + errorCode: 'error!', + }); + const retryButton = container.find('#js-u2f-try-again'); + retryButton.trigger('click'); + setupButton = container.find('#js-login-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToAuthenticateRequest({ + deviceData: 'this is data from the device', + }); + + expect(component.renderAuthenticated).toHaveBeenCalledWith( + '{"deviceData":"this is data from the device"}', + ); + }); + }); + }); +}); diff --git a/spec/frontend/u2f/mock_u2f_device.js b/spec/frontend/u2f/mock_u2f_device.js new file mode 100644 index 00000000000..ec8425a4e3e --- /dev/null +++ b/spec/frontend/u2f/mock_u2f_device.js @@ -0,0 +1,23 @@ +/* eslint-disable no-unused-expressions */ + +export default class MockU2FDevice { + constructor() { + this.respondToAuthenticateRequest = this.respondToAuthenticateRequest.bind(this); + this.respondToRegisterRequest = this.respondToRegisterRequest.bind(this); + window.u2f || (window.u2f = {}); + window.u2f.register = (appId, registerRequests, signRequests, callback) => { + this.registerCallback = callback; + }; + window.u2f.sign = (appId, challenges, signRequests, callback) => { + this.authenticateCallback = callback; + }; + } + + respondToRegisterRequest(params) { + return this.registerCallback(params); + } + + respondToAuthenticateRequest(params) { + return this.authenticateCallback(params); + } +} diff --git a/spec/frontend/u2f/register_spec.js b/spec/frontend/u2f/register_spec.js new file mode 100644 index 00000000000..a4395a2123a --- /dev/null +++ b/spec/frontend/u2f/register_spec.js @@ -0,0 +1,83 @@ +import $ from 'jquery'; +import U2FRegister from '~/u2f/register'; +import 'vendor/u2f'; +import MockU2FDevice from './mock_u2f_device'; + +describe('U2FRegister', () => { + let u2fDevice; + let container; + let component; + + preloadFixtures('u2f/register.html'); + + beforeEach(done => { + loadFixtures('u2f/register.html'); + u2fDevice = new MockU2FDevice(); + container = $('#js-register-u2f'); + component = new U2FRegister(container, $('#js-register-u2f-templates'), {}, 'token'); + component + .start() + .then(done) + .catch(done.fail); + }); + + it('allows registering a U2F device', () => { + const setupButton = container.find('#js-setup-u2f-device'); + + expect(setupButton.text()).toBe('Set up new U2F device'); + setupButton.trigger('click'); + const inProgressMessage = container.children('p'); + + expect(inProgressMessage.text()).toContain('Trying to communicate with your device'); + u2fDevice.respondToRegisterRequest({ + deviceData: 'this is data from the device', + }); + const registeredMessage = container.find('p'); + const deviceResponse = container.find('#js-device-response'); + + expect(registeredMessage.text()).toContain('Your device was successfully set up!'); + expect(deviceResponse.val()).toBe('{"deviceData":"this is data from the device"}'); + }); + + describe('errors', () => { + it("doesn't allow the same device to be registered twice (for the same user", () => { + const setupButton = container.find('#js-setup-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToRegisterRequest({ + errorCode: 4, + }); + const errorMessage = container.find('p'); + + expect(errorMessage.text()).toContain('already been registered with us'); + }); + + it('displays an error message for other errors', () => { + const setupButton = container.find('#js-setup-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToRegisterRequest({ + errorCode: 'error!', + }); + const errorMessage = container.find('p'); + + expect(errorMessage.text()).toContain('There was a problem communicating with your device'); + }); + + it('allows retrying registration after an error', () => { + let setupButton = container.find('#js-setup-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToRegisterRequest({ + errorCode: 'error!', + }); + const retryButton = container.find('#U2FTryAgain'); + retryButton.trigger('click'); + setupButton = container.find('#js-setup-u2f-device'); + setupButton.trigger('click'); + u2fDevice.respondToRegisterRequest({ + deviceData: 'this is data from the device', + }); + const registeredMessage = container.find('p'); + + expect(registeredMessage.text()).toContain('Your device was successfully set up!'); + }); + }); +}); diff --git a/spec/javascripts/u2f/authenticate_spec.js b/spec/javascripts/u2f/authenticate_spec.js deleted file mode 100644 index 8f9cb270729..00000000000 --- a/spec/javascripts/u2f/authenticate_spec.js +++ /dev/null @@ -1,102 +0,0 @@ -import $ from 'jquery'; -import U2FAuthenticate from '~/u2f/authenticate'; -import 'vendor/u2f'; -import MockU2FDevice from './mock_u2f_device'; - -describe('U2FAuthenticate', function() { - preloadFixtures('u2f/authenticate.html'); - - beforeEach(() => { - loadFixtures('u2f/authenticate.html'); - this.u2fDevice = new MockU2FDevice(); - this.container = $('#js-authenticate-u2f'); - this.component = new U2FAuthenticate( - this.container, - '#js-login-u2f-form', - { - sign_requests: [], - }, - document.querySelector('#js-login-2fa-device'), - document.querySelector('.js-2fa-form'), - ); - }); - - describe('with u2f unavailable', () => { - beforeEach(() => { - spyOn(this.component, 'switchToFallbackUI'); - this.oldu2f = window.u2f; - window.u2f = null; - }); - - afterEach(() => { - window.u2f = this.oldu2f; - }); - - it('falls back to normal 2fa', done => { - this.component - .start() - .then(() => { - expect(this.component.switchToFallbackUI).toHaveBeenCalled(); - done(); - }) - .catch(done.fail); - }); - }); - - describe('with u2f available', () => { - beforeEach(done => { - // bypass automatic form submission within renderAuthenticated - spyOn(this.component, 'renderAuthenticated').and.returnValue(true); - this.u2fDevice = new MockU2FDevice(); - - this.component - .start() - .then(done) - .catch(done.fail); - }); - - it('allows authenticating via a U2F device', () => { - const inProgressMessage = this.container.find('p'); - - expect(inProgressMessage.text()).toContain('Trying to communicate with your device'); - this.u2fDevice.respondToAuthenticateRequest({ - deviceData: 'this is data from the device', - }); - - expect(this.component.renderAuthenticated).toHaveBeenCalledWith( - '{"deviceData":"this is data from the device"}', - ); - }); - - describe('errors', () => { - it('displays an error message', () => { - const setupButton = this.container.find('#js-login-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToAuthenticateRequest({ - errorCode: 'error!', - }); - const errorMessage = this.container.find('p'); - - expect(errorMessage.text()).toContain('There was a problem communicating with your device'); - }); - return it('allows retrying authentication after an error', () => { - let setupButton = this.container.find('#js-login-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToAuthenticateRequest({ - errorCode: 'error!', - }); - const retryButton = this.container.find('#js-u2f-try-again'); - retryButton.trigger('click'); - setupButton = this.container.find('#js-login-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToAuthenticateRequest({ - deviceData: 'this is data from the device', - }); - - expect(this.component.renderAuthenticated).toHaveBeenCalledWith( - '{"deviceData":"this is data from the device"}', - ); - }); - }); - }); -}); diff --git a/spec/javascripts/u2f/mock_u2f_device.js b/spec/javascripts/u2f/mock_u2f_device.js deleted file mode 100644 index ec8425a4e3e..00000000000 --- a/spec/javascripts/u2f/mock_u2f_device.js +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable no-unused-expressions */ - -export default class MockU2FDevice { - constructor() { - this.respondToAuthenticateRequest = this.respondToAuthenticateRequest.bind(this); - this.respondToRegisterRequest = this.respondToRegisterRequest.bind(this); - window.u2f || (window.u2f = {}); - window.u2f.register = (appId, registerRequests, signRequests, callback) => { - this.registerCallback = callback; - }; - window.u2f.sign = (appId, challenges, signRequests, callback) => { - this.authenticateCallback = callback; - }; - } - - respondToRegisterRequest(params) { - return this.registerCallback(params); - } - - respondToAuthenticateRequest(params) { - return this.authenticateCallback(params); - } -} diff --git a/spec/javascripts/u2f/register_spec.js b/spec/javascripts/u2f/register_spec.js deleted file mode 100644 index a75ceca9f4c..00000000000 --- a/spec/javascripts/u2f/register_spec.js +++ /dev/null @@ -1,79 +0,0 @@ -import $ from 'jquery'; -import U2FRegister from '~/u2f/register'; -import 'vendor/u2f'; -import MockU2FDevice from './mock_u2f_device'; - -describe('U2FRegister', function() { - preloadFixtures('u2f/register.html'); - - beforeEach(done => { - loadFixtures('u2f/register.html'); - this.u2fDevice = new MockU2FDevice(); - this.container = $('#js-register-u2f'); - this.component = new U2FRegister(this.container, $('#js-register-u2f-templates'), {}, 'token'); - this.component - .start() - .then(done) - .catch(done.fail); - }); - - it('allows registering a U2F device', () => { - const setupButton = this.container.find('#js-setup-u2f-device'); - - expect(setupButton.text()).toBe('Set up new U2F device'); - setupButton.trigger('click'); - const inProgressMessage = this.container.children('p'); - - expect(inProgressMessage.text()).toContain('Trying to communicate with your device'); - this.u2fDevice.respondToRegisterRequest({ - deviceData: 'this is data from the device', - }); - const registeredMessage = this.container.find('p'); - const deviceResponse = this.container.find('#js-device-response'); - - expect(registeredMessage.text()).toContain('Your device was successfully set up!'); - expect(deviceResponse.val()).toBe('{"deviceData":"this is data from the device"}'); - }); - - describe('errors', () => { - it("doesn't allow the same device to be registered twice (for the same user", () => { - const setupButton = this.container.find('#js-setup-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToRegisterRequest({ - errorCode: 4, - }); - const errorMessage = this.container.find('p'); - - expect(errorMessage.text()).toContain('already been registered with us'); - }); - - it('displays an error message for other errors', () => { - const setupButton = this.container.find('#js-setup-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToRegisterRequest({ - errorCode: 'error!', - }); - const errorMessage = this.container.find('p'); - - expect(errorMessage.text()).toContain('There was a problem communicating with your device'); - }); - - it('allows retrying registration after an error', () => { - let setupButton = this.container.find('#js-setup-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToRegisterRequest({ - errorCode: 'error!', - }); - const retryButton = this.container.find('#U2FTryAgain'); - retryButton.trigger('click'); - setupButton = this.container.find('#js-setup-u2f-device'); - setupButton.trigger('click'); - this.u2fDevice.respondToRegisterRequest({ - deviceData: 'this is data from the device', - }); - const registeredMessage = this.container.find('p'); - - expect(registeredMessage.text()).toContain('Your device was successfully set up!'); - }); - }); -}); diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb index ba5f48ce6b3..64d667f40f6 100644 --- a/spec/models/clusters/applications/ingress_spec.rb +++ b/spec/models/clusters/applications/ingress_spec.rb @@ -21,26 +21,60 @@ describe Clusters::Applications::Ingress do describe '#can_uninstall?' do subject { ingress.can_uninstall? } - it 'returns true if external ip is set and no application exists' do - ingress.external_ip = 'IP' + context 'with jupyter installed' do + before do + create(:clusters_applications_jupyter, :installed, cluster: ingress.cluster) + end - is_expected.to be_truthy - end + it 'returns false if external_ip_or_hostname? is true' do + ingress.external_ip = 'IP' - it 'returns false if application_jupyter_nil_or_installable? is false' do - create(:clusters_applications_jupyter, :installed, cluster: ingress.cluster) + is_expected.to be_falsey + end - is_expected.to be_falsey + it 'returns false if external_ip_or_hostname? is false' do + is_expected.to be_falsey + end end - it 'returns false if application_elastic_stack_nil_or_installable? is false' do - create(:clusters_applications_elastic_stack, :installed, cluster: ingress.cluster) + context 'with jupyter installable' do + before do + create(:clusters_applications_jupyter, :installable, cluster: ingress.cluster) + end + + it 'returns true if external_ip_or_hostname? is true' do + ingress.external_ip = 'IP' + + is_expected.to be_truthy + end - is_expected.to be_falsey + it 'returns false if external_ip_or_hostname? is false' do + is_expected.to be_falsey + end end - it 'returns false if external_ip_or_hostname? is false' do - is_expected.to be_falsey + context 'with jupyter nil' do + it 'returns false if external_ip_or_hostname? is false' do + is_expected.to be_falsey + end + + context 'if external_ip_or_hostname? is true' do + context 'with IP' do + before do + ingress.external_ip = 'IP' + end + + it { is_expected.to be_truthy } + end + + context 'with hostname' do + before do + ingress.external_hostname = 'example.com' + end + + it { is_expected.to be_truthy } + end + end end end diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb index ecb87910d2d..ce341e67c14 100644 --- a/spec/models/clusters/applications/prometheus_spec.rb +++ b/spec/models/clusters/applications/prometheus_spec.rb @@ -39,6 +39,19 @@ describe Clusters::Applications::Prometheus do end end + describe 'transition to updating' do + let(:project) { create(:project) } + let(:cluster) { create(:cluster, projects: [project]) } + + subject { create(:clusters_applications_prometheus, :installed, cluster: cluster) } + + it 'sets last_update_started_at to now' do + Timecop.freeze do + expect { subject.make_updating }.to change { subject.reload.last_update_started_at }.to be_within(1.second).of(Time.now) + end + end + end + describe '#can_uninstall?' do let(:prometheus) { create(:clusters_applications_prometheus) } @@ -331,6 +344,38 @@ describe Clusters::Applications::Prometheus do end end + describe '#updated_since?' do + let(:cluster) { create(:cluster) } + let(:prometheus_app) { build(:clusters_applications_prometheus, cluster: cluster) } + let(:timestamp) { Time.now - 5.minutes } + + around do |example| + Timecop.freeze { example.run } + end + + before do + prometheus_app.last_update_started_at = Time.now + end + + context 'when app does not have status failed' do + it 'returns true when last update started after the timestamp' do + expect(prometheus_app.updated_since?(timestamp)).to be true + end + + it 'returns false when last update started before the timestamp' do + expect(prometheus_app.updated_since?(Time.now + 5.minutes)).to be false + end + end + + context 'when app has status failed' do + it 'returns false when last update started after the timestamp' do + prometheus_app.status = 6 + + expect(prometheus_app.updated_since?(timestamp)).to be false + end + end + end + describe 'alert manager token' do subject { create(:clusters_applications_prometheus) } diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index ceb6382eb6c..f0423937710 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -5938,6 +5938,24 @@ describe Project do end end + describe '#environments_for_scope' do + let_it_be(:project, reload: true) { create(:project) } + + before do + create_list(:environment, 2, project: project) + end + + it 'retrieves all project environments when using the * wildcard' do + expect(project.environments_for_scope("*")).to eq(project.environments) + end + + it 'retrieves a specific project environment when using the name of that environment' do + environment = project.environments.first + + expect(project.environments_for_scope(environment.name)).to eq([environment]) + end + end + def finish_job(export_job) export_job.start export_job.finish diff --git a/spec/models/user_canonical_email_spec.rb b/spec/models/user_canonical_email_spec.rb new file mode 100644 index 00000000000..54a4e968033 --- /dev/null +++ b/spec/models/user_canonical_email_spec.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe UserCanonicalEmail do + it { is_expected.to belong_to(:user) } + + describe 'validations' do + describe 'canonical_email' do + it { is_expected.to validate_presence_of(:canonical_email) } + + it 'validates email address', :aggregate_failures do + expect(build(:user_canonical_email, canonical_email: 'nonsense')).not_to be_valid + expect(build(:user_canonical_email, canonical_email: '@nonsense')).not_to be_valid + expect(build(:user_canonical_email, canonical_email: '@nonsense@')).not_to be_valid + expect(build(:user_canonical_email, canonical_email: 'nonsense@')).not_to be_valid + end + end + end +end diff --git a/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb b/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb new file mode 100644 index 00000000000..c08b618fe6a --- /dev/null +++ b/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Clusters::Applications::CheckUpgradeProgressService do + RESCHEDULE_PHASES = ::Gitlab::Kubernetes::Pod::PHASES - + [::Gitlab::Kubernetes::Pod::SUCCEEDED, ::Gitlab::Kubernetes::Pod::FAILED, ::Gitlab].freeze + + let(:application) { create(:clusters_applications_prometheus, :updating) } + let(:service) { described_class.new(application) } + let(:phase) { ::Gitlab::Kubernetes::Pod::UNKNOWN } + let(:errors) { nil } + + shared_examples 'a terminated upgrade' do + it 'removes the POD' do + expect(service).to receive(:remove_pod).once + + service.execute + end + end + + shared_examples 'a not yet terminated upgrade' do |a_phase| + let(:phase) { a_phase } + + context "when phase is #{a_phase}" do + context 'when not timed out' do + it 'reschedule a new check' do + expect(::ClusterWaitForAppUpdateWorker).to receive(:perform_in).once + expect(service).not_to receive(:remove_pod) + + service.execute + + expect(application).to be_updating + expect(application.status_reason).to be_nil + end + end + + context 'when timed out' do + let(:application) { create(:clusters_applications_prometheus, :timed_out, :updating) } + + it_behaves_like 'a terminated upgrade' + + it 'make the application update errored' do + expect(::ClusterWaitForAppUpdateWorker).not_to receive(:perform_in) + + service.execute + + expect(application).to be_update_errored + expect(application.status_reason).to eq("Update timed out") + end + end + end + end + + before do + allow(service).to receive(:phase).once.and_return(phase) + + allow(service).to receive(:errors).and_return(errors) + allow(service).to receive(:remove_pod).and_return(nil) + end + + describe '#execute' do + context 'when upgrade pod succeeded' do + let(:phase) { ::Gitlab::Kubernetes::Pod::SUCCEEDED } + + it_behaves_like 'a terminated upgrade' + + it 'make the application upgraded' do + expect(::ClusterWaitForAppUpdateWorker).not_to receive(:perform_in) + + service.execute + + expect(application).to be_updated + expect(application.status_reason).to be_nil + end + end + + context 'when upgrade pod failed' do + let(:phase) { ::Gitlab::Kubernetes::Pod::FAILED } + let(:errors) { 'test installation failed' } + + it_behaves_like 'a terminated upgrade' + + it 'make the application update errored' do + service.execute + + expect(application).to be_update_errored + expect(application.status_reason).to eq(errors) + end + end + + RESCHEDULE_PHASES.each { |phase| it_behaves_like 'a not yet terminated upgrade', phase } + end +end diff --git a/spec/services/clusters/applications/prometheus_config_service_spec.rb b/spec/services/clusters/applications/prometheus_config_service_spec.rb new file mode 100644 index 00000000000..993a697b543 --- /dev/null +++ b/spec/services/clusters/applications/prometheus_config_service_spec.rb @@ -0,0 +1,158 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Clusters::Applications::PrometheusConfigService do + include Gitlab::Routing.url_helpers + + let_it_be(:project) { create(:project) } + let_it_be(:production) { create(:environment, project: project) } + let_it_be(:cluster) { create(:cluster, :provided_by_user, projects: [project]) } + + let(:application) do + create(:clusters_applications_prometheus, :installed, cluster: cluster) + end + + subject { described_class.new(project, cluster, application).execute(input) } + + describe '#execute' do + let(:input) do + YAML.load_file(Rails.root.join('vendor/prometheus/values.yaml')) + end + + context 'with alerts' do + let!(:alert) do + create(:prometheus_alert, project: project, environment: production) + end + + it 'enables alertmanager' do + expect(subject.dig('alertmanager', 'enabled')).to eq(true) + end + + describe 'alertmanagerFiles' do + let(:alertmanager) do + subject.dig('alertmanagerFiles', 'alertmanager.yml') + end + + it 'contains receivers and route' do + expect(alertmanager.keys).to contain_exactly('receivers', 'route') + end + + describe 'receivers' do + let(:receiver) { alertmanager.dig('receivers', 0) } + let(:webhook_config) { receiver.dig('webhook_configs', 0) } + + let(:notify_url) do + notify_project_prometheus_alerts_url(project, format: :json) + end + + it 'sets receiver' do + expect(receiver['name']).to eq('gitlab') + end + + it 'sets webhook_config' do + expect(webhook_config).to eq( + 'url' => notify_url, + 'send_resolved' => true, + 'http_config' => { + 'bearer_token' => application.alert_manager_token + } + ) + end + end + + describe 'route' do + let(:route) { alertmanager.fetch('route') } + + it 'sets route' do + expect(route).to eq( + 'receiver' => 'gitlab', + 'group_wait' => '30s', + 'group_interval' => '5m', + 'repeat_interval' => '4h' + ) + end + end + end + + describe 'serverFiles' do + let(:groups) { subject.dig('serverFiles', 'alerts', 'groups') } + + it 'sets the alerts' do + rules = groups.dig(0, 'rules') + expect(rules.size).to eq(1) + + expect(rules.first['alert']).to eq(alert.title) + end + + context 'with parameterized queries' do + let!(:alert) do + create(:prometheus_alert, + project: project, + environment: production, + prometheus_metric: metric) + end + + let(:metric) do + create(:prometheus_metric, query: query, project: project) + end + + let(:query) { '%{ci_environment_slug}' } + + it 'substitutes query variables' do + expect(Gitlab::Prometheus::QueryVariables) + .to receive(:call) + .with(production) + .and_call_original + + expr = groups.dig(0, 'rules', 0, 'expr') + expect(expr).to include(production.name) + end + end + + context 'with multiple environments' do + let(:staging) { create(:environment, project: project) } + + before do + create(:prometheus_alert, project: project, environment: production) + create(:prometheus_alert, project: project, environment: staging) + end + + it 'sets alerts for multiple environment' do + env_names = groups.map { |group| group['name'] } + expect(env_names).to contain_exactly( + "#{production.name}.rules", + "#{staging.name}.rules" + ) + end + + it 'substitutes query variables once per environment' do + expect(Gitlab::Prometheus::QueryVariables) + .to receive(:call) + .with(production) + + expect(Gitlab::Prometheus::QueryVariables) + .to receive(:call) + .with(staging) + + subject + end + end + end + end + + context 'without alerts' do + it 'disables alertmanager' do + expect(subject.dig('alertmanager', 'enabled')).to eq(false) + end + + it 'removes alertmanagerFiles' do + expect(subject).not_to include('alertmanagerFiles') + end + + it 'removes alerts' do + expect(subject.dig('serverFiles', 'alerts')).to eq({}) + end + end + end +end diff --git a/spec/services/clusters/applications/prometheus_update_service_spec.rb b/spec/services/clusters/applications/prometheus_update_service_spec.rb new file mode 100644 index 00000000000..078b01d2777 --- /dev/null +++ b/spec/services/clusters/applications/prometheus_update_service_spec.rb @@ -0,0 +1,92 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Clusters::Applications::PrometheusUpdateService do + describe '#execute' do + let(:project) { create(:project) } + let(:environment) { create(:environment, project: project) } + let(:cluster) { create(:cluster, :provided_by_user, :with_installed_helm, projects: [project]) } + let(:application) { create(:clusters_applications_prometheus, :installed, cluster: cluster) } + let(:empty_alerts_values_update_yaml) { "---\nalertmanager:\n enabled: false\nserverFiles:\n alerts: {}\n" } + let!(:patch_command) { application.patch_command(empty_alerts_values_update_yaml) } + let(:helm_client) { instance_double(::Gitlab::Kubernetes::Helm::API) } + + subject(:service) { described_class.new(application, project) } + + before do + allow(service).to receive(:patch_command).with(empty_alerts_values_update_yaml).and_return(patch_command) + allow(service).to receive(:helm_api).and_return(helm_client) + end + + context 'when there are no errors' do + before do + expect(helm_client).to receive(:update).with(patch_command) + + allow(::ClusterWaitForAppUpdateWorker) + .to receive(:perform_in) + .and_return(nil) + end + + it 'make the application updating' do + expect(application.cluster).not_to be_nil + + service.execute + + expect(application).to be_updating + end + + it 'updates current config' do + prometheus_config_service = spy(:prometheus_config_service) + + expect(Clusters::Applications::PrometheusConfigService) + .to receive(:new) + .with(project, cluster, application) + .and_return(prometheus_config_service) + + expect(prometheus_config_service) + .to receive(:execute) + .and_return(YAML.safe_load(empty_alerts_values_update_yaml)) + + service.execute + end + + it 'schedules async update status check' do + expect(::ClusterWaitForAppUpdateWorker).to receive(:perform_in).once + + service.execute + end + end + + context 'when k8s cluster communication fails' do + before do + error = ::Kubeclient::HttpError.new(500, 'system failure', nil) + allow(helm_client).to receive(:update).and_raise(error) + end + + it 'make the application update errored' do + service.execute + + expect(application).to be_update_errored + expect(application.status_reason).to match(/kubernetes error:/i) + end + end + + context 'when application cannot be persisted' do + let(:application) { build(:clusters_applications_prometheus, :installed) } + + before do + allow(application).to receive(:make_updating!).once + .and_raise(ActiveRecord::RecordInvalid.new(application)) + end + + it 'make the application update errored' do + expect(helm_client).not_to receive(:update) + + service.execute + + expect(application).to be_update_errored + end + end + end +end diff --git a/spec/services/clusters/applications/schedule_update_service_spec.rb b/spec/services/clusters/applications/schedule_update_service_spec.rb new file mode 100644 index 00000000000..0764f5b6a97 --- /dev/null +++ b/spec/services/clusters/applications/schedule_update_service_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Clusters::Applications::ScheduleUpdateService do + describe '#execute' do + let(:project) { create(:project) } + + around do |example| + Timecop.freeze { example.run } + end + + context 'when application is able to be updated' do + context 'when the application was recently scheduled' do + it 'schedules worker with a backoff delay' do + application = create(:clusters_applications_prometheus, :installed, last_update_started_at: Time.now + 5.minutes) + service = described_class.new(application, project) + + expect(::ClusterUpdateAppWorker).to receive(:perform_in).with(described_class::BACKOFF_DELAY, application.name, application.id, project.id, Time.now).once + + service.execute + end + end + + context 'when the application has not been recently updated' do + it 'schedules worker' do + application = create(:clusters_applications_prometheus, :installed) + service = described_class.new(application, project) + + expect(::ClusterUpdateAppWorker).to receive(:perform_async).with(application.name, application.id, project.id, Time.now).once + + service.execute + end + end + end + end +end diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb index aed5d2598ef..146819c7f44 100644 --- a/spec/services/users/build_service_spec.rb +++ b/spec/services/users/build_service_spec.rb @@ -16,6 +16,14 @@ describe Users::BuildService do expect(service.execute).to be_valid end + context 'calls the UpdateCanonicalEmailService' do + specify do + expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original + + service.execute + end + end + context 'allowed params' do let(:params) do { diff --git a/spec/services/users/create_service_spec.rb b/spec/services/users/create_service_spec.rb index a139dc01314..c783a1403df 100644 --- a/spec/services/users/create_service_spec.rb +++ b/spec/services/users/create_service_spec.rb @@ -8,10 +8,11 @@ describe Users::CreateService do context 'with an admin user' do let(:service) { described_class.new(admin_user, params) } + let(:email) { 'jd@example.com' } context 'when required parameters are provided' do let(:params) do - { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass' } + { name: 'John Doe', username: 'jduser', email: email, password: 'mydummypass' } end it 'returns a persisted user' do diff --git a/spec/services/users/update_canonical_email_service_spec.rb b/spec/services/users/update_canonical_email_service_spec.rb new file mode 100644 index 00000000000..68ba1b75b6c --- /dev/null +++ b/spec/services/users/update_canonical_email_service_spec.rb @@ -0,0 +1,116 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Users::UpdateCanonicalEmailService do + let(:other_email) { "differentaddress@includeddomain.com" } + + before do + stub_const("Users::UpdateCanonicalEmailService::INCLUDED_DOMAINS_PATTERN", [/includeddomain/]) + end + + describe '#initialize' do + context 'unsuccessful' do + it 'raises an error if there is no user' do + expect { described_class.new(user: nil) }.to raise_error(ArgumentError, /Please provide a user/) + end + + it 'raises an error if the object is not a User' do + expect { described_class.new(user: 123) }.to raise_error(ArgumentError, /Please provide a user/) + end + end + + context 'when a user is provided' do + it 'does not error' do + user = build(:user) + + expect { described_class.new(user: user) }.not_to raise_error + end + end + end + + describe "#canonicalize_email" do + let(:user) { build(:user) } + let(:subject) { described_class.new(user: user) } + + context 'when the email domain is included' do + context 'strips out any . or anything after + in the agent for included domains' do + using RSpec::Parameterized::TableSyntax + + let(:expected_result) { 'user@includeddomain.com' } + + where(:raw_email, :expected_result) do + 'user@includeddomain.com' | 'user@includeddomain.com' + 'u.s.e.r@includeddomain.com' | 'user@includeddomain.com' + 'user+123@includeddomain.com' | 'user@includeddomain.com' + 'us.er+123@includeddomain.com' | 'user@includeddomain.com' + end + + with_them do + before do + user.email = raw_email + end + + specify do + subject.execute + + expect(user.user_canonical_email).not_to be_nil + expect(user.user_canonical_email.canonical_email).to eq expected_result + end + end + end + + context 'when the user has an existing canonical email' do + it 'updates the user canonical email record' do + user.user_canonical_email = build(:user_canonical_email, canonical_email: other_email) + user.email = "us.er+123@includeddomain.com" + + subject.execute + + expect(user.user_canonical_email.canonical_email).to eq "user@includeddomain.com" + end + end + end + + context 'when the email domain is not included' do + it 'returns nil' do + user.email = "u.s.er+343@excludeddomain.com" + + subject.execute + + expect(user.user_canonical_email).to be_nil + end + + it 'destroys any existing UserCanonicalEmail record' do + user.email = "u.s.er+343@excludeddomain.com" + user.user_canonical_email = build(:user_canonical_email, canonical_email: other_email) + expect(user.user_canonical_email).to receive(:delete) + + subject.execute + end + end + + context 'when the user email is not processable' do + [nil, 'nonsense'].each do |invalid_address| + before do + user.email = invalid_address + end + + specify do + subject.execute + + expect(user.user_canonical_email).to be_nil + end + + it 'preserves any existing record' do + user.email = nil + user.user_canonical_email = build(:user_canonical_email, canonical_email: other_email) + + subject.execute + + expect(user.user_canonical_email.canonical_email).to eq other_email + end + end + end + end +end diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb index 24738a79045..bd54ca97431 100644 --- a/spec/services/users/update_service_spec.rb +++ b/spec/services/users/update_service_spec.rb @@ -71,6 +71,32 @@ describe Users::UpdateService do expect(user.job_title).to eq('Backend Engineer') end + context 'updating canonical email' do + context 'if email was changed' do + subject do + update_user(user, email: 'user+extrastuff@example.com') + end + + it 'calls canonicalize_email' do + expect_next_instance_of(Users::UpdateCanonicalEmailService) do |service| + expect(service).to receive(:execute) + end + + subject + end + end + + context 'if email was NOT changed' do + subject do + update_user(user, job_title: 'supreme leader of the universe') + end + + it 'skips update canonicalize email service call' do + expect { subject }.not_to change { user.user_canonical_email } + end + end + end + def update_user(user, opts) described_class.new(user, opts.merge(user: user)).execute end diff --git a/spec/workers/cluster_update_app_worker_spec.rb b/spec/workers/cluster_update_app_worker_spec.rb new file mode 100644 index 00000000000..e540ede4bc0 --- /dev/null +++ b/spec/workers/cluster_update_app_worker_spec.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe ClusterUpdateAppWorker do + include ExclusiveLeaseHelpers + + let_it_be(:project) { create(:project) } + + let(:prometheus_update_service) { spy } + + subject { described_class.new } + + around do |example| + Timecop.freeze(Time.now) { example.run } + end + + before do + allow(::Clusters::Applications::PrometheusUpdateService).to receive(:new).and_return(prometheus_update_service) + end + + describe '#perform' do + context 'when the application last_update_started_at is higher than the time the job was scheduled in' do + it 'does nothing' do + application = create(:clusters_applications_prometheus, :updated, last_update_started_at: Time.now) + + expect(prometheus_update_service).not_to receive(:execute) + + expect(subject.perform(application.name, application.id, project.id, Time.now - 5.minutes)).to be_nil + end + end + + context 'when another worker is already running' do + it 'returns nil' do + application = create(:clusters_applications_prometheus, :updating) + + expect(subject.perform(application.name, application.id, project.id, Time.now)).to be_nil + end + end + + it 'executes PrometheusUpdateService' do + application = create(:clusters_applications_prometheus, :installed) + + expect(prometheus_update_service).to receive(:execute) + + subject.perform(application.name, application.id, project.id, Time.now) + end + + context 'with exclusive lease' do + let(:application) { create(:clusters_applications_prometheus, :installed) } + let(:lease_key) { "#{described_class.name.underscore}-#{application.id}" } + + before do + allow(Gitlab::ExclusiveLease).to receive(:new) + stub_exclusive_lease_taken(lease_key) + end + + it 'does not allow same app to be updated concurrently by same project' do + expect(Clusters::Applications::PrometheusUpdateService).not_to receive(:new) + + subject.perform(application.name, application.id, project.id, Time.now) + end + + it 'does not allow same app to be updated concurrently by different project' do + project1 = create(:project) + + expect(Clusters::Applications::PrometheusUpdateService).not_to receive(:new) + + subject.perform(application.name, application.id, project1.id, Time.now) + end + + it 'allows different app to be updated concurrently by same project' do + application2 = create(:clusters_applications_prometheus, :installed) + lease_key2 = "#{described_class.name.underscore}-#{application2.id}" + + stub_exclusive_lease(lease_key2) + + expect(Clusters::Applications::PrometheusUpdateService).to receive(:new) + .with(application2, project) + + subject.perform(application2.name, application2.id, project.id, Time.now) + end + + it 'allows different app to be updated by different project' do + application2 = create(:clusters_applications_prometheus, :installed) + lease_key2 = "#{described_class.name.underscore}-#{application2.id}" + project2 = create(:project) + + stub_exclusive_lease(lease_key2) + + expect(Clusters::Applications::PrometheusUpdateService).to receive(:new) + .with(application2, project2) + + subject.perform(application2.name, application2.id, project2.id, Time.now) + end + end + end +end diff --git a/spec/workers/cluster_wait_for_app_update_worker_spec.rb b/spec/workers/cluster_wait_for_app_update_worker_spec.rb new file mode 100644 index 00000000000..f1206bd85cb --- /dev/null +++ b/spec/workers/cluster_wait_for_app_update_worker_spec.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe ClusterWaitForAppUpdateWorker do + let(:check_upgrade_progress_service) { spy } + + before do + allow(::Clusters::Applications::CheckUpgradeProgressService).to receive(:new).and_return(check_upgrade_progress_service) + end + + it 'runs CheckUpgradeProgressService when application is found' do + application = create(:clusters_applications_prometheus) + + expect(check_upgrade_progress_service).to receive(:execute) + + subject.perform(application.name, application.id) + end + + it 'does not run CheckUpgradeProgressService when application is not found' do + expect(check_upgrade_progress_service).not_to receive(:execute) + + expect do + subject.perform("prometheus", -1) + end.to raise_error(ActiveRecord::RecordNotFound) + end +end -- cgit v1.2.1