diff options
author | Stan Hu <stanhu@gmail.com> | 2018-07-07 14:53:35 -0700 |
---|---|---|
committer | Stan Hu <stanhu@gmail.com> | 2018-07-07 14:53:35 -0700 |
commit | b3deab852bcbfe1fd86475d2881cd328c5374b05 (patch) | |
tree | d5d0a16918d6b270ddb2b42758dfca71949a808f /spec | |
parent | 7967793c5516d1966ed9e3468211529a48b18d9e (diff) | |
parent | e0c0ce28ea786a23d15eee95d56560079b0c6dfe (diff) | |
download | gitlab-ce-b3deab852bcbfe1fd86475d2881cd328c5374b05.tar.gz |
Merge branch 'master' into sh-support-bitbucket-server-import
Diffstat (limited to 'spec')
82 files changed, 3163 insertions, 1446 deletions
diff --git a/spec/controllers/concerns/group_tree_spec.rb b/spec/controllers/concerns/group_tree_spec.rb index ba84fbf8564..503eb416962 100644 --- a/spec/controllers/concerns/group_tree_spec.rb +++ b/spec/controllers/concerns/group_tree_spec.rb @@ -63,6 +63,17 @@ describe GroupTree do expect(assigns(:groups)).to contain_exactly(parent, subgroup) end + + it 'preloads parents regardless of pagination' do + allow(Kaminari.config).to receive(:default_per_page).and_return(1) + group = create(:group, :public) + subgroup = create(:group, :public, parent: group) + search_result = create(:group, :public, name: 'result', parent: subgroup) + + get :index, filter: 'resu', format: :json + + expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result) + end end context 'json content' do diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index 27f04be3fdf..a2dfc43e9f7 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -616,13 +616,40 @@ describe ProjectsController do end describe 'POST #preview_markdown' do - it 'renders json in a correct format' do + before do sign_in(user) + end + it 'renders json in a correct format' do post :preview_markdown, namespace_id: public_project.namespace, id: public_project, text: '*Markdown* text' expect(JSON.parse(response.body).keys).to match_array(%w(body references)) end + + context 'state filter on references' do + let(:issue) { create(:issue, :closed, project: public_project) } + let(:merge_request) { create(:merge_request, :closed, target_project: public_project) } + + it 'renders JSON body with state filter for issues' do + post :preview_markdown, namespace_id: public_project.namespace, + id: public_project, + text: issue.to_reference + + json_response = JSON.parse(response.body) + + expect(json_response['body']).to match(/\##{issue.iid} \(closed\)/) + end + + it 'renders JSON body with state filter for MRs' do + post :preview_markdown, namespace_id: public_project.namespace, + id: public_project, + text: merge_request.to_reference + + json_response = JSON.parse(response.body) + + expect(json_response['body']).to match(/\!#{merge_request.iid} \(closed\)/) + end + end end describe '#ensure_canonical_path' do @@ -763,23 +790,55 @@ describe ProjectsController do project.add_master(user) end - context 'when project export is enabled' do - it 'returns 302' do - get :download_export, namespace_id: project.namespace, id: project + context 'object storage disabled' do + before do + stub_feature_flags(import_export_object_storage: false) + end - expect(response).to have_gitlab_http_status(302) + context 'when project export is enabled' do + it 'returns 302' do + get :download_export, namespace_id: project.namespace, id: project + + expect(response).to have_gitlab_http_status(302) + end + end + + context 'when project export is disabled' do + before do + stub_application_setting(project_export_enabled?: false) + end + + it 'returns 404' do + get :download_export, namespace_id: project.namespace, id: project + + expect(response).to have_gitlab_http_status(404) + end end end - context 'when project export is disabled' do + context 'object storage enabled' do before do - stub_application_setting(project_export_enabled?: false) + stub_feature_flags(import_export_object_storage: true) end - it 'returns 404' do - get :download_export, namespace_id: project.namespace, id: project + context 'when project export is enabled' do + it 'returns 302' do + get :download_export, namespace_id: project.namespace, id: project - expect(response).to have_gitlab_http_status(404) + expect(response).to have_gitlab_http_status(302) + end + end + + context 'when project export is disabled' do + before do + stub_application_setting(project_export_enabled?: false) + end + + it 'returns 404' do + get :download_export, namespace_id: project.namespace, id: project + + expect(response).to have_gitlab_http_status(404) + end end end end diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb index c0b9a25bfe8..3e8e2736423 100644 --- a/spec/factories/ci/build_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -3,5 +3,53 @@ FactoryBot.define do build factory: :ci_build chunk_index 0 data_store :redis + + trait :redis_with_data do + data_store :redis + + transient do + initial_data 'test data' + end + + after(:create) do |build_trace_chunk, evaluator| + Ci::BuildTraceChunks::Redis.new.set_data(build_trace_chunk, evaluator.initial_data) + end + end + + trait :redis_without_data do + data_store :redis + end + + trait :database_with_data do + data_store :database + + transient do + initial_data 'test data' + end + + after(:build) do |build_trace_chunk, evaluator| + Ci::BuildTraceChunks::Database.new.set_data(build_trace_chunk, evaluator.initial_data) + end + end + + trait :database_without_data do + data_store :database + end + + trait :fog_with_data do + data_store :fog + + transient do + initial_data 'test data' + end + + after(:create) do |build_trace_chunk, evaluator| + Ci::BuildTraceChunks::Fog.new.set_data(build_trace_chunk, evaluator.initial_data) + end + end + + trait :fog_without_data do + data_store :fog + end end end diff --git a/spec/factories/import_export_uploads.rb b/spec/factories/import_export_uploads.rb new file mode 100644 index 00000000000..7750d49b1d0 --- /dev/null +++ b/spec/factories/import_export_uploads.rb @@ -0,0 +1,5 @@ +FactoryBot.define do + factory :import_export_upload do + project { create(:project) } + end +end diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index f6b05bac0e8..f77ded23b18 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -103,6 +103,22 @@ FactoryBot.define do end trait :with_export do + before(:create) do |_project, _evaluator| + allow(Feature).to receive(:enabled?).with(:import_export_object_storage) { false } + allow(Feature).to receive(:enabled?).with('import_export_object_storage') { false } + end + + after(:create) do |project, _evaluator| + ProjectExportWorker.new.perform(project.creator.id, project.id) + end + end + + trait :with_object_export do + before(:create) do |_project, _evaluator| + allow(Feature).to receive(:enabled?).with(:import_export_object_storage) { true } + allow(Feature).to receive(:enabled?).with('import_export_object_storage') { true } + end + after(:create) do |project, evaluator| ProjectExportWorker.new.perform(project.creator.id, project.id) end diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb index 46935662288..8647616101b 100644 --- a/spec/features/dashboard/projects_spec.rb +++ b/spec/features/dashboard/projects_spec.rb @@ -29,6 +29,34 @@ describe 'Dashboard Projects' do end end + context 'when user has access to the project' do + it 'shows role badge' do + visit dashboard_projects_path + + page.within '.user-access-role' do + expect(page).to have_content('Developer') + end + end + + context 'when role changes', :use_clean_rails_memory_store_fragment_caching do + it 'displays the right role' do + visit dashboard_projects_path + + page.within '.user-access-role' do + expect(page).to have_content('Developer') + end + + project.members.last.update(access_level: 40) + + visit dashboard_projects_path + + page.within '.user-access-role' do + expect(page).to have_content('Maintainer') + end + end + end + end + context 'when last_repository_updated_at, last_activity_at and update_at are present' do it 'shows the last_repository_updated_at attribute as the update date' do project.update_attributes!(last_repository_updated_at: Time.now, last_activity_at: 1.hour.ago) diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb index bfb17a56613..e6586fc8a0a 100644 --- a/spec/features/profiles/keys_spec.rb +++ b/spec/features/profiles/keys_spec.rb @@ -30,6 +30,20 @@ describe 'Profile > SSH Keys' do expect(find('.breadcrumbs-sub-title')).to have_link(attrs[:title]) end + it 'shows a confirmable warning if the key does not start with ssh-' do + attrs = attributes_for(:key) + + fill_in('Key', with: 'invalid-key') + fill_in('Title', with: attrs[:title]) + click_button('Add key') + + expect(page).to have_selector('.js-add-ssh-key-validation-warning') + + find('.js-add-ssh-key-validation-confirm-submit').click + + expect(page).to have_content('Key is invalid') + end + context 'when only DSA and ECDSA keys are allowed' do before do forbidden = ApplicationSetting::FORBIDDEN_KEY_VALUE diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb index 8a418356541..eb281cd2122 100644 --- a/spec/features/projects/import_export/export_file_spec.rb +++ b/spec/features/projects/import_export/export_file_spec.rb @@ -25,6 +25,7 @@ describe 'Import/Export - project export integration test', :js do before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + stub_feature_flags(import_export_object_storage: false) end after do diff --git a/spec/features/projects/import_export/namespace_export_file_spec.rb b/spec/features/projects/import_export/namespace_export_file_spec.rb index 7d056b0c140..9bb8a2063b5 100644 --- a/spec/features/projects/import_export/namespace_export_file_spec.rb +++ b/spec/features/projects/import_export/namespace_export_file_spec.rb @@ -5,6 +5,7 @@ describe 'Import/Export - Namespace export file cleanup', :js do before do allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + stub_feature_flags(import_export_object_storage: false) end after do diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb index ef15e7e1ff9..0bec5f185d6 100644 --- a/spec/features/projects/wiki/markdown_preview_spec.rb +++ b/spec/features/projects/wiki/markdown_preview_spec.rb @@ -9,6 +9,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do [relative link 1](../relative) [relative link 2](./relative) [relative link 3](./e/f/relative) +[spaced link](title with spaces) HEREDOC end @@ -42,6 +43,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/relative\">relative link 1</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/relative\">relative link 2</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/e/f/relative\">relative link 3</a>") + expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>") end end @@ -64,6 +66,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>") + expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>") end end @@ -86,6 +89,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>") + expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>") end end end @@ -119,6 +123,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/relative\">relative link 1</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/relative\">relative link 2</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/e/f/relative\">relative link 3</a>") + expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>") end end @@ -136,6 +141,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>") + expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>") end end @@ -153,6 +159,7 @@ describe 'Projects > Wiki > User previews markdown changes', :js do expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>") expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>") + expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>") end end end diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb index 74b693f3eff..f31457db92f 100644 --- a/spec/features/snippets/show_spec.rb +++ b/spec/features/snippets/show_spec.rb @@ -68,6 +68,26 @@ describe 'Snippet', :js do end end + context 'with cached Redcarpet html' do + let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, cached_markdown_version: CacheMarkdownField::CACHE_REDCARPET_VERSION) } + let(:file_name) { 'test.md' } + let(:content) { "1. one\n - sublist\n" } + + it 'renders correctly' do + expect(page).to have_xpath("//ol//li//ul") + end + end + + context 'with cached CommonMark html' do + let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content, cached_markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION) } + let(:file_name) { 'test.md' } + let(:content) { "1. one\n - sublist\n" } + + it 'renders correctly' do + expect(page).not_to have_xpath("//ol//li//ul") + end + end + context 'switching to the simple viewer' do before do find('.js-blob-viewer-switch-btn[data-viewer=simple]').click diff --git a/spec/fixtures/project_export.tar.gz b/spec/fixtures/project_export.tar.gz Binary files differnew file mode 100644 index 00000000000..72ab2d71f35 --- /dev/null +++ b/spec/fixtures/project_export.tar.gz diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb index 6c94bd4e504..115807f954b 100644 --- a/spec/helpers/groups_helper_spec.rb +++ b/spec/helpers/groups_helper_spec.rb @@ -206,8 +206,9 @@ describe GroupsHelper do let(:group) { create(:group, :public) } let(:user) { create(:user) } before do + group.add_owner(user) allow(helper).to receive(:current_user) { user } - allow(helper).to receive(:can?) { true } + allow(helper).to receive(:can?) { |*args| Ability.allowed?(*args) } helper.instance_variable_set(:@group, group) end @@ -231,7 +232,10 @@ describe GroupsHelper do cross_project_features = [:activity, :issues, :labels, :milestones, :merge_requests] - expect(helper).to receive(:can?).with(user, :read_cross_project) { false } + allow(Ability).to receive(:allowed?).and_call_original + cross_project_features.each do |feature| + expect(Ability).to receive(:allowed?).with(user, "read_group_#{feature}".to_sym, group) { false } + end expect(helper.group_sidebar_links).not_to include(*cross_project_features) end diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index d246beb9888..f76ed4bfda4 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -184,6 +184,7 @@ describe IssuablesHelper do issuableRef: "##{issue.iid}", markdownPreviewPath: "/#{@project.full_path}/preview_markdown", markdownDocsPath: '/help/user/markdown', + markdownVersion: 11, issuableTemplates: [], projectPath: @project.path, projectNamespace: @project.namespace.path, diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb index 1a720aae55c..d5ed5c59c61 100644 --- a/spec/helpers/markup_helper_spec.rb +++ b/spec/helpers/markup_helper_spec.rb @@ -205,7 +205,9 @@ describe MarkupHelper do it "uses Wiki pipeline for markdown files" do allow(@wiki).to receive(:format).and_return(:markdown) - expect(helper).to receive(:markdown_unsafe).with('wiki content', pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page", issuable_state_filter_enabled: true) + expect(helper).to receive(:markdown_unsafe).with('wiki content', + pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page", + issuable_state_filter_enabled: true, markdown_engine: :redcarpet) helper.render_wiki_content(@wiki) end @@ -236,19 +238,32 @@ describe MarkupHelper do expect(helper.markup('foo.rst', content).encoding.name).to eq('UTF-8') end - it "delegates to #markdown_unsafe when file name corresponds to Markdown" do + it 'delegates to #markdown_unsafe when file name corresponds to Markdown' do expect(helper).to receive(:gitlab_markdown?).with('foo.md').and_return(true) expect(helper).to receive(:markdown_unsafe).and_return('NOEL') expect(helper.markup('foo.md', content)).to eq('NOEL') end - it "delegates to #asciidoc_unsafe when file name corresponds to AsciiDoc" do + it 'delegates to #asciidoc_unsafe when file name corresponds to AsciiDoc' do expect(helper).to receive(:asciidoc?).with('foo.adoc').and_return(true) expect(helper).to receive(:asciidoc_unsafe).and_return('NOEL') expect(helper.markup('foo.adoc', content)).to eq('NOEL') end + + it 'uses passed in rendered content' do + expect(helper).not_to receive(:gitlab_markdown?) + expect(helper).not_to receive(:markdown_unsafe) + + expect(helper.markup('foo.md', content, rendered: '<p>NOEL</p>')).to eq('<p>NOEL</p>') + end + + it 'defaults to Redcarpet' do + expect(helper).to receive(:markdown_unsafe).with(content, hash_including(markdown_engine: :redcarpet)).and_return('NOEL') + + expect(helper.markup('foo.md', content)).to eq('NOEL') + end end describe '#first_line_in_markdown' do diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index 80147b13739..beb6e8ea273 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -80,6 +80,7 @@ describe ProjectsHelper do before do allow(helper).to receive(:current_user).and_return(user) allow(helper).to receive(:can?).with(user, :read_cross_project) { true } + allow(user).to receive(:max_member_access_for_project).and_return(40) end it "includes the route" do @@ -125,6 +126,10 @@ describe ProjectsHelper do expect(helper.project_list_cache_key(project)).to include("pipeline-status/#{project.commit.sha}-success") end + + it "includes the user max member access" do + expect(helper.project_list_cache_key(project)).to include('access:40') + end end describe '#load_pipeline_status' do diff --git a/spec/javascripts/diffs/store/getters_spec.js b/spec/javascripts/diffs/store/getters_spec.js index 7945ddea911..7a94f18778b 100644 --- a/spec/javascripts/diffs/store/getters_spec.js +++ b/spec/javascripts/diffs/store/getters_spec.js @@ -1,24 +1,66 @@ -import getters from '~/diffs/store/getters'; +import * as getters from '~/diffs/store/getters'; +import state from '~/diffs/store/modules/diff_state'; import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants'; describe('DiffsStoreGetters', () => { + let localState; + + beforeEach(() => { + localState = state(); + }); + describe('isParallelView', () => { it('should return true if view set to parallel view', () => { - expect(getters.isParallelView({ diffViewType: PARALLEL_DIFF_VIEW_TYPE })).toBeTruthy(); + localState.diffViewType = PARALLEL_DIFF_VIEW_TYPE; + + expect(getters.isParallelView(localState)).toEqual(true); }); it('should return false if view not to parallel view', () => { - expect(getters.isParallelView({ diffViewType: 'foo' })).toBeFalsy(); + localState.diffViewType = INLINE_DIFF_VIEW_TYPE; + + expect(getters.isParallelView(localState)).toEqual(false); }); }); describe('isInlineView', () => { it('should return true if view set to inline view', () => { - expect(getters.isInlineView({ diffViewType: INLINE_DIFF_VIEW_TYPE })).toBeTruthy(); + localState.diffViewType = INLINE_DIFF_VIEW_TYPE; + + expect(getters.isInlineView(localState)).toEqual(true); }); it('should return false if view not to inline view', () => { - expect(getters.isInlineView({ diffViewType: PARALLEL_DIFF_VIEW_TYPE })).toBeFalsy(); + localState.diffViewType = PARALLEL_DIFF_VIEW_TYPE; + + expect(getters.isInlineView(localState)).toEqual(false); + }); + }); + + describe('areAllFilesCollapsed', () => { + it('returns true when all files are collapsed', () => { + localState.diffFiles = [{ collapsed: true }, { collapsed: true }]; + expect(getters.areAllFilesCollapsed(localState)).toEqual(true); + }); + + it('returns false when at least one file is not collapsed', () => { + localState.diffFiles = [{ collapsed: false }, { collapsed: true }]; + expect(getters.areAllFilesCollapsed(localState)).toEqual(false); + }); + }); + + describe('commitId', () => { + it('returns commit id when is set', () => { + const commitID = '800f7a91'; + localState.commit = { + id: commitID, + }; + + expect(getters.commitId(localState)).toEqual(commitID); + }); + + it('returns null when no commit is set', () => { + expect(getters.commitId(localState)).toEqual(null); }); }); }); diff --git a/spec/javascripts/fixtures/merge_requests.rb b/spec/javascripts/fixtures/merge_requests.rb index ee60489eb7c..7257d0c8556 100644 --- a/spec/javascripts/fixtures/merge_requests.rb +++ b/spec/javascripts/fixtures/merge_requests.rb @@ -80,6 +80,13 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont render_discussions_json(merge_request, example.description) end + it 'merge_requests/resolved_diff_discussion.json' do |example| + note = create(:discussion_note_on_merge_request, :resolved, project: project, author: admin, position: position, noteable: merge_request) + create(:system_note, project: project, author: admin, noteable: merge_request, discussion_id: note.discussion.id) + + render_discussions_json(merge_request, example.description) + end + context 'with image diff' do let(:merge_request2) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project, title: "Added images") } let(:image_path) { "files/images/ee_repo_logo.png" } diff --git a/spec/javascripts/frequent_items/components/app_spec.js b/spec/javascripts/frequent_items/components/app_spec.js new file mode 100644 index 00000000000..834f919524d --- /dev/null +++ b/spec/javascripts/frequent_items/components/app_spec.js @@ -0,0 +1,251 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import Vue from 'vue'; +import appComponent from '~/frequent_items/components/app.vue'; +import eventHub from '~/frequent_items/event_hub'; +import store from '~/frequent_items/store'; +import { FREQUENT_ITEMS, HOUR_IN_MS } from '~/frequent_items/constants'; +import { getTopFrequentItems } from '~/frequent_items/utils'; +import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; +import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data'; + +let session; +const createComponentWithStore = (namespace = 'projects') => { + session = currentSession[namespace]; + gon.api_version = session.apiVersion; + const Component = Vue.extend(appComponent); + + return mountComponentWithStore(Component, { + store, + props: { + namespace, + currentUserName: session.username, + currentItem: session.project || session.group, + }, + }); +}; + +describe('Frequent Items App Component', () => { + let vm; + let mock; + + beforeEach(() => { + mock = new MockAdapter(axios); + vm = createComponentWithStore(); + }); + + afterEach(() => { + mock.restore(); + vm.$destroy(); + }); + + describe('methods', () => { + describe('dropdownOpenHandler', () => { + it('should fetch frequent items when no search has been previously made on desktop', () => { + spyOn(vm, 'fetchFrequentItems'); + + vm.dropdownOpenHandler(); + + expect(vm.fetchFrequentItems).toHaveBeenCalledWith(); + }); + }); + + describe('logItemAccess', () => { + let storage; + + beforeEach(() => { + storage = {}; + + spyOn(window.localStorage, 'setItem').and.callFake((storageKey, value) => { + storage[storageKey] = value; + }); + + spyOn(window.localStorage, 'getItem').and.callFake(storageKey => { + if (storage[storageKey]) { + return storage[storageKey]; + } + + return null; + }); + }); + + it('should create a project store if it does not exist and adds a project', () => { + vm.logItemAccess(session.storageKey, session.project); + + const projects = JSON.parse(storage[session.storageKey]); + + expect(projects.length).toBe(1); + expect(projects[0].frequency).toBe(1); + expect(projects[0].lastAccessedOn).toBeDefined(); + }); + + it('should prevent inserting same report multiple times into store', () => { + vm.logItemAccess(session.storageKey, session.project); + vm.logItemAccess(session.storageKey, session.project); + + const projects = JSON.parse(storage[session.storageKey]); + + expect(projects.length).toBe(1); + }); + + it('should increase frequency of report if it was logged multiple times over the course of an hour', () => { + let projects; + const newTimestamp = Date.now() + HOUR_IN_MS + 1; + + vm.logItemAccess(session.storageKey, session.project); + projects = JSON.parse(storage[session.storageKey]); + + expect(projects[0].frequency).toBe(1); + + vm.logItemAccess(session.storageKey, { + ...session.project, + lastAccessedOn: newTimestamp, + }); + projects = JSON.parse(storage[session.storageKey]); + + expect(projects[0].frequency).toBe(2); + expect(projects[0].lastAccessedOn).not.toBe(session.project.lastAccessedOn); + }); + + it('should always update project metadata', () => { + let projects; + const oldProject = { + ...session.project, + }; + + const newProject = { + ...session.project, + name: 'New Name', + avatarUrl: 'new/avatar.png', + namespace: 'New / Namespace', + webUrl: 'http://localhost/new/web/url', + }; + + vm.logItemAccess(session.storageKey, oldProject); + projects = JSON.parse(storage[session.storageKey]); + + expect(projects[0].name).toBe(oldProject.name); + expect(projects[0].avatarUrl).toBe(oldProject.avatarUrl); + expect(projects[0].namespace).toBe(oldProject.namespace); + expect(projects[0].webUrl).toBe(oldProject.webUrl); + + vm.logItemAccess(session.storageKey, newProject); + projects = JSON.parse(storage[session.storageKey]); + + expect(projects[0].name).toBe(newProject.name); + expect(projects[0].avatarUrl).toBe(newProject.avatarUrl); + expect(projects[0].namespace).toBe(newProject.namespace); + expect(projects[0].webUrl).toBe(newProject.webUrl); + }); + + it('should not add more than 20 projects in store', () => { + for (let id = 0; id < FREQUENT_ITEMS.MAX_COUNT; id += 1) { + const project = { + ...session.project, + id, + }; + vm.logItemAccess(session.storageKey, project); + } + + const projects = JSON.parse(storage[session.storageKey]); + + expect(projects.length).toBe(FREQUENT_ITEMS.MAX_COUNT); + }); + }); + }); + + describe('created', () => { + it('should bind event listeners on eventHub', done => { + spyOn(eventHub, '$on'); + + createComponentWithStore().$mount(); + + Vue.nextTick(() => { + expect(eventHub.$on).toHaveBeenCalledWith('projects-dropdownOpen', jasmine.any(Function)); + done(); + }); + }); + }); + + describe('beforeDestroy', () => { + it('should unbind event listeners on eventHub', done => { + spyOn(eventHub, '$off'); + + vm.$mount(); + vm.$destroy(); + + Vue.nextTick(() => { + expect(eventHub.$off).toHaveBeenCalledWith('projects-dropdownOpen', jasmine.any(Function)); + done(); + }); + }); + }); + + describe('template', () => { + it('should render search input', () => { + expect(vm.$el.querySelector('.search-input-container')).toBeDefined(); + }); + + it('should render loading animation', done => { + vm.$store.dispatch('fetchSearchedItems'); + + Vue.nextTick(() => { + const loadingEl = vm.$el.querySelector('.loading-animation'); + + expect(loadingEl).toBeDefined(); + expect(loadingEl.classList.contains('prepend-top-20')).toBe(true); + expect(loadingEl.querySelector('i').getAttribute('aria-label')).toBe('Loading projects'); + done(); + }); + }); + + it('should render frequent projects list header', done => { + Vue.nextTick(() => { + const sectionHeaderEl = vm.$el.querySelector('.section-header'); + + expect(sectionHeaderEl).toBeDefined(); + expect(sectionHeaderEl.innerText.trim()).toBe('Frequently visited'); + done(); + }); + }); + + it('should render frequent projects list', done => { + const expectedResult = getTopFrequentItems(mockFrequentProjects); + spyOn(window.localStorage, 'getItem').and.callFake(() => + JSON.stringify(mockFrequentProjects), + ); + + expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(1); + + vm.fetchFrequentItems(); + Vue.nextTick(() => { + expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe( + expectedResult.length, + ); + done(); + }); + }); + + it('should render searched projects list', done => { + mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects); + + expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(1); + + vm.$store.dispatch('setSearchQuery', 'gitlab'); + vm + .$nextTick() + .then(() => { + expect(vm.$el.querySelector('.loading-animation')).toBeDefined(); + }) + .then(vm.$nextTick) + .then(vm.$nextTick) + .then(() => { + expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe( + mockSearchedProjects.length, + ); + }) + .then(done) + .catch(done.fail); + }); + }); +}); diff --git a/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js b/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js index c193258474e..201aca77b10 100644 --- a/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js +++ b/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js @@ -1,23 +1,21 @@ import Vue from 'vue'; - -import projectsListItemComponent from '~/projects_dropdown/components/projects_list_item.vue'; - +import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import { mockProject } from '../mock_data'; +import { mockProject } from '../mock_data'; // can also use 'mockGroup', but not useful to test here const createComponent = () => { - const Component = Vue.extend(projectsListItemComponent); + const Component = Vue.extend(frequentItemsListItemComponent); return mountComponent(Component, { - projectId: mockProject.id, - projectName: mockProject.name, + itemId: mockProject.id, + itemName: mockProject.name, namespace: mockProject.namespace, webUrl: mockProject.webUrl, avatarUrl: mockProject.avatarUrl, }); }; -describe('ProjectsListItemComponent', () => { +describe('FrequentItemsListItemComponent', () => { let vm; beforeEach(() => { @@ -32,22 +30,22 @@ describe('ProjectsListItemComponent', () => { describe('hasAvatar', () => { it('should return `true` or `false` if whether avatar is present or not', () => { vm.avatarUrl = 'path/to/avatar.png'; - expect(vm.hasAvatar).toBeTruthy(); + expect(vm.hasAvatar).toBe(true); vm.avatarUrl = null; - expect(vm.hasAvatar).toBeFalsy(); + expect(vm.hasAvatar).toBe(false); }); }); - describe('highlightedProjectName', () => { + describe('highlightedItemName', () => { it('should enclose part of project name in <b> & </b> which matches with `matcher` prop', () => { vm.matcher = 'lab'; - expect(vm.highlightedProjectName).toContain('<b>Lab</b>'); + expect(vm.highlightedItemName).toContain('<b>Lab</b>'); }); it('should return project name as it is if `matcher` is not available', () => { vm.matcher = null; - expect(vm.highlightedProjectName).toBe(mockProject.name); + expect(vm.highlightedItemName).toBe(mockProject.name); }); }); @@ -66,12 +64,12 @@ describe('ProjectsListItemComponent', () => { describe('template', () => { it('should render component element', () => { - expect(vm.$el.classList.contains('projects-list-item-container')).toBeTruthy(); + expect(vm.$el.classList.contains('frequent-items-list-item-container')).toBeTruthy(); expect(vm.$el.querySelectorAll('a').length).toBe(1); - expect(vm.$el.querySelectorAll('.project-item-avatar-container').length).toBe(1); - expect(vm.$el.querySelectorAll('.project-item-metadata-container').length).toBe(1); - expect(vm.$el.querySelectorAll('.project-title').length).toBe(1); - expect(vm.$el.querySelectorAll('.project-namespace').length).toBe(1); + expect(vm.$el.querySelectorAll('.frequent-items-item-avatar-container').length).toBe(1); + expect(vm.$el.querySelectorAll('.frequent-items-item-metadata-container').length).toBe(1); + expect(vm.$el.querySelectorAll('.frequent-items-item-title').length).toBe(1); + expect(vm.$el.querySelectorAll('.frequent-items-item-namespace').length).toBe(1); }); }); }); diff --git a/spec/javascripts/frequent_items/components/frequent_items_list_spec.js b/spec/javascripts/frequent_items/components/frequent_items_list_spec.js new file mode 100644 index 00000000000..3003b7ee000 --- /dev/null +++ b/spec/javascripts/frequent_items/components/frequent_items_list_spec.js @@ -0,0 +1,84 @@ +import Vue from 'vue'; +import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import { mockFrequentProjects } from '../mock_data'; + +const createComponent = (namespace = 'projects') => { + const Component = Vue.extend(frequentItemsListComponent); + + return mountComponent(Component, { + namespace, + items: mockFrequentProjects, + isFetchFailed: false, + hasSearchQuery: false, + matcher: 'lab', + }); +}; + +describe('FrequentItemsListComponent', () => { + let vm; + + beforeEach(() => { + vm = createComponent(); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('computed', () => { + describe('isListEmpty', () => { + it('should return `true` or `false` representing whether if `items` is empty or not with projects', () => { + vm.items = []; + expect(vm.isListEmpty).toBe(true); + + vm.items = mockFrequentProjects; + expect(vm.isListEmpty).toBe(false); + }); + }); + + describe('fetched item messages', () => { + it('should return appropriate empty list message based on value of `localStorageFailed` prop with projects', () => { + vm.isFetchFailed = true; + expect(vm.listEmptyMessage).toBe('This feature requires browser localStorage support'); + + vm.isFetchFailed = false; + expect(vm.listEmptyMessage).toBe('Projects you visit often will appear here'); + }); + }); + + describe('searched item messages', () => { + it('should return appropriate empty list message based on value of `searchFailed` prop with projects', () => { + vm.hasSearchQuery = true; + vm.isFetchFailed = true; + expect(vm.listEmptyMessage).toBe('Something went wrong on our end.'); + + vm.isFetchFailed = false; + expect(vm.listEmptyMessage).toBe('Sorry, no projects matched your search'); + }); + }); + }); + + describe('template', () => { + it('should render component element with list of projects', done => { + vm.items = mockFrequentProjects; + + Vue.nextTick(() => { + expect(vm.$el.classList.contains('frequent-items-list-container')).toBe(true); + expect(vm.$el.querySelectorAll('ul.list-unstyled').length).toBe(1); + expect(vm.$el.querySelectorAll('li.frequent-items-list-item-container').length).toBe(5); + done(); + }); + }); + + it('should render component element with empty message', done => { + vm.items = []; + + Vue.nextTick(() => { + expect(vm.$el.querySelectorAll('li.section-empty').length).toBe(1); + expect(vm.$el.querySelectorAll('li.frequent-items-list-item-container').length).toBe(0); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/frequent_items/components/frequent_items_search_input_spec.js b/spec/javascripts/frequent_items/components/frequent_items_search_input_spec.js new file mode 100644 index 00000000000..6a11038e70a --- /dev/null +++ b/spec/javascripts/frequent_items/components/frequent_items_search_input_spec.js @@ -0,0 +1,77 @@ +import Vue from 'vue'; +import searchComponent from '~/frequent_items/components/frequent_items_search_input.vue'; +import eventHub from '~/frequent_items/event_hub'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; + +const createComponent = (namespace = 'projects') => { + const Component = Vue.extend(searchComponent); + + return mountComponent(Component, { namespace }); +}; + +describe('FrequentItemsSearchInputComponent', () => { + let vm; + + beforeEach(() => { + vm = createComponent(); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('methods', () => { + describe('setFocus', () => { + it('should set focus to search input', () => { + spyOn(vm.$refs.search, 'focus'); + + vm.setFocus(); + expect(vm.$refs.search.focus).toHaveBeenCalled(); + }); + }); + }); + + describe('mounted', () => { + it('should listen `dropdownOpen` event', done => { + spyOn(eventHub, '$on'); + const vmX = createComponent(); + + Vue.nextTick(() => { + expect(eventHub.$on).toHaveBeenCalledWith( + `${vmX.namespace}-dropdownOpen`, + jasmine.any(Function), + ); + done(); + }); + }); + }); + + describe('beforeDestroy', () => { + it('should unbind event listeners on eventHub', done => { + const vmX = createComponent(); + spyOn(eventHub, '$off'); + + vmX.$mount(); + vmX.$destroy(); + + Vue.nextTick(() => { + expect(eventHub.$off).toHaveBeenCalledWith( + `${vmX.namespace}-dropdownOpen`, + jasmine.any(Function), + ); + done(); + }); + }); + }); + + describe('template', () => { + it('should render component element', () => { + const inputEl = vm.$el.querySelector('input.form-control'); + + expect(vm.$el.classList.contains('search-input-container')).toBeTruthy(); + expect(inputEl).not.toBe(null); + expect(inputEl.getAttribute('placeholder')).toBe('Search your projects'); + expect(vm.$el.querySelector('.search-icon')).toBeDefined(); + }); + }); +}); diff --git a/spec/javascripts/frequent_items/mock_data.js b/spec/javascripts/frequent_items/mock_data.js new file mode 100644 index 00000000000..cf3602f42d6 --- /dev/null +++ b/spec/javascripts/frequent_items/mock_data.js @@ -0,0 +1,168 @@ +export const currentSession = { + groups: { + username: 'root', + storageKey: 'root/frequent-groups', + apiVersion: 'v4', + group: { + id: 1, + name: 'dummy-group', + full_name: 'dummy-parent-group', + webUrl: `${gl.TEST_HOST}/dummy-group`, + avatarUrl: null, + lastAccessedOn: Date.now(), + }, + }, + projects: { + username: 'root', + storageKey: 'root/frequent-projects', + apiVersion: 'v4', + project: { + id: 1, + name: 'dummy-project', + namespace: 'SampleGroup / Dummy-Project', + webUrl: `${gl.TEST_HOST}/samplegroup/dummy-project`, + avatarUrl: null, + lastAccessedOn: Date.now(), + }, + }, +}; + +export const mockNamespace = 'projects'; + +export const mockStorageKey = 'test-user/frequent-projects'; + +export const mockGroup = { + id: 1, + name: 'Sub451', + namespace: 'Commit451 / Sub451', + webUrl: `${gl.TEST_HOST}/Commit451/Sub451`, + avatarUrl: null, +}; + +export const mockRawGroup = { + id: 1, + name: 'Sub451', + full_name: 'Commit451 / Sub451', + web_url: `${gl.TEST_HOST}/Commit451/Sub451`, + avatar_url: null, +}; + +export const mockFrequentGroups = [ + { + id: 3, + name: 'Subgroup451', + full_name: 'Commit451 / Subgroup451', + webUrl: '/Commit451/Subgroup451', + avatarUrl: null, + frequency: 7, + lastAccessedOn: 1497979281815, + }, + { + id: 1, + name: 'Commit451', + full_name: 'Commit451', + webUrl: '/Commit451', + avatarUrl: null, + frequency: 3, + lastAccessedOn: 1497979281815, + }, +]; + +export const mockSearchedGroups = [mockRawGroup]; +export const mockProcessedSearchedGroups = [mockGroup]; + +export const mockProject = { + id: 1, + name: 'GitLab Community Edition', + namespace: 'gitlab-org / gitlab-ce', + webUrl: `${gl.TEST_HOST}/gitlab-org/gitlab-ce`, + avatarUrl: null, +}; + +export const mockRawProject = { + id: 1, + name: 'GitLab Community Edition', + name_with_namespace: 'gitlab-org / gitlab-ce', + web_url: `${gl.TEST_HOST}/gitlab-org/gitlab-ce`, + avatar_url: null, +}; + +export const mockFrequentProjects = [ + { + id: 1, + name: 'GitLab Community Edition', + namespace: 'gitlab-org / gitlab-ce', + webUrl: `${gl.TEST_HOST}/gitlab-org/gitlab-ce`, + avatarUrl: null, + frequency: 1, + lastAccessedOn: Date.now(), + }, + { + id: 2, + name: 'GitLab CI', + namespace: 'gitlab-org / gitlab-ci', + webUrl: `${gl.TEST_HOST}/gitlab-org/gitlab-ci`, + avatarUrl: null, + frequency: 9, + lastAccessedOn: Date.now(), + }, + { + id: 3, + name: 'Typeahead.Js', + namespace: 'twitter / typeahead-js', + webUrl: `${gl.TEST_HOST}/twitter/typeahead-js`, + avatarUrl: '/uploads/-/system/project/avatar/7/TWBS.png', + frequency: 2, + lastAccessedOn: Date.now(), + }, + { + id: 4, + name: 'Intel', + namespace: 'platform / hardware / bsp / intel', + webUrl: `${gl.TEST_HOST}/platform/hardware/bsp/intel`, + avatarUrl: null, + frequency: 3, + lastAccessedOn: Date.now(), + }, + { + id: 5, + name: 'v4.4', + namespace: 'platform / hardware / bsp / kernel / common / v4.4', + webUrl: `${gl.TEST_HOST}/platform/hardware/bsp/kernel/common/v4.4`, + avatarUrl: null, + frequency: 8, + lastAccessedOn: Date.now(), + }, +]; + +export const mockSearchedProjects = [mockRawProject]; +export const mockProcessedSearchedProjects = [mockProject]; + +export const unsortedFrequentItems = [ + { id: 1, frequency: 12, lastAccessedOn: 1491400843391 }, + { id: 2, frequency: 14, lastAccessedOn: 1488240890738 }, + { id: 3, frequency: 44, lastAccessedOn: 1497675908472 }, + { id: 4, frequency: 8, lastAccessedOn: 1497979281815 }, + { id: 5, frequency: 34, lastAccessedOn: 1488089211943 }, + { id: 6, frequency: 14, lastAccessedOn: 1493517292488 }, + { id: 7, frequency: 42, lastAccessedOn: 1486815299875 }, + { id: 8, frequency: 33, lastAccessedOn: 1500762279114 }, + { id: 10, frequency: 46, lastAccessedOn: 1483251641543 }, +]; + +/** + * This const has a specific order which tests authenticity + * of `getTopFrequentItems` method so + * DO NOT change order of items in this const. + */ +export const sortedFrequentItems = [ + { id: 10, frequency: 46, lastAccessedOn: 1483251641543 }, + { id: 3, frequency: 44, lastAccessedOn: 1497675908472 }, + { id: 7, frequency: 42, lastAccessedOn: 1486815299875 }, + { id: 5, frequency: 34, lastAccessedOn: 1488089211943 }, + { id: 8, frequency: 33, lastAccessedOn: 1500762279114 }, + { id: 6, frequency: 14, lastAccessedOn: 1493517292488 }, + { id: 2, frequency: 14, lastAccessedOn: 1488240890738 }, + { id: 1, frequency: 12, lastAccessedOn: 1491400843391 }, + { id: 4, frequency: 8, lastAccessedOn: 1497979281815 }, +]; diff --git a/spec/javascripts/frequent_items/store/actions_spec.js b/spec/javascripts/frequent_items/store/actions_spec.js new file mode 100644 index 00000000000..0cdd033d38f --- /dev/null +++ b/spec/javascripts/frequent_items/store/actions_spec.js @@ -0,0 +1,225 @@ +import testAction from 'spec/helpers/vuex_action_helper'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import AccessorUtilities from '~/lib/utils/accessor'; +import * as actions from '~/frequent_items/store/actions'; +import * as types from '~/frequent_items/store/mutation_types'; +import state from '~/frequent_items/store/state'; +import { + mockNamespace, + mockStorageKey, + mockFrequentProjects, + mockSearchedProjects, +} from '../mock_data'; + +describe('Frequent Items Dropdown Store Actions', () => { + let mockedState; + let mock; + + beforeEach(() => { + mockedState = state(); + mock = new MockAdapter(axios); + + mockedState.namespace = mockNamespace; + mockedState.storageKey = mockStorageKey; + }); + + afterEach(() => { + mock.restore(); + }); + + describe('setNamespace', () => { + it('should set namespace', done => { + testAction( + actions.setNamespace, + mockNamespace, + mockedState, + [{ type: types.SET_NAMESPACE, payload: mockNamespace }], + [], + done, + ); + }); + }); + + describe('setStorageKey', () => { + it('should set storage key', done => { + testAction( + actions.setStorageKey, + mockStorageKey, + mockedState, + [{ type: types.SET_STORAGE_KEY, payload: mockStorageKey }], + [], + done, + ); + }); + }); + + describe('requestFrequentItems', () => { + it('should request frequent items', done => { + testAction( + actions.requestFrequentItems, + null, + mockedState, + [{ type: types.REQUEST_FREQUENT_ITEMS }], + [], + done, + ); + }); + }); + + describe('receiveFrequentItemsSuccess', () => { + it('should set frequent items', done => { + testAction( + actions.receiveFrequentItemsSuccess, + mockFrequentProjects, + mockedState, + [{ type: types.RECEIVE_FREQUENT_ITEMS_SUCCESS, payload: mockFrequentProjects }], + [], + done, + ); + }); + }); + + describe('receiveFrequentItemsError', () => { + it('should set frequent items error state', done => { + testAction( + actions.receiveFrequentItemsError, + null, + mockedState, + [{ type: types.RECEIVE_FREQUENT_ITEMS_ERROR }], + [], + done, + ); + }); + }); + + describe('fetchFrequentItems', () => { + it('should dispatch `receiveFrequentItemsSuccess`', done => { + mockedState.namespace = mockNamespace; + mockedState.storageKey = mockStorageKey; + + testAction( + actions.fetchFrequentItems, + null, + mockedState, + [], + [{ type: 'requestFrequentItems' }, { type: 'receiveFrequentItemsSuccess', payload: [] }], + done, + ); + }); + + it('should dispatch `receiveFrequentItemsError`', done => { + spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').and.returnValue(false); + mockedState.namespace = mockNamespace; + mockedState.storageKey = mockStorageKey; + + testAction( + actions.fetchFrequentItems, + null, + mockedState, + [], + [{ type: 'requestFrequentItems' }, { type: 'receiveFrequentItemsError' }], + done, + ); + }); + }); + + describe('requestSearchedItems', () => { + it('should request searched items', done => { + testAction( + actions.requestSearchedItems, + null, + mockedState, + [{ type: types.REQUEST_SEARCHED_ITEMS }], + [], + done, + ); + }); + }); + + describe('receiveSearchedItemsSuccess', () => { + it('should set searched items', done => { + testAction( + actions.receiveSearchedItemsSuccess, + mockSearchedProjects, + mockedState, + [{ type: types.RECEIVE_SEARCHED_ITEMS_SUCCESS, payload: mockSearchedProjects }], + [], + done, + ); + }); + }); + + describe('receiveSearchedItemsError', () => { + it('should set searched items error state', done => { + testAction( + actions.receiveSearchedItemsError, + null, + mockedState, + [{ type: types.RECEIVE_SEARCHED_ITEMS_ERROR }], + [], + done, + ); + }); + }); + + describe('fetchSearchedItems', () => { + beforeEach(() => { + gon.api_version = 'v4'; + }); + + it('should dispatch `receiveSearchedItemsSuccess`', done => { + mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects); + + testAction( + actions.fetchSearchedItems, + null, + mockedState, + [], + [ + { type: 'requestSearchedItems' }, + { type: 'receiveSearchedItemsSuccess', payload: mockSearchedProjects }, + ], + done, + ); + }); + + it('should dispatch `receiveSearchedItemsError`', done => { + gon.api_version = 'v4'; + mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(500); + + testAction( + actions.fetchSearchedItems, + null, + mockedState, + [], + [{ type: 'requestSearchedItems' }, { type: 'receiveSearchedItemsError' }], + done, + ); + }); + }); + + describe('setSearchQuery', () => { + it('should commit query and dispatch `fetchSearchedItems` when query is present', done => { + testAction( + actions.setSearchQuery, + { query: 'test' }, + mockedState, + [{ type: types.SET_SEARCH_QUERY }], + [{ type: 'fetchSearchedItems', payload: { query: 'test' } }], + done, + ); + }); + + it('should commit query and dispatch `fetchFrequentItems` when query is empty', done => { + testAction( + actions.setSearchQuery, + null, + mockedState, + [{ type: types.SET_SEARCH_QUERY }], + [{ type: 'fetchFrequentItems' }], + done, + ); + }); + }); +}); diff --git a/spec/javascripts/frequent_items/store/getters_spec.js b/spec/javascripts/frequent_items/store/getters_spec.js new file mode 100644 index 00000000000..1cd12eb6832 --- /dev/null +++ b/spec/javascripts/frequent_items/store/getters_spec.js @@ -0,0 +1,24 @@ +import state from '~/frequent_items/store/state'; +import * as getters from '~/frequent_items/store/getters'; + +describe('Frequent Items Dropdown Store Getters', () => { + let mockedState; + + beforeEach(() => { + mockedState = state(); + }); + + describe('hasSearchQuery', () => { + it('should return `true` when search query is present', () => { + mockedState.searchQuery = 'test'; + + expect(getters.hasSearchQuery(mockedState)).toBe(true); + }); + + it('should return `false` when search query is empty', () => { + mockedState.searchQuery = ''; + + expect(getters.hasSearchQuery(mockedState)).toBe(false); + }); + }); +}); diff --git a/spec/javascripts/frequent_items/store/mutations_spec.js b/spec/javascripts/frequent_items/store/mutations_spec.js new file mode 100644 index 00000000000..d36964b2600 --- /dev/null +++ b/spec/javascripts/frequent_items/store/mutations_spec.js @@ -0,0 +1,117 @@ +import state from '~/frequent_items/store/state'; +import mutations from '~/frequent_items/store/mutations'; +import * as types from '~/frequent_items/store/mutation_types'; +import { + mockNamespace, + mockStorageKey, + mockFrequentProjects, + mockSearchedProjects, + mockProcessedSearchedProjects, + mockSearchedGroups, + mockProcessedSearchedGroups, +} from '../mock_data'; + +describe('Frequent Items dropdown mutations', () => { + let stateCopy; + + beforeEach(() => { + stateCopy = state(); + }); + + describe('SET_NAMESPACE', () => { + it('should set namespace', () => { + mutations[types.SET_NAMESPACE](stateCopy, mockNamespace); + + expect(stateCopy.namespace).toEqual(mockNamespace); + }); + }); + + describe('SET_STORAGE_KEY', () => { + it('should set storage key', () => { + mutations[types.SET_STORAGE_KEY](stateCopy, mockStorageKey); + + expect(stateCopy.storageKey).toEqual(mockStorageKey); + }); + }); + + describe('SET_SEARCH_QUERY', () => { + it('should set search query', () => { + const searchQuery = 'gitlab-ce'; + + mutations[types.SET_SEARCH_QUERY](stateCopy, searchQuery); + + expect(stateCopy.searchQuery).toEqual(searchQuery); + }); + }); + + describe('REQUEST_FREQUENT_ITEMS', () => { + it('should set view states when requesting frequent items', () => { + mutations[types.REQUEST_FREQUENT_ITEMS](stateCopy); + + expect(stateCopy.isLoadingItems).toEqual(true); + expect(stateCopy.hasSearchQuery).toEqual(false); + }); + }); + + describe('RECEIVE_FREQUENT_ITEMS_SUCCESS', () => { + it('should set view states when receiving frequent items', () => { + mutations[types.RECEIVE_FREQUENT_ITEMS_SUCCESS](stateCopy, mockFrequentProjects); + + expect(stateCopy.items).toEqual(mockFrequentProjects); + expect(stateCopy.isLoadingItems).toEqual(false); + expect(stateCopy.hasSearchQuery).toEqual(false); + expect(stateCopy.isFetchFailed).toEqual(false); + }); + }); + + describe('RECEIVE_FREQUENT_ITEMS_ERROR', () => { + it('should set items and view states when error occurs retrieving frequent items', () => { + mutations[types.RECEIVE_FREQUENT_ITEMS_ERROR](stateCopy); + + expect(stateCopy.items).toEqual([]); + expect(stateCopy.isLoadingItems).toEqual(false); + expect(stateCopy.hasSearchQuery).toEqual(false); + expect(stateCopy.isFetchFailed).toEqual(true); + }); + }); + + describe('REQUEST_SEARCHED_ITEMS', () => { + it('should set view states when requesting searched items', () => { + mutations[types.REQUEST_SEARCHED_ITEMS](stateCopy); + + expect(stateCopy.isLoadingItems).toEqual(true); + expect(stateCopy.hasSearchQuery).toEqual(true); + }); + }); + + describe('RECEIVE_SEARCHED_ITEMS_SUCCESS', () => { + it('should set items and view states when receiving searched items', () => { + mutations[types.RECEIVE_SEARCHED_ITEMS_SUCCESS](stateCopy, mockSearchedProjects); + + expect(stateCopy.items).toEqual(mockProcessedSearchedProjects); + expect(stateCopy.isLoadingItems).toEqual(false); + expect(stateCopy.hasSearchQuery).toEqual(true); + expect(stateCopy.isFetchFailed).toEqual(false); + }); + + it('should also handle the different `full_name` key for namespace in groups payload', () => { + mutations[types.RECEIVE_SEARCHED_ITEMS_SUCCESS](stateCopy, mockSearchedGroups); + + expect(stateCopy.items).toEqual(mockProcessedSearchedGroups); + expect(stateCopy.isLoadingItems).toEqual(false); + expect(stateCopy.hasSearchQuery).toEqual(true); + expect(stateCopy.isFetchFailed).toEqual(false); + }); + }); + + describe('RECEIVE_SEARCHED_ITEMS_ERROR', () => { + it('should set view states when error occurs retrieving searched items', () => { + mutations[types.RECEIVE_SEARCHED_ITEMS_ERROR](stateCopy); + + expect(stateCopy.items).toEqual([]); + expect(stateCopy.isLoadingItems).toEqual(false); + expect(stateCopy.hasSearchQuery).toEqual(true); + expect(stateCopy.isFetchFailed).toEqual(true); + }); + }); +}); diff --git a/spec/javascripts/frequent_items/utils_spec.js b/spec/javascripts/frequent_items/utils_spec.js new file mode 100644 index 00000000000..cd27d79b29a --- /dev/null +++ b/spec/javascripts/frequent_items/utils_spec.js @@ -0,0 +1,89 @@ +import bp from '~/breakpoints'; +import { isMobile, getTopFrequentItems, updateExistingFrequentItem } from '~/frequent_items/utils'; +import { HOUR_IN_MS, FREQUENT_ITEMS } from '~/frequent_items/constants'; +import { mockProject, unsortedFrequentItems, sortedFrequentItems } from './mock_data'; + +describe('Frequent Items utils spec', () => { + describe('isMobile', () => { + it('returns true when the screen is small ', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('sm'); + + expect(isMobile()).toBe(true); + }); + + it('returns true when the screen is extra-small ', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('xs'); + + expect(isMobile()).toBe(true); + }); + + it('returns false when the screen is larger than small ', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('md'); + + expect(isMobile()).toBe(false); + }); + }); + + describe('getTopFrequentItems', () => { + it('returns empty array if no items provided', () => { + const result = getTopFrequentItems(); + + expect(result.length).toBe(0); + }); + + it('returns correct amount of items for mobile', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('sm'); + const result = getTopFrequentItems(unsortedFrequentItems); + + expect(result.length).toBe(FREQUENT_ITEMS.LIST_COUNT_MOBILE); + }); + + it('returns correct amount of items for desktop', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('lg'); + const result = getTopFrequentItems(unsortedFrequentItems); + + expect(result.length).toBe(FREQUENT_ITEMS.LIST_COUNT_DESKTOP); + }); + + it('sorts frequent items in order of frequency and lastAccessedOn', () => { + spyOn(bp, 'getBreakpointSize').and.returnValue('lg'); + const result = getTopFrequentItems(unsortedFrequentItems); + const expectedResult = sortedFrequentItems.slice(0, FREQUENT_ITEMS.LIST_COUNT_DESKTOP); + + expect(result).toEqual(expectedResult); + }); + }); + + describe('updateExistingFrequentItem', () => { + let mockedProject; + + beforeEach(() => { + mockedProject = { + ...mockProject, + frequency: 1, + lastAccessedOn: 1497979281815, + }; + }); + + it('updates item if accessed over an hour ago', () => { + const newTimestamp = Date.now() + HOUR_IN_MS + 1; + const newItem = { + ...mockedProject, + lastAccessedOn: newTimestamp, + }; + const result = updateExistingFrequentItem(mockedProject, newItem); + + expect(result.frequency).toBe(mockedProject.frequency + 1); + }); + + it('does not update item if accessed within the hour', () => { + const newItem = { + ...mockedProject, + lastAccessedOn: mockedProject.lastAccessedOn + HOUR_IN_MS, + }; + const result = updateExistingFrequentItem(mockedProject, newItem); + + expect(result.frequency).toBe(mockedProject.frequency); + }); + }); +}); diff --git a/spec/javascripts/ide/components/merge_requests/info_spec.js b/spec/javascripts/ide/components/merge_requests/info_spec.js new file mode 100644 index 00000000000..98a29e5128b --- /dev/null +++ b/spec/javascripts/ide/components/merge_requests/info_spec.js @@ -0,0 +1,51 @@ +import Vue from 'vue'; +import '~/behaviors/markdown/render_gfm'; +import { createStore } from '~/ide/stores'; +import Info from '~/ide/components/merge_requests/info.vue'; +import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; + +describe('IDE merge request details', () => { + let Component; + let vm; + + beforeAll(() => { + Component = Vue.extend(Info); + }); + + beforeEach(() => { + const store = createStore(); + store.state.currentProjectId = 'gitlab-ce'; + store.state.currentMergeRequestId = 1; + store.state.projects['gitlab-ce'] = { + mergeRequests: { + 1: { + iid: 1, + title: 'Testing', + title_html: '<span class="title-html">Testing</span>', + description: 'Description', + description_html: '<p class="description-html">Description HTML</p>', + }, + }, + }; + + vm = createComponentWithStore(Component, store).$mount(); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('renders merge request IID', () => { + expect(vm.$el.querySelector('.detail-page-header').textContent).toContain('!1'); + }); + + it('renders title as HTML', () => { + expect(vm.$el.querySelector('.title-html')).not.toBe(null); + expect(vm.$el.querySelector('.title').textContent).toContain('Testing'); + }); + + it('renders description as HTML', () => { + expect(vm.$el.querySelector('.description-html')).not.toBe(null); + expect(vm.$el.querySelector('.description').textContent).toContain('Description HTML'); + }); +}); diff --git a/spec/javascripts/ide/components/panes/right_spec.js b/spec/javascripts/ide/components/panes/right_spec.js new file mode 100644 index 00000000000..99879fb0930 --- /dev/null +++ b/spec/javascripts/ide/components/panes/right_spec.js @@ -0,0 +1,72 @@ +import Vue from 'vue'; +import '~/behaviors/markdown/render_gfm'; +import { createStore } from '~/ide/stores'; +import RightPane from '~/ide/components/panes/right.vue'; +import { rightSidebarViews } from '~/ide/constants'; +import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; + +describe('IDE right pane', () => { + let Component; + let vm; + + beforeAll(() => { + Component = Vue.extend(RightPane); + }); + + beforeEach(() => { + const store = createStore(); + + vm = createComponentWithStore(Component, store).$mount(); + }); + + afterEach(() => { + vm.$destroy(); + }); + + describe('active', () => { + it('renders merge request button as active', done => { + vm.$store.state.rightPane = rightSidebarViews.mergeRequestInfo; + vm.$store.state.currentMergeRequestId = '123'; + vm.$store.state.currentProjectId = 'gitlab-ce'; + vm.$store.state.currentMergeRequestId = 1; + vm.$store.state.projects['gitlab-ce'] = { + mergeRequests: { + 1: { + iid: 1, + title: 'Testing', + title_html: '<span class="title-html">Testing</span>', + description: 'Description', + description_html: '<p class="description-html">Description HTML</p>', + }, + }, + }; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.ide-sidebar-link.active')).not.toBe(null); + expect( + vm.$el.querySelector('.ide-sidebar-link.active').getAttribute('data-original-title'), + ).toBe('Merge Request'); + + done(); + }); + }); + }); + + describe('click', () => { + beforeEach(() => { + spyOn(vm, 'setRightPane'); + }); + + it('sets view to merge request', done => { + vm.$store.state.currentMergeRequestId = '123'; + + vm.$nextTick(() => { + vm.$el.querySelector('.ide-sidebar-link').click(); + + expect(vm.setRightPane).toHaveBeenCalledWith(rightSidebarViews.mergeRequestInfo); + + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/ide/stores/actions/merge_request_spec.js b/spec/javascripts/ide/stores/actions/merge_request_spec.js index c99ccc70c6a..90c28c769f7 100644 --- a/spec/javascripts/ide/stores/actions/merge_request_spec.js +++ b/spec/javascripts/ide/stores/actions/merge_request_spec.js @@ -39,7 +39,9 @@ describe('IDE store merge request actions', () => { store .dispatch('getMergeRequestData', { projectId: 'abcproject', mergeRequestId: 1 }) .then(() => { - expect(service.getProjectMergeRequestData).toHaveBeenCalledWith('abcproject', 1); + expect(service.getProjectMergeRequestData).toHaveBeenCalledWith('abcproject', 1, { + render_html: true, + }); done(); }) diff --git a/spec/javascripts/notes/components/discussion_counter_spec.js b/spec/javascripts/notes/components/discussion_counter_spec.js index 7b2302e6f47..a3869cc6498 100644 --- a/spec/javascripts/notes/components/discussion_counter_spec.js +++ b/spec/javascripts/notes/components/discussion_counter_spec.js @@ -32,12 +32,12 @@ describe('DiscussionCounter component', () => { { ...discussionMock, id: discussionMock.id, - notes: [{ ...discussionMock.notes[0], resolved: true }], + notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: true }], }, { ...discussionMock, id: discussionMock.id + 1, - notes: [{ ...discussionMock.notes[0], resolved: false }], + notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: false }], }, ]; const firstDiscussionId = discussionMock.id + 1; diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js index 058ddb6202f..7da931fd9cb 100644 --- a/spec/javascripts/notes/components/noteable_discussion_spec.js +++ b/spec/javascripts/notes/components/noteable_discussion_spec.js @@ -4,22 +4,23 @@ import noteableDiscussion from '~/notes/components/noteable_discussion.vue'; import '~/behaviors/markdown/render_gfm'; import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data'; +const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json'; + describe('noteable_discussion component', () => { + const Component = Vue.extend(noteableDiscussion); let store; let vm; - beforeEach(() => { - const Component = Vue.extend(noteableDiscussion); + preloadFixtures(discussionWithTwoUnresolvedNotes); + beforeEach(() => { store = createStore(); store.dispatch('setNoteableData', noteableDataMock); store.dispatch('setNotesData', notesDataMock); vm = new Component({ store, - propsData: { - discussion: discussionMock, - }, + propsData: { discussion: discussionMock }, }).$mount(); }); @@ -84,7 +85,9 @@ describe('noteable_discussion component', () => { }); it('is true if there are two unresolved discussions', done => { - spyOnProperty(vm, 'unresolvedDiscussions').and.returnValue([{}, {}]); + const discussion = getJSONFixture(discussionWithTwoUnresolvedNotes)[0]; + discussion.notes[0].resolved = false; + vm.$store.dispatch('setInitialNotes', [discussion, discussion]); Vue.nextTick() .then(() => { @@ -105,12 +108,12 @@ describe('noteable_discussion component', () => { { ...discussionMock, id: discussionMock.id + 1, - notes: [{ ...discussionMock.notes[0], resolved: true }], + notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: true }], }, { ...discussionMock, id: discussionMock.id + 2, - notes: [{ ...discussionMock.notes[0], resolved: false }], + notes: [{ ...discussionMock.notes[0], resolvable: true, resolved: false }], }, ]; const nextDiscussionId = discussionMock.id + 2; diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js index 547efa32694..be2a8ba67fe 100644 --- a/spec/javascripts/notes/mock_data.js +++ b/spec/javascripts/notes/mock_data.js @@ -165,6 +165,7 @@ export const note = { report_abuse_path: '/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_546&user_id=1', path: '/gitlab-org/gitlab-ce/notes/546', + cached_markdown_version: 11, }; export const discussionMock = { @@ -303,6 +304,7 @@ export const discussionMock = { }, ], individual_note: false, + resolvable: true, }; export const loggedOutnoteableData = { diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/javascripts/notes/stores/getters_spec.js index 815cc09621f..41599e00122 100644 --- a/spec/javascripts/notes/stores/getters_spec.js +++ b/spec/javascripts/notes/stores/getters_spec.js @@ -7,9 +7,13 @@ import { collapseNotesMock, } from '../mock_data'; +const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json'; + describe('Getters Notes Store', () => { let state; + preloadFixtures(discussionWithTwoUnresolvedNotes); + beforeEach(() => { state = { discussions: [individualNote], @@ -22,12 +26,26 @@ describe('Getters Notes Store', () => { noteableData: noteableDataMock, }; }); + describe('discussions', () => { it('should return all discussions in the store', () => { expect(getters.discussions(state)).toEqual([individualNote]); }); }); + describe('resolvedDiscussionsById', () => { + it('ignores unresolved system notes', () => { + const [discussion] = getJSONFixture(discussionWithTwoUnresolvedNotes); + discussion.notes[0].resolved = true; + discussion.notes[1].resolved = false; + state.discussions.push(discussion); + + expect(getters.resolvedDiscussionsById(state)).toEqual({ + [discussion.id]: discussion, + }); + }); + }); + describe('Collapsed notes', () => { const stateCollapsedNotes = { discussions: collapseNotesMock, diff --git a/spec/javascripts/profile/add_ssh_key_validation_spec.js b/spec/javascripts/profile/add_ssh_key_validation_spec.js new file mode 100644 index 00000000000..c71a2885acc --- /dev/null +++ b/spec/javascripts/profile/add_ssh_key_validation_spec.js @@ -0,0 +1,69 @@ +import AddSshKeyValidation from '../../../app/assets/javascripts/profile/add_ssh_key_validation'; + +describe('AddSshKeyValidation', () => { + describe('submit', () => { + it('returns true if isValid is true', () => { + const addSshKeyValidation = new AddSshKeyValidation({}); + spyOn(AddSshKeyValidation, 'isPublicKey').and.returnValue(true); + + expect(addSshKeyValidation.submit()).toBeTruthy(); + }); + + it('calls preventDefault and toggleWarning if isValid is false', () => { + const addSshKeyValidation = new AddSshKeyValidation({}); + const event = jasmine.createSpyObj('event', ['preventDefault']); + spyOn(AddSshKeyValidation, 'isPublicKey').and.returnValue(false); + spyOn(addSshKeyValidation, 'toggleWarning'); + + addSshKeyValidation.submit(event); + + expect(event.preventDefault).toHaveBeenCalled(); + expect(addSshKeyValidation.toggleWarning).toHaveBeenCalledWith(true); + }); + }); + + describe('toggleWarning', () => { + it('shows warningElement and hides originalSubmitElement if isVisible is true', () => { + const warningElement = document.createElement('div'); + const originalSubmitElement = document.createElement('div'); + warningElement.classList.add('hide'); + + const addSshKeyValidation = new AddSshKeyValidation( + {}, + warningElement, + originalSubmitElement, + ); + addSshKeyValidation.toggleWarning(true); + + expect(warningElement.classList.contains('hide')).toBeFalsy(); + expect(originalSubmitElement.classList.contains('hide')).toBeTruthy(); + }); + + it('hides warningElement and shows originalSubmitElement if isVisible is false', () => { + const warningElement = document.createElement('div'); + const originalSubmitElement = document.createElement('div'); + originalSubmitElement.classList.add('hide'); + + const addSshKeyValidation = new AddSshKeyValidation( + {}, + warningElement, + originalSubmitElement, + ); + addSshKeyValidation.toggleWarning(false); + + expect(warningElement.classList.contains('hide')).toBeTruthy(); + expect(originalSubmitElement.classList.contains('hide')).toBeFalsy(); + }); + }); + + describe('isPublicKey', () => { + it('returns false if probably invalid public ssh key', () => { + expect(AddSshKeyValidation.isPublicKey('nope')).toBeFalsy(); + }); + + it('returns true if probably valid public ssh key', () => { + expect(AddSshKeyValidation.isPublicKey('ssh-')).toBeTruthy(); + expect(AddSshKeyValidation.isPublicKey('ecdsa-sha2-')).toBeTruthy(); + }); + }); +}); diff --git a/spec/javascripts/projects_dropdown/components/app_spec.js b/spec/javascripts/projects_dropdown/components/app_spec.js deleted file mode 100644 index 38b31c3d727..00000000000 --- a/spec/javascripts/projects_dropdown/components/app_spec.js +++ /dev/null @@ -1,349 +0,0 @@ -import Vue from 'vue'; - -import bp from '~/breakpoints'; -import appComponent from '~/projects_dropdown/components/app.vue'; -import eventHub from '~/projects_dropdown/event_hub'; -import ProjectsStore from '~/projects_dropdown/store/projects_store'; -import ProjectsService from '~/projects_dropdown/service/projects_service'; - -import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import { currentSession, mockProject, mockRawProject } from '../mock_data'; - -const createComponent = () => { - gon.api_version = currentSession.apiVersion; - const Component = Vue.extend(appComponent); - const store = new ProjectsStore(); - const service = new ProjectsService(currentSession.username); - - return mountComponent(Component, { - store, - service, - currentUserName: currentSession.username, - currentProject: currentSession.project, - }); -}; - -const returnServicePromise = (data, failed) => - new Promise((resolve, reject) => { - if (failed) { - reject(data); - } else { - resolve({ - json() { - return data; - }, - }); - } - }); - -describe('AppComponent', () => { - describe('computed', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - describe('frequentProjects', () => { - it('should return list of frequently accessed projects from store', () => { - expect(vm.frequentProjects).toBeDefined(); - expect(vm.frequentProjects.length).toBe(0); - - vm.store.setFrequentProjects([mockProject]); - expect(vm.frequentProjects).toBeDefined(); - expect(vm.frequentProjects.length).toBe(1); - }); - }); - - describe('searchProjects', () => { - it('should return list of frequently accessed projects from store', () => { - expect(vm.searchProjects).toBeDefined(); - expect(vm.searchProjects.length).toBe(0); - - vm.store.setSearchedProjects([mockRawProject]); - expect(vm.searchProjects).toBeDefined(); - expect(vm.searchProjects.length).toBe(1); - }); - }); - }); - - describe('methods', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - describe('toggleFrequentProjectsList', () => { - it('should toggle props which control visibility of Frequent Projects list from state passed', () => { - vm.toggleFrequentProjectsList(true); - expect(vm.isLoadingProjects).toBeFalsy(); - expect(vm.isSearchListVisible).toBeFalsy(); - expect(vm.isFrequentsListVisible).toBeTruthy(); - - vm.toggleFrequentProjectsList(false); - expect(vm.isLoadingProjects).toBeTruthy(); - expect(vm.isSearchListVisible).toBeTruthy(); - expect(vm.isFrequentsListVisible).toBeFalsy(); - }); - }); - - describe('toggleSearchProjectsList', () => { - it('should toggle props which control visibility of Searched Projects list from state passed', () => { - vm.toggleSearchProjectsList(true); - expect(vm.isLoadingProjects).toBeFalsy(); - expect(vm.isFrequentsListVisible).toBeFalsy(); - expect(vm.isSearchListVisible).toBeTruthy(); - - vm.toggleSearchProjectsList(false); - expect(vm.isLoadingProjects).toBeTruthy(); - expect(vm.isFrequentsListVisible).toBeTruthy(); - expect(vm.isSearchListVisible).toBeFalsy(); - }); - }); - - describe('toggleLoader', () => { - it('should toggle props which control visibility of list loading animation from state passed', () => { - vm.toggleLoader(true); - expect(vm.isFrequentsListVisible).toBeFalsy(); - expect(vm.isSearchListVisible).toBeFalsy(); - expect(vm.isLoadingProjects).toBeTruthy(); - - vm.toggleLoader(false); - expect(vm.isFrequentsListVisible).toBeTruthy(); - expect(vm.isSearchListVisible).toBeTruthy(); - expect(vm.isLoadingProjects).toBeFalsy(); - }); - }); - - describe('fetchFrequentProjects', () => { - it('should set props for loading animation to `true` while frequent projects list is being loaded', () => { - spyOn(vm, 'toggleLoader'); - - vm.fetchFrequentProjects(); - expect(vm.isLocalStorageFailed).toBeFalsy(); - expect(vm.toggleLoader).toHaveBeenCalledWith(true); - }); - - it('should set props for loading animation to `false` and props for frequent projects list to `true` once data is loaded', () => { - const mockData = [mockProject]; - - spyOn(vm.service, 'getFrequentProjects').and.returnValue(mockData); - spyOn(vm.store, 'setFrequentProjects'); - spyOn(vm, 'toggleFrequentProjectsList'); - - vm.fetchFrequentProjects(); - expect(vm.service.getFrequentProjects).toHaveBeenCalled(); - expect(vm.store.setFrequentProjects).toHaveBeenCalledWith(mockData); - expect(vm.toggleFrequentProjectsList).toHaveBeenCalledWith(true); - }); - - it('should set props for failure message to `true` when method fails to fetch frequent projects list', () => { - spyOn(vm.service, 'getFrequentProjects').and.returnValue(null); - spyOn(vm.store, 'setFrequentProjects'); - spyOn(vm, 'toggleFrequentProjectsList'); - - expect(vm.isLocalStorageFailed).toBeFalsy(); - - vm.fetchFrequentProjects(); - expect(vm.service.getFrequentProjects).toHaveBeenCalled(); - expect(vm.store.setFrequentProjects).toHaveBeenCalledWith([]); - expect(vm.toggleFrequentProjectsList).toHaveBeenCalledWith(true); - expect(vm.isLocalStorageFailed).toBeTruthy(); - }); - - it('should set props for search results list to `true` if search query was already made previously', () => { - spyOn(bp, 'getBreakpointSize').and.returnValue('md'); - spyOn(vm.service, 'getFrequentProjects'); - spyOn(vm, 'toggleSearchProjectsList'); - - vm.searchQuery = 'test'; - vm.fetchFrequentProjects(); - expect(vm.service.getFrequentProjects).not.toHaveBeenCalled(); - expect(vm.toggleSearchProjectsList).toHaveBeenCalledWith(true); - }); - - it('should set props for frequent projects list to `true` if search query was already made but screen size is less than 768px', () => { - spyOn(bp, 'getBreakpointSize').and.returnValue('sm'); - spyOn(vm, 'toggleSearchProjectsList'); - spyOn(vm.service, 'getFrequentProjects'); - - vm.searchQuery = 'test'; - vm.fetchFrequentProjects(); - expect(vm.service.getFrequentProjects).toHaveBeenCalled(); - expect(vm.toggleSearchProjectsList).not.toHaveBeenCalled(); - }); - }); - - describe('fetchSearchedProjects', () => { - const searchQuery = 'test'; - - it('should perform search with provided search query', done => { - const mockData = [mockRawProject]; - spyOn(vm, 'toggleLoader'); - spyOn(vm, 'toggleSearchProjectsList'); - spyOn(vm.service, 'getSearchedProjects').and.returnValue(returnServicePromise(mockData)); - spyOn(vm.store, 'setSearchedProjects'); - - vm.fetchSearchedProjects(searchQuery); - setTimeout(() => { - expect(vm.searchQuery).toBe(searchQuery); - expect(vm.toggleLoader).toHaveBeenCalledWith(true); - expect(vm.service.getSearchedProjects).toHaveBeenCalledWith(searchQuery); - expect(vm.toggleSearchProjectsList).toHaveBeenCalledWith(true); - expect(vm.store.setSearchedProjects).toHaveBeenCalledWith(mockData); - done(); - }, 0); - }); - - it('should update props for showing search failure', done => { - spyOn(vm, 'toggleSearchProjectsList'); - spyOn(vm.service, 'getSearchedProjects').and.returnValue(returnServicePromise({}, true)); - - vm.fetchSearchedProjects(searchQuery); - setTimeout(() => { - expect(vm.searchQuery).toBe(searchQuery); - expect(vm.service.getSearchedProjects).toHaveBeenCalledWith(searchQuery); - expect(vm.isSearchFailed).toBeTruthy(); - expect(vm.toggleSearchProjectsList).toHaveBeenCalledWith(true); - done(); - }, 0); - }); - }); - - describe('logCurrentProjectAccess', () => { - it('should log current project access via service', done => { - spyOn(vm.service, 'logProjectAccess'); - - vm.currentProject = mockProject; - vm.logCurrentProjectAccess(); - - setTimeout(() => { - expect(vm.service.logProjectAccess).toHaveBeenCalledWith(mockProject); - done(); - }, 1); - }); - }); - - describe('handleSearchClear', () => { - it('should show frequent projects list when search input is cleared', () => { - spyOn(vm.store, 'clearSearchedProjects'); - spyOn(vm, 'toggleFrequentProjectsList'); - - vm.handleSearchClear(); - - expect(vm.toggleFrequentProjectsList).toHaveBeenCalledWith(true); - expect(vm.store.clearSearchedProjects).toHaveBeenCalled(); - expect(vm.searchQuery).toBe(''); - }); - }); - - describe('handleSearchFailure', () => { - it('should show failure message within dropdown', () => { - spyOn(vm, 'toggleSearchProjectsList'); - - vm.handleSearchFailure(); - expect(vm.toggleSearchProjectsList).toHaveBeenCalledWith(true); - expect(vm.isSearchFailed).toBeTruthy(); - }); - }); - }); - - describe('created', () => { - it('should bind event listeners on eventHub', done => { - spyOn(eventHub, '$on'); - - createComponent().$mount(); - - Vue.nextTick(() => { - expect(eventHub.$on).toHaveBeenCalledWith('dropdownOpen', jasmine.any(Function)); - expect(eventHub.$on).toHaveBeenCalledWith('searchProjects', jasmine.any(Function)); - expect(eventHub.$on).toHaveBeenCalledWith('searchCleared', jasmine.any(Function)); - expect(eventHub.$on).toHaveBeenCalledWith('searchFailed', jasmine.any(Function)); - done(); - }); - }); - }); - - describe('beforeDestroy', () => { - it('should unbind event listeners on eventHub', done => { - const vm = createComponent(); - spyOn(eventHub, '$off'); - - vm.$mount(); - vm.$destroy(); - - Vue.nextTick(() => { - expect(eventHub.$off).toHaveBeenCalledWith('dropdownOpen', jasmine.any(Function)); - expect(eventHub.$off).toHaveBeenCalledWith('searchProjects', jasmine.any(Function)); - expect(eventHub.$off).toHaveBeenCalledWith('searchCleared', jasmine.any(Function)); - expect(eventHub.$off).toHaveBeenCalledWith('searchFailed', jasmine.any(Function)); - done(); - }); - }); - }); - - describe('template', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - it('should render search input', () => { - expect(vm.$el.querySelector('.search-input-container')).toBeDefined(); - }); - - it('should render loading animation', done => { - vm.toggleLoader(true); - Vue.nextTick(() => { - const loadingEl = vm.$el.querySelector('.loading-animation'); - - expect(loadingEl).toBeDefined(); - expect(loadingEl.classList.contains('prepend-top-20')).toBeTruthy(); - expect(loadingEl.querySelector('i').getAttribute('aria-label')).toBe('Loading projects'); - done(); - }); - }); - - it('should render frequent projects list header', done => { - vm.toggleFrequentProjectsList(true); - Vue.nextTick(() => { - const sectionHeaderEl = vm.$el.querySelector('.section-header'); - - expect(sectionHeaderEl).toBeDefined(); - expect(sectionHeaderEl.innerText.trim()).toBe('Frequently visited'); - done(); - }); - }); - - it('should render frequent projects list', done => { - vm.toggleFrequentProjectsList(true); - Vue.nextTick(() => { - expect(vm.$el.querySelector('.projects-list-frequent-container')).toBeDefined(); - done(); - }); - }); - - it('should render searched projects list', done => { - vm.toggleSearchProjectsList(true); - Vue.nextTick(() => { - expect(vm.$el.querySelector('.section-header')).toBe(null); - expect(vm.$el.querySelector('.projects-list-search-container')).toBeDefined(); - done(); - }); - }); - }); -}); diff --git a/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js b/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js deleted file mode 100644 index 2bafb4e81ca..00000000000 --- a/spec/javascripts/projects_dropdown/components/projects_list_frequent_spec.js +++ /dev/null @@ -1,72 +0,0 @@ -import Vue from 'vue'; - -import projectsListFrequentComponent from '~/projects_dropdown/components/projects_list_frequent.vue'; - -import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import { mockFrequents } from '../mock_data'; - -const createComponent = () => { - const Component = Vue.extend(projectsListFrequentComponent); - - return mountComponent(Component, { - projects: mockFrequents, - localStorageFailed: false, - }); -}; - -describe('ProjectsListFrequentComponent', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - describe('computed', () => { - describe('isListEmpty', () => { - it('should return `true` or `false` representing whether if `projects` is empty of not', () => { - vm.projects = []; - expect(vm.isListEmpty).toBeTruthy(); - - vm.projects = mockFrequents; - expect(vm.isListEmpty).toBeFalsy(); - }); - }); - - describe('listEmptyMessage', () => { - it('should return appropriate empty list message based on value of `localStorageFailed` prop', () => { - vm.localStorageFailed = true; - expect(vm.listEmptyMessage).toBe('This feature requires browser localStorage support'); - - vm.localStorageFailed = false; - expect(vm.listEmptyMessage).toBe('Projects you visit often will appear here'); - }); - }); - }); - - describe('template', () => { - it('should render component element with list of projects', (done) => { - vm.projects = mockFrequents; - - Vue.nextTick(() => { - expect(vm.$el.classList.contains('projects-list-frequent-container')).toBeTruthy(); - expect(vm.$el.querySelectorAll('ul.list-unstyled').length).toBe(1); - expect(vm.$el.querySelectorAll('li.projects-list-item-container').length).toBe(5); - done(); - }); - }); - - it('should render component element with empty message', (done) => { - vm.projects = []; - - Vue.nextTick(() => { - expect(vm.$el.querySelectorAll('li.section-empty').length).toBe(1); - expect(vm.$el.querySelectorAll('li.projects-list-item-container').length).toBe(0); - done(); - }); - }); - }); -}); diff --git a/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js b/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js deleted file mode 100644 index c4b86d77034..00000000000 --- a/spec/javascripts/projects_dropdown/components/projects_list_search_spec.js +++ /dev/null @@ -1,84 +0,0 @@ -import Vue from 'vue'; - -import projectsListSearchComponent from '~/projects_dropdown/components/projects_list_search.vue'; - -import mountComponent from 'spec/helpers/vue_mount_component_helper'; -import { mockProject } from '../mock_data'; - -const createComponent = () => { - const Component = Vue.extend(projectsListSearchComponent); - - return mountComponent(Component, { - projects: [mockProject], - matcher: 'lab', - searchFailed: false, - }); -}; - -describe('ProjectsListSearchComponent', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - describe('computed', () => { - describe('isListEmpty', () => { - it('should return `true` or `false` representing whether if `projects` is empty of not', () => { - vm.projects = []; - expect(vm.isListEmpty).toBeTruthy(); - - vm.projects = [mockProject]; - expect(vm.isListEmpty).toBeFalsy(); - }); - }); - - describe('listEmptyMessage', () => { - it('should return appropriate empty list message based on value of `searchFailed` prop', () => { - vm.searchFailed = true; - expect(vm.listEmptyMessage).toBe('Something went wrong on our end.'); - - vm.searchFailed = false; - expect(vm.listEmptyMessage).toBe('Sorry, no projects matched your search'); - }); - }); - }); - - describe('template', () => { - it('should render component element with list of projects', (done) => { - vm.projects = [mockProject]; - - Vue.nextTick(() => { - expect(vm.$el.classList.contains('projects-list-search-container')).toBeTruthy(); - expect(vm.$el.querySelectorAll('ul.list-unstyled').length).toBe(1); - expect(vm.$el.querySelectorAll('li.projects-list-item-container').length).toBe(1); - done(); - }); - }); - - it('should render component element with empty message', (done) => { - vm.projects = []; - - Vue.nextTick(() => { - expect(vm.$el.querySelectorAll('li.section-empty').length).toBe(1); - expect(vm.$el.querySelectorAll('li.projects-list-item-container').length).toBe(0); - done(); - }); - }); - - it('should render component element with failure message', (done) => { - vm.searchFailed = true; - vm.projects = []; - - Vue.nextTick(() => { - expect(vm.$el.querySelectorAll('li.section-empty.section-failure').length).toBe(1); - expect(vm.$el.querySelectorAll('li.projects-list-item-container').length).toBe(0); - done(); - }); - }); - }); -}); diff --git a/spec/javascripts/projects_dropdown/components/search_spec.js b/spec/javascripts/projects_dropdown/components/search_spec.js deleted file mode 100644 index 427f5024e3a..00000000000 --- a/spec/javascripts/projects_dropdown/components/search_spec.js +++ /dev/null @@ -1,100 +0,0 @@ -import Vue from 'vue'; - -import searchComponent from '~/projects_dropdown/components/search.vue'; -import eventHub from '~/projects_dropdown/event_hub'; - -import mountComponent from 'spec/helpers/vue_mount_component_helper'; - -const createComponent = () => { - const Component = Vue.extend(searchComponent); - - return mountComponent(Component); -}; - -describe('SearchComponent', () => { - describe('methods', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - describe('setFocus', () => { - it('should set focus to search input', () => { - spyOn(vm.$refs.search, 'focus'); - - vm.setFocus(); - expect(vm.$refs.search.focus).toHaveBeenCalled(); - }); - }); - - describe('emitSearchEvents', () => { - it('should emit `searchProjects` event via eventHub when `searchQuery` present', () => { - const searchQuery = 'test'; - spyOn(eventHub, '$emit'); - vm.searchQuery = searchQuery; - vm.emitSearchEvents(); - expect(eventHub.$emit).toHaveBeenCalledWith('searchProjects', searchQuery); - }); - - it('should emit `searchCleared` event via eventHub when `searchQuery` is cleared', () => { - spyOn(eventHub, '$emit'); - vm.searchQuery = ''; - vm.emitSearchEvents(); - expect(eventHub.$emit).toHaveBeenCalledWith('searchCleared'); - }); - }); - }); - - describe('mounted', () => { - it('should listen `dropdownOpen` event', (done) => { - spyOn(eventHub, '$on'); - createComponent(); - - Vue.nextTick(() => { - expect(eventHub.$on).toHaveBeenCalledWith('dropdownOpen', jasmine.any(Function)); - done(); - }); - }); - }); - - describe('beforeDestroy', () => { - it('should unbind event listeners on eventHub', (done) => { - const vm = createComponent(); - spyOn(eventHub, '$off'); - - vm.$mount(); - vm.$destroy(); - - Vue.nextTick(() => { - expect(eventHub.$off).toHaveBeenCalledWith('dropdownOpen', jasmine.any(Function)); - done(); - }); - }); - }); - - describe('template', () => { - let vm; - - beforeEach(() => { - vm = createComponent(); - }); - - afterEach(() => { - vm.$destroy(); - }); - - it('should render component element', () => { - const inputEl = vm.$el.querySelector('input.form-control'); - - expect(vm.$el.classList.contains('search-input-container')).toBeTruthy(); - expect(inputEl).not.toBe(null); - expect(inputEl.getAttribute('placeholder')).toBe('Search your projects'); - expect(vm.$el.querySelector('.search-icon')).toBeDefined(); - }); - }); -}); diff --git a/spec/javascripts/projects_dropdown/mock_data.js b/spec/javascripts/projects_dropdown/mock_data.js deleted file mode 100644 index d6a79fb8ac1..00000000000 --- a/spec/javascripts/projects_dropdown/mock_data.js +++ /dev/null @@ -1,96 +0,0 @@ -export const currentSession = { - username: 'root', - storageKey: 'root/frequent-projects', - apiVersion: 'v4', - project: { - id: 1, - name: 'dummy-project', - namespace: 'SamepleGroup / Dummy-Project', - webUrl: 'http://127.0.0.1/samplegroup/dummy-project', - avatarUrl: null, - lastAccessedOn: Date.now(), - }, -}; - -export const mockProject = { - id: 1, - name: 'GitLab Community Edition', - namespace: 'gitlab-org / gitlab-ce', - webUrl: 'http://127.0.0.1:3000/gitlab-org/gitlab-ce', - avatarUrl: null, -}; - -export const mockRawProject = { - id: 1, - name: 'GitLab Community Edition', - name_with_namespace: 'gitlab-org / gitlab-ce', - web_url: 'http://127.0.0.1:3000/gitlab-org/gitlab-ce', - avatar_url: null, -}; - -export const mockFrequents = [ - { - id: 1, - name: 'GitLab Community Edition', - namespace: 'gitlab-org / gitlab-ce', - webUrl: 'http://127.0.0.1:3000/gitlab-org/gitlab-ce', - avatarUrl: null, - }, - { - id: 2, - name: 'GitLab CI', - namespace: 'gitlab-org / gitlab-ci', - webUrl: 'http://127.0.0.1:3000/gitlab-org/gitlab-ci', - avatarUrl: null, - }, - { - id: 3, - name: 'Typeahead.Js', - namespace: 'twitter / typeahead-js', - webUrl: 'http://127.0.0.1:3000/twitter/typeahead-js', - avatarUrl: '/uploads/-/system/project/avatar/7/TWBS.png', - }, - { - id: 4, - name: 'Intel', - namespace: 'platform / hardware / bsp / intel', - webUrl: 'http://127.0.0.1:3000/platform/hardware/bsp/intel', - avatarUrl: null, - }, - { - id: 5, - name: 'v4.4', - namespace: 'platform / hardware / bsp / kernel / common / v4.4', - webUrl: 'http://localhost:3000/platform/hardware/bsp/kernel/common/v4.4', - avatarUrl: null, - }, -]; - -export const unsortedFrequents = [ - { id: 1, frequency: 12, lastAccessedOn: 1491400843391 }, - { id: 2, frequency: 14, lastAccessedOn: 1488240890738 }, - { id: 3, frequency: 44, lastAccessedOn: 1497675908472 }, - { id: 4, frequency: 8, lastAccessedOn: 1497979281815 }, - { id: 5, frequency: 34, lastAccessedOn: 1488089211943 }, - { id: 6, frequency: 14, lastAccessedOn: 1493517292488 }, - { id: 7, frequency: 42, lastAccessedOn: 1486815299875 }, - { id: 8, frequency: 33, lastAccessedOn: 1500762279114 }, - { id: 10, frequency: 46, lastAccessedOn: 1483251641543 }, -]; - -/** - * This const has a specific order which tests authenticity - * of `ProjectsService.getTopFrequentProjects` method so - * DO NOT change order of items in this const. - */ -export const sortedFrequents = [ - { id: 10, frequency: 46, lastAccessedOn: 1483251641543 }, - { id: 3, frequency: 44, lastAccessedOn: 1497675908472 }, - { id: 7, frequency: 42, lastAccessedOn: 1486815299875 }, - { id: 5, frequency: 34, lastAccessedOn: 1488089211943 }, - { id: 8, frequency: 33, lastAccessedOn: 1500762279114 }, - { id: 6, frequency: 14, lastAccessedOn: 1493517292488 }, - { id: 2, frequency: 14, lastAccessedOn: 1488240890738 }, - { id: 1, frequency: 12, lastAccessedOn: 1491400843391 }, - { id: 4, frequency: 8, lastAccessedOn: 1497979281815 }, -]; diff --git a/spec/javascripts/projects_dropdown/service/projects_service_spec.js b/spec/javascripts/projects_dropdown/service/projects_service_spec.js deleted file mode 100644 index cfd1bb7d24f..00000000000 --- a/spec/javascripts/projects_dropdown/service/projects_service_spec.js +++ /dev/null @@ -1,179 +0,0 @@ -import Vue from 'vue'; -import VueResource from 'vue-resource'; - -import bp from '~/breakpoints'; -import ProjectsService from '~/projects_dropdown/service/projects_service'; -import { FREQUENT_PROJECTS } from '~/projects_dropdown/constants'; -import { currentSession, unsortedFrequents, sortedFrequents } from '../mock_data'; - -Vue.use(VueResource); - -FREQUENT_PROJECTS.MAX_COUNT = 3; - -describe('ProjectsService', () => { - let service; - - beforeEach(() => { - gon.api_version = currentSession.apiVersion; - gon.current_user_id = 1; - service = new ProjectsService(currentSession.username); - }); - - describe('contructor', () => { - it('should initialize default properties of class', () => { - expect(service.isLocalStorageAvailable).toBeTruthy(); - expect(service.currentUserName).toBe(currentSession.username); - expect(service.storageKey).toBe(currentSession.storageKey); - expect(service.projectsPath).toBeDefined(); - }); - }); - - describe('getSearchedProjects', () => { - it('should return promise from VueResource HTTP GET', () => { - spyOn(service.projectsPath, 'get').and.stub(); - - const searchQuery = 'lab'; - const queryParams = { - simple: true, - per_page: 20, - membership: true, - order_by: 'last_activity_at', - search: searchQuery, - }; - - service.getSearchedProjects(searchQuery); - expect(service.projectsPath.get).toHaveBeenCalledWith(queryParams); - }); - }); - - describe('logProjectAccess', () => { - let storage; - - beforeEach(() => { - storage = {}; - - spyOn(window.localStorage, 'setItem').and.callFake((storageKey, value) => { - storage[storageKey] = value; - }); - - spyOn(window.localStorage, 'getItem').and.callFake((storageKey) => { - if (storage[storageKey]) { - return storage[storageKey]; - } - - return null; - }); - }); - - it('should create a project store if it does not exist and adds a project', () => { - service.logProjectAccess(currentSession.project); - - const projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects.length).toBe(1); - expect(projects[0].frequency).toBe(1); - expect(projects[0].lastAccessedOn).toBeDefined(); - }); - - it('should prevent inserting same report multiple times into store', () => { - service.logProjectAccess(currentSession.project); - service.logProjectAccess(currentSession.project); - - const projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects.length).toBe(1); - }); - - it('should increase frequency of report if it was logged multiple times over the course of an hour', () => { - let projects; - spyOn(Math, 'abs').and.returnValue(3600001); // this will lead to `diff` > 1; - service.logProjectAccess(currentSession.project); - - projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects[0].frequency).toBe(1); - - service.logProjectAccess(currentSession.project); - projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects[0].frequency).toBe(2); - expect(projects[0].lastAccessedOn).not.toBe(currentSession.project.lastAccessedOn); - }); - - it('should always update project metadata', () => { - let projects; - const oldProject = { - ...currentSession.project, - }; - - const newProject = { - ...currentSession.project, - name: 'New Name', - avatarUrl: 'new/avatar.png', - namespace: 'New / Namespace', - webUrl: 'http://localhost/new/web/url', - }; - - service.logProjectAccess(oldProject); - projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects[0].name).toBe(oldProject.name); - expect(projects[0].avatarUrl).toBe(oldProject.avatarUrl); - expect(projects[0].namespace).toBe(oldProject.namespace); - expect(projects[0].webUrl).toBe(oldProject.webUrl); - - service.logProjectAccess(newProject); - projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects[0].name).toBe(newProject.name); - expect(projects[0].avatarUrl).toBe(newProject.avatarUrl); - expect(projects[0].namespace).toBe(newProject.namespace); - expect(projects[0].webUrl).toBe(newProject.webUrl); - }); - - it('should not add more than 20 projects in store', () => { - for (let i = 1; i <= 5; i += 1) { - const project = Object.assign(currentSession.project, { id: i }); - service.logProjectAccess(project); - } - - const projects = JSON.parse(storage[currentSession.storageKey]); - expect(projects.length).toBe(3); - }); - }); - - describe('getTopFrequentProjects', () => { - let storage = {}; - - beforeEach(() => { - storage[currentSession.storageKey] = JSON.stringify(unsortedFrequents); - - spyOn(window.localStorage, 'getItem').and.callFake((storageKey) => { - if (storage[storageKey]) { - return storage[storageKey]; - } - - return null; - }); - }); - - it('should return top 5 frequently accessed projects for desktop screens', () => { - spyOn(bp, 'getBreakpointSize').and.returnValue('md'); - const frequentProjects = service.getTopFrequentProjects(); - - expect(frequentProjects.length).toBe(5); - frequentProjects.forEach((project, index) => { - expect(project.id).toBe(sortedFrequents[index].id); - }); - }); - - it('should return top 3 frequently accessed projects for mobile screens', () => { - spyOn(bp, 'getBreakpointSize').and.returnValue('sm'); - const frequentProjects = service.getTopFrequentProjects(); - - expect(frequentProjects.length).toBe(3); - frequentProjects.forEach((project, index) => { - expect(project.id).toBe(sortedFrequents[index].id); - }); - }); - - it('should return empty array if there are no projects available in store', () => { - storage = {}; - expect(service.getTopFrequentProjects().length).toBe(0); - }); - }); -}); diff --git a/spec/javascripts/projects_dropdown/store/projects_store_spec.js b/spec/javascripts/projects_dropdown/store/projects_store_spec.js deleted file mode 100644 index e57399d37cd..00000000000 --- a/spec/javascripts/projects_dropdown/store/projects_store_spec.js +++ /dev/null @@ -1,41 +0,0 @@ -import ProjectsStore from '~/projects_dropdown/store/projects_store'; -import { mockProject, mockRawProject } from '../mock_data'; - -describe('ProjectsStore', () => { - let store; - - beforeEach(() => { - store = new ProjectsStore(); - }); - - describe('setFrequentProjects', () => { - it('should set frequent projects list to state', () => { - store.setFrequentProjects([mockProject]); - - expect(store.getFrequentProjects().length).toBe(1); - expect(store.getFrequentProjects()[0].id).toBe(mockProject.id); - }); - }); - - describe('setSearchedProjects', () => { - it('should set searched projects list to state', () => { - store.setSearchedProjects([mockRawProject]); - - const processedProjects = store.getSearchedProjects(); - expect(processedProjects.length).toBe(1); - expect(processedProjects[0].id).toBe(mockRawProject.id); - expect(processedProjects[0].namespace).toBe(mockRawProject.name_with_namespace); - expect(processedProjects[0].webUrl).toBe(mockRawProject.web_url); - expect(processedProjects[0].avatarUrl).toBe(mockRawProject.avatar_url); - }); - }); - - describe('clearSearchedProjects', () => { - it('should clear searched projects list from state', () => { - store.setSearchedProjects([mockRawProject]); - expect(store.getSearchedProjects().length).toBe(1); - store.clearSearchedProjects(); - expect(store.getSearchedProjects().length).toBe(0); - }); - }); -}); diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb index ab14d77d552..a515d07b072 100644 --- a/spec/lib/banzai/filter/markdown_filter_spec.rb +++ b/spec/lib/banzai/filter/markdown_filter_spec.rb @@ -3,17 +3,61 @@ require 'spec_helper' describe Banzai::Filter::MarkdownFilter do include FilterSpecHelper - context 'code block' do - it 'adds language to lang attribute when specified' do - result = filter("```html\nsome code\n```") + describe 'markdown engine from context' do + it 'defaults to CommonMark' do + expect_any_instance_of(Banzai::Filter::MarkdownEngines::CommonMark).to receive(:render).and_return('test') - expect(result).to start_with("<pre><code lang=\"html\">") + filter('test') end - it 'does not add language to lang attribute when not specified' do - result = filter("```\nsome code\n```") + it 'uses Redcarpet' do + expect_any_instance_of(Banzai::Filter::MarkdownEngines::Redcarpet).to receive(:render).and_return('test') - expect(result).to start_with("<pre><code>") + filter('test', { markdown_engine: :redcarpet }) + end + + it 'uses CommonMark' do + expect_any_instance_of(Banzai::Filter::MarkdownEngines::CommonMark).to receive(:render).and_return('test') + + filter('test', { markdown_engine: :common_mark }) + end + end + + describe 'code block' do + context 'using CommonMark' do + before do + stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark) + end + + it 'adds language to lang attribute when specified' do + result = filter("```html\nsome code\n```") + + expect(result).to start_with("<pre><code lang=\"html\">") + end + + it 'does not add language to lang attribute when not specified' do + result = filter("```\nsome code\n```") + + expect(result).to start_with("<pre><code>") + end + end + + context 'using Redcarpet' do + before do + stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :redcarpet) + end + + it 'adds language to lang attribute when specified' do + result = filter("```html\nsome code\n```") + + expect(result).to start_with("\n<pre><code lang=\"html\">") + end + + it 'does not add language to lang attribute when not specified' do + result = filter("```\nsome code\n```") + + expect(result).to start_with("\n<pre><code>") + end end end end diff --git a/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb b/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb new file mode 100644 index 00000000000..20af63bc6c8 --- /dev/null +++ b/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb @@ -0,0 +1,109 @@ +require 'spec_helper' + +describe Gitlab::BackgroundMigration::FixCrossProjectLabelLinks, :migration, schema: 20180702120647 do + let(:namespaces_table) { table(:namespaces) } + let(:projects_table) { table(:projects) } + let(:issues_table) { table(:issues) } + let(:merge_requests_table) { table(:merge_requests) } + let(:labels_table) { table(:labels) } + let(:label_links_table) { table(:label_links) } + + let!(:group1) { namespaces_table.create(id: 10, type: 'Group', name: 'group1', path: 'group1') } + let!(:group2) { namespaces_table.create(id: 20, type: 'Group', name: 'group2', path: 'group2') } + + let!(:project1) { projects_table.create(id: 1, name: 'project1', path: 'group1/project1', namespace_id: 10) } + let!(:project2) { projects_table.create(id: 3, name: 'project2', path: 'group1/project2', namespace_id: 20) } + + let!(:label1) { labels_table.create(id: 1, title: 'bug', color: 'red', group_id: 10, type: 'GroupLabel') } + let!(:label2) { labels_table.create(id: 2, title: 'bug', color: 'red', group_id: 20, type: 'GroupLabel') } + + def create_merge_request(id, project_id) + merge_requests_table.create(id: id, + target_project_id: project_id, + target_branch: 'master', + source_project_id: project_id, + source_branch: 'mr name', + title: "mr name#{id}") + end + + def create_issue(id, project_id) + issues_table.create(id: id, title: "issue#{id}", project_id: project_id) + end + + def create_resource(target_type, id, project_id) + target_type == 'Issue' ? create_issue(id, project_id) : create_merge_request(id, project_id) + end + + shared_examples_for 'resource with cross-project labels' do + it 'updates only cross-project label links which exist in the local project or group' do + create_resource(target_type, 1, 1) + create_resource(target_type, 2, 3) + labels_table.create(id: 3, title: 'bug', color: 'red', project_id: 3, type: 'ProjectLabel') + link = label_links_table.create(label_id: 2, target_type: target_type, target_id: 1) + link2 = label_links_table.create(label_id: 3, target_type: target_type, target_id: 2) + + subject.perform(1, 100) + + expect(link.reload.label_id).to eq(1) + expect(link2.reload.label_id).to eq(3) + end + + it 'ignores cross-project label links if label color is different' do + labels_table.create(id: 3, title: 'bug', color: 'green', group_id: 20, type: 'GroupLabel') + create_resource(target_type, 1, 1) + link = label_links_table.create(label_id: 3, target_type: target_type, target_id: 1) + + subject.perform(1, 100) + + expect(link.reload.label_id).to eq(3) + end + + it 'ignores cross-project label links if label name is different' do + labels_table.create(id: 3, title: 'bug1', color: 'red', group_id: 20, type: 'GroupLabel') + create_resource(target_type, 1, 1) + link = label_links_table.create(label_id: 3, target_type: target_type, target_id: 1) + + subject.perform(1, 100) + + expect(link.reload.label_id).to eq(3) + end + + context 'with nested group' do + before do + namespaces_table.create(id: 11, type: 'Group', name: 'subgroup1', path: 'group1/subgroup1', parent_id: 10) + projects_table.create(id: 2, name: 'subproject1', path: 'group1/subgroup1/subproject1', namespace_id: 11) + create_resource(target_type, 1, 2) + end + + it 'ignores label links referencing ancestor group labels', :nested_groups do + labels_table.create(id: 4, title: 'bug', color: 'red', project_id: 2, type: 'ProjectLabel') + label_links_table.create(label_id: 4, target_type: target_type, target_id: 1) + link = label_links_table.create(label_id: 1, target_type: target_type, target_id: 1) + + subject.perform(1, 100) + + expect(link.reload.label_id).to eq(1) + end + + it 'checks also issues and MRs in subgroups', :nested_groups do + link = label_links_table.create(label_id: 2, target_type: target_type, target_id: 1) + + subject.perform(1, 100) + + expect(link.reload.label_id).to eq(1) + end + end + end + + context 'resource is Issue' do + it_behaves_like 'resource with cross-project labels' do + let(:target_type) { 'Issue' } + end + end + + context 'resource is Merge Request' do + it_behaves_like 'resource with cross-project labels' do + let(:target_type) { 'MergeRequest' } + end + end +end diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index 8bb246aa4bd..782e4e45a91 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -357,6 +357,35 @@ describe Gitlab::Database do end end + describe '.db_read_only?' do + context 'when using PostgreSQL' do + before do + allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original + expect(described_class).to receive(:postgresql?).and_return(true) + end + + it 'detects a read only database' do + allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "t" }]) + + expect(described_class.db_read_only?).to be_truthy + end + + it 'detects a read write database' do + allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "f" }]) + + expect(described_class.db_read_only?).to be_falsey + end + end + + context 'when using MySQL' do + before do + expect(described_class).to receive(:postgresql?).and_return(false) + end + + it { expect(described_class.db_read_only?).to be_falsey } + end + end + describe '#sanitize_timestamp' do let(:max_timestamp) { Time.at((1 << 31) - 1) } diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb new file mode 100644 index 00000000000..2e3656b52fb --- /dev/null +++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb @@ -0,0 +1,76 @@ +require 'spec_helper' + +describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do + include ::ExclusiveLeaseHelpers + + let(:class_instance) { (Class.new { include ::Gitlab::ExclusiveLeaseHelpers }).new } + let(:unique_key) { SecureRandom.hex(10) } + + describe '#in_lock' do + subject { class_instance.in_lock(unique_key, **options) { } } + + let(:options) { {} } + + context 'when the lease is not obtained yet' do + before do + stub_exclusive_lease(unique_key, 'uuid') + end + + it 'calls the given block' do + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + end + + it 'calls the given block continuously' do + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + end + + it 'cancels the exclusive lease after the block' do + expect_to_cancel_exclusive_lease(unique_key, 'uuid') + + subject + end + end + + context 'when the lease is obtained already' do + let!(:lease) { stub_exclusive_lease_taken(unique_key) } + + it 'retries to obtain a lease and raises an error' do + expect(lease).to receive(:try_obtain).exactly(11).times + + expect { subject }.to raise_error('Failed to obtain a lock') + end + + context 'when ttl is specified' do + let(:options) { { ttl: 10.minutes } } + + it 'receives the specified argument' do + expect(Gitlab::ExclusiveLease).to receive(:new).with(unique_key, { timeout: 10.minutes } ) + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + + context 'when retry count is specified' do + let(:options) { { retries: 3 } } + + it 'retries for the specified times' do + expect(lease).to receive(:try_obtain).exactly(4).times + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + + context 'when sleep second is specified' do + let(:options) { { retries: 0, sleep_sec: 0.05.seconds } } + + it 'receives the specified argument' do + expect(class_instance).to receive(:sleep).with(0.05.seconds).once + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + end + end +end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 5dd7af3a552..e6268a05d44 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -1856,6 +1856,54 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#set_config' do + let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') } + let(:rugged) { repository_rugged } + let(:entries) do + { + 'test.foo1' => 'bla bla', + 'test.foo2' => 1234, + 'test.foo3' => true + } + end + + it 'can set config settings' do + expect(repository.set_config(entries)).to be_nil + + expect(rugged.config['test.foo1']).to eq('bla bla') + expect(rugged.config['test.foo2']).to eq('1234') + expect(rugged.config['test.foo3']).to eq('true') + end + + after do + entries.keys.each { |k| rugged.config.delete(k) } + end + end + + describe '#delete_config' do + let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') } + let(:rugged) { repository_rugged } + let(:entries) do + { + 'test.foo1' => 'bla bla', + 'test.foo2' => 1234, + 'test.foo3' => true + } + end + + it 'can delete config settings' do + entries.each do |key, value| + rugged.config[key] = value + end + + expect(repository.delete_config(*%w[does.not.exist test.foo1 test.foo2])).to be_nil + + config_keys = rugged.config.each_key.to_a + expect(config_keys).not_to include('test.foo1') + expect(config_keys).not_to include('test.foo2') + end + end + describe '#merge' do let(:repository) do Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') diff --git a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_object_storage_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_object_storage_spec.rb new file mode 100644 index 00000000000..5059d68e54b --- /dev/null +++ b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_object_storage_spec.rb @@ -0,0 +1,105 @@ +require 'spec_helper' + +describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do + let!(:service) { described_class.new } + let!(:project) { create(:project, :with_object_export) } + let(:shared) { project.import_export_shared } + let!(:user) { create(:user) } + + describe '#execute' do + before do + allow(service).to receive(:strategy_execute) + stub_feature_flags(import_export_object_storage: true) + end + + it 'returns if project exported file is not found' do + allow(project).to receive(:export_project_object_exists?).and_return(false) + + expect(service).not_to receive(:strategy_execute) + + service.execute(user, project) + end + + it 'creates a lock file in the export dir' do + allow(service).to receive(:delete_after_export_lock) + + service.execute(user, project) + + expect(lock_path_exist?).to be_truthy + end + + context 'when the method succeeds' do + it 'removes the lock file' do + service.execute(user, project) + + expect(lock_path_exist?).to be_falsey + end + end + + context 'when the method fails' do + before do + allow(service).to receive(:strategy_execute).and_call_original + end + + context 'when validation fails' do + before do + allow(service).to receive(:invalid?).and_return(true) + end + + it 'does not create the lock file' do + expect(service).not_to receive(:create_or_update_after_export_lock) + + service.execute(user, project) + end + + it 'does not execute main logic' do + expect(service).not_to receive(:strategy_execute) + + service.execute(user, project) + end + + it 'logs validation errors in shared context' do + expect(service).to receive(:log_validation_errors) + + service.execute(user, project) + end + end + + context 'when an exception is raised' do + it 'removes the lock' do + expect { service.execute(user, project) }.to raise_error(NotImplementedError) + + expect(lock_path_exist?).to be_falsey + end + end + end + end + + describe '#log_validation_errors' do + it 'add the message to the shared context' do + errors = %w(test_message test_message2) + + allow(service).to receive(:invalid?).and_return(true) + allow(service.errors).to receive(:full_messages).and_return(errors) + + expect(shared).to receive(:add_error_message).twice.and_call_original + + service.execute(user, project) + + expect(shared.errors).to eq errors + end + end + + describe '#to_json' do + it 'adds the current strategy class to the serialized attributes' do + params = { param1: 1 } + result = params.merge(klass: described_class.to_s).to_json + + expect(described_class.new(params).to_json).to eq result + end + end + + def lock_path_exist? + File.exist?(described_class.lock_file_path(project)) + end +end diff --git a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb index ed54d87de4a..566b7f46c87 100644 --- a/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb +++ b/spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb @@ -9,6 +9,7 @@ describe Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy do describe '#execute' do before do allow(service).to receive(:strategy_execute) + stub_feature_flags(import_export_object_storage: false) end it 'returns if project exported file is not found' do diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb index 5fe57d9987b..7f2e0a4ee2c 100644 --- a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb +++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb @@ -24,13 +24,34 @@ describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do end describe '#execute' do - it 'removes the exported project file after the upload' do - allow(strategy).to receive(:send_file) - allow(strategy).to receive(:handle_response_error) + context 'without object storage' do + before do + stub_feature_flags(import_export_object_storage: false) + end + + it 'removes the exported project file after the upload' do + allow(strategy).to receive(:send_file) + allow(strategy).to receive(:handle_response_error) + + expect(project).to receive(:remove_exported_project_file) + + strategy.execute(user, project) + end + end + + context 'with object storage' do + before do + stub_feature_flags(import_export_object_storage: true) + end - expect(project).to receive(:remove_exported_project_file) + it 'removes the exported project file after the upload' do + allow(strategy).to receive(:send_file) + allow(strategy).to receive(:handle_response_error) - strategy.execute(user, project) + expect(project).to receive(:remove_exported_project_file) + + strategy.execute(user, project) + end end end end diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 2ea66479c1b..084ce3066d6 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -293,6 +293,7 @@ project: - deploy_tokens - settings - ci_cd_settings +- import_export_upload award_emoji: - awardable - user diff --git a/spec/lib/gitlab/import_export/saver_spec.rb b/spec/lib/gitlab/import_export/saver_spec.rb new file mode 100644 index 00000000000..02f1a4b81aa --- /dev/null +++ b/spec/lib/gitlab/import_export/saver_spec.rb @@ -0,0 +1,43 @@ +require 'spec_helper' +require 'fileutils' + +describe Gitlab::ImportExport::Saver do + let!(:project) { create(:project, :public, name: 'project') } + let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } + let(:shared) { project.import_export_shared } + subject { described_class.new(project: project, shared: shared) } + + before do + allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) + + FileUtils.mkdir_p(shared.export_path) + FileUtils.touch("#{shared.export_path}/tmp.bundle") + end + + after do + FileUtils.rm_rf(export_path) + end + + context 'local archive' do + it 'saves the repo to disk' do + stub_feature_flags(import_export_object_storage: false) + + subject.save + + expect(shared.errors).to be_empty + expect(Dir.empty?(shared.archive_path)).to be false + end + end + + context 'object storage' do + it 'saves the repo using object storage' do + stub_feature_flags(import_export_object_storage: true) + stub_uploads_object_storage(ImportExportUploader) + + subject.save + + expect(ImportExportUpload.find_by(project: project).export_file.url) + .to match(%r[\/uploads\/-\/system\/import_export_upload\/export_file.*]) + end + end +end diff --git a/spec/mailers/previews/devise_mailer_preview.rb b/spec/mailers/previews/devise_mailer_preview.rb deleted file mode 100644 index d6588efc486..00000000000 --- a/spec/mailers/previews/devise_mailer_preview.rb +++ /dev/null @@ -1,30 +0,0 @@ -class DeviseMailerPreview < ActionMailer::Preview - def confirmation_instructions_for_signup - DeviseMailer.confirmation_instructions(unsaved_user, 'faketoken', {}) - end - - def confirmation_instructions_for_new_email - user = User.last - user.unconfirmed_email = 'unconfirmed@example.com' - - DeviseMailer.confirmation_instructions(user, 'faketoken', {}) - end - - def reset_password_instructions - DeviseMailer.reset_password_instructions(unsaved_user, 'faketoken', {}) - end - - def unlock_instructions - DeviseMailer.unlock_instructions(unsaved_user, 'faketoken', {}) - end - - def password_change - DeviseMailer.password_change(unsaved_user, {}) - end - - private - - def unsaved_user - User.new(name: 'Jane Doe', email: 'jdoe@example.com') - end -end diff --git a/spec/mailers/previews/email_rejection_mailer_preview.rb b/spec/mailers/previews/email_rejection_mailer_preview.rb deleted file mode 100644 index 639e8471232..00000000000 --- a/spec/mailers/previews/email_rejection_mailer_preview.rb +++ /dev/null @@ -1,5 +0,0 @@ -class EmailRejectionMailerPreview < ActionMailer::Preview - def rejection - EmailRejectionMailer.rejection("some rejection reason", "From: someone@example.com\nraw email here").message - end -end diff --git a/spec/mailers/previews/notify_preview.rb b/spec/mailers/previews/notify_preview.rb deleted file mode 100644 index e32fd0bd120..00000000000 --- a/spec/mailers/previews/notify_preview.rb +++ /dev/null @@ -1,170 +0,0 @@ -class NotifyPreview < ActionMailer::Preview - def note_merge_request_email_for_individual_note - note_email(:note_merge_request_email) do - note = <<-MD.strip_heredoc - This is an individual note on a merge request :smiley: - - In this notification email, we expect to see: - - - The note contents (that's what you're looking at) - - A link to view this note on Gitlab - - An explanation for why the user is receiving this notification - MD - - create_note(noteable_type: 'merge_request', noteable_id: merge_request.id, note: note) - end - end - - def note_merge_request_email_for_discussion - note_email(:note_merge_request_email) do - note = <<-MD.strip_heredoc - This is a new discussion on a merge request :smiley: - - In this notification email, we expect to see: - - - A line saying who started this discussion - - The note contents (that's what you're looking at) - - A link to view this discussion on Gitlab - - An explanation for why the user is receiving this notification - MD - - create_note(noteable_type: 'merge_request', noteable_id: merge_request.id, type: 'DiscussionNote', note: note) - end - end - - def note_merge_request_email_for_diff_discussion - note_email(:note_merge_request_email) do - note = <<-MD.strip_heredoc - This is a new discussion on a merge request :smiley: - - In this notification email, we expect to see: - - - A line saying who started this discussion and on what file - - The diff - - The note contents (that's what you're looking at) - - A link to view this discussion on Gitlab - - An explanation for why the user is receiving this notification - MD - - position = Gitlab::Diff::Position.new( - old_path: "files/ruby/popen.rb", - new_path: "files/ruby/popen.rb", - old_line: nil, - new_line: 14, - diff_refs: merge_request.diff_refs - ) - - create_note(noteable_type: 'merge_request', noteable_id: merge_request.id, type: 'DiffNote', position: position, note: note) - end - end - - def closed_issue_email - Notify.closed_issue_email(user.id, issue.id, user.id).message - end - - def issue_status_changed_email - Notify.issue_status_changed_email(user.id, issue.id, 'closed', user.id).message - end - - def closed_merge_request_email - Notify.closed_merge_request_email(user.id, issue.id, user.id).message - end - - def merge_request_status_email - Notify.merge_request_status_email(user.id, merge_request.id, 'closed', user.id).message - end - - def merged_merge_request_email - Notify.merged_merge_request_email(user.id, merge_request.id, user.id).message - end - - def member_access_denied_email - Notify.member_access_denied_email('project', project.id, user.id).message - end - - def member_access_granted_email - Notify.member_access_granted_email('project', user.id).message - end - - def member_access_requested_email - Notify.member_access_requested_email('group', user.id, 'some@example.com').message - end - - def member_invite_accepted_email - Notify.member_invite_accepted_email('project', user.id).message - end - - def member_invite_declined_email - Notify.member_invite_declined_email( - 'project', - project.id, - 'invite@example.com', - user.id - ).message - end - - def member_invited_email - Notify.member_invited_email('project', user.id, '1234').message - end - - def pages_domain_enabled_email - cleanup do - pages_domain = PagesDomain.new(domain: 'my.example.com', project: project, verified_at: Time.now, enabled_until: 1.week.from_now) - - Notify.pages_domain_enabled_email(pages_domain, user).message - end - end - - def pipeline_success_email - Notify.pipeline_success_email(pipeline, pipeline.user.try(:email)) - end - - def pipeline_failed_email - Notify.pipeline_failed_email(pipeline, pipeline.user.try(:email)) - end - - private - - def project - @project ||= Project.find_by_full_path('gitlab-org/gitlab-test') - end - - def issue - @merge_request ||= project.issues.first - end - - def merge_request - @merge_request ||= project.merge_requests.first - end - - def pipeline - @pipeline = Ci::Pipeline.last - end - - def user - @user ||= User.last - end - - def create_note(params) - Notes::CreateService.new(project, user, params).execute - end - - def note_email(method) - cleanup do - note = yield - - Notify.public_send(method, user.id, note) - end - end - - def cleanup - email = nil - - ActiveRecord::Base.transaction do - email = yield - raise ActiveRecord::Rollback - end - - email - end -end diff --git a/spec/mailers/previews/repository_check_mailer_preview.rb b/spec/mailers/previews/repository_check_mailer_preview.rb deleted file mode 100644 index 19d4eab1805..00000000000 --- a/spec/mailers/previews/repository_check_mailer_preview.rb +++ /dev/null @@ -1,5 +0,0 @@ -class RepositoryCheckMailerPreview < ActionMailer::Preview - def notify - RepositoryCheckMailer.notify(3).message - end -end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 0da1234ee3b..090ca159e08 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -514,6 +514,22 @@ describe Ci::Build do end end + describe '#has_old_trace?' do + subject { build.has_old_trace? } + + context 'when old trace exists' do + before do + build.update_column(:trace, 'old trace') + end + + it { is_expected.to be_truthy } + end + + context 'when old trace does not exist' do + it { is_expected.to be_falsy } + end + end + describe '#trace=' do it "expect to fail trace=" do expect { build.trace = "new" }.to raise_error(NotImplementedError) @@ -533,16 +549,32 @@ describe Ci::Build do end describe '#erase_old_trace!' do - subject { build.send(:read_attribute, :trace) } + subject { build.erase_old_trace! } - before do - build.send(:write_attribute, :trace, 'old trace') + context 'when old trace exists' do + before do + build.update_column(:trace, 'old trace') + end + + it "erases old trace" do + subject + + expect(build.old_trace).to be_nil + end + + it "executes UPDATE query" do + recorded = ActiveRecord::QueryRecorder.new { subject } + + expect(recorded.log.select { |l| l.match?(/UPDATE.*ci_builds/) }.count).to eq(1) + end end - it "expect to receive data from database" do - build.erase_old_trace! + context 'when old trace does not exist' do + it 'does not execute UPDATE query' do + recorded = ActiveRecord::QueryRecorder.new { subject } - is_expected.to be_nil + expect(recorded.log.select { |l| l.match?(/UPDATE.*ci_builds/) }.count).to eq(0) + end end end diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 464897de306..774a638b430 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -14,6 +14,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do before do stub_feature_flags(ci_enable_live_trace: true) + stub_artifacts_object_storage end context 'FastDestroyAll' do @@ -37,6 +38,22 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end end + describe '.all_stores' do + subject { described_class.all_stores } + + it 'returns a correctly ordered array' do + is_expected.to eq(%w[redis database fog]) + end + + it 'returns redis store as the the lowest precedence' do + expect(subject.first).to eq('redis') + end + + it 'returns fog store as the the highest precedence' do + expect(subject.last).to eq('fog') + end + end + describe '#data' do subject { build_trace_chunk.data } @@ -44,181 +61,269 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data_store) { :redis } before do - build_trace_chunk.send(:redis_set_data, 'Sample data in redis') + build_trace_chunk.send(:unsafe_set_data!, 'Sample data in redis') end it { is_expected.to eq('Sample data in redis') } end context 'when data_store is database' do - let(:data_store) { :db } - let(:raw_data) { 'Sample data in db' } + let(:data_store) { :database } + let(:raw_data) { 'Sample data in database' } - it { is_expected.to eq('Sample data in db') } + it { is_expected.to eq('Sample data in database') } end - end - - describe '#set_data' do - subject { build_trace_chunk.send(:set_data, value) } - let(:value) { 'Sample data' } + context 'when data_store is fog' do + let(:data_store) { :fog } - context 'when value bytesize is bigger than CHUNK_SIZE' do - let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) } + before do + build_trace_chunk.send(:unsafe_set_data!, 'Sample data in fog') + end - it { expect { subject }.to raise_error('too much data') } + it { is_expected.to eq('Sample data in fog') } end + end - context 'when data_store is redis' do - let(:data_store) { :redis } + describe '#append' do + subject { build_trace_chunk.append(new_data, offset) } - it do - expect(build_trace_chunk.send(:redis_data)).to be_nil + let(:new_data) { 'Sample new data' } + let(:offset) { 0 } + let(:merged_data) { data + new_data.to_s } - subject + shared_examples_for 'Appending correctly' do + context 'when offset is negative' do + let(:offset) { -1 } + + it { expect { subject }.to raise_error('Offset is out of range') } + end - expect(build_trace_chunk.send(:redis_data)).to eq(value) + context 'when offset is bigger than data size' do + let(:offset) { data.bytesize + 1 } + + it { expect { subject }.to raise_error('Offset is out of range') } end - context 'when fullfilled chunk size' do - let(:value) { 'a' * described_class::CHUNK_SIZE } + context 'when new data overflows chunk size' do + let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } - it 'schedules stashing data' do - expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once + it { expect { subject }.to raise_error('Chunk size overflow') } + end + + context 'when offset is EOF' do + let(:offset) { data.bytesize } + it 'appends' do subject + + expect(build_trace_chunk.data).to eq(merged_data) end - end - end - context 'when data_store is database' do - let(:data_store) { :db } + context 'when the other process is appending' do + let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" } - it 'sets data' do - expect(build_trace_chunk.raw_data).to be_nil + before do + stub_exclusive_lease_taken(lease_key) + end - subject + it 'raise an error' do + expect { subject }.to raise_error('Failed to obtain a lock') + end + end - expect(build_trace_chunk.raw_data).to eq(value) - expect(build_trace_chunk.persisted?).to be_truthy - end + context 'when new_data is nil' do + let(:new_data) { nil } - context 'when raw_data is not changed' do - it 'does not execute UPDATE' do - expect(build_trace_chunk.raw_data).to be_nil - build_trace_chunk.save! + it 'raises an error' do + expect { subject }.to raise_error('New data is missing') + end + end - # First set - expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0 - expect(build_trace_chunk.raw_data).to eq(value) - expect(build_trace_chunk.persisted?).to be_truthy + context 'when new_data is empty' do + let(:new_data) { '' } - # Second set - build_trace_chunk.reload - expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0) + it 'does not append' do + subject + + expect(build_trace_chunk.data).to eq(data) + end + + it 'does not execute UPDATE' do + ActiveRecord::QueryRecorder.new { subject }.log.map do |query| + expect(query).not_to include('UPDATE') + end + end end end - context 'when fullfilled chunk size' do - it 'does not schedule stashing data' do - expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async) + context 'when offset is middle of datasize' do + let(:offset) { data.bytesize / 2 } + it 'appends' do subject + + expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) end end end - end - describe '#truncate' do - subject { build_trace_chunk.truncate(offset) } + shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do + context 'when new data fullfilled chunk size' do + let(:new_data) { 'a' * described_class::CHUNK_SIZE } - shared_examples_for 'truncates' do - context 'when offset is negative' do - let(:offset) { -1 } + it 'schedules trace chunk flush worker' do + expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once - it { expect { subject }.to raise_error('Offset is out of range') } - end + subject + end - context 'when offset is bigger than data size' do - let(:offset) { data.bytesize + 1 } + it 'migrates data to object storage' do + Sidekiq::Testing.inline! do + subject - it { expect { subject }.to raise_error('Offset is out of range') } + build_trace_chunk.reload + expect(build_trace_chunk.fog?).to be_truthy + expect(build_trace_chunk.data).to eq(new_data) + end + end end + end - context 'when offset is 10' do - let(:offset) { 10 } + shared_examples_for 'Scheduling no sidekiq worker' do + context 'when new data fullfilled chunk size' do + let(:new_data) { 'a' * described_class::CHUNK_SIZE } + + it 'does not schedule trace chunk flush worker' do + expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async) - it 'truncates' do subject + end - expect(build_trace_chunk.data).to eq(data.byteslice(0, offset)) + it 'does not migrate data to object storage' do + Sidekiq::Testing.inline! do + data_store = build_trace_chunk.data_store + + subject + + build_trace_chunk.reload + expect(build_trace_chunk.data_store).to eq(data_store) + end end end end context 'when data_store is redis' do let(:data_store) { :redis } - let(:data) { 'Sample data in redis' } - before do - build_trace_chunk.send(:redis_set_data, data) + context 'when there are no data' do + let(:data) { '' } + + it 'has no data' do + expect(build_trace_chunk.data).to be_empty + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling sidekiq worker to flush data to persist store' end - it_behaves_like 'truncates' - end + context 'when there are some data' do + let(:data) { 'Sample data in redis' } - context 'when data_store is database' do - let(:data_store) { :db } - let(:raw_data) { 'Sample data in db' } - let(:data) { raw_data } + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end - it_behaves_like 'truncates' + it 'has data' do + expect(build_trace_chunk.data).to eq(data) + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling sidekiq worker to flush data to persist store' + end end - end - describe '#append' do - subject { build_trace_chunk.append(new_data, offset) } + context 'when data_store is database' do + let(:data_store) { :database } - let(:new_data) { 'Sample new data' } - let(:offset) { 0 } - let(:total_data) { data + new_data } + context 'when there are no data' do + let(:data) { '' } - shared_examples_for 'appends' do - context 'when offset is negative' do - let(:offset) { -1 } + it 'has no data' do + expect(build_trace_chunk.data).to be_empty + end - it { expect { subject }.to raise_error('Offset is out of range') } + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' end - context 'when offset is bigger than data size' do - let(:offset) { data.bytesize + 1 } + context 'when there are some data' do + let(:raw_data) { 'Sample data in database' } + let(:data) { raw_data } - it { expect { subject }.to raise_error('Offset is out of range') } + it 'has data' do + expect(build_trace_chunk.data).to eq(data) + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' end + end - context 'when offset is bigger than data size' do - let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } + context 'when data_store is fog' do + let(:data_store) { :fog } - it { expect { subject }.to raise_error('Chunk size overflow') } + context 'when there are no data' do + let(:data) { '' } + + it 'has no data' do + expect(build_trace_chunk.data).to be_empty + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' end - context 'when offset is EOF' do - let(:offset) { data.bytesize } + context 'when there are some data' do + let(:data) { 'Sample data in fog' } - it 'appends' do - subject + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end - expect(build_trace_chunk.data).to eq(total_data) + it 'has data' do + expect(build_trace_chunk.data).to eq(data) end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' + end + end + end + + describe '#truncate' do + subject { build_trace_chunk.truncate(offset) } + + shared_examples_for 'truncates' do + context 'when offset is negative' do + let(:offset) { -1 } + + it { expect { subject }.to raise_error('Offset is out of range') } + end + + context 'when offset is bigger than data size' do + let(:offset) { data.bytesize + 1 } + + it { expect { subject }.to raise_error('Offset is out of range') } end context 'when offset is 10' do let(:offset) { 10 } - it 'appends' do + it 'truncates' do subject - expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) + expect(build_trace_chunk.data).to eq(data.byteslice(0, offset)) end end end @@ -228,18 +333,29 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data) { 'Sample data in redis' } before do - build_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:unsafe_set_data!, data) end - it_behaves_like 'appends' + it_behaves_like 'truncates' end context 'when data_store is database' do - let(:data_store) { :db } - let(:raw_data) { 'Sample data in db' } + let(:data_store) { :database } + let(:raw_data) { 'Sample data in database' } let(:data) { raw_data } - it_behaves_like 'appends' + it_behaves_like 'truncates' + end + + context 'when data_store is fog' do + let(:data_store) { :fog } + let(:data) { 'Sample data in fog' } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it_behaves_like 'truncates' end end @@ -253,7 +369,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data) { 'Sample data in redis' } before do - build_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:unsafe_set_data!, data) end it { is_expected.to eq(data.bytesize) } @@ -265,10 +381,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end context 'when data_store is database' do - let(:data_store) { :db } + let(:data_store) { :database } context 'when data exists' do - let(:raw_data) { 'Sample data in db' } + let(:raw_data) { 'Sample data in database' } let(:data) { raw_data } it { is_expected.to eq(data.bytesize) } @@ -278,10 +394,43 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do it { is_expected.to eq(0) } end end + + context 'when data_store is fog' do + let(:data_store) { :fog } + + context 'when data exists' do + let(:data) { 'Sample data in fog' } + let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it { is_expected.to eq(data.bytesize) } + end + + context 'when data does not exist' do + it { is_expected.to eq(0) } + end + end end - describe '#use_database!' do - subject { build_trace_chunk.use_database! } + describe '#persist_data!' do + subject { build_trace_chunk.persist_data! } + + shared_examples_for 'Atomic operation' do + context 'when the other process is persisting' do + let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" } + + before do + stub_exclusive_lease_taken(lease_key) + end + + it 'raise an error' do + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + end context 'when data_store is redis' do let(:data_store) { :redis } @@ -290,46 +439,93 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data) { 'Sample data in redis' } before do - build_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:unsafe_set_data!, data) end - it 'stashes the data' do - expect(build_trace_chunk.data_store).to eq('redis') - expect(build_trace_chunk.send(:redis_data)).to eq(data) - expect(build_trace_chunk.raw_data).to be_nil + it 'persists the data' do + expect(build_trace_chunk.redis?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data) + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound) subject - expect(build_trace_chunk.data_store).to eq('db') - expect(build_trace_chunk.send(:redis_data)).to be_nil - expect(build_trace_chunk.raw_data).to eq(data) + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) end + + it_behaves_like 'Atomic operation' end context 'when data does not exist' do - it 'does not call UPDATE' do - expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) + it 'does not persist' do + expect { subject }.to raise_error('Can not persist empty data') end end end context 'when data_store is database' do - let(:data_store) { :db } + let(:data_store) { :database } - it 'does not call UPDATE' do - expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) + context 'when data exists' do + let(:data) { 'Sample data in database' } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it 'persists the data' do + expect(build_trace_chunk.database?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data) + expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound) + + subject + + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) + end + + it_behaves_like 'Atomic operation' end - end - end - describe 'ExclusiveLock' do - before do - stub_exclusive_lease_taken - stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1) + context 'when data does not exist' do + it 'does not persist' do + expect { subject }.to raise_error('Can not persist empty data') + end + end end - it 'raise an error' do - expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') + context 'when data_store is fog' do + let(:data_store) { :fog } + + context 'when data exists' do + let(:data) { 'Sample data in fog' } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it 'does not change data store' do + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) + + subject + + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) + end + + it_behaves_like 'Atomic operation' + end end end diff --git a/spec/models/ci/build_trace_chunks/database_spec.rb b/spec/models/ci/build_trace_chunks/database_spec.rb new file mode 100644 index 00000000000..d8fc9d57e95 --- /dev/null +++ b/spec/models/ci/build_trace_chunks/database_spec.rb @@ -0,0 +1,105 @@ +require 'spec_helper' + +describe Ci::BuildTraceChunks::Database do + let(:data_store) { described_class.new } + + describe '#available?' do + subject { data_store.available? } + + it { is_expected.to be_truthy } + end + + describe '#data' do + subject { data_store.data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') } + + it 'returns the data' do + is_expected.to eq('sample data in database') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :database_without_data) } + + it 'returns nil' do + is_expected.to be_nil + end + end + end + + describe '#set_data' do + subject { data_store.set_data(model, data) } + + let(:data) { 'abc123' } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') } + + it 'overwrites data' do + expect(data_store.data(model)).to eq('sample data in database') + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :database_without_data) } + + it 'sets new data' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + end + + describe '#delete_data' do + subject { data_store.delete_data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') } + + it 'deletes data' do + expect(data_store.data(model)).to eq('sample data in database') + + subject + + expect(data_store.data(model)).to be_nil + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :database_without_data) } + + it 'does nothing' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to be_nil + end + end + end + + describe '#keys' do + subject { data_store.keys(relation) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + + before do + create(:ci_build_trace_chunk, :database_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :database_with_data, chunk_index: 1, build: build) + end + + it 'returns empty array' do + is_expected.to eq([]) + end + end +end diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb new file mode 100644 index 00000000000..8f49190af13 --- /dev/null +++ b/spec/models/ci/build_trace_chunks/fog_spec.rb @@ -0,0 +1,146 @@ +require 'spec_helper' + +describe Ci::BuildTraceChunks::Fog do + let(:data_store) { described_class.new } + + before do + stub_artifacts_object_storage + end + + describe '#available?' do + subject { data_store.available? } + + context 'when object storage is enabled' do + it { is_expected.to be_truthy } + end + + context 'when object storage is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end + + it { is_expected.to be_falsy } + end + end + + describe '#data' do + subject { data_store.data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') } + + it 'returns the data' do + is_expected.to eq('sample data in fog') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :fog_without_data) } + + it 'returns nil' do + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + end + end + end + + describe '#set_data' do + subject { data_store.set_data(model, data) } + + let(:data) { 'abc123' } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') } + + it 'overwrites data' do + expect(data_store.data(model)).to eq('sample data in fog') + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :fog_without_data) } + + it 'sets new data' do + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + end + + describe '#delete_data' do + subject { data_store.delete_data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') } + + it 'deletes data' do + expect(data_store.data(model)).to eq('sample data in fog') + + subject + + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :fog_without_data) } + + it 'does nothing' do + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + + subject + + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + end + end + end + + describe '#keys' do + subject { data_store.keys(relation) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + + before do + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build) + end + + it 'returns keys' do + is_expected.to eq([[build.id, 0], [build.id, 1]]) + end + end + + describe '#delete_keys' do + subject { data_store.delete_keys(keys) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + let(:keys) { data_store.keys(relation) } + + before do + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build) + end + + it 'deletes multiple data' do + ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| + expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present + expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present + end + + subject + + ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| + expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound) + expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound) + end + end + end +end diff --git a/spec/models/ci/build_trace_chunks/redis_spec.rb b/spec/models/ci/build_trace_chunks/redis_spec.rb new file mode 100644 index 00000000000..9da1e6a95ee --- /dev/null +++ b/spec/models/ci/build_trace_chunks/redis_spec.rb @@ -0,0 +1,132 @@ +require 'spec_helper' + +describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do + let(:data_store) { described_class.new } + + describe '#available?' do + subject { data_store.available? } + + it { is_expected.to be_truthy } + end + + describe '#data' do + subject { data_store.data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') } + + it 'returns the data' do + is_expected.to eq('sample data in redis') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :redis_without_data) } + + it 'returns nil' do + is_expected.to be_nil + end + end + end + + describe '#set_data' do + subject { data_store.set_data(model, data) } + + let(:data) { 'abc123' } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') } + + it 'overwrites data' do + expect(data_store.data(model)).to eq('sample data in redis') + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :redis_without_data) } + + it 'sets new data' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + end + + describe '#delete_data' do + subject { data_store.delete_data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') } + + it 'deletes data' do + expect(data_store.data(model)).to eq('sample data in redis') + + subject + + expect(data_store.data(model)).to be_nil + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :redis_without_data) } + + it 'does nothing' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to be_nil + end + end + end + + describe '#keys' do + subject { data_store.keys(relation) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + + before do + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build) + end + + it 'returns keys' do + is_expected.to eq([[build.id, 0], [build.id, 1]]) + end + end + + describe '#delete_keys' do + subject { data_store.delete_keys(keys) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + let(:keys) { data_store.keys(relation) } + + before do + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build) + end + + it 'deletes multiple data' do + Gitlab::Redis::SharedState.with do |redis| + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_truthy + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_truthy + end + + subject + + Gitlab::Redis::SharedState.with do |redis| + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_falsy + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_falsy + end + end + end +end diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb index efddd4e7662..2f87fb5f25d 100644 --- a/spec/models/ci/job_artifact_spec.rb +++ b/spec/models/ci/job_artifact_spec.rb @@ -25,7 +25,7 @@ describe Ci::JobArtifact do end it 'does not schedule the migration' do - expect(ObjectStorageUploadWorker).not_to receive(:perform_async) + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) subject end diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb index 2d75422ee68..da26d802688 100644 --- a/spec/models/concerns/cache_markdown_field_spec.rb +++ b/spec/models/concerns/cache_markdown_field_spec.rb @@ -370,4 +370,20 @@ describe CacheMarkdownField do end end end + + describe CacheMarkdownField::MarkdownEngine do + subject { lambda { |version| CacheMarkdownField::MarkdownEngine.from_version(version) } } + + it 'returns :common_mark as a default' do + expect(subject.call(nil)).to eq :common_mark + end + + it 'returns :common_mark' do + expect(subject.call(CacheMarkdownField::CACHE_COMMONMARK_VERSION)).to eq :common_mark + end + + it 'returns :redcarpet' do + expect(subject.call(CacheMarkdownField::CACHE_REDCARPET_VERSION)).to eq :redcarpet + end + end end diff --git a/spec/models/import_export_upload_spec.rb b/spec/models/import_export_upload_spec.rb new file mode 100644 index 00000000000..58af84b8a08 --- /dev/null +++ b/spec/models/import_export_upload_spec.rb @@ -0,0 +1,25 @@ +require 'spec_helper' + +describe ImportExportUpload do + subject { described_class.new(project: create(:project)) } + + shared_examples 'stores the Import/Export file' do |method| + it 'stores the import file' do + subject.public_send("#{method}=", fixture_file_upload('spec/fixtures/project_export.tar.gz')) + + subject.save! + + url = "/uploads/-/system/import_export_upload/#{method}/#{subject.id}/project_export.tar.gz" + + expect(subject.public_send(method).url).to eq(url) + end + end + + context 'import' do + it_behaves_like 'stores the Import/Export file', :import_file + end + + context 'export' do + it_behaves_like 'stores the Import/Export file', :export_file + end +end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index c3aa6cd6fed..b9512b81678 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -2782,6 +2782,10 @@ describe Project do let(:legacy_project) { create(:project, :legacy_storage, :with_export) } let(:project) { create(:project, :with_export) } + before do + stub_feature_flags(import_export_object_storage: false) + end + it 'removes the exports directory for the project' do expect(File.exist?(project.export_path)).to be_truthy @@ -2830,12 +2834,14 @@ describe Project do let(:project) { create(:project, :with_export) } it 'removes the exported project file' do + stub_feature_flags(import_export_object_storage: false) + exported_file = project.export_project_path expect(File.exist?(exported_file)).to be_truthy - allow(FileUtils).to receive(:rm_f).and_call_original - expect(FileUtils).to receive(:rm_f).with(exported_file).and_call_original + allow(FileUtils).to receive(:rm_rf).and_call_original + expect(FileUtils).to receive(:rm_rf).with(exported_file).and_call_original project.remove_exported_project_file diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb index 28c908ea425..a849af062c5 100644 --- a/spec/models/service_spec.rb +++ b/spec/models/service_spec.rb @@ -78,7 +78,7 @@ describe Service do context 'when template is invalid' do it 'sets service template to inactive when template is invalid' do project = create(:project) - template = JiraService.new(template: true, active: true) + template = KubernetesService.new(template: true, active: true) template.save(validate: false) service = described_class.build_from_template(project.id, template) diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb index 9b5c290b9f9..d6d340bd806 100644 --- a/spec/policies/group_policy_spec.rb +++ b/spec/policies/group_policy_spec.rb @@ -9,7 +9,11 @@ describe GroupPolicy do let(:admin) { create(:admin) } let(:group) { create(:group, :private) } - let(:guest_permissions) { [:read_label, :read_group, :upload_file, :read_namespace] } + let(:guest_permissions) do + [:read_label, :read_group, :upload_file, :read_namespace, :read_group_activity, + :read_group_issues, :read_group_boards, :read_group_labels, :read_group_milestones, + :read_group_merge_requests] + end let(:reporter_permissions) { [:admin_label] } diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index eba39bb6ccc..1716d182782 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -306,6 +306,14 @@ describe API::MergeRequests do expect(json_response['changes_count']).to eq(merge_request.merge_request_diff.real_size) end + it 'exposes description and title html when render_html is true' do + get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), render_html: true + + expect(response).to have_gitlab_http_status(200) + + expect(json_response).to include('title_html', 'description_html') + end + context 'merge_request_metrics' do before do merge_request.metrics.update!(merged_by: user, diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb index 3834d27d0a9..a4615bd081f 100644 --- a/spec/requests/api/project_export_spec.rb +++ b/spec/requests/api/project_export_spec.rb @@ -192,6 +192,13 @@ describe API::ProjectExport do context 'when upload complete' do before do FileUtils.rm_rf(project_after_export.export_path) + + if project_after_export.export_project_object_exists? + upload = project_after_export.import_export_upload + + upload.remove_export_file! + upload.save + end end it_behaves_like '404 response' do @@ -261,6 +268,22 @@ describe API::ProjectExport do it_behaves_like 'get project export download not found' end end + + context 'when an uploader is used' do + before do + stub_uploads_object_storage(ImportExportUploader) + + [project, project_finished, project_after_export].each do |p| + p.add_master(user) + + upload = ImportExportUpload.new(project: p) + upload.export_file = fixture_file_upload('spec/fixtures/project_export.tar.gz', "`/tar.gz") + upload.save! + end + end + + it_behaves_like 'get project download by strategy' + end end describe 'POST /projects/:project_id/export' do diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index e7639599874..d57993ab454 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -851,6 +851,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do it 'does not update job status and job trace' do update_job(state: 'success', trace: 'BUILD TRACE UPDATED') + job.reload expect(response).to have_gitlab_http_status(403) expect(response.header['Job-Status']).to eq 'failed' expect(job.trace.raw).to eq 'Job failed' diff --git a/spec/services/import_export_clean_up_service_spec.rb b/spec/services/import_export_clean_up_service_spec.rb index 1875d0448cd..d5fcef1246f 100644 --- a/spec/services/import_export_clean_up_service_spec.rb +++ b/spec/services/import_export_clean_up_service_spec.rb @@ -11,7 +11,6 @@ describe ImportExportCleanUpService do path = '/invalid/path/' stub_repository_downloads_path(path) - expect(File).to receive(:directory?).with(path + tmp_import_export_folder).and_return(false).at_least(:once) expect(service).not_to receive(:clean_up_export_files) service.execute @@ -38,6 +37,24 @@ describe ImportExportCleanUpService do end end + context 'with uploader exports' do + it 'removes old files' do + upload = create(:import_export_upload, + updated_at: 2.days.ago, + export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')) + + expect { service.execute }.to change { upload.reload.export_file.file.nil? }.to(true) + end + + it 'does not remove new files' do + upload = create(:import_export_upload, + updated_at: 1.hour.ago, + export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')) + + expect { service.execute }.not_to change { upload.reload.export_file.file.nil? } + end + end + def in_directory_with_files(mtime:) Dir.mktmpdir do |tmpdir| stub_repository_downloads_path(tmpdir) diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb index 64a9559791f..81dc7c57f4a 100644 --- a/spec/services/preview_markdown_service_spec.rb +++ b/spec/services/preview_markdown_service_spec.rb @@ -64,4 +64,16 @@ describe PreviewMarkdownService do expect(result[:commands]).to eq 'Sets time estimate to 2y.' end end + + it 'sets correct markdown engine' do + service = described_class.new(project, user, { markdown_version: CacheMarkdownField::CACHE_REDCARPET_VERSION }) + result = service.execute + + expect(result[:markdown_engine]).to eq :redcarpet + + service = described_class.new(project, user, { markdown_version: CacheMarkdownField::CACHE_COMMONMARK_VERSION }) + result = service.execute + + expect(result[:markdown_engine]).to eq :common_mark + end end diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb index 6fd73a50511..e98df375d48 100644 --- a/spec/services/projects/autocomplete_service_spec.rb +++ b/spec/services/projects/autocomplete_service_spec.rb @@ -131,4 +131,58 @@ describe Projects::AutocompleteService do end end end + + describe '#labels_as_hash' do + def expect_labels_to_equal(labels, expected_labels) + expect(labels.size).to eq(expected_labels.size) + extract_title = lambda { |label| label['title'] } + expect(labels.map(&extract_title)).to eq(expected_labels.map(&extract_title)) + end + + let(:user) { create(:user) } + let(:group) { create(:group, :nested) } + let!(:sub_group) { create(:group, parent: group) } + let(:project) { create(:project, :public, group: group) } + let(:issue) { create(:issue, project: project) } + + let!(:label1) { create(:label, project: project) } + let!(:label2) { create(:label, project: project) } + let!(:sub_group_label) { create(:group_label, group: sub_group) } + let!(:parent_group_label) { create(:group_label, group: group.parent) } + + before do + create(:group_member, group: group, user: user) + end + + it 'returns labels from project and ancestor groups' do + service = described_class.new(project, user) + results = service.labels_as_hash + expected_labels = [label1, label2, parent_group_label] + + expect_labels_to_equal(results, expected_labels) + end + + context 'some labels are already assigned' do + before do + issue.labels << label1 + end + + it 'marks already assigned as set' do + service = described_class.new(project, user) + results = service.labels_as_hash(issue) + expected_labels = [label1, label2, parent_group_label] + + expect_labels_to_equal(results, expected_labels) + + assigned_label_titles = issue.labels.map(&:title) + results.each do |hash| + if assigned_label_titles.include?(hash['title']) + expect(hash[:set]).to eq(true) + else + expect(hash.key?(:set)).to eq(false) + end + end + end + end + end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index fdce8e84620..46ec1bcef24 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -170,6 +170,17 @@ RSpec.configure do |config| redis_queues_cleanup! end + config.around(:each, :use_clean_rails_memory_store_fragment_caching) do |example| + caching_store = ActionController::Base.cache_store + ActionController::Base.cache_store = ActiveSupport::Cache::MemoryStore.new + ActionController::Base.perform_caching = true + + example.run + + ActionController::Base.perform_caching = false + ActionController::Base.cache_store = caching_store + end + # The :each scope runs "inside" the example, so this hook ensures the DB is in the # correct state before any examples' before hooks are called. This prevents a # problem where `ScheduleIssuesClosedAtTypeChange` (or any migration that depends diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb index 471b0a74a19..58b5c6a6435 100644 --- a/spec/support/helpers/stub_object_storage.rb +++ b/spec/support/helpers/stub_object_storage.rb @@ -25,6 +25,11 @@ module StubObjectStorage ::Fog::Storage.new(connection_params).tap do |connection| begin connection.directories.create(key: remote_directory) + + # Cleanup remaining files + connection.directories.each do |directory| + directory.files.map(&:destroy) + end rescue Excon::Error::Conflict end end diff --git a/spec/support/helpers/wait_for_requests.rb b/spec/support/helpers/wait_for_requests.rb index fda0e29f983..c7f878b7371 100644 --- a/spec/support/helpers/wait_for_requests.rb +++ b/spec/support/helpers/wait_for_requests.rb @@ -24,7 +24,9 @@ module WaitForRequests # Wait for client-side AJAX requests def wait_for_requests - wait_for('JS requests complete') { finished_all_js_requests? } + wait_for('JS requests complete', max_wait_time: 2 * Capybara.default_max_wait_time) do + finished_all_js_requests? + end end # Wait for active Rack requests and client-side AJAX requests diff --git a/spec/support/shared_examples/ci_trace_shared_examples.rb b/spec/support/shared_examples/ci_trace_shared_examples.rb index db723a323f8..94e82b8ce90 100644 --- a/spec/support/shared_examples/ci_trace_shared_examples.rb +++ b/spec/support/shared_examples/ci_trace_shared_examples.rb @@ -138,6 +138,28 @@ shared_examples_for 'common trace features' do end end + describe '#write' do + subject { trace.send(:write, mode) { } } + + let(:mode) { 'wb' } + + context 'when arhicved trace does not exist yet' do + it 'does not raise an error' do + expect { subject }.not_to raise_error + end + end + + context 'when arhicved trace already exists' do + before do + create(:ci_job_artifact, :trace, job: build) + end + + it 'raises an error' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::AlreadyArchivedError) + end + end + end + describe '#set' do before do trace.set("12") @@ -574,7 +596,7 @@ shared_examples_for 'trace with disabled live trace feature' do it 'does not archive' do expect_any_instance_of(described_class).not_to receive(:archive_stream!) - expect { subject }.to raise_error('Already archived') + expect { subject }.to raise_error(Gitlab::Ci::Trace::AlreadyArchivedError) expect(build.job_artifacts_trace.file.exists?).to be_truthy end end @@ -589,6 +611,55 @@ shared_examples_for 'trace with disabled live trace feature' do end end end + + describe '#erase!' do + subject { trace.erase! } + + context 'when it is a live trace' do + context 'when trace is stored in database' do + let(:build) { create(:ci_build) } + + before do + build.update_column(:trace, 'sample trace') + end + + it { expect(trace.raw).not_to be_nil } + + it "removes trace" do + subject + + expect(trace.raw).to be_nil + end + end + + context 'when trace is stored in file storage' do + let(:build) { create(:ci_build, :trace_live) } + + it { expect(trace.raw).not_to be_nil } + + it "removes trace" do + subject + + expect(trace.raw).to be_nil + end + end + end + + context 'when it is an archived trace' do + let(:build) { create(:ci_build, :trace_artifact) } + + it "has trace at first" do + expect(trace.raw).not_to be_nil + end + + it "removes trace" do + subject + + build.reload + expect(trace.raw).to be_nil + end + end + end end shared_examples_for 'trace with enabled live trace feature' do @@ -761,7 +832,7 @@ shared_examples_for 'trace with enabled live trace feature' do it 'does not archive' do expect_any_instance_of(described_class).not_to receive(:archive_stream!) - expect { subject }.to raise_error('Already archived') + expect { subject }.to raise_error(Gitlab::Ci::Trace::AlreadyArchivedError) expect(build.job_artifacts_trace.file.exists?).to be_truthy end end @@ -776,4 +847,35 @@ shared_examples_for 'trace with enabled live trace feature' do end end end + + describe '#erase!' do + subject { trace.erase! } + + context 'when it is a live trace' do + let(:build) { create(:ci_build, :trace_live) } + + it { expect(trace.raw).not_to be_nil } + + it "removes trace" do + subject + + expect(trace.raw).to be_nil + end + end + + context 'when it is an archived trace' do + let(:build) { create(:ci_build, :trace_artifact) } + + it "has trace at first" do + expect(trace.raw).not_to be_nil + end + + it "removes trace" do + subject + + build.reload + expect(trace.raw).to be_nil + end + end + end end diff --git a/spec/uploaders/import_export_uploader_spec.rb b/spec/uploaders/import_export_uploader_spec.rb new file mode 100644 index 00000000000..51b173b682d --- /dev/null +++ b/spec/uploaders/import_export_uploader_spec.rb @@ -0,0 +1,20 @@ +require 'spec_helper' + +describe ImportExportUploader do + let(:model) { build_stubbed(:import_export_upload) } + let(:upload) { create(:upload, model: model) } + + subject { described_class.new(model, :import_file) } + + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[import_export_upload/import_file/], + upload_path: %r[import_export_upload/import_file/] + end +end diff --git a/spec/workers/ci/archive_traces_cron_worker_spec.rb b/spec/workers/ci/archive_traces_cron_worker_spec.rb index 9af51b7d4d8..23f5dda298a 100644 --- a/spec/workers/ci/archive_traces_cron_worker_spec.rb +++ b/spec/workers/ci/archive_traces_cron_worker_spec.rb @@ -25,24 +25,36 @@ describe Ci::ArchiveTracesCronWorker do end end - context 'when a job was succeeded' do + context 'when a job succeeded' do let!(:build) { create(:ci_build, :success, :trace_live) } it_behaves_like 'archives trace' - context 'when archive raised an exception' do - let!(:build) { create(:ci_build, :success, :trace_artifact, :trace_live) } + context 'when a trace had already been archived' do + let!(:build) { create(:ci_build, :success, :trace_live, :trace_artifact) } let!(:build2) { create(:ci_build, :success, :trace_live) } - it 'archives valid targets' do - expect(Rails.logger).to receive(:error).with("Failed to archive stale live trace. id: #{build.id} message: Already archived") - + it 'continues to archive live traces' do subject build2.reload expect(build2.job_artifacts_trace).to be_exist end end + + context 'when an unexpected exception happened during archiving' do + let!(:build) { create(:ci_build, :success, :trace_live) } + + before do + allow_any_instance_of(Gitlab::Ci::Trace).to receive(:archive!).and_raise('Unexpected error') + end + + it 'puts a log' do + expect(Rails.logger).to receive(:error).with("Failed to archive stale live trace. id: #{build.id} message: Unexpected error") + + subject + end + end end context 'when a job was cancelled' do diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb deleted file mode 100644 index 32ddcbe9757..00000000000 --- a/spec/workers/object_storage_upload_worker_spec.rb +++ /dev/null @@ -1,108 +0,0 @@ -require 'spec_helper' - -describe ObjectStorageUploadWorker do - let(:local) { ObjectStorage::Store::LOCAL } - let(:remote) { ObjectStorage::Store::REMOTE } - - def perform - described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id) - end - - context 'for LFS' do - let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } - let(:uploader_class) { LfsObjectUploader } - let(:subject_class) { LfsObject } - let(:file_field) { :file } - let(:subject_id) { lfs_object.id } - - context 'when object storage is enabled' do - before do - stub_lfs_object_storage(background_upload: true) - end - - it 'uploads object to storage' do - expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote) - end - - context 'when background upload is disabled' do - before do - allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false } - end - - it 'is skipped' do - expect { perform }.not_to change { lfs_object.reload.file_store } - end - end - end - - context 'when object storage is disabled' do - before do - stub_lfs_object_storage(enabled: false) - end - - it "doesn't migrate files" do - perform - - expect(lfs_object.reload.file_store).to eq(local) - end - end - end - - context 'for legacy artifacts' do - let(:build) { create(:ci_build, :legacy_artifacts) } - let(:uploader_class) { LegacyArtifactUploader } - let(:subject_class) { Ci::Build } - let(:file_field) { :artifacts_file } - let(:subject_id) { build.id } - - context 'when local storage is used' do - let(:store) { local } - - context 'and remote storage is defined' do - before do - stub_artifacts_object_storage(background_upload: true) - end - - it "migrates file to remote storage" do - perform - - expect(build.reload.artifacts_file_store).to eq(remote) - end - - context 'for artifacts_metadata' do - let(:file_field) { :artifacts_metadata } - - it 'migrates metadata to remote storage' do - perform - - expect(build.reload.artifacts_metadata_store).to eq(remote) - end - end - end - end - end - - context 'for job artifacts' do - let(:artifact) { create(:ci_job_artifact, :archive) } - let(:uploader_class) { JobArtifactUploader } - let(:subject_class) { Ci::JobArtifact } - let(:file_field) { :file } - let(:subject_id) { artifact.id } - - context 'when local storage is used' do - let(:store) { local } - - context 'and remote storage is defined' do - before do - stub_artifacts_object_storage(background_upload: true) - end - - it "migrates file to remote storage" do - perform - - expect(artifact.reload.file_store).to eq(remote) - end - end - end - end -end diff --git a/spec/workers/repository_check/batch_worker_spec.rb b/spec/workers/repository_check/batch_worker_spec.rb index 6bc551be9ad..ede271b2cdd 100644 --- a/spec/workers/repository_check/batch_worker_spec.rb +++ b/spec/workers/repository_check/batch_worker_spec.rb @@ -62,4 +62,12 @@ describe RepositoryCheck::BatchWorker do expect(subject.perform(shard_name)).to eq([]) end + + it 'does not run if the exclusive lease is taken' do + allow(subject).to receive(:try_obtain_lease).and_return(false) + + expect(subject).not_to receive(:perform_repository_checks) + + subject.perform(shard_name) + end end diff --git a/spec/workers/repository_check/dispatch_worker_spec.rb b/spec/workers/repository_check/dispatch_worker_spec.rb index 20a4f1f5344..7877429aa8f 100644 --- a/spec/workers/repository_check/dispatch_worker_spec.rb +++ b/spec/workers/repository_check/dispatch_worker_spec.rb @@ -11,6 +11,14 @@ describe RepositoryCheck::DispatchWorker do subject.perform end + it 'does nothing if the exclusive lease is taken' do + allow(subject).to receive(:try_obtain_lease).and_return(false) + + expect(RepositoryCheck::BatchWorker).not_to receive(:perform_async) + + subject.perform + end + it 'dispatches work to RepositoryCheck::BatchWorker' do expect(RepositoryCheck::BatchWorker).to receive(:perform_async).at_least(:once) |