summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-04-21 12:21:08 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2023-04-21 12:21:08 +0000
commite7198b914bf1d6594909e35d3d00d0a0b260f250 (patch)
tree6fe636fedeae9c679839d5bf27dc091af037e765 /spec
parent3b80f22aba42e3e424de5c3dd15cc11f96aaac65 (diff)
downloadgitlab-ce-e7198b914bf1d6594909e35d3d00d0a0b260f250.tar.gz
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/import/phabricator_controller_spec.rb83
-rw-r--r--spec/features/projects/new_project_spec.rb3
-rw-r--r--spec/frontend/behaviors/components/json_table_spec.js3
-rw-r--r--spec/frontend/notebook/cells/output/dataframe_spec.js10
-rw-r--r--spec/frontend/notebook/cells/output/dataframe_util_spec.js34
-rw-r--r--spec/frontend/notebook/mock_data.js52
-rw-r--r--spec/frontend/vue_shared/components/file_finder/index_spec.js250
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb11
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb13
-rw-r--r--spec/lib/gitlab/phabricator_import/cache/map_spec.rb85
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/client_spec.rb59
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/maniphest_spec.rb39
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/response_spec.rb79
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb27
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/user_spec.rb49
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb21
-rw-r--r--spec/lib/gitlab/phabricator_import/importer_spec.rb35
-rw-r--r--spec/lib/gitlab/phabricator_import/issues/importer_spec.rb61
-rw-r--r--spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb83
-rw-r--r--spec/lib/gitlab/phabricator_import/project_creator_spec.rb59
-rw-r--r--spec/lib/gitlab/phabricator_import/representation/task_spec.rb49
-rw-r--r--spec/lib/gitlab/phabricator_import/representation/user_spec.rb28
-rw-r--r--spec/lib/gitlab/phabricator_import/user_finder_spec.rb93
-rw-r--r--spec/lib/gitlab/phabricator_import/worker_state_spec.rb49
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_router_spec.rb1
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb4
-rw-r--r--spec/models/application_setting_spec.rb11
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb162
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb20
-rw-r--r--spec/routing/import_routing_spec.rb12
-rw-r--r--spec/support/rspec_order_todo.yml18
-rw-r--r--spec/tooling/lib/tooling/gettext_extractor_spec.rb36
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/phabricator_import/base_worker_spec.rb74
-rw-r--r--spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb16
35 files changed, 430 insertions, 1200 deletions
diff --git a/spec/controllers/import/phabricator_controller_spec.rb b/spec/controllers/import/phabricator_controller_spec.rb
deleted file mode 100644
index 9be85a40d82..00000000000
--- a/spec/controllers/import/phabricator_controller_spec.rb
+++ /dev/null
@@ -1,83 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Import::PhabricatorController do
- let(:current_user) { create(:user) }
-
- before do
- sign_in current_user
- end
-
- describe 'GET #new' do
- subject { get :new }
-
- context 'when the import source is not available' do
- before do
- stub_application_setting(import_sources: [])
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when the import source is available' do
- before do
- stub_application_setting(import_sources: ['phabricator'])
- end
-
- it { is_expected.to have_gitlab_http_status(:ok) }
- end
- end
-
- describe 'POST #create' do
- subject(:post_create) { post :create, params: params }
-
- context 'with valid params' do
- let(:params) do
- { path: 'phab-import',
- name: 'Phab import',
- phabricator_server_url: 'https://phabricator.example.com',
- api_token: 'hazaah',
- namespace_id: current_user.namespace_id }
- end
-
- it 'creates a project to import', :sidekiq_might_not_need_inline do
- expect_next_instance_of(Gitlab::PhabricatorImport::Importer) do |importer|
- expect(importer).to receive(:execute)
- end
-
- expect { post_create }.to change { current_user.namespace.projects.reload.size }.from(0).to(1)
-
- expect(current_user.namespace.projects.last).to be_import
- end
- end
-
- context 'when an import param is missing' do
- let(:params) do
- { path: 'phab-import',
- name: 'Phab import',
- phabricator_server_url: nil,
- api_token: 'hazaah',
- namespace_id: current_user.namespace_id }
- end
-
- it 'does not create the project' do
- expect { post_create }.not_to change { current_user.namespace.projects.reload.size }
- end
- end
-
- context 'when a project param is missing' do
- let(:params) do
- { phabricator_server_url: 'https://phabricator.example.com',
- api_token: 'hazaah',
- namespace_id: current_user.namespace_id }
- end
-
- it 'does not create the project' do
- expect { post_create }.not_to change { current_user.namespace.projects.reload.size }
- end
- end
-
- it_behaves_like 'project import rate limiter'
- end
-end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index e9e8c0e2386..d8ecfca6237 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -126,8 +126,7 @@ RSpec.describe 'New project', :js, feature_category: :projects do
'gitlab.com': :status_import_gitlab_path,
'fogbugz': :new_import_fogbugz_path,
'gitea': :new_import_gitea_path,
- 'manifest': :new_import_manifest_path,
- 'phabricator': :new_import_phabricator_path
+ 'manifest': :new_import_manifest_path
}
end
diff --git a/spec/frontend/behaviors/components/json_table_spec.js b/spec/frontend/behaviors/components/json_table_spec.js
index a82310873ed..ae62d28d6c0 100644
--- a/spec/frontend/behaviors/components/json_table_spec.js
+++ b/spec/frontend/behaviors/components/json_table_spec.js
@@ -12,6 +12,7 @@ const TEST_FIELDS = [
label: 'Second',
sortable: true,
other: 'foo',
+ class: 'someClass',
},
{
key: 'C',
@@ -127,11 +128,13 @@ describe('behaviors/components/json_table', () => {
key: 'B',
label: 'Second',
sortable: true,
+ class: 'someClass',
},
{
key: 'C',
label: 'Third',
sortable: false,
+ class: [],
},
'D',
],
diff --git a/spec/frontend/notebook/cells/output/dataframe_spec.js b/spec/frontend/notebook/cells/output/dataframe_spec.js
index abf6631353c..bf90497a36b 100644
--- a/spec/frontend/notebook/cells/output/dataframe_spec.js
+++ b/spec/frontend/notebook/cells/output/dataframe_spec.js
@@ -35,16 +35,16 @@ describe('~/notebook/cells/output/DataframeOutput', () => {
it('sets the correct fields', () => {
expect(findTable().props().fields).toEqual([
- { key: 'index', label: '', sortable: true },
- { key: 'column_1', label: 'column_1', sortable: true },
- { key: 'column_2', label: 'column_2', sortable: true },
+ { key: 'index0', label: '', sortable: true, class: 'gl-font-weight-bold' },
+ { key: 'column0', label: 'column_1', sortable: true, class: '' },
+ { key: 'column1', label: 'column_2', sortable: true, class: '' },
]);
});
it('sets the correct items', () => {
expect(findTable().props().items).toEqual([
- { index: 0, column_1: 'abc de f', column_2: 'a' },
- { index: 1, column_1: 'True', column_2: '0.1' },
+ { index0: '0', column0: 'abc de f', column1: 'a' },
+ { index0: '1', column0: 'True', column1: '0.1' },
]);
});
});
diff --git a/spec/frontend/notebook/cells/output/dataframe_util_spec.js b/spec/frontend/notebook/cells/output/dataframe_util_spec.js
index ddc1b3cfe26..37dee5429e4 100644
--- a/spec/frontend/notebook/cells/output/dataframe_util_spec.js
+++ b/spec/frontend/notebook/cells/output/dataframe_util_spec.js
@@ -1,5 +1,5 @@
import { isDataframe, convertHtmlTableToJson } from '~/notebook/cells/output/dataframe_util';
-import { outputWithDataframeContent } from '../../mock_data';
+import { outputWithDataframeContent, outputWithMultiIndexDataFrame } from '../../mock_data';
import sanitizeTests from './html_sanitize_fixtures';
describe('notebook/cells/output/dataframe_utils', () => {
@@ -43,13 +43,33 @@ describe('notebook/cells/output/dataframe_utils', () => {
const output = {
fields: [
- { key: 'index', label: '', sortable: true },
- { key: 'column_1', label: 'column_1', sortable: true },
- { key: 'column_2', label: 'column_2', sortable: true },
+ { key: 'index0', label: '', sortable: true, class: 'gl-font-weight-bold' },
+ { key: 'column0', label: 'column_1', sortable: true, class: '' },
+ { key: 'column1', label: 'column_2', sortable: true, class: '' },
],
items: [
- { index: 0, column_1: 'abc de f', column_2: 'a' },
- { index: 1, column_1: 'True', column_2: '0.1' },
+ { index0: '0', column0: 'abc de f', column1: 'a' },
+ { index0: '1', column0: 'True', column1: '0.1' },
+ ],
+ };
+
+ expect(convertHtmlTableToJson(input)).toEqual(output);
+ });
+
+ it('converts multi-index table correctly', () => {
+ const input = outputWithMultiIndexDataFrame;
+
+ const output = {
+ fields: [
+ { key: 'index0', label: 'first', sortable: true, class: 'gl-font-weight-bold' },
+ { key: 'index1', label: 'second', sortable: true, class: 'gl-font-weight-bold' },
+ { key: 'column0', label: '0', sortable: true, class: '' },
+ ],
+ items: [
+ { index0: 'bar', index1: 'one', column0: '1' },
+ { index0: 'bar', index1: 'two', column0: '2' },
+ { index0: 'baz', index1: 'one', column0: '3' },
+ { index0: 'baz', index1: 'two', column0: '4' },
],
};
@@ -96,7 +116,7 @@ describe('notebook/cells/output/dataframe_utils', () => {
['svg', 3],
])('sanitizes output for: %p', (tag, index) => {
const inputHtml = makeDataframeWithHtml(sanitizeTests[index][1].input);
- const convertedHtml = convertHtmlTableToJson(inputHtml).items[0].column_1;
+ const convertedHtml = convertHtmlTableToJson(inputHtml).items[0].column0;
expect(convertedHtml).not.toContain(tag);
});
diff --git a/spec/frontend/notebook/mock_data.js b/spec/frontend/notebook/mock_data.js
index 15db2931b3c..9c63ad773b5 100644
--- a/spec/frontend/notebook/mock_data.js
+++ b/spec/frontend/notebook/mock_data.js
@@ -45,6 +45,58 @@ export const outputWithDataframeContent = [
'</div>',
];
+export const outputWithMultiIndexDataFrame = [
+ '<div>\n',
+ '<style scoped>\n',
+ ' .dataframe tbody tr th:only-of-type {\n',
+ ' vertical-align: middle;\n',
+ ' }\n',
+ '\n',
+ ' .dataframe tbody tr th {\n',
+ ' vertical-align: top;\n',
+ ' }\n',
+ '\n',
+ ' .dataframe thead th {\n',
+ ' text-align: right;\n',
+ ' }\n',
+ '</style>\n',
+ '<table border="1" class="dataframe">\n',
+ ' <thead>\n',
+ ' <tr style="text-align: right;">\n',
+ ' <th></th>\n',
+ ' <th></th>\n',
+ ' <th>0</th>\n',
+ ' </tr>\n',
+ ' <tr>\n',
+ ' <th>first</th>\n',
+ ' <th>second</th>\n',
+ ' <th></th>\n',
+ ' </tr>\n',
+ ' </thead>\n',
+ ' <tbody>\n',
+ ' <tr>\n',
+ ' <th rowspan="2" valign="top">bar</th>\n',
+ ' <th>one</th>\n',
+ ' <td>1</td>\n',
+ ' </tr>\n',
+ ' <tr>\n',
+ ' <th>two</th>\n',
+ ' <td>2</td>\n',
+ ' </tr>\n',
+ ' <tr>\n',
+ ' <th rowspan="2" valign="top">baz</th>\n',
+ ' <th>one</th>\n',
+ ' <td>3</td>\n',
+ ' </tr>\n',
+ ' <tr>\n',
+ ' <th>two</th>\n',
+ ' <td>4</td>\n',
+ ' </tr>\n',
+ ' </tbody>\n',
+ '</table>\n',
+ '</div>',
+];
+
export const outputWithDataframe = {
data: {
'text/html': outputWithDataframeContent,
diff --git a/spec/frontend/vue_shared/components/file_finder/index_spec.js b/spec/frontend/vue_shared/components/file_finder/index_spec.js
index d7569ed7b76..9708d689245 100644
--- a/spec/frontend/vue_shared/components/file_finder/index_spec.js
+++ b/spec/frontend/vue_shared/components/file_finder/index_spec.js
@@ -1,244 +1,215 @@
+import { GlLoadingIcon } from '@gitlab/ui';
import Mousetrap from 'mousetrap';
-import Vue, { nextTick } from 'vue';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { nextTick } from 'vue';
+import VirtualList from 'vue-virtual-scroll-list';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { file } from 'jest/ide/helpers';
-import { UP_KEY_CODE, DOWN_KEY_CODE, ENTER_KEY_CODE, ESC_KEY_CODE } from '~/lib/utils/keycodes';
import FindFileComponent from '~/vue_shared/components/file_finder/index.vue';
+import FileFinderItem from '~/vue_shared/components/file_finder/item.vue';
+import { setHTMLFixture } from 'helpers/fixtures';
describe('File finder item spec', () => {
- const Component = Vue.extend(FindFileComponent);
- let vm;
+ let wrapper;
+
+ const TEST_FILES = [
+ {
+ ...file('index.js'),
+ path: 'index.js',
+ type: 'blob',
+ url: '/index.jsurl',
+ },
+ {
+ ...file('component.js'),
+ path: 'component.js',
+ type: 'blob',
+ },
+ ];
function createComponent(props) {
- vm = new Component({
+ wrapper = mountExtended(FindFileComponent, {
+ attachTo: document.body,
propsData: {
- files: [],
+ files: TEST_FILES,
visible: true,
loading: false,
...props,
},
});
-
- vm.$mount('#app');
}
- beforeEach(() => {
- setHTMLFixture('<div id="app"></div>');
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
+ const findAllFileFinderItems = () => wrapper.findAllComponents(FileFinderItem);
+ const findSearchInput = () => wrapper.findByTestId('search-input');
+ const enterSearchText = (text) => findSearchInput().setValue(text);
+ const clearSearch = () => wrapper.findByTestId('clear-search-input').vm.$emit('click');
describe('with entries', () => {
beforeEach(() => {
createComponent({
- files: [
- {
- ...file('index.js'),
- path: 'index.js',
- type: 'blob',
- url: '/index.jsurl',
- },
- {
- ...file('component.js'),
- path: 'component.js',
- type: 'blob',
- },
- ],
+ files: TEST_FILES,
});
return nextTick();
});
it('renders list of blobs', () => {
- expect(vm.$el.textContent).toContain('index.js');
- expect(vm.$el.textContent).toContain('component.js');
- expect(vm.$el.textContent).not.toContain('folder');
+ expect(wrapper.text()).toContain('index.js');
+ expect(wrapper.text()).toContain('component.js');
+ expect(wrapper.text()).not.toContain('folder');
});
it('filters entries', async () => {
- vm.searchText = 'index';
-
- await nextTick();
+ await enterSearchText('index');
- expect(vm.$el.textContent).toContain('index.js');
- expect(vm.$el.textContent).not.toContain('component.js');
+ expect(wrapper.text()).toContain('index.js');
+ expect(wrapper.text()).not.toContain('component.js');
});
it('shows clear button when searchText is not empty', async () => {
- vm.searchText = 'index';
-
- await nextTick();
+ await enterSearchText('index');
- expect(vm.$el.querySelector('.dropdown-input').classList).toContain('has-value');
- expect(vm.$el.querySelector('.dropdown-input-search').classList).toContain('hidden');
+ expect(wrapper.find('.dropdown-input').classes()).toContain('has-value');
+ expect(wrapper.find('.dropdown-input-search').classes()).toContain('hidden');
});
- it('clear button resets searchText', () => {
- vm.searchText = 'index';
+ it('clear button resets searchText', async () => {
+ await enterSearchText('index');
+ expect(findSearchInput().element.value).toBe('index');
- vm.clearSearchInput();
+ await clearSearch();
- expect(vm.searchText).toBe('');
+ expect(findSearchInput().element.value).toBe('');
});
it('clear button focuses search input', async () => {
- jest.spyOn(vm.$refs.searchInput, 'focus').mockImplementation(() => {});
- vm.searchText = 'index';
+ expect(findSearchInput().element).not.toBe(document.activeElement);
- vm.clearSearchInput();
+ await enterSearchText('index');
+ await clearSearch();
- await nextTick();
-
- expect(vm.$refs.searchInput.focus).toHaveBeenCalled();
+ expect(findSearchInput().element).toBe(document.activeElement);
});
describe('listShowCount', () => {
- it('returns 1 when no filtered entries exist', () => {
- vm.searchText = 'testing 123';
+ it('returns 1 when no filtered entries exist', async () => {
+ await enterSearchText('testing 123');
- expect(vm.listShowCount).toBe(1);
+ expect(wrapper.findComponent(VirtualList).props('remain')).toBe(1);
});
it('returns entries length when not filtered', () => {
- expect(vm.listShowCount).toBe(2);
+ expect(wrapper.findComponent(VirtualList).props('remain')).toBe(2);
});
});
- describe('filteredBlobsLength', () => {
- it('returns length of filtered blobs', () => {
- vm.searchText = 'index';
+ describe('filtering', () => {
+ it('renders only items that match the filter', async () => {
+ await enterSearchText('index');
- expect(vm.filteredBlobsLength).toBe(1);
+ expect(findAllFileFinderItems()).toHaveLength(1);
});
});
describe('DOM Performance', () => {
it('renders less DOM nodes if not visible by utilizing v-if', async () => {
- vm.visible = false;
+ createComponent({ visible: false });
await nextTick();
- expect(vm.$el).toBeInstanceOf(Comment);
+ expect(wrapper.findByTestId('overlay').exists()).toBe(false);
});
});
describe('watches', () => {
describe('searchText', () => {
it('resets focusedIndex when updated', async () => {
- vm.focusedIndex = 1;
- vm.searchText = 'test';
-
+ await enterSearchText('index');
await nextTick();
- expect(vm.focusedIndex).toBe(0);
+ expect(findAllFileFinderItems().at(0).props('focused')).toBe(true);
});
});
describe('visible', () => {
it('resets searchText when changed to false', async () => {
- vm.searchText = 'test';
- vm.visible = false;
-
- await nextTick();
+ await enterSearchText('test');
+ await wrapper.setProps({ visible: false });
+ // need to set it back to true, so the component's content renders
+ await wrapper.setProps({ visible: true });
- expect(vm.searchText).toBe('');
+ expect(findSearchInput().element.value).toBe('');
});
});
});
describe('openFile', () => {
- beforeEach(() => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- });
-
it('closes file finder', () => {
- vm.openFile(vm.files[0]);
+ expect(wrapper.emitted('toggle')).toBeUndefined();
- expect(vm.$emit).toHaveBeenCalledWith('toggle', false);
+ findSearchInput().trigger('keyup.enter');
+
+ expect(wrapper.emitted('toggle')).toHaveLength(1);
});
it('pushes to router', () => {
- vm.openFile(vm.files[0]);
+ expect(wrapper.emitted('click')).toBeUndefined();
+
+ findSearchInput().trigger('keyup.enter');
- expect(vm.$emit).toHaveBeenCalledWith('click', vm.files[0]);
+ expect(wrapper.emitted('click')).toHaveLength(1);
});
});
describe('onKeyup', () => {
it('opens file on enter key', async () => {
- const event = new CustomEvent('keyup');
- event.keyCode = ENTER_KEY_CODE;
+ expect(wrapper.emitted('click')).toBeUndefined();
- jest.spyOn(vm, 'openFile').mockImplementation(() => {});
+ await findSearchInput().trigger('keyup.enter');
- vm.$refs.searchInput.dispatchEvent(event);
-
- await nextTick();
-
- expect(vm.openFile).toHaveBeenCalledWith(vm.files[0]);
+ expect(wrapper.emitted('click')[0][0]).toBe(TEST_FILES[0]);
});
it('closes file finder on esc key', async () => {
- const event = new CustomEvent('keyup');
- event.keyCode = ESC_KEY_CODE;
-
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
-
- vm.$refs.searchInput.dispatchEvent(event);
+ expect(wrapper.emitted('toggle')).toBeUndefined();
- await nextTick();
+ await findSearchInput().trigger('keyup.esc');
- expect(vm.$emit).toHaveBeenCalledWith('toggle', false);
+ expect(wrapper.emitted('toggle')[0][0]).toBe(false);
});
});
describe('onKeyDown', () => {
- let el;
-
- beforeEach(() => {
- el = vm.$refs.searchInput;
- });
-
describe('up key', () => {
- const event = new CustomEvent('keydown');
- event.keyCode = UP_KEY_CODE;
+ it('resets to last index when at top', async () => {
+ expect(findAllFileFinderItems().at(0).props('focused')).toBe(true);
- it('resets to last index when at top', () => {
- el.dispatchEvent(event);
+ await findSearchInput().trigger('keydown.up');
- expect(vm.focusedIndex).toBe(1);
+ expect(findAllFileFinderItems().at(-1).props('focused')).toBe(true);
});
- it('minus 1 from focusedIndex', () => {
- vm.focusedIndex = 1;
-
- el.dispatchEvent(event);
+ it('minus 1 from focusedIndex', async () => {
+ await findSearchInput().trigger('keydown.up');
+ await findSearchInput().trigger('keydown.up');
- expect(vm.focusedIndex).toBe(0);
+ expect(findAllFileFinderItems().at(0).props('focused')).toBe(true);
});
});
describe('down key', () => {
- const event = new CustomEvent('keydown');
- event.keyCode = DOWN_KEY_CODE;
+ it('resets to first index when at bottom', async () => {
+ await findSearchInput().trigger('keydown.down');
+ expect(findAllFileFinderItems().at(-1).props('focused')).toBe(true);
- it('resets to first index when at bottom', () => {
- vm.focusedIndex = 1;
- el.dispatchEvent(event);
-
- expect(vm.focusedIndex).toBe(0);
+ await findSearchInput().trigger('keydown.down');
+ expect(findAllFileFinderItems().at(0).props('focused')).toBe(true);
});
- it('adds 1 to focusedIndex', () => {
- el.dispatchEvent(event);
+ it('adds 1 to focusedIndex', async () => {
+ expect(findAllFileFinderItems().at(0).props('focused')).toBe(true);
+
+ await findSearchInput().trigger('keydown.down');
- expect(vm.focusedIndex).toBe(1);
+ expect(findAllFileFinderItems().at(1).props('focused')).toBe(true);
});
});
});
@@ -246,46 +217,45 @@ describe('File finder item spec', () => {
describe('without entries', () => {
it('renders loading text when loading', () => {
- createComponent({ loading: true });
+ createComponent({ loading: true, files: [] });
- expect(vm.$el.querySelector('.gl-spinner')).not.toBe(null);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders no files text', () => {
- createComponent();
+ createComponent({ files: [] });
- expect(vm.$el.textContent).toContain('No files found.');
+ expect(wrapper.text()).toContain('No files found.');
});
});
describe('keyboard shortcuts', () => {
beforeEach(async () => {
createComponent();
-
- jest.spyOn(vm, 'toggle').mockImplementation(() => {});
-
await nextTick();
});
- it('calls toggle on `t` key press', async () => {
+ it('calls toggle on `t` key press', () => {
+ expect(wrapper.emitted('toggle')).toBeUndefined();
+
Mousetrap.trigger('t');
- await nextTick();
- expect(vm.toggle).toHaveBeenCalled();
+ expect(wrapper.emitted('toggle')).not.toBeUndefined();
});
- it('calls toggle on `mod+p` key press', async () => {
+ it('calls toggle on `mod+p` key press', () => {
+ expect(wrapper.emitted('toggle')).toBeUndefined();
+
Mousetrap.trigger('mod+p');
- await nextTick();
- expect(vm.toggle).toHaveBeenCalled();
+ expect(wrapper.emitted('toggle')).not.toBeUndefined();
});
it('always allows `mod+p` to trigger toggle', () => {
expect(
Mousetrap.prototype.stopCallback(
null,
- vm.$el.querySelector('.dropdown-input-field'),
+ wrapper.find('.dropdown-input-field').element,
'mod+p',
),
).toBe(false);
@@ -293,7 +263,7 @@ describe('File finder item spec', () => {
it('onlys handles `t` when focused in input-field', () => {
expect(
- Mousetrap.prototype.stopCallback(null, vm.$el.querySelector('.dropdown-input-field'), 't'),
+ Mousetrap.prototype.stopCallback(null, wrapper.find('.dropdown-input-field').element, 't'),
).toBe(true);
});
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index ecc5c688228..4498e369695 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -409,6 +409,17 @@ RSpec.describe Gitlab::Auth::AuthFinders, feature_category: :system_access do
expect(find_user_from_access_token).to be_nil
end
+ context 'when run for kubernetes internal API endpoint' do
+ before do
+ set_bearer_token('AgentToken')
+ set_header('SCRIPT_NAME', '/api/v4/internal/kubernetes/modules/starboard_vulnerability/policies_configuration')
+ end
+
+ it 'returns nil' do
+ expect(find_user_from_access_token).to be_nil
+ end
+ end
+
context 'when validate_access_token! returns valid' do
it 'returns user' do
set_header(described_class::PRIVATE_TOKEN_HEADER, personal_access_token.token)
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index 393e0a9be10..16d22dab5bb 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe Gitlab::ImportSources do
'Repository by URL' => 'git',
'GitLab export' => 'gitlab_project',
'Gitea' => 'gitea',
- 'Manifest file' => 'manifest',
- 'Phabricator' => 'phabricator'
+ 'Manifest file' => 'manifest'
}
expect(described_class.options).to eq(expected)
@@ -36,7 +35,6 @@ RSpec.describe Gitlab::ImportSources do
gitlab_project
gitea
manifest
- phabricator
)
expect(described_class.values).to eq(expected)
@@ -54,7 +52,6 @@ RSpec.describe Gitlab::ImportSources do
fogbugz
gitlab_project
gitea
- phabricator
)
expect(described_class.importer_names).to eq(expected)
@@ -71,8 +68,7 @@ RSpec.describe Gitlab::ImportSources do
'git' => nil,
'gitlab_project' => Gitlab::ImportExport::Importer,
'gitea' => Gitlab::LegacyGithubImport::Importer,
- 'manifest' => nil,
- 'phabricator' => Gitlab::PhabricatorImport::Importer
+ 'manifest' => nil
}
import_sources.each do |name, klass|
@@ -92,8 +88,7 @@ RSpec.describe Gitlab::ImportSources do
'git' => 'Repository by URL',
'gitlab_project' => 'GitLab export',
'gitea' => 'Gitea',
- 'manifest' => 'Manifest file',
- 'phabricator' => 'Phabricator'
+ 'manifest' => 'Manifest file'
}
import_sources.each do |name, title|
@@ -104,7 +99,7 @@ RSpec.describe Gitlab::ImportSources do
end
describe 'imports_repository? checker' do
- let(:allowed_importers) { %w[github gitlab_project bitbucket_server phabricator] }
+ let(:allowed_importers) { %w[github gitlab_project bitbucket_server] }
it 'fails if any importer other than the allowed ones implements this method' do
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
diff --git a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
deleted file mode 100644
index 157b3ca56c9..00000000000
--- a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
- let_it_be(:project) { create(:project) }
-
- let(:redis) { Gitlab::Redis::Cache }
-
- subject(:map) { described_class.new(project) }
-
- describe '#get_gitlab_model' do
- it 'returns nil if there was nothing cached for the phabricator id' do
- expect(map.get_gitlab_model('does not exist')).to be_nil
- end
-
- it 'returns the object if it was set in redis' do
- issue = create(:issue, project: project)
- set_in_redis('exists', issue)
-
- expect(map.get_gitlab_model('exists')).to eq(issue)
- end
-
- it 'extends the TTL for the cache key' do
- set_in_redis('extend', create(:issue, project: project)) do |redis|
- redis.expire(cache_key('extend'), 10.seconds.to_i)
- end
-
- map.get_gitlab_model('extend')
-
- ttl = redis.with { |redis| redis.ttl(cache_key('extend')) }
-
- expect(ttl).to be > 10.seconds
- end
-
- it 'sets the object in redis once if a block was given and nothing was cached' do
- issue = create(:issue, project: project)
-
- expect(map.get_gitlab_model('does not exist') { issue }).to eq(issue)
-
- expect { |b| map.get_gitlab_model('does not exist', &b) }
- .not_to yield_control
- end
-
- it 'does not cache `nil` objects' do
- expect(map).not_to receive(:set_gitlab_model)
-
- map.get_gitlab_model('does not exist') { nil }
- end
- end
-
- describe '#set_gitlab_model' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'sets the class and id in redis with a ttl' do
- issue = create(:issue, project: project)
-
- map.set_gitlab_model(issue, 'it is set')
-
- set_data, ttl = redis.with do |redis|
- redis.pipelined do |p|
- p.mapped_hmget(cache_key('it is set'), :classname, :database_id)
- p.ttl(cache_key('it is set'))
- end
- end
-
- expect(set_data).to eq({ classname: 'Issue', database_id: issue.id.to_s })
- expect(ttl).to be_within(1.second).of(Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
- end
- end
-
- def set_in_redis(key, object)
- redis.with do |redis|
- redis.mapped_hmset(cache_key(key),
- { classname: object.class, database_id: object.id })
- yield(redis) if block_given?
- end
- end
-
- def cache_key(phabricator_id)
- subject.__send__(:cache_key_for_phabricator_id, phabricator_id)
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/client_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/client_spec.rb
deleted file mode 100644
index dad349f3255..00000000000
--- a/spec/lib/gitlab/phabricator_import/conduit/client_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Conduit::Client do
- let(:client) do
- described_class.new('https://see-ya-later.phabricator', 'api-token')
- end
-
- describe '#get' do
- it 'performs and parses a request' do
- params = { some: 'extra', values: %w[are passed] }
- stub_valid_request(params)
-
- response = client.get('test', params: params)
-
- expect(response).to be_a(Gitlab::PhabricatorImport::Conduit::Response)
- expect(response).to be_success
- end
-
- it 'wraps request errors in an `ApiError`' do
- stub_timeout
-
- expect { client.get('test') }.to raise_error(Gitlab::PhabricatorImport::Conduit::ApiError)
- end
-
- it 'raises response error' do
- stub_error_response
-
- expect { client.get('test') }
- .to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /has the wrong length/)
- end
- end
-
- def stub_valid_request(params = {})
- WebMock.stub_request(
- :get, 'https://see-ya-later.phabricator/api/test'
- ).with(
- body: CGI.unescape(params.reverse_merge('api.token' => 'api-token').to_query)
- ).and_return(
- status: 200,
- body: fixture_file('phabricator_responses/maniphest.search.json')
- )
- end
-
- def stub_timeout
- WebMock.stub_request(
- :get, 'https://see-ya-later.phabricator/api/test'
- ).to_timeout
- end
-
- def stub_error_response
- WebMock.stub_request(
- :get, 'https://see-ya-later.phabricator/api/test'
- ).and_return(
- status: 200,
- body: fixture_file('phabricator_responses/auth_failed.json')
- )
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/maniphest_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/maniphest_spec.rb
deleted file mode 100644
index e655a39a28d..00000000000
--- a/spec/lib/gitlab/phabricator_import/conduit/maniphest_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Conduit::Maniphest do
- let(:maniphest) do
- described_class.new(phabricator_url: 'https://see-ya-later.phabricator', api_token: 'api-token')
- end
-
- describe '#tasks' do
- let(:fake_client) { double('Phabricator client') }
-
- before do
- allow(maniphest).to receive(:client).and_return(fake_client)
- end
-
- it 'calls the api with the correct params' do
- expected_params = {
- after: '123',
- attachments: {
- projects: 1, subscribers: 1, columns: 1
- }
- }
-
- expect(fake_client).to receive(:get).with('maniphest.search',
- params: expected_params)
-
- maniphest.tasks(after: '123')
- end
-
- it 'returns a parsed response' do
- response = Gitlab::PhabricatorImport::Conduit::Response
- .new(fixture_file('phabricator_responses/maniphest.search.json'))
-
- allow(fake_client).to receive(:get).and_return(response)
-
- expect(maniphest.tasks).to be_a(Gitlab::PhabricatorImport::Conduit::TasksResponse)
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb
deleted file mode 100644
index a444e7fdf47..00000000000
--- a/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Conduit::Response do
- let(:response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json'))) }
- let(:error_response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/auth_failed.json'))) }
-
- describe '.parse!' do
- it 'raises a ResponseError if the http response was not successfull' do
- fake_response = double(:http_response, success?: false, status: 401)
-
- expect { described_class.parse!(fake_response) }
- .to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /responded with 401/)
- end
-
- it 'raises a ResponseError if the response contained a Phabricator error' do
- fake_response = double(:http_response,
- success?: true,
- status: 200,
- body: fixture_file('phabricator_responses/auth_failed.json'))
-
- expect { described_class.parse!(fake_response) }
- .to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /ERR-INVALID-AUTH: API token/)
- end
-
- it 'raises a ResponseError if JSON parsing failed' do
- fake_response = double(:http_response,
- success?: true,
- status: 200,
- body: 'This is no JSON')
-
- expect { described_class.parse!(fake_response) }
- .to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /unexpected character/)
- end
-
- it 'returns a parsed response for valid input' do
- fake_response = double(:http_response,
- success?: true,
- status: 200,
- body: fixture_file('phabricator_responses/maniphest.search.json'))
-
- expect(described_class.parse!(fake_response)).to be_a(described_class)
- end
- end
-
- describe '#success?' do
- it { expect(response).to be_success }
- it { expect(error_response).not_to be_success }
- end
-
- describe '#error_code' do
- it { expect(error_response.error_code).to eq('ERR-INVALID-AUTH') }
- it { expect(response.error_code).to be_nil }
- end
-
- describe '#error_info' do
- it 'returns the correct error info' do
- expected_message = 'API token "api-token" has the wrong length. API tokens should be 32 characters long.'
-
- expect(error_response.error_info).to eq(expected_message)
- end
-
- it { expect(response.error_info).to be_nil }
- end
-
- describe '#data' do
- it { expect(error_response.data).to be_nil }
- it { expect(response.data).to be_an(Array) }
- end
-
- describe '#pagination' do
- it { expect(error_response.pagination).to be_nil }
-
- it 'builds the pagination correctly' do
- expect(response.pagination).to be_a(Gitlab::PhabricatorImport::Conduit::Pagination)
- expect(response.pagination.next_page).to eq('284')
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb
deleted file mode 100644
index 4e56dead5c0..00000000000
--- a/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Conduit::TasksResponse do
- let(:conduit_response) do
- Gitlab::PhabricatorImport::Conduit::Response
- .new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))
- end
-
- subject(:response) { described_class.new(conduit_response) }
-
- describe '#pagination' do
- it 'delegates to the conduit reponse' do
- expect(response.pagination).to eq(conduit_response.pagination)
- end
- end
-
- describe '#tasks' do
- it 'builds the correct tasks representation' do
- tasks = response.tasks
-
- titles = tasks.map(&:issue_attributes).map { |attrs| attrs[:title] }
-
- expect(titles).to contain_exactly('Things are slow', 'Things are broken')
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
deleted file mode 100644
index d38421c9405..00000000000
--- a/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Conduit::User do
- let(:user_client) do
- described_class.new(phabricator_url: 'https://see-ya-later.phabricator', api_token: 'api-token')
- end
-
- describe '#users' do
- let(:fake_client) { double('Phabricator client') }
-
- before do
- allow(user_client).to receive(:client).and_return(fake_client)
- end
-
- it 'calls the api with the correct params' do
- expected_params = {
- constraints: { phids: %w[phid-1 phid-2] }
- }
-
- expect(fake_client).to receive(:get).with('user.search',
- params: expected_params)
-
- user_client.users(%w[phid-1 phid-2])
- end
-
- it 'returns an array of parsed responses' do
- response = Gitlab::PhabricatorImport::Conduit::Response
- .new(fixture_file('phabricator_responses/user.search.json'))
-
- allow(fake_client).to receive(:get).and_return(response)
-
- expect(user_client.users(%w[some phids])).to match_array([an_instance_of(Gitlab::PhabricatorImport::Conduit::UsersResponse)])
- end
-
- it 'performs multiple requests if more phids than the maximum page size are passed' do
- stub_const('Gitlab::PhabricatorImport::Conduit::User::MAX_PAGE_SIZE', 1)
- first_params = { constraints: { phids: ['phid-1'] } }
- second_params = { constraints: { phids: ['phid-2'] } }
-
- expect(fake_client).to receive(:get).with('user.search',
- params: first_params).once
- expect(fake_client).to receive(:get).with('user.search',
- params: second_params).once
-
- user_client.users(%w[phid-1 phid-2])
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb
deleted file mode 100644
index ebbb2c0598c..00000000000
--- a/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Conduit::UsersResponse do
- let(:conduit_response) do
- Gitlab::PhabricatorImport::Conduit::Response
- .new(Gitlab::Json.parse(fixture_file('phabricator_responses/user.search.json')))
- end
-
- subject(:response) { described_class.new(conduit_response) }
-
- describe '#users' do
- it 'builds the correct users representation' do
- tasks = response.users
-
- usernames = tasks.map(&:username)
-
- expect(usernames).to contain_exactly('jane', 'john')
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/importer_spec.rb b/spec/lib/gitlab/phabricator_import/importer_spec.rb
deleted file mode 100644
index e78024c35c1..00000000000
--- a/spec/lib/gitlab/phabricator_import/importer_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Importer do
- it { expect(described_class).to be_async }
-
- it "acts like it's importing repositories" do
- expect(described_class).to be_imports_repository
- end
-
- describe '#execute' do
- let(:project) { create(:project, :import_scheduled) }
-
- subject(:importer) { described_class.new(project) }
-
- it 'sets a custom jid that will be kept up to date' do
- expect { importer.execute }.to change { project.import_state.reload.jid }
- end
-
- it 'starts importing tasks' do
- expect(Gitlab::PhabricatorImport::ImportTasksWorker).to receive(:schedule).with(project.id)
-
- importer.execute
- end
-
- it 'marks the import as failed when something goes wrong' do
- allow(importer).to receive(:schedule_first_tasks_page).and_raise('Stuff is broken')
-
- importer.execute
-
- expect(project.import_state).to be_failed
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb
deleted file mode 100644
index 63ba575aea3..00000000000
--- a/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Issues::Importer do
- let(:project) { create(:project) }
-
- let(:response) do
- Gitlab::PhabricatorImport::Conduit::TasksResponse.new(
- Gitlab::PhabricatorImport::Conduit::Response
- .new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))
- )
- end
-
- subject(:importer) { described_class.new(project, nil) }
-
- before do
- client = instance_double(Gitlab::PhabricatorImport::Conduit::Maniphest)
- allow(client).to receive(:tasks).and_return(response)
- allow(importer).to receive(:client).and_return(client)
- end
-
- describe '#execute' do
- it 'imports each task in the response' do
- response.tasks.each do |task|
- task_importer = instance_double(Gitlab::PhabricatorImport::Issues::TaskImporter)
-
- expect(task_importer).to receive(:execute)
- expect(Gitlab::PhabricatorImport::Issues::TaskImporter)
- .to receive(:new).with(project, task)
- .and_return(task_importer)
- end
-
- importer.execute
- end
-
- context 'stubbed task import' do
- before do
- # Stub out the actual importing so we don't perform aditional requests
- expect_next_instance_of(Gitlab::PhabricatorImport::Issues::TaskImporter) do |task_importer|
- allow(task_importer).to receive(:execute)
- end.at_least(1)
- end
-
- it 'schedules the next batch if there is one' do
- expect(Gitlab::PhabricatorImport::ImportTasksWorker)
- .to receive(:schedule).with(project.id, response.pagination.next_page)
-
- importer.execute
- end
-
- it 'does not reschedule when there is no next page' do
- allow(response.pagination).to receive(:has_next_page?).and_return(false)
-
- expect(Gitlab::PhabricatorImport::ImportTasksWorker)
- .not_to receive(:schedule)
-
- importer.execute
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
deleted file mode 100644
index 0539bacba44..00000000000
--- a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
+++ /dev/null
@@ -1,83 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Issues::TaskImporter do
- let_it_be(:project) { create(:project) }
-
- let(:task) do
- Gitlab::PhabricatorImport::Representation::Task.new(
- {
- 'phid' => 'the-phid',
- 'fields' => {
- 'name' => 'Title',
- 'description' => {
- 'raw' => '# This is markdown\n it can contain more text.'
- },
- 'authorPHID' => 'PHID-USER-456',
- 'ownerPHID' => 'PHID-USER-123',
- 'dateCreated' => '1518688921',
- 'dateClosed' => '1518789995'
- }
- }
- )
- end
-
- subject(:importer) { described_class.new(project, task) }
-
- describe '#execute' do
- let(:fake_user_finder) { instance_double(Gitlab::PhabricatorImport::UserFinder) }
-
- before do
- allow(fake_user_finder).to receive(:find)
- allow(importer).to receive(:user_finder).and_return(fake_user_finder)
- end
-
- it 'creates the issue with the expected attributes' do
- issue = importer.execute
-
- expect(issue.project).to eq(project)
- expect(issue).to be_persisted
- expect(issue.author).to eq(User.ghost)
- expect(issue.title).to eq('Title')
- expect(issue.description).to eq('# This is markdown\n it can contain more text.')
- expect(issue).to be_closed
- expect(issue.created_at).to eq(Time.at(1518688921))
- expect(issue.closed_at).to eq(Time.at(1518789995))
- end
-
- it 'does not recreate the issue when called multiple times' do
- expect { importer.execute }
- .to change { project.issues.reload.size }.from(0).to(1)
- expect { importer.execute }
- .not_to change { project.issues.reload.size }
- end
-
- it 'does not trigger a save when the object did not change' do
- existing_issue = create(:issue,
- task.issue_attributes.merge(author: User.ghost))
- allow(importer).to receive(:issue).and_return(existing_issue)
-
- expect(existing_issue).not_to receive(:save!)
-
- importer.execute
- end
-
- it 'links the author if the author can be found' do
- author = create(:user)
- expect(fake_user_finder).to receive(:find).with('PHID-USER-456').and_return(author)
-
- issue = importer.execute
-
- expect(issue.author).to eq(author)
- end
-
- it 'links an assignee if the user can be found' do
- assignee = create(:user)
- expect(fake_user_finder).to receive(:find).with('PHID-USER-123').and_return(assignee)
-
- issue = importer.execute
-
- expect(issue.assignees).to include(assignee)
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/project_creator_spec.rb b/spec/lib/gitlab/phabricator_import/project_creator_spec.rb
deleted file mode 100644
index 016aa0abe4d..00000000000
--- a/spec/lib/gitlab/phabricator_import/project_creator_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::ProjectCreator do
- let(:user) { create(:user) }
- let(:params) do
- { path: 'new-phab-import',
- phabricator_server_url: 'http://phab.example.com',
- api_token: 'the-token' }
- end
-
- subject(:creator) { described_class.new(user, params) }
-
- describe '#execute' do
- it 'creates a project correctly and schedule an import', :sidekiq_might_not_need_inline do
- expect_next_instance_of(Gitlab::PhabricatorImport::Importer) do |importer|
- expect(importer).to receive(:execute)
- end
-
- project = creator.execute
-
- expect(project).to be_persisted
- expect(project).to be_import
- expect(project.import_type).to eq('phabricator')
- expect(project.import_data.credentials).to match(a_hash_including(api_token: 'the-token'))
- expect(project.import_data.data).to match(a_hash_including('phabricator_url' => 'http://phab.example.com'))
- expect(project.import_url).to eq(Project::UNKNOWN_IMPORT_URL)
- expect(project.namespace).to eq(user.namespace)
- end
-
- context 'when import params are missing' do
- let(:params) do
- { path: 'new-phab-import',
- phabricator_server_url: 'http://phab.example.com',
- api_token: '' }
- end
-
- it 'returns nil' do
- expect(creator.execute).to be_nil
- end
- end
-
- context 'when import params are invalid' do
- let(:params) do
- { path: 'new-phab-import',
- namespace_id: '-1',
- phabricator_server_url: 'http://phab.example.com',
- api_token: 'the-token' }
- end
-
- it 'returns an unpersisted project' do
- project = creator.execute
-
- expect(project).not_to be_persisted
- expect(project).not_to be_valid
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/representation/task_spec.rb b/spec/lib/gitlab/phabricator_import/representation/task_spec.rb
deleted file mode 100644
index 2b8570e4aff..00000000000
--- a/spec/lib/gitlab/phabricator_import/representation/task_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Representation::Task do
- subject(:task) do
- described_class.new(
- {
- 'phid' => 'the-phid',
- 'fields' => {
- 'name' => 'Title'.ljust(257, '.'), # A string padded to 257 chars
- 'authorPHID' => 'a phid',
- 'ownerPHID' => 'another user phid',
- 'description' => {
- 'raw' => '# This is markdown\n it can contain more text.'
- },
- 'dateCreated' => '1518688921',
- 'dateClosed' => '1518789995'
- }
- }
- )
- end
-
- describe '#issue_attributes' do
- it 'contains the expected values' do
- expected_attributes = {
- title: 'Title'.ljust(255, '.'),
- description: '# This is markdown\n it can contain more text.',
- state: :closed,
- created_at: Time.at(1518688921),
- closed_at: Time.at(1518789995)
- }
-
- expect(task.issue_attributes).to eq(expected_attributes)
- end
- end
-
- describe '#author_phid' do
- it 'returns the correct field' do
- expect(task.author_phid).to eq('a phid')
- end
- end
-
- describe '#owner_phid' do
- it 'returns the correct field' do
- expect(task.owner_phid).to eq('another user phid')
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/representation/user_spec.rb b/spec/lib/gitlab/phabricator_import/representation/user_spec.rb
deleted file mode 100644
index 6df26b905cc..00000000000
--- a/spec/lib/gitlab/phabricator_import/representation/user_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::Representation::User do
- subject(:user) do
- described_class.new(
- {
- 'phid' => 'the-phid',
- 'fields' => {
- 'username' => 'the-username'
- }
- }
- )
- end
-
- describe '#phabricator_id' do
- it 'returns the phabricator id' do
- expect(user.phabricator_id).to eq('the-phid')
- end
- end
-
- describe '#username' do
- it 'returns the username' do
- expect(user.username).to eq('the-username')
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
deleted file mode 100644
index 2ec2571b7fe..00000000000
--- a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::UserFinder, :clean_gitlab_redis_cache do
- let(:project) { create(:project, namespace: create(:group)) }
-
- subject(:finder) { described_class.new(project, %w[first-phid second-phid]) }
-
- before do
- project.namespace.add_developer(existing_user)
- end
-
- describe '#find' do
- let!(:existing_user) { create(:user, username: 'existing-user') }
- let(:cache) { Gitlab::PhabricatorImport::Cache::Map.new(project) }
-
- before do
- allow(finder).to receive(:object_map).and_return(cache)
- end
-
- context 'for a cached phid' do
- before do
- cache.set_gitlab_model(existing_user, 'first-phid')
- end
-
- it 'returns the existing user' do
- expect(finder.find('first-phid')).to eq(existing_user)
- end
-
- it 'does not perform a find using the API' do
- expect(finder).not_to receive(:find_user_for_phid)
-
- finder.find('first-phid')
- end
-
- it 'excludes the phid from the request if one needs to be made' do
- client = instance_double(Gitlab::PhabricatorImport::Conduit::User)
- allow(finder).to receive(:client).and_return(client)
-
- expect(client).to receive(:users).with(['second-phid']).and_return([])
-
- finder.find('first-phid')
- finder.find('second-phid')
- end
- end
-
- context 'when the phid is not cached' do
- let(:response) do
- [
- instance_double(
- Gitlab::PhabricatorImport::Conduit::UsersResponse,
- users: [instance_double(Gitlab::PhabricatorImport::Representation::User, phabricator_id: 'second-phid', username: 'existing-user')]
- ),
- instance_double(
- Gitlab::PhabricatorImport::Conduit::UsersResponse,
- users: [instance_double(Gitlab::PhabricatorImport::Representation::User, phabricator_id: 'first-phid', username: 'other-user')]
- )
- ]
- end
-
- let(:client) do
- client = instance_double(Gitlab::PhabricatorImport::Conduit::User)
- allow(client).to receive(:users).and_return(response)
-
- client
- end
-
- before do
- allow(finder).to receive(:client).and_return(client)
- end
-
- it 'loads the users from the API once' do
- expect(client).to receive(:users).and_return(response).once
-
- expect(finder.find('second-phid')).to eq(existing_user)
- expect(finder.find('first-phid')).to be_nil
- end
-
- it 'adds found users to the cache' do
- expect { finder.find('second-phid') }
- .to change { cache.get_gitlab_model('second-phid') }
- .from(nil).to(existing_user)
- end
-
- it 'only returns users that are members of the project' do
- create(:user, username: 'other-user')
-
- expect(finder.find('first-phid')).to eq(nil)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/phabricator_import/worker_state_spec.rb b/spec/lib/gitlab/phabricator_import/worker_state_spec.rb
deleted file mode 100644
index 4a07e28440f..00000000000
--- a/spec/lib/gitlab/phabricator_import/worker_state_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::WorkerState, :clean_gitlab_redis_shared_state do
- subject(:state) { described_class.new('weird-project-id') }
-
- let(:key) { 'phabricator-import/jobs/project-weird-project-id/job-count' }
-
- describe '#add_job' do
- it 'increments the counter for jobs' do
- set_value(3)
-
- expect { state.add_job }.to change { get_value }.from('3').to('4')
- end
- end
-
- describe '#remove_job' do
- it 'decrements the counter for jobs' do
- set_value(3)
-
- expect { state.remove_job }.to change { get_value }.from('3').to('2')
- end
- end
-
- describe '#running_count' do
- it 'reads the value' do
- set_value(9)
-
- expect(state.running_count).to eq(9)
- end
-
- it 'returns 0 when nothing was set' do
- expect(state.running_count).to eq(0)
- end
- end
-
- def set_value(value)
- redis.with { |r| r.set(key, value) }
- end
-
- def get_value
- redis.with { |r| r.get(key) }
- end
-
- def redis
- Gitlab::Redis::SharedState
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
index 4a8dbe69d36..ef54cab5275 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
@@ -21,7 +21,6 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
create_worker('PostReceive', :git) | 'git:post_receive'
create_worker('PipelineHooksWorker', :pipeline_hooks) | 'pipeline_hooks:pipeline_hooks'
create_worker('Gitlab::JiraImport::AdvanceStageWorker') | 'jira_import_advance_stage'
- create_worker('Gitlab::PhabricatorImport::ImportTasksWorker', :importer) | 'importer:phabricator_import_import_tasks'
end
with_them do
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index f2b332501be..7bf25af598c 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -263,7 +263,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
for_defined_days_back do
user = create(:user)
- %w(gitlab_project gitlab github bitbucket bitbucket_server gitea git manifest fogbugz phabricator).each do |type|
+ %w(gitlab_project gitlab github bitbucket bitbucket_server gitea git manifest fogbugz).each do |type|
create(:project, import_type: type, creator_id: user.id)
end
@@ -301,7 +301,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
issue_imports: {
jira: 2,
fogbugz: 2,
- phabricator: 2,
csv: 2
},
group_imports: {
@@ -330,7 +329,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
issue_imports: {
jira: 1,
fogbugz: 1,
- phabricator: 1,
csv: 1
},
group_imports: {
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 16a1ce9ccaa..d364e46d833 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -317,6 +317,17 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
+ context 'import_sources validation' do
+ before do
+ subject.import_sources = %w[github bitbucket gitlab git gitlab_project gitea manifest phabricator]
+ end
+
+ it 'removes phabricator as an import source' do
+ subject.validate
+ expect(subject.import_sources).to eq(%w[github bitbucket gitlab git gitlab_project gitea manifest])
+ end
+ end
+
context 'grafana_url validations' do
before do
subject.instance_variable_set(:@parsed_grafana_url, nil)
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index 14ac08b545a..c2a0c8c8a7c 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -22,15 +22,13 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
end
describe '#recalculate!' do
- let(:namespace) { create(:group) }
+ let_it_be(:namespace) { create(:group) }
+
let(:root_storage_statistics) { create(:namespace_root_storage_statistics, namespace: namespace) }
let(:project1) { create(:project, namespace: namespace) }
let(:project2) { create(:project, namespace: namespace) }
- let!(:project_stat1) { create(:project_statistics, project: project1, with_data: true, size_multiplier: 100) }
- let!(:project_stat2) { create(:project_statistics, project: project2, with_data: true, size_multiplier: 200) }
-
shared_examples 'project data refresh' do
it 'aggregates project statistics' do
root_storage_statistics.recalculate!
@@ -96,8 +94,13 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
end
end
- it_behaves_like 'project data refresh'
- it_behaves_like 'does not include personal snippets'
+ context 'with project statistics' do
+ let!(:project_stat1) { create(:project_statistics, project: project1, with_data: true, size_multiplier: 100) }
+ let!(:project_stat2) { create(:project_statistics, project: project2, with_data: true, size_multiplier: 200) }
+
+ it_behaves_like 'project data refresh'
+ it_behaves_like 'does not include personal snippets'
+ end
context 'with subgroups' do
let(:subgroup1) { create(:group, parent: namespace) }
@@ -106,6 +109,9 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
let(:project1) { create(:project, namespace: subgroup1) }
let(:project2) { create(:project, namespace: subgroup2) }
+ let!(:project_stat1) { create(:project_statistics, project: project1, with_data: true, size_multiplier: 100) }
+ let!(:project_stat2) { create(:project_statistics, project: project2, with_data: true, size_multiplier: 200) }
+
it_behaves_like 'project data refresh'
it_behaves_like 'does not include personal snippets'
end
@@ -122,6 +128,9 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
let(:namespace) { root_group }
+ let!(:project_stat1) { create(:project_statistics, project: project1, with_data: true, size_multiplier: 100) }
+ let!(:project_stat2) { create(:project_statistics, project: project2, with_data: true, size_multiplier: 200) }
+
it 'aggregates namespace statistics' do
# This group is not a descendant of the root_group so it shouldn't be included in the final stats.
other_group = create(:group)
@@ -168,6 +177,9 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
let(:namespace) { user.namespace }
+ let!(:project_stat1) { create(:project_statistics, project: project1, with_data: true, size_multiplier: 100) }
+ let!(:project_stat2) { create(:project_statistics, project: project2, with_data: true, size_multiplier: 200) }
+
it_behaves_like 'project data refresh'
it 'does not aggregate namespace statistics' do
@@ -210,5 +222,143 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
end
end
end
+
+ context 'with forks of projects' do
+ it 'aggregates total private forks size' do
+ project = create_project(visibility_level: :private, size_multiplier: 150)
+ project_fork = create_fork(project, size_multiplier: 100)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ end
+
+ it 'aggregates total public forks size' do
+ project = create_project(visibility_level: :public, size_multiplier: 250)
+ project_fork = create_fork(project, size_multiplier: 200)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.public_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ end
+
+ it 'aggregates total internal forks size' do
+ project = create_project(visibility_level: :internal, size_multiplier: 70)
+ project_fork = create_fork(project, size_multiplier: 50)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ end
+
+ it 'aggregates multiple forks' do
+ project = create_project(size_multiplier: 175)
+ fork_a = create_fork(project, size_multiplier: 50)
+ fork_b = create_fork(project, size_multiplier: 60)
+
+ root_storage_statistics.recalculate!
+
+ total_size = fork_a.statistics.storage_size + fork_b.statistics.storage_size
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(total_size)
+ end
+
+ it 'aggregates only forks in the namespace' do
+ other_namespace = create(:group)
+ project = create_project(size_multiplier: 175)
+ fork_a = create_fork(project, size_multiplier: 50)
+ create_fork(project, size_multiplier: 50, namespace: other_namespace)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(fork_a.statistics.storage_size)
+ end
+
+ it 'aggregates forks in subgroups' do
+ subgroup = create(:group, parent: namespace)
+ project = create_project(size_multiplier: 100)
+ project_fork = create_fork(project, namespace: subgroup, size_multiplier: 300)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ end
+
+ it 'aggregates forks along with total storage size' do
+ project = create_project(size_multiplier: 240)
+ project_fork = create_fork(project, size_multiplier: 100)
+
+ root_storage_statistics.recalculate!
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.storage_size).to eq(project.statistics.storage_size + project_fork.statistics.storage_size)
+ end
+
+ it 'sets the public forks storage size back to zero' do
+ root_storage_statistics.update!(public_forks_storage_size: 200)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.public_forks_storage_size).to eq(0)
+ end
+
+ it 'sets the private forks storage size back to zero' do
+ root_storage_statistics.update!(private_forks_storage_size: 100)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(0)
+ end
+
+ it 'sets the internal forks storage size back to zero' do
+ root_storage_statistics.update!(internal_forks_storage_size: 50)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(0)
+ end
+
+ context 'when the feature flag is off' do
+ before do
+ stub_feature_flags(root_storage_statistics_calculate_forks: false)
+ end
+
+ it 'does not aggregate fork storage sizes' do
+ project = create_project(size_multiplier: 150)
+ create_fork(project, size_multiplier: 100)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(0)
+ end
+
+ it 'aggregates fork sizes for enabled namespaces' do
+ stub_feature_flags(root_storage_statistics_calculate_forks: namespace)
+ project = create_project(size_multiplier: 150)
+ project_fork = create_fork(project, size_multiplier: 100)
+
+ root_storage_statistics.recalculate!
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ end
+ end
+ end
+ end
+
+ def create_project(size_multiplier:, visibility_level: :private)
+ project = create(:project, visibility_level, namespace: namespace)
+ create(:project_statistics, project: project, with_data: true, size_multiplier: size_multiplier)
+
+ project
+ end
+
+ def create_fork(project, size_multiplier:, namespace: nil)
+ fork_namespace = namespace || project.namespace
+ project_fork = create(:project, namespace: fork_namespace, visibility_level: project.visibility_level)
+ create(:project_statistics, project: project_fork, with_data: true, size_multiplier: size_multiplier)
+ fork_network = project.fork_network || (create(:fork_network, root_project: project) && project.reload.fork_network)
+ create(:fork_network_member, project: project_fork, fork_network: fork_network)
+
+ project_fork
end
end
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 6d7f2657e1e..4651d8c6f10 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -59,12 +59,29 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
end
end
+ shared_examples 'error handling' do
+ let!(:agent_token) { create(:cluster_agent_token) }
+
+ # this test verifies fix for an issue where AgentToken passed in Authorization
+ # header broke error handling in the api_helpers.rb. It can be removed after
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/406582 is done
+ it 'returns correct error for the endpoint' do
+ allow(Gitlab::Kas).to receive(:verify_api_request).and_raise(StandardError.new('Unexpected Error'))
+
+ send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(response.body).to include("Unexpected Error")
+ end
+ end
+
describe 'POST /internal/kubernetes/usage_metrics', :clean_gitlab_redis_shared_state do
def send_request(headers: {}, params: {})
post api('/internal/kubernetes/usage_metrics'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
end
include_examples 'authorization'
+ include_examples 'error handling'
context 'is authenticated for an agent' do
let!(:agent_token) { create(:cluster_agent_token) }
@@ -160,6 +177,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
end
include_examples 'authorization'
+ include_examples 'error handling'
context 'agent exists' do
it 'configures the agent and returns a 204' do
@@ -189,6 +207,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
include_examples 'authorization'
include_examples 'agent authentication'
+ include_examples 'error handling'
context 'an agent is found' do
let!(:agent_token) { create(:cluster_agent_token) }
@@ -233,6 +252,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
include_examples 'authorization'
include_examples 'agent authentication'
+ include_examples 'error handling'
context 'an agent is found' do
let_it_be(:agent_token) { create(:cluster_agent_token) }
diff --git a/spec/routing/import_routing_spec.rb b/spec/routing/import_routing_spec.rb
index 4d6090d6027..cdc266437e8 100644
--- a/spec/routing/import_routing_spec.rb
+++ b/spec/routing/import_routing_spec.rb
@@ -174,18 +174,6 @@ RSpec.describe Import::GitlabProjectsController, 'routing' do
end
end
-# new_import_phabricator GET /import/phabricator/new(.:format) import/phabricator#new
-# import_phabricator POST /import/phabricator(.:format) import/phabricator#create
-RSpec.describe Import::PhabricatorController, 'routing' do
- it 'to #create' do
- expect(post("/import/phabricator")).to route_to("import/phabricator#create")
- end
-
- it 'to #new' do
- expect(get("/import/phabricator/new")).to route_to("import/phabricator#new")
- end
-end
-
# status_import_github_group GET /import/github_group/status(.:format) import/github_groups#status
RSpec.describe Import::GithubGroupsController, 'routing' do
it 'to #status' do
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index a6b069bc93b..970a2588d0f 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -3399,7 +3399,6 @@
- './spec/controllers/import/github_controller_spec.rb'
- './spec/controllers/import/gitlab_controller_spec.rb'
- './spec/controllers/import/manifest_controller_spec.rb'
-- './spec/controllers/import/phabricator_controller_spec.rb'
- './spec/controllers/invites_controller_spec.rb'
- './spec/controllers/jira_connect/app_descriptor_controller_spec.rb'
- './spec/controllers/jira_connect/branches_controller_spec.rb'
@@ -6895,21 +6894,6 @@
- './spec/lib/gitlab/performance_bar_spec.rb'
- './spec/lib/gitlab/performance_bar/stats_spec.rb'
- './spec/lib/gitlab/performance_bar/with_top_level_warnings_spec.rb'
-- './spec/lib/gitlab/phabricator_import/cache/map_spec.rb'
-- './spec/lib/gitlab/phabricator_import/conduit/client_spec.rb'
-- './spec/lib/gitlab/phabricator_import/conduit/maniphest_spec.rb'
-- './spec/lib/gitlab/phabricator_import/conduit/response_spec.rb'
-- './spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb'
-- './spec/lib/gitlab/phabricator_import/conduit/user_spec.rb'
-- './spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb'
-- './spec/lib/gitlab/phabricator_import/importer_spec.rb'
-- './spec/lib/gitlab/phabricator_import/issues/importer_spec.rb'
-- './spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb'
-- './spec/lib/gitlab/phabricator_import/project_creator_spec.rb'
-- './spec/lib/gitlab/phabricator_import/representation/task_spec.rb'
-- './spec/lib/gitlab/phabricator_import/representation/user_spec.rb'
-- './spec/lib/gitlab/phabricator_import/user_finder_spec.rb'
-- './spec/lib/gitlab/phabricator_import/worker_state_spec.rb'
- './spec/lib/gitlab/pipeline_scope_counts_spec.rb'
- './spec/lib/gitlab/polling_interval_spec.rb'
- './spec/lib/gitlab/popen/runner_spec.rb'
@@ -10165,8 +10149,6 @@
- './spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb'
- './spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb'
- './spec/workers/gitlab_performance_bar_stats_worker_spec.rb'
-- './spec/workers/gitlab/phabricator_import/base_worker_spec.rb'
-- './spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb'
- './spec/workers/gitlab_service_ping_worker_spec.rb'
- './spec/workers/gitlab_shell_worker_spec.rb'
- './spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb'
diff --git a/spec/tooling/lib/tooling/gettext_extractor_spec.rb b/spec/tooling/lib/tooling/gettext_extractor_spec.rb
index 47a808f12df..3c0f91342c2 100644
--- a/spec/tooling/lib/tooling/gettext_extractor_spec.rb
+++ b/spec/tooling/lib/tooling/gettext_extractor_spec.rb
@@ -49,8 +49,10 @@ RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
end
describe '::HamlParser' do
- it 'overwrites libraries in order to prefer hamlit' do
- expect(described_class::HamlParser.libraries).to match_array(['hamlit'])
+ # Testing with a non-externalized string, as the functionality
+ # is properly tested later on
+ it '#parse_source' do
+ expect(described_class::HamlParser.new(files[:haml_file]).parse_source('%h1= "Test"')).to match_array([])
end
end
@@ -188,7 +190,8 @@ RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
'All2' => nil,
'Context|A' => nil,
'RB' => nil, 'Apple' => 'Apples'
- }
+ },
+ parser: GetText::RubyParser
},
'with haml file' => {
invalid_syntax: " %a\n- content = _('HAML')",
@@ -199,7 +202,8 @@ RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
'Context|C' => nil,
'HAML' => nil,
'Cabbage' => 'Cabbages'
- }
+ },
+ parser: described_class::HamlParser
},
'with erb file' => {
invalid_syntax: "<% x = {id: _('ERB') %>",
@@ -210,7 +214,8 @@ RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
'Context|B' => nil,
'ERB' => nil,
'Pear' => 'Pears'
- }
+ },
+ parser: GetText::ErbParser
}
}
end
@@ -219,9 +224,14 @@ RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
let(:curr_file) { files[file] }
context 'when file has valid syntax' do
+ before do
+ allow(parser).to receive(:new).and_call_original
+ end
+
it 'parses file and returns extracted strings as POEntries' do
expect(subject.map(&:class).uniq).to match_array([GetText::POEntry])
expect(subject.to_h { |entry| [entry.msgid, entry.msgid_plural] }).to eq(result)
+ expect(parser).to have_received(:new)
end
end
@@ -237,13 +247,25 @@ RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do
expect { subject }.not_to raise_error
end
end
+
+ context 'when file does not contain "_("' do
+ before do
+ allow(parser).to receive(:new).and_call_original
+ File.write(curr_file, '"abcdef"')
+ end
+
+ it 'never parses the file and returns empty array' do
+ expect(subject).to match_array([])
+ expect(parser).not_to have_received(:new)
+ end
+ end
end
- context 'with unsupported file' do
+ context 'with unsupported file containing "_("' do
let(:curr_file) { File.join(base_dir, 'foo.unsupported') }
before do
- File.write(curr_file, '')
+ File.write(curr_file, '_("Test")')
end
it 'raises error' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 0f204aeeb4b..0a4dd38de13 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -304,7 +304,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::JiraImport::Stage::ImportLabelsWorker' => 5,
'Gitlab::JiraImport::Stage::ImportNotesWorker' => 5,
'Gitlab::JiraImport::Stage::StartImportWorker' => 5,
- 'Gitlab::PhabricatorImport::ImportTasksWorker' => 5,
'GitlabPerformanceBarStatsWorker' => 3,
'GitlabSubscriptions::RefreshSeatsWorker' => 0,
'GitlabShellWorker' => 3,
diff --git a/spec/workers/gitlab/phabricator_import/base_worker_spec.rb b/spec/workers/gitlab/phabricator_import/base_worker_spec.rb
deleted file mode 100644
index 9adba391d0f..00000000000
--- a/spec/workers/gitlab/phabricator_import/base_worker_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::BaseWorker, feature_category: :importers do
- let(:subclass) do
- # Creating an anonymous class for a worker is complicated, as we generate the
- # queue name from the class name.
- Gitlab::PhabricatorImport::ImportTasksWorker
- end
-
- describe '.schedule' do
- let(:arguments) { %w[project_id the_next_page] }
-
- it 'schedules the job' do
- expect(subclass).to receive(:perform_async).with(*arguments)
-
- subclass.schedule(*arguments)
- end
-
- it 'counts the scheduled job', :clean_gitlab_redis_shared_state do
- state = Gitlab::PhabricatorImport::WorkerState.new('project_id')
-
- allow(subclass).to receive(:remove_job) # otherwise the job is removed before we saw it
-
- expect { subclass.schedule(*arguments) }.to change { state.running_count }.by(1)
- end
- end
-
- describe '#perform' do
- let(:project) { create(:project, :import_started, import_url: "https://a.phab.instance") }
- let(:worker) { subclass.new }
- let(:state) { Gitlab::PhabricatorImport::WorkerState.new(project.id) }
-
- before do
- allow(worker).to receive(:import)
- end
-
- it 'does not break for a non-existing project' do
- expect { worker.perform('not a thing') }.not_to raise_error
- end
-
- it 'does not do anything when the import is not in progress' do
- project = create(:project, :import_failed)
-
- expect(worker).not_to receive(:import)
-
- worker.perform(project.id)
- end
-
- it 'calls import for the project' do
- expect(worker).to receive(:import).with(project, 'other_arg')
-
- worker.perform(project.id, 'other_arg')
- end
-
- it 'marks the project as imported if there was only one job running' do
- worker.perform(project.id)
-
- expect(project.import_state.reload).to be_finished
- end
-
- it 'does not mark the job as finished when there are more scheduled jobs' do
- 2.times { state.add_job }
-
- worker.perform(project.id)
-
- expect(project.import_state.reload).to be_in_progress
- end
-
- it 'decrements the job counter' do
- expect { worker.perform(project.id) }.to change { state.running_count }.by(-1)
- end
- end
-end
diff --git a/spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb b/spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb
deleted file mode 100644
index 6c1da983640..00000000000
--- a/spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::PhabricatorImport::ImportTasksWorker, feature_category: :importers do
- describe '#perform' do
- it 'calls the correct importer' do
- project = create(:project, :import_started, import_url: "https://the.phab.ulr")
- fake_importer = instance_double(Gitlab::PhabricatorImport::Issues::Importer)
-
- expect(Gitlab::PhabricatorImport::Issues::Importer).to receive(:new).with(project).and_return(fake_importer)
- expect(fake_importer).to receive(:execute)
-
- described_class.new.perform(project.id)
- end
- end
-end