summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/config/object_store_settings_spec.rb29
-rw-r--r--spec/controllers/projects/wikis_controller_spec.rb37
-rw-r--r--spec/controllers/users_controller_spec.rb64
-rw-r--r--spec/features/issues/update_issues_spec.rb4
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb4
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb1
-rw-r--r--spec/features/signed_commits_spec.rb10
-rw-r--r--spec/features/users/show_spec.rb56
-rw-r--r--spec/finders/user_recent_events_finder_spec.rb15
-rw-r--r--spec/fixtures/api/schemas/pipeline.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/projects.json15
-rw-r--r--spec/helpers/button_helper_spec.rb6
-rw-r--r--spec/helpers/users_helper_spec.rb16
-rw-r--r--spec/helpers/visibility_level_helper_spec.rb23
-rw-r--r--spec/javascripts/diffs/components/diff_line_gutter_content_spec.js6
-rw-r--r--spec/javascripts/gpg_badges_spec.js76
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js4
-rw-r--r--spec/javascripts/issue_show/components/edited_spec.js6
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js19
-rw-r--r--spec/javascripts/reports/store/actions_spec.js130
-rw-r--r--spec/javascripts/reports/store/mutations_spec.js101
-rw-r--r--spec/javascripts/sidebar/assignees_spec.js8
-rw-r--r--spec/javascripts/vue_shared/components/clipboard_button_spec.js2
-rw-r--r--spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js14
-rw-r--r--spec/lib/banzai/filter/image_lazy_load_filter_spec.rb14
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb12
-rw-r--r--spec/lib/feature_spec.rb32
-rw-r--r--spec/lib/gitlab/background_migration/delete_diff_files_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb86
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb25
-rw-r--r--spec/lib/gitlab/gitaly_client/wiki_service_spec.rb22
-rw-r--r--spec/lib/gitlab/gitlab_import/client_spec.rb84
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb2
-rw-r--r--spec/migrations/migrate_stage_id_reference_in_background_spec.rb2
-rw-r--r--spec/migrations/migrate_stages_statuses_spec.rb2
-rw-r--r--spec/migrations/normalize_ldap_extern_uids_spec.rb4
-rw-r--r--spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb2
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_spec.rb2
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb2
-rw-r--r--spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb2
-rw-r--r--spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb2
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb4
-rw-r--r--spec/models/deploy_token_spec.rb9
-rw-r--r--spec/models/project_services/hangouts_chat_service_spec.rb246
-rw-r--r--spec/models/project_spec.rb1
-rw-r--r--spec/models/spam_log_spec.rb2
-rw-r--r--spec/requests/api/environments_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb19
-rw-r--r--spec/requests/api/jobs_spec.rb1
-rw-r--r--spec/requests/api/pipelines_spec.rb3
-rw-r--r--spec/requests/api/project_import_spec.rb2
-rw-r--r--spec/requests/api/projects_spec.rb32
-rw-r--r--spec/requests/api/users_spec.rb55
-rw-r--r--spec/requests/git_http_spec.rb8
-rw-r--r--spec/requests/lfs_http_spec.rb38
-rw-r--r--spec/services/groups/destroy_service_spec.rb2
-rw-r--r--spec/services/projects/create_from_template_service_spec.rb2
-rw-r--r--spec/services/projects/destroy_service_spec.rb14
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb2
-rw-r--r--spec/services/projects/import_service_spec.rb4
-rw-r--r--spec/services/users/destroy_service_spec.rb2
-rw-r--r--spec/workers/storage_migrator_worker_spec.rb2
67 files changed, 1286 insertions, 124 deletions
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
new file mode 100644
index 00000000000..b1ada3c99db
--- /dev/null
+++ b/spec/config/object_store_settings_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+require Rails.root.join('config', 'object_store_settings.rb')
+
+describe ObjectStoreSettings do
+ describe '.parse' do
+ it 'should set correct default values' do
+ settings = described_class.parse(nil)
+
+ expect(settings['enabled']).to be false
+ expect(settings['direct_upload']).to be false
+ expect(settings['background_upload']).to be true
+ expect(settings['remote_directory']).to be nil
+ end
+
+ it 'respects original values' do
+ original_settings = Settingslogic.new({
+ 'enabled' => true,
+ 'remote_directory' => 'artifacts'
+ })
+
+ settings = described_class.parse(original_settings)
+
+ expect(settings['enabled']).to be true
+ expect(settings['direct_upload']).to be false
+ expect(settings['background_upload']).to be true
+ expect(settings['remote_directory']).to eq 'artifacts'
+ end
+ end
+end
diff --git a/spec/controllers/projects/wikis_controller_spec.rb b/spec/controllers/projects/wikis_controller_spec.rb
index 92addf30307..fed6677935e 100644
--- a/spec/controllers/projects/wikis_controller_spec.rb
+++ b/spec/controllers/projects/wikis_controller_spec.rb
@@ -1,8 +1,35 @@
require 'spec_helper'
describe Projects::WikisController do
- let(:project) { create(:project_empty_repo, :public) }
+ let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
+ let(:wiki) { ProjectWiki.new(project, user) }
+
+ describe 'GET #show' do
+ let(:wiki_title) { 'page-title-test' }
+
+ render_views
+
+ before do
+ create_page(wiki_title, 'hello world')
+ end
+
+ it 'limits the retrieved pages for the sidebar' do
+ sign_in(user)
+
+ expect(controller).to receive(:load_wiki).and_return(wiki)
+
+ # empty? call
+ expect(wiki).to receive(:pages).with(limit: 1).and_call_original
+ # Sidebar entries
+ expect(wiki).to receive(:pages).with(limit: 15).and_call_original
+
+ get :show, namespace_id: project.namespace, project_id: project, id: wiki_title
+
+ expect(response).to have_http_status(:ok)
+ expect(response.body).to include(wiki_title)
+ end
+ end
describe 'POST #preview_markdown' do
it 'renders json in a correct format' do
@@ -13,4 +40,12 @@ describe Projects::WikisController do
expect(JSON.parse(response.body).keys).to match_array(%w(body references))
end
end
+
+ def create_page(name, content)
+ project.wiki.wiki.write_page(name, :markdown, content, commit_details(name))
+ end
+
+ def commit_details(name)
+ Gitlab::Git::Wiki::CommitDetails.new(user.id, user.username, user.name, user.email, "created page #{name}")
+ end
end
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index b0acf4a49ac..071f96a729e 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -2,6 +2,8 @@ require 'spec_helper'
describe UsersController do
let(:user) { create(:user) }
+ let(:private_user) { create(:user, private_profile: true) }
+ let(:public_user) { create(:user) }
describe 'GET #show' do
context 'with rendered views' do
@@ -98,16 +100,47 @@ describe UsersController do
expect(assigns(:events)).to be_empty
end
+
+ it 'hides events if the user has a private profile' do
+ Gitlab::DataBuilder::Push.build_sample(project, private_user)
+
+ get :show, username: private_user.username, format: :json
+
+ expect(assigns(:events)).to be_empty
+ end
end
end
describe 'GET #calendar' do
- it 'renders calendar' do
- sign_in(user)
+ context 'for user' do
+ let(:project) { create(:project) }
+
+ before do
+ sign_in(user)
+ project.add_developer(user)
+ end
+
+ context 'with public profile' do
+ it 'renders calendar' do
+ push_data = Gitlab::DataBuilder::Push.build_sample(project, public_user)
+ EventCreateService.new.push(project, public_user, push_data)
+
+ get :calendar, username: public_user.username, format: :json
- get :calendar, username: user.username, format: :json
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context 'with private profile' do
+ it 'does not render calendar' do
+ push_data = Gitlab::DataBuilder::Push.build_sample(project, private_user)
+ EventCreateService.new.push(project, private_user, push_data)
- expect(response).to have_gitlab_http_status(200)
+ get :calendar, username: private_user.username, format: :json
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
context 'forked project' do
@@ -150,9 +183,26 @@ describe UsersController do
expect(assigns(:calendar_date)).to eq(Date.parse('2014-07-31'))
end
- it 'renders calendar_activities' do
- get :calendar_activities, username: user.username
- expect(response).to render_template('calendar_activities')
+ context 'for user' do
+ context 'with public profile' do
+ it 'renders calendar_activities' do
+ push_data = Gitlab::DataBuilder::Push.build_sample(project, public_user)
+ EventCreateService.new.push(project, public_user, push_data)
+
+ get :calendar_activities, username: public_user.username
+ expect(assigns[:events]).not_to be_empty
+ end
+ end
+
+ context 'with private profile' do
+ it 'does not render calendar_activities' do
+ push_data = Gitlab::DataBuilder::Push.build_sample(project, private_user)
+ EventCreateService.new.push(project, private_user, push_data)
+
+ get :calendar_activities, username: private_user.username
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
end
diff --git a/spec/features/issues/update_issues_spec.rb b/spec/features/issues/update_issues_spec.rb
index 845a7c5fc42..fd8629ae504 100644
--- a/spec/features/issues/update_issues_spec.rb
+++ b/spec/features/issues/update_issues_spec.rb
@@ -49,7 +49,7 @@ describe 'Multiple issue updating from issues#index', :js do
click_update_issues_button
page.within('.issue .controls') do
- expect(find('.author_link')["title"]).to have_content(user.name)
+ expect(find('.author-link')["title"]).to have_content(user.name)
end
end
@@ -63,7 +63,7 @@ describe 'Multiple issue updating from issues#index', :js do
click_link 'Unassigned'
click_update_issues_button
- expect(find('.issue:first-child .controls')).not_to have_css('.author_link')
+ expect(find('.issue:first-child .controls')).not_to have_css('.author-link')
end
end
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index bb327159cb0..cb6603d3f50 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -47,7 +47,7 @@ describe 'Merge requests > User mass updates', :js do
change_assignee(user.name)
page.within('.merge-request .controls') do
- expect(find('.author_link')["title"]).to have_content(user.name)
+ expect(find('.author-link')["title"]).to have_content(user.name)
end
end
end
@@ -62,7 +62,7 @@ describe 'Merge requests > User mass updates', :js do
it 'removes assignee from the merge request' do
change_assignee('Unassigned')
- expect(find('.merge-request .controls')).not_to have_css('.author_link')
+ expect(find('.merge-request .controls')).not_to have_css('.author-link')
end
end
end
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index 23d8d606790..534cfe1eb12 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -238,6 +238,5 @@ def check_author_link(email, author)
author_link = find('.commit-author-link')
expect(author_link['href']).to eq(user_path(author))
- expect(author_link['title']).to eq(email)
expect(find('.commit-author-name').text).to eq(author.name)
end
diff --git a/spec/features/signed_commits_spec.rb b/spec/features/signed_commits_spec.rb
index 3d05474dca2..5003eb508c2 100644
--- a/spec/features/signed_commits_spec.rb
+++ b/spec/features/signed_commits_spec.rb
@@ -7,7 +7,7 @@ describe 'GPG signed commits', :js do
user = create :user, email: 'unrelated.user@example.org'
project.add_maintainer(user)
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User1.public_key, user: user
end
@@ -21,7 +21,7 @@ describe 'GPG signed commits', :js do
end
# user changes his email which makes the gpg key verified
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
user.skip_reconfirmation!
user.update!(email: GpgHelpers::User1.emails.first)
end
@@ -48,7 +48,7 @@ describe 'GPG signed commits', :js do
end
# user adds the gpg key which makes the signature valid
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User1.public_key, user: user
end
@@ -66,7 +66,7 @@ describe 'GPG signed commits', :js do
end
let(:user_1_key) do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User1.public_key, user: user_1
end
end
@@ -79,7 +79,7 @@ describe 'GPG signed commits', :js do
end
let(:user_2_key) do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User2.public_key, user: user_2
end
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index 3e2fb704bc6..207c333c636 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -3,15 +3,53 @@ require 'spec_helper'
describe 'User page' do
let(:user) { create(:user) }
- it 'shows all the tabs' do
- visit(user_path(user))
-
- page.within '.nav-links' do
- expect(page).to have_link('Activity')
- expect(page).to have_link('Groups')
- expect(page).to have_link('Contributed projects')
- expect(page).to have_link('Personal projects')
- expect(page).to have_link('Snippets')
+ context 'with public profile' do
+ it 'shows all the tabs' do
+ visit(user_path(user))
+
+ page.within '.nav-links' do
+ expect(page).to have_link('Activity')
+ expect(page).to have_link('Groups')
+ expect(page).to have_link('Contributed projects')
+ expect(page).to have_link('Personal projects')
+ expect(page).to have_link('Snippets')
+ end
+ end
+
+ it 'does not show private profile message' do
+ visit(user_path(user))
+
+ expect(page).not_to have_content("This user has a private profile")
+ end
+ end
+
+ context 'with private profile' do
+ let(:user) { create(:user, private_profile: true) }
+
+ it 'shows no tab' do
+ visit(user_path(user))
+
+ expect(page).to have_css("div.profile-header")
+ expect(page).not_to have_css("ul.nav-links")
+ end
+
+ it 'shows private profile message' do
+ visit(user_path(user))
+
+ expect(page).to have_content("This user has a private profile")
+ end
+
+ it 'shows own tabs' do
+ sign_in(user)
+ visit(user_path(user))
+
+ page.within '.nav-links' do
+ expect(page).to have_link('Activity')
+ expect(page).to have_link('Groups')
+ expect(page).to have_link('Contributed projects')
+ expect(page).to have_link('Personal projects')
+ expect(page).to have_link('Snippets')
+ end
end
end
diff --git a/spec/finders/user_recent_events_finder_spec.rb b/spec/finders/user_recent_events_finder_spec.rb
index da043f94021..58470f4c84d 100644
--- a/spec/finders/user_recent_events_finder_spec.rb
+++ b/spec/finders/user_recent_events_finder_spec.rb
@@ -29,11 +29,22 @@ describe UserRecentEventsFinder do
public_project.add_developer(current_user)
end
- it 'returns all the events' do
- expect(finder.execute).to include(private_event, internal_event, public_event)
+ context 'when profile is public' do
+ it 'returns all the events' do
+ expect(finder.execute).to include(private_event, internal_event, public_event)
+ end
+ end
+
+ context 'when profile is private' do
+ it 'returns no event' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, project_owner).and_return(false)
+ expect(finder.execute).to be_empty
+ end
end
it 'does not include the events if the user cannot read cross project' do
+ expect(Ability).to receive(:allowed?).and_call_original
expect(Ability).to receive(:allowed?).with(current_user, :read_cross_project) { false }
expect(finder.execute).to be_empty
end
diff --git a/spec/fixtures/api/schemas/pipeline.json b/spec/fixtures/api/schemas/pipeline.json
index 55511d17b5e..b6e30c40f13 100644
--- a/spec/fixtures/api/schemas/pipeline.json
+++ b/spec/fixtures/api/schemas/pipeline.json
@@ -319,6 +319,10 @@
"id": "/properties/updated_at",
"type": "string"
},
+ "web_url": {
+ "id": "/properties/web_url",
+ "type": "string"
+ },
"user": {
"id": "/properties/user",
"properties": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json b/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json
index 0d127dc5297..56f86856dd4 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json
@@ -4,13 +4,15 @@
"id",
"sha",
"ref",
- "status"
+ "status",
+ "web_url"
],
"properties" : {
"id": { "type": "integer" },
"sha": { "type": "string" },
"ref": { "type": "string" },
- "status": { "type": "string" }
+ "status": { "type": "string" },
+ "web_url": { "type": "string" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/projects.json b/spec/fixtures/api/schemas/public_api/v4/projects.json
index 17ad8d8c48d..af5670ebd33 100644
--- a/spec/fixtures/api/schemas/public_api/v4/projects.json
+++ b/spec/fixtures/api/schemas/public_api/v4/projects.json
@@ -24,13 +24,24 @@
"avatar_url": { "type": ["string", "null"] },
"star_count": { "type": "integer" },
"forks_count": { "type": "integer" },
- "last_activity_at": { "type": "date" }
+ "last_activity_at": { "type": "date" },
+ "namespace": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "path": { "type": "string" },
+ "kind": { "type": "string" },
+ "full_path": { "type": "string" },
+ "parent_id": { "type": ["integer", "null"] }
+ }
+ }
},
"required": [
"id", "name", "name_with_namespace", "description", "path",
"path_with_namespace", "created_at", "default_branch", "tag_list",
"ssh_url_to_repo", "http_url_to_repo", "web_url", "avatar_url",
- "star_count", "last_activity_at"
+ "star_count", "last_activity_at", "namespace"
],
"additionalProperties": false
}
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index fee8df10129..630f3eff258 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -121,6 +121,8 @@ describe ButtonHelper do
end
describe 'clipboard_button' do
+ include IconsHelper
+
let(:user) { create(:user) }
let(:project) { build_stubbed(:project) }
@@ -145,7 +147,7 @@ describe ButtonHelper do
expect(element.attr('data-clipboard-text')).to eq(nil)
expect(element.inner_text).to eq("")
- expect(element).to have_selector('.fa.fa-clipboard')
+ expect(element.to_html).to include sprite_icon('duplicate')
end
end
@@ -178,7 +180,7 @@ describe ButtonHelper do
context 'with `hide_button_icon` attribute provided' do
it 'shows copy to clipboard button without tooltip support' do
- expect(element(hide_button_icon: true)).not_to have_selector('.fa.fa-clipboard')
+ expect(element(hide_button_icon: true).to_html).not_to include sprite_icon('duplicate')
end
end
end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index b18c045848f..b079802cb81 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -25,8 +25,20 @@ describe UsersHelper do
allow(helper).to receive(:can?).and_return(true)
end
- it 'includes all the expected tabs' do
- expect(tabs).to include(:activity, :groups, :contributed, :projects, :snippets)
+ context 'with public profile' do
+ it 'includes all the expected tabs' do
+ expect(tabs).to include(:activity, :groups, :contributed, :projects, :snippets)
+ end
+ end
+
+ context 'with private profile' do
+ before do
+ allow(helper).to receive(:can?).with(user, :read_user_profile, nil).and_return(false)
+ end
+
+ it 'is empty' do
+ expect(tabs).to be_empty
+ end
end
end
diff --git a/spec/helpers/visibility_level_helper_spec.rb b/spec/helpers/visibility_level_helper_spec.rb
index 5077c89d7b4..a3be222b7bd 100644
--- a/spec/helpers/visibility_level_helper_spec.rb
+++ b/spec/helpers/visibility_level_helper_spec.rb
@@ -6,6 +6,29 @@ describe VisibilityLevelHelper do
let(:personal_snippet) { build(:personal_snippet) }
let(:project_snippet) { build(:project_snippet) }
+ describe 'visibility_icon_description' do
+ context 'used with a Project' do
+ it 'delegates projects to #project_visibility_icon_description' do
+ expect(visibility_icon_description(project))
+ .to match /project/i
+ end
+
+ context 'used with a ProjectPresenter' do
+ it 'delegates projects to #project_visibility_icon_description' do
+ expect(visibility_icon_description(project.present))
+ .to match /project/i
+ end
+ end
+
+ context 'used with a Group' do
+ it 'delegates groups to #group_visibility_icon_description' do
+ expect(visibility_icon_description(group))
+ .to match /group/i
+ end
+ end
+ end
+ end
+
describe 'visibility_level_description' do
context 'used with a Project' do
it 'delegates projects to #project_visibility_level_description' do
diff --git a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js
index cb85d12daf2..bdc94131fc2 100644
--- a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js
+++ b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js
@@ -50,7 +50,11 @@ describe('DiffLineGutterContent', () => {
it('should return discussions for the given lineCode', () => {
const { lineCode } = getDiffFileMock().highlightedDiffLines[1];
- const component = createComponent({ lineCode, showCommentButton: true });
+ const component = createComponent({
+ lineCode,
+ showCommentButton: true,
+ discussions: getDiscussionsMockData(),
+ });
setDiscussions(component);
diff --git a/spec/javascripts/gpg_badges_spec.js b/spec/javascripts/gpg_badges_spec.js
index 97c771dcfd3..78330dd9633 100644
--- a/spec/javascripts/gpg_badges_spec.js
+++ b/spec/javascripts/gpg_badges_spec.js
@@ -1,23 +1,27 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import GpgBadges from '~/gpg_badges';
+import { TEST_HOST } from 'spec/test_constants';
describe('GpgBadges', () => {
let mock;
const dummyCommitSha = 'n0m0rec0ffee';
const dummyBadgeHtml = 'dummy html';
const dummyResponse = {
- signatures: [{
- commit_sha: dummyCommitSha,
- html: dummyBadgeHtml,
- }],
+ signatures: [
+ {
+ commit_sha: dummyCommitSha,
+ html: dummyBadgeHtml,
+ },
+ ],
};
+ const dummyUrl = `${TEST_HOST}/dummy/signatures`;
beforeEach(() => {
mock = new MockAdapter(axios);
setFixtures(`
<form
- class="commits-search-form js-signature-container" data-signatures-path="/hello" action="/hello"
+ class="commits-search-form js-signature-container" data-signatures-path="${dummyUrl}" action="${dummyUrl}"
method="get">
<input name="utf8" type="hidden" value="✓">
<input type="search" name="search" id="commits-search"class="form-control search-text-input input-short">
@@ -32,25 +36,55 @@ describe('GpgBadges', () => {
mock.restore();
});
- it('displays a loading spinner', (done) => {
- mock.onGet('/hello').reply(200);
+ it('does not make a request if there is no container element', done => {
+ setFixtures('');
+ spyOn(axios, 'get');
- GpgBadges.fetch().then(() => {
- expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null);
- const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin');
- expect(spinners.length).toBe(1);
- done();
- }).catch(done.fail);
+ GpgBadges.fetch()
+ .then(() => {
+ expect(axios.get).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
});
- it('replaces the loading spinner', (done) => {
- mock.onGet('/hello').reply(200, dummyResponse);
+ it('throws an error if the endpoint is missing', done => {
+ setFixtures('<div class="js-signature-container"></div>');
+ spyOn(axios, 'get');
- GpgBadges.fetch().then(() => {
- expect(document.querySelector('.js-loading-gpg-badge')).toBe(null);
- const parentContainer = document.querySelector('.parent-container');
- expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml);
- done();
- }).catch(done.fail);
+ GpgBadges.fetch()
+ .then(() => done.fail('Expected error to be thrown'))
+ .catch(error => {
+ expect(error.message).toBe('Missing commit signatures endpoint!');
+ expect(axios.get).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('displays a loading spinner', done => {
+ mock.onGet(dummyUrl).replyOnce(200);
+
+ GpgBadges.fetch()
+ .then(() => {
+ expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null);
+ const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin');
+ expect(spinners.length).toBe(1);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('replaces the loading spinner', done => {
+ mock.onGet(dummyUrl).replyOnce(200, dummyResponse);
+
+ GpgBadges.fetch()
+ .then(() => {
+ expect(document.querySelector('.js-loading-gpg-badge')).toBe(null);
+ const parentContainer = document.querySelector('.parent-container');
+ expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml);
+ done();
+ })
+ .catch(done.fail);
});
});
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index eb5e0bddb74..36328382448 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -76,7 +76,7 @@ describe('Issuable output', () => {
expect(vm.$el.querySelector('.wiki').innerHTML).toContain('<p>this is a description!</p>');
expect(vm.$el.querySelector('.js-task-list-field').value).toContain('this is a description');
expect(formatText(editedText.innerText)).toMatch(/Edited[\s\S]+?by Some User/);
- expect(editedText.querySelector('.author_link').href).toMatch(/\/some_user$/);
+ expect(editedText.querySelector('.author-link').href).toMatch(/\/some_user$/);
expect(editedText.querySelector('time')).toBeTruthy();
})
.then(() => {
@@ -90,7 +90,7 @@ describe('Issuable output', () => {
expect(vm.$el.querySelector('.js-task-list-field').value).toContain('42');
expect(vm.$el.querySelector('.edited-text')).toBeTruthy();
expect(formatText(vm.$el.querySelector('.edited-text').innerText)).toMatch(/Edited[\s\S]+?by Other User/);
- expect(editedText.querySelector('.author_link').href).toMatch(/\/other_user$/);
+ expect(editedText.querySelector('.author-link').href).toMatch(/\/other_user$/);
expect(editedText.querySelector('time')).toBeTruthy();
})
.then(done)
diff --git a/spec/javascripts/issue_show/components/edited_spec.js b/spec/javascripts/issue_show/components/edited_spec.js
index 2061def699b..7f09db837bb 100644
--- a/spec/javascripts/issue_show/components/edited_spec.js
+++ b/spec/javascripts/issue_show/components/edited_spec.js
@@ -18,7 +18,7 @@ describe('edited', () => {
}).$mount();
expect(formatText(editedComponent.$el.innerText)).toMatch(/Edited[\s\S]+?by Some User/);
- expect(editedComponent.$el.querySelector('.author_link').href).toMatch(/\/some_user$/);
+ expect(editedComponent.$el.querySelector('.author-link').href).toMatch(/\/some_user$/);
expect(editedComponent.$el.querySelector('time')).toBeTruthy();
});
@@ -31,7 +31,7 @@ describe('edited', () => {
}).$mount();
expect(formatText(editedComponent.$el.innerText)).toMatch(/Edited by Some User/);
- expect(editedComponent.$el.querySelector('.author_link').href).toMatch(/\/some_user$/);
+ expect(editedComponent.$el.querySelector('.author-link').href).toMatch(/\/some_user$/);
expect(editedComponent.$el.querySelector('time')).toBeFalsy();
});
@@ -43,7 +43,7 @@ describe('edited', () => {
}).$mount();
expect(formatText(editedComponent.$el.innerText)).not.toMatch(/by Some User/);
- expect(editedComponent.$el.querySelector('.author_link')).toBeFalsy();
+ expect(editedComponent.$el.querySelector('.author-link')).toBeFalsy();
expect(editedComponent.$el.querySelector('time')).toBeTruthy();
});
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index 41ff59949e5..71b26a315af 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -627,4 +627,23 @@ describe('common_utils', () => {
});
});
});
+
+ describe('roundOffFloat', () => {
+ it('Rounds off decimal places of a float number with provided precision', () => {
+ expect(commonUtils.roundOffFloat(3.141592, 3)).toBe(3.142);
+ });
+
+ it('Rounds off a float number to a whole number when provided precision is zero', () => {
+ expect(commonUtils.roundOffFloat(3.141592, 0)).toBe(3);
+ expect(commonUtils.roundOffFloat(3.5, 0)).toBe(4);
+ });
+
+ it('Rounds off float number to nearest 0, 10, 100, 1000 and so on when provided precision is below 0', () => {
+ expect(commonUtils.roundOffFloat(34567.14159, -1)).toBe(34570);
+ expect(commonUtils.roundOffFloat(34567.14159, -2)).toBe(34600);
+ expect(commonUtils.roundOffFloat(34567.14159, -3)).toBe(35000);
+ expect(commonUtils.roundOffFloat(34567.14159, -4)).toBe(30000);
+ expect(commonUtils.roundOffFloat(34567.14159, -5)).toBe(0);
+ });
+ });
});
diff --git a/spec/javascripts/reports/store/actions_spec.js b/spec/javascripts/reports/store/actions_spec.js
new file mode 100644
index 00000000000..c714c5af156
--- /dev/null
+++ b/spec/javascripts/reports/store/actions_spec.js
@@ -0,0 +1,130 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import {
+ setEndpoint,
+ requestReports,
+ fetchReports,
+ stopPolling,
+ clearEtagPoll,
+ receiveReportsSuccess,
+ receiveReportsError,
+} from '~/reports/store/actions';
+import state from '~/reports/store/state';
+import * as types from '~/reports/store/mutation_types';
+import testAction from 'spec/helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+
+describe('Reports Store Actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe('setEndpoint', () => {
+ it('should commit SET_ENDPOINT mutation', done => {
+ testAction(
+ setEndpoint,
+ 'endpoint.json',
+ mockedState,
+ [{ type: types.SET_ENDPOINT, payload: 'endpoint.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestReports', () => {
+ it('should commit REQUEST_REPORTS mutation', done => {
+ testAction(requestReports, null, mockedState, [{ type: types.REQUEST_REPORTS }], [], done);
+ });
+ });
+
+ describe('fetchReports', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ stopPolling();
+ clearEtagPoll();
+ });
+
+ describe('success', () => {
+ it('dispatches requestReports and receiveReportsSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { summary: {}, suites: [{ name: 'rspec' }] });
+
+ testAction(
+ fetchReports,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestReports',
+ },
+ {
+ payload: { summary: {}, suites: [{ name: 'rspec' }] },
+ type: 'receiveReportsSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
+ });
+
+ it('dispatches requestReports and receiveReportsError ', done => {
+ testAction(
+ fetchReports,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestReports',
+ },
+ {
+ type: 'receiveReportsError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveReportsSuccess', () => {
+ it('should commit RECEIVE_REPORTS_SUCCESS mutation', done => {
+ testAction(
+ receiveReportsSuccess,
+ { summary: {} },
+ mockedState,
+ [{ type: types.RECEIVE_REPORTS_SUCCESS, payload: { summary: {} } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveReportsError', () => {
+ it('should commit RECEIVE_REPORTS_ERROR mutation', done => {
+ testAction(
+ receiveReportsError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_REPORTS_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/javascripts/reports/store/mutations_spec.js b/spec/javascripts/reports/store/mutations_spec.js
new file mode 100644
index 00000000000..3e0b15438c3
--- /dev/null
+++ b/spec/javascripts/reports/store/mutations_spec.js
@@ -0,0 +1,101 @@
+import state from '~/reports/store/state';
+import mutations from '~/reports/store/mutations';
+import * as types from '~/reports/store/mutation_types';
+
+describe('Reports Store Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('SET_ENDPOINT', () => {
+ it('should set endpoint', () => {
+ mutations[types.SET_ENDPOINT](stateCopy, 'endpoint.json');
+ expect(stateCopy.endpoint).toEqual('endpoint.json');
+ });
+ });
+
+ describe('REQUEST_REPORTS', () => {
+ it('should set isLoading to true', () => {
+ mutations[types.REQUEST_REPORTS](stateCopy);
+ expect(stateCopy.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_REPORTS_SUCCESS', () => {
+ const mockedResponse = {
+ summary: {
+ total: 14,
+ resolved: 0,
+ failed: 7,
+ },
+ suites: [
+ {
+ name: 'build:linux',
+ summary: {
+ total: 2,
+ resolved: 0,
+ failed: 1,
+ },
+ new_failures: [
+ {
+ name: 'StringHelper#concatenate when a is git and b is lab returns summary',
+ execution_time: 0.0092435,
+ system_output:
+ 'Failure/Error: is_expected.to eq(\'gitlab\')',
+ },
+ ],
+ resolved_failures: [
+ {
+ name: 'StringHelper#concatenate when a is git and b is lab returns summary',
+ execution_time: 0.009235,
+ system_output:
+ 'Failure/Error: is_expected.to eq(\'gitlab\')',
+ },
+ ],
+ existing_failures: [
+ {
+ name: 'StringHelper#concatenate when a is git and b is lab returns summary',
+ execution_time: 1232.08,
+ system_output:
+ 'Failure/Error: is_expected.to eq(\'gitlab\')',
+ },
+ ],
+ },
+ ],
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_REPORTS_SUCCESS](stateCopy, mockedResponse);
+ });
+
+ it('should reset isLoading', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set summary counts', () => {
+ expect(stateCopy.summary.total).toEqual(mockedResponse.summary.total);
+ expect(stateCopy.summary.resolved).toEqual(mockedResponse.summary.resolved);
+ expect(stateCopy.summary.failed).toEqual(mockedResponse.summary.failed);
+ });
+
+ it('should set reports', () => {
+ expect(stateCopy.reports).toEqual(mockedResponse.suites);
+ });
+ });
+
+ describe('RECEIVE_REPORTS_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_REPORTS_ERROR](stateCopy);
+ });
+ it('should reset isLoading', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+
+ });
+});
diff --git a/spec/javascripts/sidebar/assignees_spec.js b/spec/javascripts/sidebar/assignees_spec.js
index 4e4343812bd..843e7002180 100644
--- a/spec/javascripts/sidebar/assignees_spec.js
+++ b/spec/javascripts/sidebar/assignees_spec.js
@@ -102,13 +102,13 @@ describe('Assignee component', () => {
},
}).$mount();
- expect(component.$el.querySelector('.author_link')).not.toBeNull();
+ expect(component.$el.querySelector('.author-link')).not.toBeNull();
// The image
- expect(component.$el.querySelector('.author_link img').getAttribute('src')).toEqual(UsersMock.user.avatar);
+ expect(component.$el.querySelector('.author-link img').getAttribute('src')).toEqual(UsersMock.user.avatar);
// Author name
- expect(component.$el.querySelector('.author_link .author').innerText.trim()).toEqual(UsersMock.user.name);
+ expect(component.$el.querySelector('.author-link .author').innerText.trim()).toEqual(UsersMock.user.name);
// Username
- expect(component.$el.querySelector('.author_link .username').innerText.trim()).toEqual(`@${UsersMock.user.username}`);
+ expect(component.$el.querySelector('.author-link .username').innerText.trim()).toEqual(`@${UsersMock.user.username}`);
});
it('has the root url present in the assigneeUrl method', () => {
diff --git a/spec/javascripts/vue_shared/components/clipboard_button_spec.js b/spec/javascripts/vue_shared/components/clipboard_button_spec.js
index 97f0fbb04db..e135690349e 100644
--- a/spec/javascripts/vue_shared/components/clipboard_button_spec.js
+++ b/spec/javascripts/vue_shared/components/clipboard_button_spec.js
@@ -21,7 +21,7 @@ describe('clipboard button', () => {
it('renders a button for clipboard', () => {
expect(vm.$el.tagName).toEqual('BUTTON');
expect(vm.$el.getAttribute('data-clipboard-text')).toEqual('copy me');
- expect(vm.$el.querySelector('i').className).toEqual('fa fa-clipboard');
+ expect(vm.$el).toHaveSpriteIcon('duplicate');
});
it('should have a tooltip with default values', () => {
diff --git a/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js b/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js
index de3bf667fb3..076d940961d 100644
--- a/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js
+++ b/spec/javascripts/vue_shared/components/stacked_progress_bar_spec.js
@@ -10,9 +10,9 @@ const createComponent = (config) => {
successLabel: 'Synced',
failureLabel: 'Failed',
neutralLabel: 'Out of sync',
- successCount: 10,
- failureCount: 5,
- totalCount: 20,
+ successCount: 25,
+ failureCount: 10,
+ totalCount: 5000,
}, config);
return mountComponent(Component, defaultConfig);
@@ -32,7 +32,7 @@ describe('StackedProgressBarComponent', () => {
describe('computed', () => {
describe('neutralCount', () => {
it('returns neutralCount based on totalCount, successCount and failureCount', () => {
- expect(vm.neutralCount).toBe(5); // 20 - 10 - 5
+ expect(vm.neutralCount).toBe(4965); // 5000 - 25 - 10
});
});
});
@@ -40,7 +40,11 @@ describe('StackedProgressBarComponent', () => {
describe('methods', () => {
describe('getPercent', () => {
it('returns percentage from provided count based on `totalCount`', () => {
- expect(vm.getPercent(10)).toBe(50);
+ expect(vm.getPercent(500)).toBe(10);
+ });
+
+ it('returns percentage with decimal place from provided count based on `totalCount`', () => {
+ expect(vm.getPercent(10)).toBe(0.2);
});
});
diff --git a/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb b/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb
index 41f957c4e00..d06c5535309 100644
--- a/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb
+++ b/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb
@@ -7,6 +7,20 @@ describe Banzai::Filter::ImageLazyLoadFilter do
%(<img src="#{path}" />)
end
+ def image_with_class(path, class_attr = nil)
+ %(<img src="#{path}" class="#{class_attr}"/>)
+ end
+
+ it 'adds a class attribute' do
+ doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'))
+ expect(doc.at_css('img')['class']).to eq 'lazy'
+ end
+
+ it 'appends to the current class attribute' do
+ doc = filter(image_with_class('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg', 'test'))
+ expect(doc.at_css('img')['class']).to eq 'test lazy'
+ end
+
it 'transforms the image src to a data-src' do
doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'))
expect(doc.at_css('img')['data-src']).to eq '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index d930c608b18..0b3c2390304 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -54,6 +54,18 @@ describe Banzai::Filter::SanitizationFilter do
expect(instance.whitelist[:transformers].size).to eq control_count
end
+ it 'customizes the whitelist only once for different instances' do
+ instance1 = described_class.new('Foo1')
+ instance2 = described_class.new('Foo2')
+ control_count = instance1.whitelist[:transformers].size
+
+ instance1.whitelist
+ instance2.whitelist
+
+ expect(instance1.whitelist[:transformers].size).to eq control_count
+ expect(instance2.whitelist[:transformers].size).to eq control_count
+ end
+
it 'sanitizes `class` attribute from all elements' do
act = %q{<pre class="code highlight white c"><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>}
exp = %q{<pre><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>}
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 6eb10497428..f313e675654 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -39,18 +39,36 @@ describe Feature do
end
describe '.persisted?' do
- it 'returns true for a persisted feature' do
- Feature::FlipperFeature.create!(key: 'foo')
+ context 'when the feature is persisted' do
+ it 'returns true when feature name is a string' do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ feature = double(:feature, name: 'foo')
+
+ expect(described_class.persisted?(feature)).to eq(true)
+ end
+
+ it 'returns true when feature name is a symbol' do
+ Feature::FlipperFeature.create!(key: 'foo')
- feature = double(:feature, name: 'foo')
+ feature = double(:feature, name: :foo)
- expect(described_class.persisted?(feature)).to eq(true)
+ expect(described_class.persisted?(feature)).to eq(true)
+ end
end
- it 'returns false for a feature that is not persisted' do
- feature = double(:feature, name: 'foo')
+ context 'when the feature is not persisted' do
+ it 'returns false when feature name is a string' do
+ feature = double(:feature, name: 'foo')
+
+ expect(described_class.persisted?(feature)).to eq(false)
+ end
- expect(described_class.persisted?(feature)).to eq(false)
+ it 'returns false when feature name is a symbol' do
+ feature = double(:feature, name: :bar)
+
+ expect(described_class.persisted?(feature)).to eq(false)
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
index 64c994a268f..1969aed51da 100644
--- a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
+++ b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180619121030 do
+describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, :sidekiq, schema: 20180619121030 do
describe '#perform' do
context 'when diff files can be deleted' do
let(:merge_request) { create(:merge_request, :merged) }
diff --git a/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb b/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
index fb5093b0bd1..ec8ba0ce127 100644
--- a/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
+++ b/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::ScheduleDiffFilesDeletion, :migration, schema: 20180619121030 do
+describe Gitlab::BackgroundMigration::ScheduleDiffFilesDeletion, :migration, :sidekiq, schema: 20180619121030 do
describe '#perform' do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 782e4e45a91..7d76519dddd 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -65,6 +65,28 @@ describe Gitlab::Database do
end
end
+ describe '.postgresql_9_or_less?' do
+ it 'returns false when using MySQL' do
+ allow(described_class).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.postgresql_9_or_less?).to eq(false)
+ end
+
+ it 'returns true when using PostgreSQL 9.6' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.6')
+
+ expect(described_class.postgresql_9_or_less?).to eq(true)
+ end
+
+ it 'returns false when using PostgreSQL 10 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('10')
+
+ expect(described_class.postgresql_9_or_less?).to eq(false)
+ end
+ end
+
describe '.join_lateral_supported?' do
it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
@@ -109,6 +131,70 @@ describe Gitlab::Database do
end
end
+ describe '.pg_wal_lsn_diff' do
+ it 'returns old name when using PostgreSQL 9.6' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.6')
+
+ expect(described_class.pg_wal_lsn_diff).to eq('pg_xlog_location_diff')
+ end
+
+ it 'returns new name when using PostgreSQL 10 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('10')
+
+ expect(described_class.pg_wal_lsn_diff).to eq('pg_wal_lsn_diff')
+ end
+ end
+
+ describe '.pg_current_wal_insert_lsn' do
+ it 'returns old name when using PostgreSQL 9.6' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.6')
+
+ expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_xlog_insert_location')
+ end
+
+ it 'returns new name when using PostgreSQL 10 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('10')
+
+ expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_wal_insert_lsn')
+ end
+ end
+
+ describe '.pg_last_wal_receive_lsn' do
+ it 'returns old name when using PostgreSQL 9.6' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.6')
+
+ expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_xlog_receive_location')
+ end
+
+ it 'returns new name when using PostgreSQL 10 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('10')
+
+ expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_wal_receive_lsn')
+ end
+ end
+
+ describe '.pg_last_wal_replay_lsn' do
+ it 'returns old name when using PostgreSQL 9.6' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.6')
+
+ expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_xlog_replay_location')
+ end
+
+ it 'returns new name when using PostgreSQL 10 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('10')
+
+ expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_wal_replay_lsn')
+ end
+ end
+
describe '.nulls_last_order' do
context 'when using PostgreSQL' do
before do
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index b63658e1b3b..c5666e4ec61 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -6,6 +6,31 @@ describe Gitlab::Git::Wiki do
let(:project_wiki) { ProjectWiki.new(project, user) }
subject { project_wiki.wiki }
+ describe '#pages' do
+ before do
+ create_page('page1', 'content')
+ create_page('page2', 'content2')
+ end
+
+ after do
+ destroy_page('page1')
+ destroy_page('page2')
+ end
+
+ it 'returns all the pages' do
+ expect(subject.pages.count).to eq(2)
+ expect(subject.pages.first.title).to eq 'page1'
+ expect(subject.pages.last.title).to eq 'page2'
+ end
+
+ it 'returns only one page' do
+ pages = subject.pages(limit: 1)
+
+ expect(pages.count).to eq(1)
+ expect(pages.first.title).to eq 'page1'
+ end
+ end
+
describe '#page' do
before do
create_page('page1', 'content')
diff --git a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
index 6ad9f5ef766..5f67fe6b952 100644
--- a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
@@ -70,6 +70,15 @@ describe Gitlab::GitalyClient::WikiService do
subject
end
+ it 'sends a limit of 0 to wiki_get_all_pages' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_get_all_pages)
+ .with(gitaly_request_with_params(limit: 0), kind_of(Hash))
+ .and_return([].each)
+
+ subject
+ end
+
it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion for each page' do
expect_any_instance_of(Gitaly::WikiService::Stub)
.to receive(:wiki_get_all_pages)
@@ -84,5 +93,18 @@ describe Gitlab::GitalyClient::WikiService do
expect(wiki_page_2.raw_data).to eq('cd')
expect(wiki_page_2_version.format).to eq('markdown')
end
+
+ context 'with limits' do
+ subject { client.get_all_pages(limit: 1) }
+
+ it 'sends a request with the limit' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_get_all_pages)
+ .with(gitaly_request_with_params(limit: 1), kind_of(Hash))
+ .and_return([].each)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/gitlab_import/client_spec.rb b/spec/lib/gitlab/gitlab_import/client_spec.rb
index 50e8d7183ce..22ad88e28cb 100644
--- a/spec/lib/gitlab/gitlab_import/client_spec.rb
+++ b/spec/lib/gitlab/gitlab_import/client_spec.rb
@@ -15,4 +15,88 @@ describe Gitlab::GitlabImport::Client do
expect(key).to be_kind_of(Symbol)
end
end
+
+ it 'uses membership and simple flags' do
+ stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true')
+
+ expect_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([])
+
+ expect(client.projects.to_a).to eq []
+ end
+
+ shared_examples 'pagination params' do
+ before do
+ allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([])
+ end
+
+ it 'allows page_limit param' do
+ allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return(element_list)
+
+ expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original
+
+ client.send(method, *args, page_limit: 2, per_page: 1).to_a
+ end
+
+ it 'allows per_page param' do
+ expect(client).to receive(:lazy_page_iterator).with(hash_including(per_page: 2)).and_call_original
+
+ client.send(method, *args, per_page: 2).to_a
+ end
+
+ it 'allows starting_page param' do
+ expect(client).to receive(:lazy_page_iterator).with(hash_including(starting_page: 3)).and_call_original
+
+ client.send(method, *args, starting_page: 3).to_a
+ end
+ end
+
+ describe '#projects' do
+ subject(:method) { :projects }
+ let(:args) { [] }
+ let(:element_list) { build_list(:project, 2) }
+
+ before do
+ stub_request('/api/v4/projects?membership=true&page=1&per_page=1&simple=true')
+ stub_request('/api/v4/projects?membership=true&page=2&per_page=1&simple=true')
+ stub_request('/api/v4/projects?membership=true&page=1&per_page=2&simple=true')
+ stub_request('/api/v4/projects?membership=true&page=3&per_page=100&simple=true')
+ end
+
+ it_behaves_like 'pagination params'
+ end
+
+ describe '#issues' do
+ subject(:method) { :issues }
+ let(:args) { [1] }
+ let(:element_list) { build_list(:issue, 2) }
+
+ before do
+ stub_request('/api/v4/projects/1/issues?page=1&per_page=1')
+ stub_request('/api/v4/projects/1/issues?page=2&per_page=1')
+ stub_request('/api/v4/projects/1/issues?page=1&per_page=2')
+ stub_request('/api/v4/projects/1/issues?page=3&per_page=100')
+ end
+
+ it_behaves_like 'pagination params'
+ end
+
+ describe '#issue_comments' do
+ subject(:method) { :issue_comments }
+ let(:args) { [1, 1] }
+ let(:element_list) { build_list(:note_on_issue, 2) }
+
+ before do
+ stub_request('/api/v4/projects/1/issues/1/notes?page=1&per_page=1')
+ stub_request('/api/v4/projects/1/issues/1/notes?page=2&per_page=1')
+ stub_request('/api/v4/projects/1/issues/1/notes?page=1&per_page=2')
+ stub_request('/api/v4/projects/1/issues/1/notes?page=3&per_page=100')
+ end
+
+ it_behaves_like 'pagination params'
+ end
+
+ def stub_request(path)
+ WebMock.stub_request(:get, "https://gitlab.com#{path}")
+ .to_return(status: 200)
+ end
end
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index 813ae43b4d3..7eac2cacb90 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -65,7 +65,7 @@ describe Gitlab::HashedStorage::Migrator do
end
it 'migrate project' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
subject.migrate(project)
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 084ce3066d6..db5aab0cd76 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -211,6 +211,7 @@ project:
- slack_service
- microsoft_teams_service
- mattermost_service
+- hangouts_chat_service
- buildkite_service
- bamboo_service
- teamcity_service
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 20def4fefe2..a19b3c0ba66 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -133,7 +133,7 @@ describe Gitlab::UsageData do
expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
- expect(subject[:omniauth_enabled]).to eq(Gitlab.config.omniauth.enabled)
+ expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
expect(subject[:reply_by_email_enabled]).to eq(Gitlab::IncomingEmail.enabled?)
expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
diff --git a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
index 4395e2f8264..5c6f213e15b 100644
--- a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
+++ b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
@@ -31,7 +31,7 @@ describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do
end
it 'correctly processes services' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect(services_table.where(confidential_note_events: nil).count).to eq 4
expect(services_table.where(confidential_note_events: true).count).to eq 1
diff --git a/spec/migrations/migrate_stage_id_reference_in_background_spec.rb b/spec/migrations/migrate_stage_id_reference_in_background_spec.rb
index a837498e1b1..dd6f5325750 100644
--- a/spec/migrations/migrate_stage_id_reference_in_background_spec.rb
+++ b/spec/migrations/migrate_stage_id_reference_in_background_spec.rb
@@ -44,7 +44,7 @@ describe MigrateStageIdReferenceInBackground, :migration, :sidekiq do
end
it 'schedules background migrations' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect(jobs.where(stage_id: nil).count).to eq 5
migrate!
diff --git a/spec/migrations/migrate_stages_statuses_spec.rb b/spec/migrations/migrate_stages_statuses_spec.rb
index ce35276cbf5..5483e24fce7 100644
--- a/spec/migrations/migrate_stages_statuses_spec.rb
+++ b/spec/migrations/migrate_stages_statuses_spec.rb
@@ -34,7 +34,7 @@ describe MigrateStagesStatuses, :sidekiq, :migration do
end
it 'correctly migrates stages statuses' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect(stages.where(status: nil).count).to eq 3
migrate!
diff --git a/spec/migrations/normalize_ldap_extern_uids_spec.rb b/spec/migrations/normalize_ldap_extern_uids_spec.rb
index 56a78f52802..c6ea1e3e49e 100644
--- a/spec/migrations/normalize_ldap_extern_uids_spec.rb
+++ b/spec/migrations/normalize_ldap_extern_uids_spec.rb
@@ -38,7 +38,7 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do
end
it 'migrates the LDAP identities' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
migrate!
identities.where(id: 1..4).each do |identity|
expect(identity.extern_uid).to eq("uid=foo #{identity.id},ou=people,dc=example,dc=com")
@@ -47,7 +47,7 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do
end
it 'does not modify non-LDAP identities' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
migrate!
identity = identities.last
expect(identity.extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
diff --git a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
index ed306fb3d62..96bef107599 100644
--- a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
+++ b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
@@ -20,7 +20,7 @@ describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
end
it 'schedules background migrations' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect(GpgKeySubkey.count).to eq(0)
migrate!
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
index d230f064444..9f7e47bae0d 100644
--- a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
+++ b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
@@ -33,7 +33,7 @@ describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do
end
it 'schedules background migrations' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
expect(merge_request_diffs.where(non_empty).count).to eq 3
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
index 1aab4ae1650..5bcb923af7b 100644
--- a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
+++ b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
@@ -33,7 +33,7 @@ describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
end
it 'migrates the data' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
expect(merge_request_diffs.where(non_empty).count).to eq 3
diff --git a/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb b/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb
index c9fdbe95d13..76fe16581ac 100644
--- a/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb
+++ b/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb
@@ -53,7 +53,7 @@ describe ScheduleMergeRequestLatestMergeRequestDiffIdMigrations, :migration, :si
end
it 'schedules background migrations' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 3
migrate!
diff --git a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
index 027f4a91c90..fa4ddd5fbc7 100644
--- a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
+++ b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
@@ -31,7 +31,7 @@ describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do
end
it 'correctly processes web hooks' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect(web_hooks_table.where(confidential_note_events: nil).count).to eq 4
expect(web_hooks_table.where(confidential_note_events: true).count).to eq 1
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 774a638b430..915bf134d57 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -179,7 +179,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
it 'migrates data to object storage' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
subject
build_trace_chunk.reload
@@ -201,7 +201,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
it 'does not migrate data to object storage' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
data_store = build_trace_chunk.data_store
subject
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index f8d51a95833..cd84a684fec 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -62,11 +62,18 @@ describe DeployToken do
end
end
- context "when it hasn't been revoked" do
+ context "when it hasn't been revoked and is not expired" do
it 'should return true' do
expect(deploy_token.active?).to be_truthy
end
end
+
+ context "when it hasn't been revoked and is expired" do
+ it 'should return true' do
+ deploy_token.update_attribute(:expires_at, Date.today - 5.days)
+ expect(deploy_token.active?).to be_falsy
+ end
+ end
end
describe '#username' do
diff --git a/spec/models/project_services/hangouts_chat_service_spec.rb b/spec/models/project_services/hangouts_chat_service_spec.rb
new file mode 100644
index 00000000000..cfa55188a64
--- /dev/null
+++ b/spec/models/project_services/hangouts_chat_service_spec.rb
@@ -0,0 +1,246 @@
+require 'spec_helper'
+
+describe HangoutsChatService do
+ describe 'Associations' do
+ it { is_expected.to belong_to :project }
+ it { is_expected.to have_one :service_hook }
+ end
+
+ describe 'Validations' do
+ context 'when service is active' do
+ before do
+ subject.active = true
+ end
+
+ it { is_expected.to validate_presence_of(:webhook) }
+ it_behaves_like 'issue tracker service URL attribute', :webhook
+ end
+
+ context 'when service is inactive' do
+ before do
+ subject.active = false
+ end
+
+ it { is_expected.not_to validate_presence_of(:webhook) }
+ end
+ end
+
+ describe '#execute' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:webhook_url) { 'https://example.gitlab.com/' }
+
+ before do
+ allow(subject).to receive_messages(
+ project: project,
+ project_id: project.id,
+ service_hook: true,
+ webhook: webhook_url
+ )
+
+ WebMock.stub_request(:post, webhook_url)
+ end
+
+ shared_examples 'Hangouts Chat service' do
+ it 'calls Hangouts Chat API' do
+ subject.execute(sample_data)
+
+ expect(WebMock)
+ .to have_requested(:post, webhook_url)
+ .with { |req| req.body =~ /\A{"text":.+}\Z/ }
+ .once
+ end
+ end
+
+ context 'with push events' do
+ let(:sample_data) do
+ Gitlab::DataBuilder::Push.build_sample(project, user)
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+
+ it 'specifies the webhook when it is configured' do
+ expect(HangoutsChat::Sender).to receive(:new).with(webhook_url).and_return(double(:hangouts_chat_service).as_null_object)
+
+ subject.execute(sample_data)
+ end
+
+ context 'with not default branch' do
+ let(:sample_data) do
+ Gitlab::DataBuilder::Push.build(project, user, nil, nil, 'not-the-default-branch')
+ end
+
+ context 'when notify_only_default_branch enabled' do
+ before do
+ subject.notify_only_default_branch = true
+ end
+
+ it 'does not call the Hangouts Chat API' do
+ result = subject.execute(sample_data)
+
+ expect(result).to be_falsy
+ end
+ end
+
+ context 'when notify_only_default_branch disabled' do
+ before do
+ subject.notify_only_default_branch = false
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+ end
+ end
+
+ context 'with issue events' do
+ let(:opts) { { title: 'Awesome issue', description: 'please fix' } }
+ let(:sample_data) do
+ service = Issues::CreateService.new(project, user, opts)
+ issue = service.execute
+ service.hook_data(issue, 'open')
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with merge events' do
+ let(:opts) do
+ {
+ title: 'Awesome merge_request',
+ description: 'please fix',
+ source_branch: 'feature',
+ target_branch: 'master'
+ }
+ end
+
+ let(:sample_data) do
+ service = MergeRequests::CreateService.new(project, user, opts)
+ merge_request = service.execute
+ service.hook_data(merge_request, 'open')
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with wiki page events' do
+ let(:opts) do
+ {
+ title: 'Awesome wiki_page',
+ content: 'Some text describing some thing or another',
+ format: 'md',
+ message: 'user created page: Awesome wiki_page'
+ }
+ end
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, attrs: opts) }
+ let(:sample_data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create') }
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with note events' do
+ let(:sample_data) { Gitlab::DataBuilder::Note.build(note, user) }
+
+ context 'with commit comment' do
+ let(:note) do
+ create(:note_on_commit, author: user,
+ project: project,
+ commit_id: project.repository.commit.id,
+ note: 'a comment on a commit')
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with merge request comment' do
+ let(:note) do
+ create(:note_on_merge_request, project: project,
+ note: 'merge request note')
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with issue comment' do
+ let(:note) do
+ create(:note_on_issue, project: project, note: 'issue note')
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with snippet comment' do
+ let(:note) do
+ create(:note_on_project_snippet, project: project,
+ note: 'snippet note')
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+ end
+
+ context 'with pipeline events' do
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project, status: status,
+ sha: project.commit.sha, ref: project.default_branch)
+ end
+ let(:sample_data) { Gitlab::DataBuilder::Pipeline.build(pipeline) }
+
+ context 'with failed pipeline' do
+ let(:status) { 'failed' }
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+
+ context 'with succeeded pipeline' do
+ let(:status) { 'success' }
+
+ context 'with default notify_only_broken_pipelines' do
+ it 'does not call Hangouts Chat API' do
+ result = subject.execute(sample_data)
+
+ expect(result).to be_falsy
+ end
+ end
+
+ context 'when notify_only_broken_pipelines is false' do
+ before do
+ subject.notify_only_broken_pipelines = false
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+ end
+
+ context 'with not default branch' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project, status: 'failed', ref: 'not-the-default-branch')
+ end
+
+ context 'when notify_only_default_branch enabled' do
+ before do
+ subject.notify_only_default_branch = true
+ end
+
+ it 'does not call the Hangouts Chat API' do
+ result = subject.execute(sample_data)
+
+ expect(result).to be_falsy
+ end
+ end
+
+ context 'when notify_only_default_branch disabled' do
+ before do
+ subject.notify_only_default_branch = false
+ end
+
+ it_behaves_like 'Hangouts Chat service'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index d200e5f2e42..b0ec725bf70 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -26,6 +26,7 @@ describe Project do
it { is_expected.to have_one(:slack_service) }
it { is_expected.to have_one(:microsoft_teams_service) }
it { is_expected.to have_one(:mattermost_service) }
+ it { is_expected.to have_one(:hangouts_chat_service) }
it { is_expected.to have_one(:packagist_service) }
it { is_expected.to have_one(:pushover_service) }
it { is_expected.to have_one(:asana_service) }
diff --git a/spec/models/spam_log_spec.rb b/spec/models/spam_log_spec.rb
index 0d6b4384ada..90a2caaeb88 100644
--- a/spec/models/spam_log_spec.rb
+++ b/spec/models/spam_log_spec.rb
@@ -22,7 +22,7 @@ describe SpamLog do
spam_log = build(:spam_log)
user = spam_log.user
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
spam_log.remove_user(deleted_by: admin)
end
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index bf93555b0f0..f3db0c122a0 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -20,7 +20,7 @@ describe API::Environments do
path path_with_namespace
star_count forks_count
created_at last_activity_at
- avatar_url
+ avatar_url namespace
)
get api("/projects/#{project.id}/environments", user)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 65b387a2170..3a8948f8477 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -226,6 +226,25 @@ describe API::Groups do
expect(json_response.first['name']).to eq(group2.name)
end
end
+
+ context 'when using min_access_level in the request' do
+ let!(:group3) { create(:group, :private) }
+ let(:response_groups) { json_response.map { |group| group['id'] } }
+
+ before do
+ group1.add_developer(user2)
+ group3.add_master(user2)
+ end
+
+ it 'returns an array of groups the user has at least master access' do
+ get api('/groups', user2), min_access_level: 40
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(response_groups).to eq([group2.id, group3.id])
+ end
+ end
end
describe "GET /groups/:id" do
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 7d1a5c12805..8412d0383f7 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -220,6 +220,7 @@ describe API::Jobs do
expect(Time.parse(json_response['finished_at'])).to be_like_time(job.finished_at)
expect(Time.parse(json_response['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response['duration']).to eq(job.duration)
+ expect(json_response['web_url']).to be_present
end
it 'returns pipeline data' do
diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb
index e2ca27f5d41..342a97b6a69 100644
--- a/spec/requests/api/pipelines_spec.rb
+++ b/spec/requests/api/pipelines_spec.rb
@@ -24,7 +24,8 @@ describe API::Pipelines do
expect(json_response).to be_an Array
expect(json_response.first['sha']).to match /\A\h{40}\z/
expect(json_response.first['id']).to eq pipeline.id
- expect(json_response.first.keys).to contain_exactly(*%w[id sha ref status])
+ expect(json_response.first['web_url']).to be_present
+ expect(json_response.first.keys).to contain_exactly(*%w[id sha ref status web_url])
end
context 'when parameter is passed' do
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 41243854ebc..55332f56508 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -102,7 +102,7 @@ describe API::ProjectImport do
it 'correctly overrides params during the import' do
override_params = { 'description' => 'Hello world' }
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
post api('/projects/import', user),
path: 'test-import',
file: fixture_file_upload(file),
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index f72c01561d8..71e3436fa76 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -225,7 +225,7 @@ describe API::Projects do
path path_with_namespace
star_count forks_count
created_at last_activity_at
- avatar_url
+ avatar_url namespace
)
get api('/projects?simple=true', user)
@@ -400,6 +400,22 @@ describe API::Projects do
end
end
end
+
+ context 'and with min_access_level' do
+ before do
+ project2.add_master(user2)
+ project3.add_developer(user2)
+ project4.add_reporter(user2)
+ end
+
+ it 'returns an array of groups the user has at least developer access' do
+ get api('/projects', user2), { min_access_level: 30 }
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(project2.id, project3.id)
+ end
+ end
end
context 'when authenticated as a different user' do
@@ -681,6 +697,20 @@ describe API::Projects do
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
end
+
+ it 'returns projects filetered by minimal access level' do
+ private_project1 = create(:project, :private, name: 'private_project1', creator_id: user4.id, namespace: user4.namespace)
+ private_project2 = create(:project, :private, name: 'private_project2', creator_id: user4.id, namespace: user4.namespace)
+ private_project1.add_developer(user2)
+ private_project2.add_reporter(user2)
+
+ get api("/users/#{user4.id}/projects/", user2), { min_access_level: 30 }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |project| project['id'] }).to contain_exactly(private_project1.id)
+ end
end
describe 'POST /projects/user/:id' do
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index a97c3f3461a..6a051f865aa 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -11,6 +11,7 @@ describe API::Users do
let(:ldap_blocked_user) { create(:omniauth_user, provider: 'ldapmain', state: 'ldap_blocked') }
let(:not_existing_user_id) { (User.maximum('id') || 0 ) + 10 }
let(:not_existing_pat_id) { (PersonalAccessToken.maximum('id') || 0 ) + 10 }
+ let(:private_user) { create(:user, private_profile: true) }
describe 'GET /users' do
context "when unauthenticated" do
@@ -254,6 +255,13 @@ describe API::Users do
expect(response).to match_response_schema('public_api/v4/user/admin')
expect(json_response['is_admin']).to be(false)
end
+
+ it "includes the `created_at` field for private users" do
+ get api("/users/#{private_user.id}", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/admin')
+ expect(json_response.keys).to include 'created_at'
+ end
end
context 'for an anonymous user' do
@@ -272,6 +280,20 @@ describe API::Users do
expect(response).to have_gitlab_http_status(404)
end
+
+ it "returns the `created_at` field for public users" do
+ get api("/users/#{user.id}")
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).to include 'created_at'
+ end
+
+ it "does not return the `created_at` field for private users" do
+ get api("/users/#{private_user.id}")
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).not_to include 'created_at'
+ end
end
it "returns a 404 error if user id not found" do
@@ -374,6 +396,18 @@ describe API::Users do
expect(new_user.recently_sent_password_reset?).to eq(true)
end
+ it "creates user with private profile" do
+ post api('/users', admin), attributes_for(:user, private_profile: true)
+
+ expect(response).to have_gitlab_http_status(201)
+
+ user_id = json_response['id']
+ new_user = User.find(user_id)
+
+ expect(new_user).not_to eq(nil)
+ expect(new_user.private_profile?).to eq(true)
+ end
+
it "does not create user with invalid email" do
post api('/users', admin),
email: 'invalid email',
@@ -583,6 +617,13 @@ describe API::Users do
expect(user.reload.external?).to be_truthy
end
+ it "updates private profile" do
+ put api("/users/#{user.id}", admin), { private_profile: true }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(user.reload.private_profile).to eq(true)
+ end
+
it "does not update admin status" do
put api("/users/#{admin_user.id}", admin), { can_create_group: false }
@@ -1067,7 +1108,7 @@ describe API::Users do
end
it "deletes user" do
- Sidekiq::Testing.inline! { delete api("/users/#{user.id}", admin) }
+ perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(204)
expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound
@@ -1079,30 +1120,30 @@ describe API::Users do
end
it "does not delete for unauthenticated user" do
- Sidekiq::Testing.inline! { delete api("/users/#{user.id}") }
+ perform_enqueued_jobs { delete api("/users/#{user.id}") }
expect(response).to have_gitlab_http_status(401)
end
it "is not available for non admin users" do
- Sidekiq::Testing.inline! { delete api("/users/#{user.id}", user) }
+ perform_enqueued_jobs { delete api("/users/#{user.id}", user) }
expect(response).to have_gitlab_http_status(403)
end
it "returns 404 for non-existing user" do
- Sidekiq::Testing.inline! { delete api("/users/999999", admin) }
+ perform_enqueued_jobs { delete api("/users/999999", admin) }
expect(response).to have_gitlab_http_status(404)
expect(json_response['message']).to eq('404 User Not Found')
end
it "returns a 404 for invalid ID" do
- Sidekiq::Testing.inline! { delete api("/users/ASDF", admin) }
+ perform_enqueued_jobs { delete api("/users/ASDF", admin) }
expect(response).to have_gitlab_http_status(404)
end
context "hard delete disabled" do
it "moves contributions to the ghost user" do
- Sidekiq::Testing.inline! { delete api("/users/#{user.id}", admin) }
+ perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(204)
expect(issue.reload).to be_persisted
@@ -1112,7 +1153,7 @@ describe API::Users do
context "hard delete enabled" do
it "removes contributions" do
- Sidekiq::Testing.inline! { delete api("/users/#{user.id}?hard_delete=true", admin) }
+ perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) }
expect(response).to have_gitlab_http_status(204)
expect(Issue.exists?(issue.id)).to be_falsy
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 0f3e7157e14..c71eae9164a 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -381,6 +381,10 @@ describe 'Git HTTP requests' do
context "when authentication fails" do
context "when the user is IP banned" do
+ before do
+ Gitlab.config.rack_attack.git_basic_auth['enabled'] = true
+ end
+
it "responds with status 401" do
expect(Rack::Attack::Allow2Ban).to receive(:filter).and_return(true)
allow_any_instance_of(Rack::Request).to receive(:ip).and_return('1.2.3.4')
@@ -420,6 +424,10 @@ describe 'Git HTTP requests' do
end
context "when the user isn't blocked" do
+ before do
+ Gitlab.config.rack_attack.git_basic_auth['enabled'] = true
+ end
+
it "resets the IP in Rack Attack on download" do
expect(Rack::Attack::Allow2Ban).to receive(:reset).twice
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index de39abdb746..c2378646f89 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -575,6 +575,40 @@ describe 'Git LFS API and storage' do
end
end
+ context 'when using Deploy Tokens' do
+ let(:project) { create(:project, :repository) }
+ let(:authorization) { authorize_deploy_token }
+ let(:update_user_permissions) { nil }
+ let(:role) { nil }
+ let(:update_lfs_permissions) do
+ project.lfs_objects << lfs_object
+ end
+
+ context 'when Deploy Token is valid' do
+ let(:deploy_token) { create(:deploy_token, projects: [project]) }
+
+ it_behaves_like 'an authorized requests'
+ end
+
+ context 'when Deploy Token is not valid' do
+ let(:deploy_token) { create(:deploy_token, projects: [project], read_repository: false) }
+
+ it 'responds with access denied' do
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when Deploy Token is not related to the project' do
+ let(:another_project) { create(:project, :repository) }
+ let(:deploy_token) { create(:deploy_token, projects: [another_project]) }
+
+ it 'responds with access forbidden' do
+ # We render 404, to prevent data leakage about existence of the project
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
context 'when build is authorized as' do
let(:authorization) { authorize_ci_project }
@@ -1381,6 +1415,10 @@ describe 'Git LFS API and storage' do
ActionController::HttpAuthentication::Basic.encode_credentials(user.username, Gitlab::LfsToken.new(user).token)
end
+ def authorize_deploy_token
+ ActionController::HttpAuthentication::Basic.encode_credentials(deploy_token.username, deploy_token.token)
+ end
+
def post_lfs_json(url, body = nil, headers = nil)
post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
end
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index a9baccd061a..b54491cf5f9 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -49,7 +49,7 @@ describe Groups::DestroyService do
context 'Sidekiq inline' do
before do
# Run sidekiq immediately to check that renamed dir will be removed
- Sidekiq::Testing.inline! { destroy_group(group, user, async) }
+ perform_enqueued_jobs { destroy_group(group, user, async) }
end
it 'verifies that paths have been deleted' do
diff --git a/spec/services/projects/create_from_template_service_spec.rb b/spec/services/projects/create_from_template_service_spec.rb
index 9aa9237d875..a43da01f37e 100644
--- a/spec/services/projects/create_from_template_service_spec.rb
+++ b/spec/services/projects/create_from_template_service_spec.rb
@@ -28,7 +28,7 @@ describe Projects::CreateFromTemplateService do
context 'the result project' do
before do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
@project = subject.execute
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 38660ad7a01..e428808ab68 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -45,18 +45,18 @@ describe Projects::DestroyService do
shared_examples 'handles errors thrown during async destroy' do |error_message|
it 'does not allow the error to bubble up' do
expect do
- Sidekiq::Testing.inline! { destroy_project(project, user, {}) }
+ perform_enqueued_jobs { destroy_project(project, user, {}) }
end.not_to raise_error
end
it 'unmarks the project as "pending deletion"' do
- Sidekiq::Testing.inline! { destroy_project(project, user, {}) }
+ perform_enqueued_jobs { destroy_project(project, user, {}) }
expect(project.reload.pending_delete).to be(false)
end
it 'stores an error message in `projects.delete_error`' do
- Sidekiq::Testing.inline! { destroy_project(project, user, {}) }
+ perform_enqueued_jobs { destroy_project(project, user, {}) }
expect(project.reload.delete_error).to be_present
expect(project.delete_error).to include(error_message)
@@ -66,7 +66,7 @@ describe Projects::DestroyService do
context 'Sidekiq inline' do
before do
# Run sidekiq immediatly to check that renamed repository will be removed
- Sidekiq::Testing.inline! { destroy_project(project, user, {}) }
+ perform_enqueued_jobs { destroy_project(project, user, {}) }
end
context 'when has remote mirrors' do
@@ -110,7 +110,7 @@ describe Projects::DestroyService do
end
it 'keeps project team intact upon an error' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
begin
destroy_project(project, user, {})
rescue ::Redis::CannotConnectError
@@ -128,7 +128,7 @@ describe Projects::DestroyService do
before do
project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
# Run sidekiq immediately to check that renamed repository will be removed
- Sidekiq::Testing.inline! { destroy_project(project, user, {}) }
+ perform_enqueued_jobs { destroy_project(project, user, {}) }
end
it_behaves_like 'deleting the project'
@@ -172,7 +172,7 @@ describe Projects::DestroyService do
it 'allows error to bubble up and rolls back project deletion' do
expect do
- Sidekiq::Testing.inline! { destroy_project(project, user, {}) }
+ perform_enqueued_jobs { destroy_project(project, user, {}) }
end.to raise_error(Exception, 'Other error message')
expect(project.reload.pending_delete).to be(false)
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index 1cf373d1d72..18ecef1c0a1 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -35,7 +35,7 @@ describe Projects::HousekeepingService do
allow(subject).to receive(:gc_period).and_return(1)
project.increment_pushes_since_gc
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
expect { subject.execute }.to change { project.pushes_since_gc }.to(0)
end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index b3815045792..e2a600d12d1 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -69,7 +69,7 @@ describe Projects::ImportService do
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - The repository could not be created."
+ expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - The repository could not be created."
end
context 'when repository creation succeeds' do
@@ -141,7 +141,7 @@ describe Projects::ImportService do
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - Failed to import the repository"
+ expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository"
end
context 'when repository import scheduled' do
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index f82d4b483e7..3bae8bfbd42 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -173,7 +173,7 @@ describe Users::DestroyService do
describe "user personal's repository removal" do
before do
- Sidekiq::Testing.inline! { service.execute(user) }
+ perform_enqueued_jobs { service.execute(user) }
end
context 'legacy storage' do
diff --git a/spec/workers/storage_migrator_worker_spec.rb b/spec/workers/storage_migrator_worker_spec.rb
index 815432aacce..808084c8f7c 100644
--- a/spec/workers/storage_migrator_worker_spec.rb
+++ b/spec/workers/storage_migrator_worker_spec.rb
@@ -13,7 +13,7 @@ describe StorageMigratorWorker do
end
it 'migrates projects in the specified range' do
- Sidekiq::Testing.inline! do
+ perform_enqueued_jobs do
worker.perform(ids.min, ids.max)
end