summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorFilipa Lacerda <filipa@gitlab.com>2017-09-22 09:37:19 +0100
committerFilipa Lacerda <filipa@gitlab.com>2017-09-22 09:37:19 +0100
commit07b0d933b523b22464c72e0dd85bc413f455b72f (patch)
treee70464a937516169b1fdff89a0c1518236ce5bf8 /spec
parent8ba9c2bd6d10c74529a0e2e6bb894c34614966f1 (diff)
parentd103e95513704314a38ab8ae441851524031c4a1 (diff)
downloadgitlab-ce-07b0d933b523b22464c72e0dd85bc413f455b72f.tar.gz
Merge branch 'master' into 31050-registry-image-lists
* master: (112 commits) Replace the 'project/service.feature' spinach test with an rspec analog Removed two legacy config options Fix rendering double note issue. IssueNotes: Switch back to Write pane when note cancel or submit. Upgrade Nokogiri because of CVE-2017-9050 Bump VERSION to 10.1.0-pre Standardize access to CSRF token in JavaScript Do not clone the repo when running the review-docs jobs Don't memoize storage configuration on `FsShardsCheck` Document that group Owners can always create subgroups Auto DevOps docs cleanup Display full pre-receive and post-receive hook output in GitLab UI Adds EE tag detection to remove_old in gitlab backup. Correctly detect multiple issue URLs after 'Closes...' in MR descriptions new sharing permissions IssueNotes: Resize comment form after note submit and discard. [skip ci] Add changelog Make resolve discussion icon gray update spacing Deleted another Fixture due to User Callout ...
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/health_controller_spec.rb1
-rw-r--r--spec/features/projects/diffs/diff_show_spec.rb40
-rw-r--r--spec/features/projects/services/slack_service_spec.rb26
-rw-r--r--spec/features/projects/services/user_activates_asana_spec.rb24
-rw-r--r--spec/features/projects/services/user_activates_assembla_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb31
-rw-r--r--spec/features/projects/services/user_activates_emails_on_push_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_flowdock_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_hipchat_spec.rb38
-rw-r--r--spec/features/projects/services/user_activates_irker_spec.rb24
-rw-r--r--spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb26
-rw-r--r--spec/features/projects/services/user_activates_jira_spec.rb (renamed from spec/features/projects/services/jira_service_spec.rb)2
-rw-r--r--spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb (renamed from spec/features/projects/services/mattermost_slash_command_spec.rb)0
-rw-r--r--spec/features/projects/services/user_activates_pivotaltracker_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_pushover_spec.rb27
-rw-r--r--spec/features/projects/services/user_activates_slack_notifications_spec.rb54
-rw-r--r--spec/features/projects/services/user_activates_slack_slash_command_spec.rb (renamed from spec/features/projects/services/slack_slash_command_spec.rb)0
-rw-r--r--spec/features/projects/services/user_views_services_spec.rb25
-rw-r--r--spec/features/projects/user_edits_files_spec.rb16
-rw-r--r--spec/features/user_callout_spec.rb55
-rw-r--r--spec/finders/fork_projects_finder_spec.rb43
-rw-r--r--spec/javascripts/environments/folder/environments_folder_view_spec.js4
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_helper_spec.js219
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_options_spec.js45
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_spec.js122
-rw-r--r--spec/javascripts/fixtures/dashboard.rb35
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js631
-rw-r--r--spec/javascripts/lib/utils/csrf_token_spec.js49
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js7
-rw-r--r--spec/javascripts/notes/components/issue_comment_form_spec.js14
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js1
-rw-r--r--spec/javascripts/notes/stores/mutation_spec.js30
-rw-r--r--spec/javascripts/pretty_time_spec.js282
-rw-r--r--spec/javascripts/todos_spec.js29
-rw-r--r--spec/javascripts/user_callout_spec.js49
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js28
-rw-r--r--spec/lib/banzai/filter/milestone_reference_filter_spec.rb2
-rw-r--r--spec/lib/gitlab/backup/manager_spec.rb52
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/build/policy_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb251
-rw-r--r--spec/lib/gitlab/closing_issue_extractor_spec.rb4
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb61
-rw-r--r--spec/lib/gitlab/git/hook_spec.rb3
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb2
-rw-r--r--spec/lib/gitlab/git/rev_list_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb124
-rw-r--r--spec/migrations/clean_stages_statuses_migration_spec.rb51
-rw-r--r--spec/models/ci/pipeline_spec.rb1
-rw-r--r--spec/models/key_spec.rb31
-rw-r--r--spec/models/milestone_spec.rb15
-rw-r--r--spec/models/project_auto_devops_spec.rb16
-rw-r--r--spec/models/repository_spec.rb24
-rw-r--r--spec/requests/api/access_requests_spec.rb12
-rw-r--r--spec/requests/api/award_emoji_spec.rb14
-rw-r--r--spec/requests/api/boards_spec.rb33
-rw-r--r--spec/requests/api/branches_spec.rb12
-rw-r--r--spec/requests/api/broadcast_messages_spec.rb11
-rw-r--r--spec/requests/api/groups_spec.rb3
-rw-r--r--spec/requests/api/projects_spec.rb53
-rw-r--r--spec/requests/api/v3/award_emoji_spec.rb14
-rw-r--r--spec/requests/api/v3/boards_spec.rb25
-rw-r--r--spec/requests/api/v3/branches_spec.rb10
-rw-r--r--spec/requests/api/v3/broadcast_messages_spec.rb6
-rw-r--r--spec/requests/api/v3/builds_spec.rb12
-rw-r--r--spec/requests/api/v3/issues_spec.rb19
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb10
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/create_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb7
-rw-r--r--spec/services/keys/last_used_service_spec.rb68
-rw-r--r--spec/services/merge_requests/close_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_service_spec.rb2
-rw-r--r--spec/services/projects/count_service_spec.rb4
-rw-r--r--spec/services/system_note_service_spec.rb4
-rw-r--r--spec/views/projects/pipelines_settings/_show.html.haml_spec.rb62
-rw-r--r--spec/workers/use_key_worker_spec.rb23
80 files changed, 1886 insertions, 1396 deletions
diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb
index cc389e554ad..9e9cf4f2c1f 100644
--- a/spec/controllers/health_controller_spec.rb
+++ b/spec/controllers/health_controller_spec.rb
@@ -10,6 +10,7 @@ describe HealthController do
before do
allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip])
+ stub_storage_settings({}) # Hide the broken storage
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
diff --git a/spec/features/projects/diffs/diff_show_spec.rb b/spec/features/projects/diffs/diff_show_spec.rb
index a6f52c9ef58..c1307ab640f 100644
--- a/spec/features/projects/diffs/diff_show_spec.rb
+++ b/spec/features/projects/diffs/diff_show_spec.rb
@@ -62,13 +62,43 @@ feature 'Diff file viewer', :js do
end
context 'Image file' do
- before do
- visit_commit('2f63565e7aac07bcdadb654e253078b727143ec4')
+ context 'Replaced' do
+ before do
+ visit_commit('2f63565e7aac07bcdadb654e253078b727143ec4')
+ end
+
+ it 'shows a rendered image' do
+ within('.diff-file[id="e986451b8f7397b617dbb6fffcb5539328c56921"]') do
+ expect(page).to have_css('img[alt="files/images/6049019_460s.jpg"]')
+ end
+ end
+
+ it 'shows view replaced and view file links' do
+ expect(page.all('.file-actions a').length).to eq 2
+ expect(page.all('.file-actions a')[0]).to have_content 'View replaced file @'
+ expect(page.all('.file-actions a')[1]).to have_content 'View file @'
+ end
+ end
+
+ context 'Added' do
+ before do
+ visit_commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9')
+ end
+
+ it 'shows view file link' do
+ expect(page.all('.file-actions a').length).to eq 1
+ expect(page.all('.file-actions a')[0]).to have_content 'View file @'
+ end
end
- it 'shows a rendered image' do
- within('.diff-file[id="e986451b8f7397b617dbb6fffcb5539328c56921"]') do
- expect(page).to have_css('img[alt="files/images/6049019_460s.jpg"]')
+ context 'Deleted' do
+ before do
+ visit_commit('7fd7a459706ee87be6f855fd98ce8c552b15529a')
+ end
+
+ it 'shows view file link' do
+ expect(page.all('.file-actions a').length).to eq 1
+ expect(page.all('.file-actions a')[0]).to have_content 'View file @'
end
end
end
diff --git a/spec/features/projects/services/slack_service_spec.rb b/spec/features/projects/services/slack_service_spec.rb
deleted file mode 100644
index c10ec5e2987..00000000000
--- a/spec/features/projects/services/slack_service_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-require 'spec_helper'
-
-feature 'Projects > Slack service > Setup events' do
- let(:user) { create(:user) }
- let(:service) { SlackService.new }
- let(:project) { create(:project, slack_service: service) }
-
- background do
- service.fields
- service.update_attributes(push_channel: 1, issue_channel: 2, merge_request_channel: 3, note_channel: 4, tag_push_channel: 5, pipeline_channel: 6, wiki_page_channel: 7)
- project.team << [user, :master]
- sign_in(user)
- end
-
- scenario 'user can filter events by channel' do
- visit edit_project_service_path(project, service)
-
- expect(page.find_field("service_push_channel").value).to have_content '1'
- expect(page.find_field("service_issue_channel").value).to have_content '2'
- expect(page.find_field("service_merge_request_channel").value).to have_content '3'
- expect(page.find_field("service_note_channel").value).to have_content '4'
- expect(page.find_field("service_tag_push_channel").value).to have_content '5'
- expect(page.find_field("service_pipeline_channel").value).to have_content '6'
- expect(page.find_field("service_wiki_page_channel").value).to have_content '7'
- end
-end
diff --git a/spec/features/projects/services/user_activates_asana_spec.rb b/spec/features/projects/services/user_activates_asana_spec.rb
new file mode 100644
index 00000000000..db836d2985c
--- /dev/null
+++ b/spec/features/projects/services/user_activates_asana_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'User activates Asana' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Asana')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Api key', with: 'verySecret')
+ fill_in('Restrict to branch', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Asana activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_assembla_spec.rb b/spec/features/projects/services/user_activates_assembla_spec.rb
new file mode 100644
index 00000000000..f099b332785
--- /dev/null
+++ b/spec/features/projects/services/user_activates_assembla_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates Assembla' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Assembla')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Assembla activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb b/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
new file mode 100644
index 00000000000..a00c2e0ad99
--- /dev/null
+++ b/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe 'User activates Atlassian Bamboo CI' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Atlassian Bamboo CI')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Bamboo url', with: 'http://bamboo.example.com')
+ fill_in('Build key', with: 'KEY')
+ fill_in('Username', with: 'user')
+ fill_in('Password', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Atlassian Bamboo CI activated.')
+
+ # Password field should not be filled in.
+ click_link('Atlassian Bamboo CI')
+
+ expect(find_field('Enter new password').value).to be_nil
+ end
+end
diff --git a/spec/features/projects/services/user_activates_emails_on_push_spec.rb b/spec/features/projects/services/user_activates_emails_on_push_spec.rb
new file mode 100644
index 00000000000..3769875b29c
--- /dev/null
+++ b/spec/features/projects/services/user_activates_emails_on_push_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates Emails on push' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Emails on push')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Recipients', with: 'qa@company.name')
+ click_button('Save')
+
+ expect(page).to have_content('Emails on push activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_flowdock_spec.rb b/spec/features/projects/services/user_activates_flowdock_spec.rb
new file mode 100644
index 00000000000..5298d8acaf5
--- /dev/null
+++ b/spec/features/projects/services/user_activates_flowdock_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates Flowdock' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Flowdock')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Flowdock activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_hipchat_spec.rb b/spec/features/projects/services/user_activates_hipchat_spec.rb
new file mode 100644
index 00000000000..a9bf16642c7
--- /dev/null
+++ b/spec/features/projects/services/user_activates_hipchat_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe 'User activates HipChat' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('HipChat')
+ end
+
+ context 'with standart settings' do
+ it 'activates service' do
+ check('Active')
+ fill_in('Room', with: 'gitlab')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('HipChat activated.')
+ end
+ end
+
+ context 'with custom settings' do
+ it 'activates service' do
+ check('Active')
+ fill_in('Room', with: 'gitlab_custom')
+ fill_in('Token', with: 'secretCustom')
+ fill_in('Server', with: 'https://chat.example.com')
+ click_button('Save')
+
+ expect(page).to have_content('HipChat activated.')
+ end
+ end
+end
diff --git a/spec/features/projects/services/user_activates_irker_spec.rb b/spec/features/projects/services/user_activates_irker_spec.rb
new file mode 100644
index 00000000000..435663c818f
--- /dev/null
+++ b/spec/features/projects/services/user_activates_irker_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'User activates Irker (IRC gateway)' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Irker (IRC gateway)')
+ end
+
+ it 'activates service' do
+ check('Active')
+ check('Colorize messages')
+ fill_in('Recipients', with: 'irc://chat.freenode.net/#commits')
+ click_button('Save')
+
+ expect(page).to have_content('Irker (IRC gateway) activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb b/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
new file mode 100644
index 00000000000..1048803fde8
--- /dev/null
+++ b/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
@@ -0,0 +1,26 @@
+require 'spec_helper'
+
+describe 'User activates JetBrains TeamCity CI' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('JetBrains TeamCity CI')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Teamcity url', with: 'http://teamcity.example.com')
+ fill_in('Build type', with: 'GitlabTest_Build')
+ fill_in('Username', with: 'user')
+ fill_in('Password', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('JetBrains TeamCity CI activated.')
+ end
+end
diff --git a/spec/features/projects/services/jira_service_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb
index 65e3a487d4b..0a86292ae6c 100644
--- a/spec/features/projects/services/jira_service_spec.rb
+++ b/spec/features/projects/services/user_activates_jira_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Setup Jira service', :js do
+describe 'User activates Jira', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:service) { project.create_jira_service }
diff --git a/spec/features/projects/services/mattermost_slash_command_spec.rb b/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
index 95d5e8b14b9..95d5e8b14b9 100644
--- a/spec/features/projects/services/mattermost_slash_command_spec.rb
+++ b/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
diff --git a/spec/features/projects/services/user_activates_pivotaltracker_spec.rb b/spec/features/projects/services/user_activates_pivotaltracker_spec.rb
new file mode 100644
index 00000000000..d5d109ba48b
--- /dev/null
+++ b/spec/features/projects/services/user_activates_pivotaltracker_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates PivotalTracker' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('PivotalTracker')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('PivotalTracker activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_pushover_spec.rb b/spec/features/projects/services/user_activates_pushover_spec.rb
new file mode 100644
index 00000000000..9b7e8d62792
--- /dev/null
+++ b/spec/features/projects/services/user_activates_pushover_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe 'User activates Pushover' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Pushover')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Api key', with: 'verySecret')
+ fill_in('User key', with: 'verySecret')
+ fill_in('Device', with: 'myDevice')
+ select('High Priority', from: 'Priority')
+ select('Bike', from: 'Sound')
+ click_button('Save')
+
+ expect(page).to have_content('Pushover activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_slack_notifications_spec.rb b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
new file mode 100644
index 00000000000..fae9ebd1bd6
--- /dev/null
+++ b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe 'User activates Slack notifications' do
+ let(:user) { create(:user) }
+ let(:service) { SlackService.new }
+ let(:project) { create(:project, slack_service: service) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when service is not configured yet' do
+ before do
+ visit(project_settings_integrations_path(project))
+
+ click_link('Slack notifications')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Webhook', with: 'https://hooks.slack.com/services/SVRWFV0VVAR97N/B02R25XN3/ZBqu7xMupaEEICInN685')
+ click_button('Save')
+
+ expect(page).to have_content('Slack notifications activated.')
+ end
+ end
+
+ context 'when service is already configured' do
+ before do
+ service.fields
+ service.update_attributes(
+ push_channel: 1,
+ issue_channel: 2,
+ merge_request_channel: 3,
+ note_channel: 4,
+ tag_push_channel: 5,
+ pipeline_channel: 6,
+ wiki_page_channel: 7)
+
+ visit(edit_project_service_path(project, service))
+ end
+
+ it 'filters events by channel' do
+ expect(page.find_field('service_push_channel').value).to have_content('1')
+ expect(page.find_field('service_issue_channel').value).to have_content('2')
+ expect(page.find_field('service_merge_request_channel').value).to have_content('3')
+ expect(page.find_field('service_note_channel').value).to have_content('4')
+ expect(page.find_field('service_tag_push_channel').value).to have_content('5')
+ expect(page.find_field('service_pipeline_channel').value).to have_content('6')
+ expect(page.find_field('service_wiki_page_channel').value).to have_content('7')
+ end
+ end
+end
diff --git a/spec/features/projects/services/slack_slash_command_spec.rb b/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
index a8baf126269..a8baf126269 100644
--- a/spec/features/projects/services/slack_slash_command_spec.rb
+++ b/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
diff --git a/spec/features/projects/services/user_views_services_spec.rb b/spec/features/projects/services/user_views_services_spec.rb
new file mode 100644
index 00000000000..f86591c2633
--- /dev/null
+++ b/spec/features/projects/services/user_views_services_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe 'User views services' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+ end
+
+ it 'shows the list of available services' do
+ expect(page).to have_content('Project services')
+ expect(page).to have_content('Campfire')
+ expect(page).to have_content('HipChat')
+ expect(page).to have_content('Assembla')
+ expect(page).to have_content('Pushover')
+ expect(page).to have_content('Atlassian Bamboo')
+ expect(page).to have_content('JetBrains TeamCity')
+ expect(page).to have_content('Asana')
+ expect(page).to have_content('Irker (IRC gateway)')
+ end
+end
diff --git a/spec/features/projects/user_edits_files_spec.rb b/spec/features/projects/user_edits_files_spec.rb
index 3129aad8473..19954313c23 100644
--- a/spec/features/projects/user_edits_files_spec.rb
+++ b/spec/features/projects/user_edits_files_spec.rb
@@ -20,8 +20,7 @@ describe 'User edits files' do
it 'inserts a content of a file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
@@ -38,8 +37,7 @@ describe 'User edits files' do
it 'commits an edited file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -56,7 +54,7 @@ describe 'User edits files' do
click_link('.gitignore')
find('.js-edit-blob').click
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -67,15 +65,13 @@ describe 'User edits files' do
click_link('Changes')
- wait_for_requests
expect(page).to have_content('*.rbca')
end
it 'shows the diff of an edited file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
click_link('Preview changes')
@@ -104,7 +100,7 @@ describe 'User edits files' do
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
)
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
@@ -120,7 +116,7 @@ describe 'User edits files' do
click_link('Fork')
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
diff --git a/spec/features/user_callout_spec.rb b/spec/features/user_callout_spec.rb
deleted file mode 100644
index 37d66b618af..00000000000
--- a/spec/features/user_callout_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-require 'spec_helper'
-
-describe 'User Callouts', js: true do
- let(:user) { create(:user) }
- let(:another_user) { create(:user) }
- let(:project) { create(:project, path: 'gitlab', name: 'sample') }
-
- before do
- sign_in(user)
- project.team << [user, :master]
- end
-
- it 'takes you to the profile preferences when the link is clicked' do
- visit dashboard_projects_path
- click_link 'Check it out'
- expect(current_path).to eq profile_preferences_path
- end
-
- it 'does not show when cookie is set' do
- visit dashboard_projects_path
-
- within('.user-callout') do
- find('.close').trigger('click')
- end
-
- visit dashboard_projects_path
-
- expect(page).not_to have_selector('.user-callout')
- end
-
- describe 'user callout should appear in two routes' do
- it 'shows up on the user profile' do
- visit user_path(user)
- expect(find('.user-callout')).to have_content 'Customize your experience'
- end
-
- it 'shows up on the dashboard projects' do
- visit dashboard_projects_path
- expect(find('.user-callout')).to have_content 'Customize your experience'
- end
- end
-
- it 'hides the user callout when click on the dismiss icon' do
- visit user_path(user)
- within('.user-callout') do
- find('.close').click
- end
- expect(page).not_to have_selector('.user-callout')
- end
-
- it 'does not show callout on another users profile' do
- visit user_path(another_user)
- expect(page).not_to have_selector('.user-callout')
- end
-end
diff --git a/spec/finders/fork_projects_finder_spec.rb b/spec/finders/fork_projects_finder_spec.rb
new file mode 100644
index 00000000000..f0cef7ea406
--- /dev/null
+++ b/spec/finders/fork_projects_finder_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe ForkProjectsFinder do
+ let(:source_project) { create(:project, :empty_repo) }
+ let(:private_fork) { create(:project, :private, :empty_repo, name: 'A') }
+ let(:internal_fork) { create(:project, :internal, :empty_repo, name: 'B') }
+ let(:public_fork) { create(:project, :public, :empty_repo, name: 'C') }
+
+ let(:non_member) { create(:user) }
+ let(:private_fork_member) { create(:user) }
+
+ before do
+ private_fork.add_developer(private_fork_member)
+
+ source_project.forks << private_fork
+ source_project.forks << internal_fork
+ source_project.forks << public_fork
+ end
+
+ describe '#execute' do
+ let(:finder) { described_class.new(source_project, params: {}, current_user: current_user) }
+
+ subject { finder.execute }
+
+ describe 'without a user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to eq([public_fork]) }
+ end
+
+ describe 'with a user' do
+ let(:current_user) { non_member }
+
+ it { is_expected.to eq([public_fork, internal_fork]) }
+ end
+
+ describe 'with a member' do
+ let(:current_user) { private_fork_member }
+
+ it { is_expected.to eq([public_fork, internal_fork, private_fork]) }
+ end
+ end
+end
diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js
index fdaea5c0b0c..7e62d356bd2 100644
--- a/spec/javascripts/environments/folder/environments_folder_view_spec.js
+++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js
@@ -14,6 +14,10 @@ describe('Environments Folder View', () => {
window.history.pushState({}, null, 'environments/folders/build');
});
+ afterEach(() => {
+ window.history.pushState({}, null, '/');
+ });
+
let component;
describe('successfull request', () => {
diff --git a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js
deleted file mode 100644
index 114d282e48a..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js
+++ /dev/null
@@ -1,219 +0,0 @@
-import Cookies from 'js-cookie';
-import {
- getCookieName,
- getSelector,
- showPopover,
- hidePopover,
- dismiss,
- mouseleave,
- mouseenter,
- setupDismissButton,
-} from '~/feature_highlight/feature_highlight_helper';
-
-describe('feature highlight helper', () => {
- describe('getCookieName', () => {
- it('returns `feature-highlighted-` prefix', () => {
- const cookieId = 'cookieId';
- expect(getCookieName(cookieId)).toEqual(`feature-highlighted-${cookieId}`);
- });
- });
-
- describe('getSelector', () => {
- it('returns js-feature-highlight selector', () => {
- const highlightId = 'highlightId';
- expect(getSelector(highlightId)).toEqual(`.js-feature-highlight[data-highlight=${highlightId}]`);
- });
- });
-
- describe('showPopover', () => {
- it('returns true when popover is shown', () => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- expect(showPopover.call(context)).toEqual(true);
- });
-
- it('returns false when popover is already shown', () => {
- const context = {
- hasClass: () => true,
- };
-
- expect(showPopover.call(context)).toEqual(false);
- });
-
- it('shows popover', (done) => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- spyOn(context, 'popover').and.callFake((method) => {
- expect(method).toEqual('show');
- done();
- });
-
- showPopover.call(context);
- });
-
- it('adds disable-animation and js-popover-show class', (done) => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- spyOn(context, 'addClass').and.callFake((classNames) => {
- expect(classNames).toEqual('disable-animation js-popover-show');
- done();
- });
-
- showPopover.call(context);
- });
- });
-
- describe('hidePopover', () => {
- it('returns true when popover is hidden', () => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- expect(hidePopover.call(context)).toEqual(true);
- });
-
- it('returns false when popover is already hidden', () => {
- const context = {
- hasClass: () => false,
- };
-
- expect(hidePopover.call(context)).toEqual(false);
- });
-
- it('hides popover', (done) => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- spyOn(context, 'popover').and.callFake((method) => {
- expect(method).toEqual('hide');
- done();
- });
-
- hidePopover.call(context);
- });
-
- it('removes disable-animation and js-popover-show class', (done) => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- spyOn(context, 'removeClass').and.callFake((classNames) => {
- expect(classNames).toEqual('disable-animation js-popover-show');
- done();
- });
-
- hidePopover.call(context);
- });
- });
-
- describe('dismiss', () => {
- const context = {
- hide: () => {},
- };
-
- beforeEach(() => {
- spyOn(Cookies, 'set').and.callFake(() => {});
- spyOn(hidePopover, 'call').and.callFake(() => {});
- spyOn(context, 'hide').and.callFake(() => {});
- dismiss.call(context);
- });
-
- it('sets cookie to true', () => {
- expect(Cookies.set).toHaveBeenCalled();
- });
-
- it('calls hide popover', () => {
- expect(hidePopover.call).toHaveBeenCalled();
- });
-
- it('calls hide', () => {
- expect(context.hide).toHaveBeenCalled();
- });
- });
-
- describe('mouseleave', () => {
- it('calls hide popover if .popover:hover is false', () => {
- const fakeJquery = {
- length: 0,
- };
-
- spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
- spyOn(hidePopover, 'call');
- mouseleave();
- expect(hidePopover.call).toHaveBeenCalled();
- });
-
- it('does not call hide popover if .popover:hover is true', () => {
- const fakeJquery = {
- length: 1,
- };
-
- spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
- spyOn(hidePopover, 'call');
- mouseleave();
- expect(hidePopover.call).not.toHaveBeenCalled();
- });
- });
-
- describe('mouseenter', () => {
- const context = {};
-
- it('shows popover', () => {
- spyOn(showPopover, 'call').and.returnValue(false);
- mouseenter.call(context);
- expect(showPopover.call).toHaveBeenCalled();
- });
-
- it('registers mouseleave event if popover is showed', (done) => {
- spyOn(showPopover, 'call').and.returnValue(true);
- spyOn($.fn, 'on').and.callFake((eventName) => {
- expect(eventName).toEqual('mouseleave');
- done();
- });
- mouseenter.call(context);
- });
-
- it('does not register mouseleave event if popover is not showed', () => {
- spyOn(showPopover, 'call').and.returnValue(false);
- const spy = spyOn($.fn, 'on').and.callFake(() => {});
- mouseenter.call(context);
- expect(spy).not.toHaveBeenCalled();
- });
- });
-
- describe('setupDismissButton', () => {
- it('registers click event callback', (done) => {
- const context = {
- getAttribute: () => 'popoverId',
- dataset: {
- highlight: 'cookieId',
- },
- };
-
- spyOn($.fn, 'on').and.callFake((event) => {
- expect(event).toEqual('click');
- done();
- });
- setupDismissButton.call(context);
- });
- });
-});
diff --git a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js b/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
deleted file mode 100644
index 7feb361edec..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
+++ /dev/null
@@ -1,45 +0,0 @@
-import domContentLoaded from '~/feature_highlight/feature_highlight_options';
-import bp from '~/breakpoints';
-
-describe('feature highlight options', () => {
- describe('domContentLoaded', () => {
- const highlightOrder = [];
-
- beforeEach(() => {
- // Check for when highlightFeatures is called
- spyOn(highlightOrder, 'find').and.callFake(() => {});
- });
-
- it('should not call highlightFeatures when breakpoint is xs', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('xs');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should not call highlightFeatures when breakpoint is sm', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('sm');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should not call highlightFeatures when breakpoint is md', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('md');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should call highlightFeatures when breakpoint is lg', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/javascripts/feature_highlight/feature_highlight_spec.js b/spec/javascripts/feature_highlight/feature_highlight_spec.js
deleted file mode 100644
index 6abe8425ee7..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import Cookies from 'js-cookie';
-import * as featureHighlightHelper from '~/feature_highlight/feature_highlight_helper';
-import * as featureHighlight from '~/feature_highlight/feature_highlight';
-
-describe('feature highlight', () => {
- describe('setupFeatureHighlightPopover', () => {
- const selector = '.js-feature-highlight[data-highlight=test]';
- beforeEach(() => {
- setFixtures(`
- <div>
- <div class="js-feature-highlight" data-highlight="test" disabled>
- Trigger
- </div>
- </div>
- <div class="feature-highlight-popover-content">
- Content
- <div class="dismiss-feature-highlight">
- Dismiss
- </div>
- </div>
- `);
- spyOn(window, 'addEventListener');
- spyOn(window, 'removeEventListener');
- featureHighlight.setupFeatureHighlightPopover('test', 0);
- });
-
- it('setups popover content', () => {
- const $popoverContent = $('.feature-highlight-popover-content');
- const outerHTML = $popoverContent.prop('outerHTML');
-
- expect($(selector).data('content')).toEqual(outerHTML);
- });
-
- it('setups mouseenter', () => {
- const showSpy = spyOn(featureHighlightHelper.showPopover, 'call');
- $(selector).trigger('mouseenter');
-
- expect(showSpy).toHaveBeenCalled();
- });
-
- it('setups debounced mouseleave', (done) => {
- const hideSpy = spyOn(featureHighlightHelper.hidePopover, 'call');
- $(selector).trigger('mouseleave');
-
- // Even though we've set the debounce to 0ms, setTimeout is needed for the debounce
- setTimeout(() => {
- expect(hideSpy).toHaveBeenCalled();
- done();
- }, 0);
- });
-
- it('setups inserted.bs.popover', () => {
- $(selector).trigger('mouseenter');
- const popoverId = $(selector).attr('aria-describedby');
- const spyEvent = spyOnEvent(`#${popoverId} .dismiss-feature-highlight`, 'click');
-
- $(`#${popoverId} .dismiss-feature-highlight`).click();
- expect(spyEvent).toHaveBeenTriggered();
- });
-
- it('setups show.bs.popover', () => {
- $(selector).trigger('show.bs.popover');
- expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function));
- });
-
- it('setups hide.bs.popover', () => {
- $(selector).trigger('hide.bs.popover');
- expect(window.removeEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function));
- });
-
- it('removes disabled attribute', () => {
- expect($('.js-feature-highlight').is(':disabled')).toEqual(false);
- });
-
- it('displays popover', () => {
- expect($(selector).attr('aria-describedby')).toBeFalsy();
- $(selector).trigger('mouseenter');
- expect($(selector).attr('aria-describedby')).toBeTruthy();
- });
- });
-
- describe('shouldHighlightFeature', () => {
- it('should return false if element is not found', () => {
- spyOn(document, 'querySelector').and.returnValue(null);
- spyOn(Cookies, 'get').and.returnValue(null);
-
- expect(featureHighlight.shouldHighlightFeature()).toBeFalsy();
- });
-
- it('should return false if previouslyDismissed', () => {
- spyOn(document, 'querySelector').and.returnValue(document.createElement('div'));
- spyOn(Cookies, 'get').and.returnValue('true');
-
- expect(featureHighlight.shouldHighlightFeature()).toBeFalsy();
- });
-
- it('should return true if element is found and not previouslyDismissed', () => {
- spyOn(document, 'querySelector').and.returnValue(document.createElement('div'));
- spyOn(Cookies, 'get').and.returnValue(null);
-
- expect(featureHighlight.shouldHighlightFeature()).toBeTruthy();
- });
- });
-
- describe('highlightFeatures', () => {
- it('calls setupFeatureHighlightPopover if shouldHighlightFeature returns true', () => {
- // Mimic shouldHighlightFeature set to true
- const highlightOrder = ['issue-boards'];
- spyOn(highlightOrder, 'find').and.returnValue(highlightOrder[0]);
-
- expect(featureHighlight.highlightFeatures(highlightOrder)).toEqual(true);
- });
-
- it('does not call setupFeatureHighlightPopover if shouldHighlightFeature returns false', () => {
- // Mimic shouldHighlightFeature set to false
- const highlightOrder = ['issue-boards'];
- spyOn(highlightOrder, 'find').and.returnValue(null);
-
- expect(featureHighlight.highlightFeatures(highlightOrder)).toEqual(false);
- });
- });
-});
diff --git a/spec/javascripts/fixtures/dashboard.rb b/spec/javascripts/fixtures/dashboard.rb
deleted file mode 100644
index 7fa351680c9..00000000000
--- a/spec/javascripts/fixtures/dashboard.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-require 'spec_helper'
-
-describe Dashboard::ProjectsController, '(JavaScript fixtures)', type: :controller do
- include JavaScriptFixturesHelpers
-
- let(:admin) { create(:admin) }
- let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
- let(:project) { create(:project, namespace: namespace, path: 'builds-project') }
-
- render_views
-
- before(:all) do
- clean_frontend_fixtures('dashboard/')
- end
-
- before do
- sign_in(admin)
- end
-
- after do
- remove_repository(project)
- end
-
- it 'dashboard/user-callout.html.raw' do |example|
- rendered = render_template('shared/_user_callout')
- store_frontend_fixture(rendered, example.description)
- end
-
- private
-
- def render_template(template_file_name)
- controller.prepend_view_path(JavaScriptFixturesHelpers::FIXTURE_PATH)
- controller.render_to_string(template_file_name, layout: false)
- end
-end
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index a6ad250bd86..fd5b7d4e5da 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -1,398 +1,415 @@
/* eslint-disable promise/catch-or-return */
-import '~/lib/utils/common_utils';
+import * as commonUtils from '~/lib/utils/common_utils';
-(() => {
- describe('common_utils', () => {
- describe('gl.utils.parseUrl', () => {
- it('returns an anchor tag with url', () => {
- expect(gl.utils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
- });
- it('url is escaped', () => {
- // IE11 will return a relative pathname while other browsers will return a full pathname.
- // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
- // element will create an absolute url relative to the current execution context.
- // The JavaScript test suite is executed at '/' which will lead to an absolute url
- // starting with '/'.
- expect(gl.utils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
- });
+describe('common_utils', () => {
+ describe('parseUrl', () => {
+ it('returns an anchor tag with url', () => {
+ expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
});
+ it('url is escaped', () => {
+ // IE11 will return a relative pathname while other browsers will return a full pathname.
+ // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
+ // element will create an absolute url relative to the current execution context.
+ // The JavaScript test suite is executed at '/' which will lead to an absolute url
+ // starting with '/'.
+ expect(commonUtils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
+ });
+ });
- describe('gl.utils.parseUrlPathname', () => {
- beforeEach(() => {
- spyOn(gl.utils, 'parseUrl').and.callFake(url => ({
- pathname: url,
- }));
- });
- it('returns an absolute url when given an absolute url', () => {
- expect(gl.utils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
- });
- it('returns an absolute url when given a relative url', () => {
- expect(gl.utils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
- });
+ describe('parseUrlPathname', () => {
+ it('returns an absolute url when given an absolute url', () => {
+ expect(commonUtils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
});
- describe('gl.utils.getUrlParamsArray', () => {
- it('should return params array', () => {
- expect(gl.utils.getUrlParamsArray() instanceof Array).toBe(true);
- });
+ it('returns an absolute url when given a relative url', () => {
+ expect(commonUtils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
+ });
+ });
- it('should remove the question mark from the search params', () => {
- const paramsArray = gl.utils.getUrlParamsArray();
- expect(paramsArray[0][0] !== '?').toBe(true);
- });
+ describe('getUrlParamsArray', () => {
+ it('should return params array', () => {
+ expect(commonUtils.getUrlParamsArray() instanceof Array).toBe(true);
+ });
- it('should decode params', () => {
- history.pushState('', '', '?label_name%5B%5D=test');
+ it('should remove the question mark from the search params', () => {
+ const paramsArray = commonUtils.getUrlParamsArray();
+ expect(paramsArray[0][0] !== '?').toBe(true);
+ });
- expect(
- gl.utils.getUrlParamsArray()[0],
- ).toBe('label_name[]=test');
+ it('should decode params', () => {
+ history.pushState('', '', '?label_name%5B%5D=test');
- history.pushState('', '', '?');
- });
+ expect(
+ commonUtils.getUrlParamsArray()[0],
+ ).toBe('label_name[]=test');
+
+ history.pushState('', '', '?');
});
+ });
- describe('gl.utils.handleLocationHash', () => {
- beforeEach(() => {
- spyOn(window.document, 'getElementById').and.callThrough();
- });
+ describe('handleLocationHash', () => {
+ beforeEach(() => {
+ spyOn(window.document, 'getElementById').and.callThrough();
+ });
- afterEach(() => {
- window.history.pushState({}, null, '');
- });
+ afterEach(() => {
+ window.history.pushState({}, null, '');
+ });
- function expectGetElementIdToHaveBeenCalledWith(elementId) {
- expect(window.document.getElementById).toHaveBeenCalledWith(elementId);
- }
+ function expectGetElementIdToHaveBeenCalledWith(elementId) {
+ expect(window.document.getElementById).toHaveBeenCalledWith(elementId);
+ }
- it('decodes hash parameter', () => {
- window.history.pushState({}, null, '#random-hash');
- gl.utils.handleLocationHash();
+ it('decodes hash parameter', () => {
+ window.history.pushState({}, null, '#random-hash');
+ commonUtils.handleLocationHash();
- expectGetElementIdToHaveBeenCalledWith('random-hash');
- expectGetElementIdToHaveBeenCalledWith('user-content-random-hash');
- });
+ expectGetElementIdToHaveBeenCalledWith('random-hash');
+ expectGetElementIdToHaveBeenCalledWith('user-content-random-hash');
+ });
- it('decodes cyrillic hash parameter', () => {
- window.history.pushState({}, null, '#definição');
- gl.utils.handleLocationHash();
+ it('decodes cyrillic hash parameter', () => {
+ window.history.pushState({}, null, '#definição');
+ commonUtils.handleLocationHash();
- expectGetElementIdToHaveBeenCalledWith('definição');
- expectGetElementIdToHaveBeenCalledWith('user-content-definição');
- });
+ expectGetElementIdToHaveBeenCalledWith('definição');
+ expectGetElementIdToHaveBeenCalledWith('user-content-definição');
+ });
- it('decodes encoded cyrillic hash parameter', () => {
- window.history.pushState({}, null, '#defini%C3%A7%C3%A3o');
- gl.utils.handleLocationHash();
+ it('decodes encoded cyrillic hash parameter', () => {
+ window.history.pushState({}, null, '#defini%C3%A7%C3%A3o');
+ commonUtils.handleLocationHash();
- expectGetElementIdToHaveBeenCalledWith('definição');
- expectGetElementIdToHaveBeenCalledWith('user-content-definição');
- });
+ expectGetElementIdToHaveBeenCalledWith('definição');
+ expectGetElementIdToHaveBeenCalledWith('user-content-definição');
});
+ });
- describe('gl.utils.setParamInURL', () => {
- afterEach(() => {
- window.history.pushState({}, null, '');
- });
+ describe('setParamInURL', () => {
+ afterEach(() => {
+ window.history.pushState({}, null, '');
+ });
- it('should return the parameter', () => {
- window.history.replaceState({}, null, '');
+ it('should return the parameter', () => {
+ window.history.replaceState({}, null, '');
- expect(gl.utils.setParamInURL('page', 156)).toBe('?page=156');
- expect(gl.utils.setParamInURL('page', '156')).toBe('?page=156');
- });
+ expect(commonUtils.setParamInURL('page', 156)).toBe('?page=156');
+ expect(commonUtils.setParamInURL('page', '156')).toBe('?page=156');
+ });
- it('should update the existing parameter when its a number', () => {
- window.history.pushState({}, null, '?page=15');
+ it('should update the existing parameter when its a number', () => {
+ window.history.pushState({}, null, '?page=15');
- expect(gl.utils.setParamInURL('page', 16)).toBe('?page=16');
- expect(gl.utils.setParamInURL('page', '16')).toBe('?page=16');
- expect(gl.utils.setParamInURL('page', true)).toBe('?page=true');
- });
+ expect(commonUtils.setParamInURL('page', 16)).toBe('?page=16');
+ expect(commonUtils.setParamInURL('page', '16')).toBe('?page=16');
+ expect(commonUtils.setParamInURL('page', true)).toBe('?page=true');
+ });
- it('should update the existing parameter when its a string', () => {
- window.history.pushState({}, null, '?scope=all');
+ it('should update the existing parameter when its a string', () => {
+ window.history.pushState({}, null, '?scope=all');
- expect(gl.utils.setParamInURL('scope', 'finished')).toBe('?scope=finished');
- });
+ expect(commonUtils.setParamInURL('scope', 'finished')).toBe('?scope=finished');
+ });
- it('should update the existing parameter when more than one parameter exists', () => {
- window.history.pushState({}, null, '?scope=all&page=15');
+ it('should update the existing parameter when more than one parameter exists', () => {
+ window.history.pushState({}, null, '?scope=all&page=15');
- expect(gl.utils.setParamInURL('scope', 'finished')).toBe('?scope=finished&page=15');
- });
+ expect(commonUtils.setParamInURL('scope', 'finished')).toBe('?scope=finished&page=15');
+ });
- it('should add a new parameter to the end of the existing ones', () => {
- window.history.pushState({}, null, '?scope=all');
+ it('should add a new parameter to the end of the existing ones', () => {
+ window.history.pushState({}, null, '?scope=all');
- expect(gl.utils.setParamInURL('page', 16)).toBe('?scope=all&page=16');
- expect(gl.utils.setParamInURL('page', '16')).toBe('?scope=all&page=16');
- expect(gl.utils.setParamInURL('page', true)).toBe('?scope=all&page=true');
- });
+ expect(commonUtils.setParamInURL('page', 16)).toBe('?scope=all&page=16');
+ expect(commonUtils.setParamInURL('page', '16')).toBe('?scope=all&page=16');
+ expect(commonUtils.setParamInURL('page', true)).toBe('?scope=all&page=true');
});
+ });
- describe('gl.utils.getParameterByName', () => {
- beforeEach(() => {
- window.history.pushState({}, null, '?scope=all&p=2');
- });
+ describe('getParameterByName', () => {
+ beforeEach(() => {
+ window.history.pushState({}, null, '?scope=all&p=2');
+ });
- afterEach(() => {
- window.history.replaceState({}, null, null);
- });
+ afterEach(() => {
+ window.history.replaceState({}, null, null);
+ });
- it('should return valid parameter', () => {
- const value = gl.utils.getParameterByName('scope');
- expect(gl.utils.getParameterByName('p')).toEqual('2');
- expect(value).toBe('all');
- });
+ it('should return valid parameter', () => {
+ const value = commonUtils.getParameterByName('scope');
+ expect(commonUtils.getParameterByName('p')).toEqual('2');
+ expect(value).toBe('all');
+ });
- it('should return invalid parameter', () => {
- const value = gl.utils.getParameterByName('fakeParameter');
- expect(value).toBe(null);
- });
+ it('should return invalid parameter', () => {
+ const value = commonUtils.getParameterByName('fakeParameter');
+ expect(value).toBe(null);
+ });
- it('should return valid paramentes if URL is provided', () => {
- let value = gl.utils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
- expect(value).toBe('bar');
+ it('should return valid paramentes if URL is provided', () => {
+ let value = commonUtils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
+ expect(value).toBe('bar');
- value = gl.utils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
- expect(value).toBe('canchu');
- });
+ value = commonUtils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
+ expect(value).toBe('canchu');
});
+ });
- describe('gl.utils.normalizedHeaders', () => {
- it('should upperCase all the header keys to keep them consistent', () => {
- const apiHeaders = {
- 'X-Something-Workhorse': { workhorse: 'ok' },
- 'x-something-nginx': { nginx: 'ok' },
- };
+ describe('normalizedHeaders', () => {
+ it('should upperCase all the header keys to keep them consistent', () => {
+ const apiHeaders = {
+ 'X-Something-Workhorse': { workhorse: 'ok' },
+ 'x-something-nginx': { nginx: 'ok' },
+ };
- const normalized = gl.utils.normalizeHeaders(apiHeaders);
+ const normalized = commonUtils.normalizeHeaders(apiHeaders);
- const WORKHORSE = 'X-SOMETHING-WORKHORSE';
- const NGINX = 'X-SOMETHING-NGINX';
+ const WORKHORSE = 'X-SOMETHING-WORKHORSE';
+ const NGINX = 'X-SOMETHING-NGINX';
- expect(normalized[WORKHORSE].workhorse).toBe('ok');
- expect(normalized[NGINX].nginx).toBe('ok');
- });
+ expect(normalized[WORKHORSE].workhorse).toBe('ok');
+ expect(normalized[NGINX].nginx).toBe('ok');
});
+ });
- describe('gl.utils.normalizeCRLFHeaders', () => {
- beforeEach(function () {
- this.CLRFHeaders = 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
+ describe('normalizeCRLFHeaders', () => {
+ beforeEach(function () {
+ this.CLRFHeaders = 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
+ spyOn(String.prototype, 'split').and.callThrough();
+ this.normalizeCRLFHeaders = commonUtils.normalizeCRLFHeaders(this.CLRFHeaders);
+ });
- spyOn(String.prototype, 'split').and.callThrough();
- spyOn(gl.utils, 'normalizeHeaders').and.callThrough();
+ it('should split by newline', function () {
+ expect(String.prototype.split).toHaveBeenCalledWith('\n');
+ });
- this.normalizeCRLFHeaders = gl.utils.normalizeCRLFHeaders(this.CLRFHeaders);
- });
+ it('should split by colon+space for each header', function () {
+ expect(String.prototype.split.calls.allArgs().filter(args => args[0] === ': ').length).toBe(3);
+ });
- it('should split by newline', function () {
- expect(String.prototype.split).toHaveBeenCalledWith('\n');
+ it('should return a normalized headers object', function () {
+ expect(this.normalizeCRLFHeaders).toEqual({
+ 'A-HEADER': 'a-value',
+ 'ANOTHER-HEADER': 'ANOTHER-VALUE',
+ 'LAST-HEADER': 'last-VALUE',
});
+ });
+ });
- it('should split by colon+space for each header', function () {
- expect(String.prototype.split.calls.allArgs().filter(args => args[0] === ': ').length).toBe(3);
- });
+ describe('parseIntPagination', () => {
+ it('should parse to integers all string values and return pagination object', () => {
+ const pagination = {
+ 'X-PER-PAGE': 10,
+ 'X-PAGE': 2,
+ 'X-TOTAL': 30,
+ 'X-TOTAL-PAGES': 3,
+ 'X-NEXT-PAGE': 3,
+ 'X-PREV-PAGE': 1,
+ };
+
+ const expectedPagination = {
+ perPage: 10,
+ page: 2,
+ total: 30,
+ totalPages: 3,
+ nextPage: 3,
+ previousPage: 1,
+ };
+
+ expect(commonUtils.parseIntPagination(pagination)).toEqual(expectedPagination);
+ });
+ });
- it('should call gl.utils.normalizeHeaders with a parsed headers object', function () {
- expect(gl.utils.normalizeHeaders).toHaveBeenCalledWith(jasmine.any(Object));
- });
+ describe('isMetaClick', () => {
+ it('should identify meta click on Windows/Linux', () => {
+ const e = {
+ metaKey: false,
+ ctrlKey: true,
+ which: 1,
+ };
- it('should return a normalized headers object', function () {
- expect(this.normalizeCRLFHeaders).toEqual({
- 'A-HEADER': 'a-value',
- 'ANOTHER-HEADER': 'ANOTHER-VALUE',
- 'LAST-HEADER': 'last-VALUE',
- });
- });
+ expect(commonUtils.isMetaClick(e)).toBe(true);
});
- describe('gl.utils.parseIntPagination', () => {
- it('should parse to integers all string values and return pagination object', () => {
- const pagination = {
- 'X-PER-PAGE': 10,
- 'X-PAGE': 2,
- 'X-TOTAL': 30,
- 'X-TOTAL-PAGES': 3,
- 'X-NEXT-PAGE': 3,
- 'X-PREV-PAGE': 1,
- };
-
- const expectedPagination = {
- perPage: 10,
- page: 2,
- total: 30,
- totalPages: 3,
- nextPage: 3,
- previousPage: 1,
- };
-
- expect(gl.utils.parseIntPagination(pagination)).toEqual(expectedPagination);
- });
- });
+ it('should identify meta click on macOS', () => {
+ const e = {
+ metaKey: true,
+ ctrlKey: false,
+ which: 1,
+ };
- describe('gl.utils.isMetaClick', () => {
- it('should identify meta click on Windows/Linux', () => {
- const e = {
- metaKey: false,
- ctrlKey: true,
- which: 1,
- };
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
- expect(gl.utils.isMetaClick(e)).toBe(true);
- });
+ it('should identify as meta click on middle-click or Mouse-wheel click', () => {
+ const e = {
+ metaKey: false,
+ ctrlKey: false,
+ which: 2,
+ };
- it('should identify meta click on macOS', () => {
- const e = {
- metaKey: true,
- ctrlKey: false,
- which: 1,
- };
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
+ });
- expect(gl.utils.isMetaClick(e)).toBe(true);
- });
+ describe('convertPermissionToBoolean', () => {
+ it('should convert a boolean in a string to a boolean', () => {
+ expect(commonUtils.convertPermissionToBoolean('true')).toEqual(true);
+ expect(commonUtils.convertPermissionToBoolean('false')).toEqual(false);
+ });
+ });
- it('should identify as meta click on middle-click or Mouse-wheel click', () => {
- const e = {
- metaKey: false,
- ctrlKey: false,
- which: 2,
- };
+ describe('backOff', () => {
+ beforeEach(() => {
+ // shortcut our timeouts otherwise these tests will take a long time to finish
+ const origSetTimeout = window.setTimeout;
+ spyOn(window, 'setTimeout').and.callFake(cb => origSetTimeout(cb, 0));
+ });
- expect(gl.utils.isMetaClick(e)).toBe(true);
+ it('solves the promise from the callback', (done) => {
+ const expectedResponseValue = 'Success!';
+ commonUtils.backOff((next, stop) => (
+ new Promise((resolve) => {
+ resolve(expectedResponseValue);
+ }).then((resp) => {
+ stop(resp);
+ })
+ )).then((respBackoff) => {
+ expect(respBackoff).toBe(expectedResponseValue);
+ done();
});
});
- describe('gl.utils.backOff', () => {
- beforeEach(() => {
- // shortcut our timeouts otherwise these tests will take a long time to finish
- const origSetTimeout = window.setTimeout;
- spyOn(window, 'setTimeout').and.callFake(cb => origSetTimeout(cb, 0));
+ it('catches the rejected promise from the callback ', (done) => {
+ const errorMessage = 'Mistakes were made!';
+ commonUtils.backOff((next, stop) => {
+ new Promise((resolve, reject) => {
+ reject(new Error(errorMessage));
+ }).then((resp) => {
+ stop(resp);
+ }).catch(err => stop(err));
+ }).catch((errBackoffResp) => {
+ expect(errBackoffResp instanceof Error).toBe(true);
+ expect(errBackoffResp.message).toBe(errorMessage);
+ done();
});
+ });
- it('solves the promise from the callback', (done) => {
- const expectedResponseValue = 'Success!';
- gl.utils.backOff((next, stop) => (
- new Promise((resolve) => {
- resolve(expectedResponseValue);
- }).then((resp) => {
- stop(resp);
+ it('solves the promise correctly after retrying a third time', (done) => {
+ let numberOfCalls = 1;
+ const expectedResponseValue = 'Success!';
+ commonUtils.backOff((next, stop) => (
+ Promise.resolve(expectedResponseValue)
+ .then((resp) => {
+ if (numberOfCalls < 3) {
+ numberOfCalls += 1;
+ next();
+ } else {
+ stop(resp);
+ }
})
- )).then((respBackoff) => {
- expect(respBackoff).toBe(expectedResponseValue);
- done();
- });
+ )).then((respBackoff) => {
+ const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
+ expect(timeouts).toEqual([2000, 4000]);
+ expect(respBackoff).toBe(expectedResponseValue);
+ done();
});
+ });
- it('catches the rejected promise from the callback ', (done) => {
- const errorMessage = 'Mistakes were made!';
- gl.utils.backOff((next, stop) => {
- new Promise((resolve, reject) => {
- reject(new Error(errorMessage));
- }).then((resp) => {
- stop(resp);
- }).catch(err => stop(err));
- }).catch((errBackoffResp) => {
+ it('rejects the backOff promise after timing out', (done) => {
+ commonUtils.backOff(next => next(), 64000)
+ .catch((errBackoffResp) => {
+ const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
+ expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
expect(errBackoffResp instanceof Error).toBe(true);
- expect(errBackoffResp.message).toBe(errorMessage);
+ expect(errBackoffResp.message).toBe('BACKOFF_TIMEOUT');
done();
});
- });
+ });
+ });
- it('solves the promise correctly after retrying a third time', (done) => {
- let numberOfCalls = 1;
- const expectedResponseValue = 'Success!';
- gl.utils.backOff((next, stop) => (
- Promise.resolve(expectedResponseValue)
- .then((resp) => {
- if (numberOfCalls < 3) {
- numberOfCalls += 1;
- next();
- } else {
- stop(resp);
- }
- })
- )).then((respBackoff) => {
- const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
- expect(timeouts).toEqual([2000, 4000]);
- expect(respBackoff).toBe(expectedResponseValue);
- done();
- });
- });
+ describe('setFavicon', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('href', 'default/favicon');
+ document.body.appendChild(favicon);
+ });
- it('rejects the backOff promise after timing out', (done) => {
- gl.utils.backOff(next => next(), 64000)
- .catch((errBackoffResp) => {
- const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
- expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
- expect(errBackoffResp instanceof Error).toBe(true);
- expect(errBackoffResp.message).toBe('BACKOFF_TIMEOUT');
- done();
- });
- });
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
});
+ it('should set page favicon to provided favicon', () => {
+ const faviconPath = '//custom_favicon';
+ commonUtils.setFavicon(faviconPath);
- describe('gl.utils.setFavicon', () => {
- it('should set page favicon to provided favicon', () => {
- const faviconPath = '//custom_favicon';
- const fakeLink = {
- setAttribute() {},
- };
+ expect(document.getElementById('favicon').getAttribute('href')).toEqual(faviconPath);
+ });
+ });
- spyOn(window.document, 'getElementById').and.callFake(() => fakeLink);
- spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => {
- expect(attr).toEqual('href');
- expect(val.indexOf(faviconPath) > -1).toBe(true);
- });
- gl.utils.setFavicon(faviconPath);
- });
+ describe('resetFavicon', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('href', 'default/favicon');
+ document.body.appendChild(favicon);
});
- describe('gl.utils.resetFavicon', () => {
- it('should reset page favicon to tanuki', () => {
- const fakeLink = {
- setAttribute() {},
- };
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
- spyOn(window.document, 'getElementById').and.callFake(() => fakeLink);
- spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => {
- expect(attr).toEqual('href');
- expect(val).toMatch(/favicon/);
- });
- gl.utils.resetFavicon();
- });
+ it('should reset page favicon to tanuki', () => {
+ commonUtils.resetFavicon();
+ expect(document.getElementById('favicon').getAttribute('href')).toEqual('default/favicon');
});
+ });
- describe('gl.utils.setCiStatusFavicon', () => {
- it('should set page favicon to CI status favicon based on provided status', () => {
- const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
- const FAVICON_PATH = '//icon_status_success';
- const spySetFavicon = spyOn(gl.utils, 'setFavicon').and.stub();
- const spyResetFavicon = spyOn(gl.utils, 'resetFavicon').and.stub();
- spyOn($, 'ajax').and.callFake(function (options) {
- options.success({ favicon: FAVICON_PATH });
- expect(spySetFavicon).toHaveBeenCalledWith(FAVICON_PATH);
- options.success();
- expect(spyResetFavicon).toHaveBeenCalled();
- options.error();
- expect(spyResetFavicon).toHaveBeenCalled();
- });
+ describe('setCiStatusFavicon', () => {
+ const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
+
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ document.body.appendChild(favicon);
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
- gl.utils.setCiStatusFavicon(BUILD_URL);
+ it('should reset favicon in case of error', () => {
+ const favicon = document.getElementById('favicon');
+ spyOn($, 'ajax').and.callFake(function (options) {
+ options.error();
+ expect(favicon.getAttribute('href')).toEqual('null');
});
+
+ commonUtils.setCiStatusFavicon(BUILD_URL);
});
- describe('gl.utils.ajaxPost', () => {
- it('should perform `$.ajax` call and do `POST` request', () => {
- const requestURL = '/some/random/api';
- const data = { keyname: 'value' };
- const ajaxSpy = spyOn($, 'ajax').and.callFake(() => {});
+ it('should set page favicon to CI status favicon based on provided status', () => {
+ const FAVICON_PATH = '//icon_status_success';
+ const favicon = document.getElementById('favicon');
- gl.utils.ajaxPost(requestURL, data);
- expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST');
+ spyOn($, 'ajax').and.callFake(function (options) {
+ options.success({ favicon: FAVICON_PATH });
+ expect(favicon.getAttribute('href')).toEqual(FAVICON_PATH);
});
+
+ commonUtils.setCiStatusFavicon(BUILD_URL);
+ });
+ });
+
+ describe('ajaxPost', () => {
+ it('should perform `$.ajax` call and do `POST` request', () => {
+ const requestURL = '/some/random/api';
+ const data = { keyname: 'value' };
+ const ajaxSpy = spyOn($, 'ajax').and.callFake(() => {});
+
+ commonUtils.ajaxPost(requestURL, data);
+ expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST');
});
});
-})();
+});
diff --git a/spec/javascripts/lib/utils/csrf_token_spec.js b/spec/javascripts/lib/utils/csrf_token_spec.js
new file mode 100644
index 00000000000..c484213df8e
--- /dev/null
+++ b/spec/javascripts/lib/utils/csrf_token_spec.js
@@ -0,0 +1,49 @@
+import csrf from '~/lib/utils/csrf';
+
+describe('csrf', () => {
+ beforeEach(() => {
+ this.tokenKey = 'X-CSRF-Token';
+ this.token = 'pH1cvjnP9grx2oKlhWEDvUZnJ8x2eXsIs1qzyHkF3DugSG5yTxR76CWeEZRhML2D1IeVB7NEW0t5l/axE4iJpQ==';
+ });
+
+ it('returns the correct headerKey', () => {
+ expect(csrf.headerKey).toBe(this.tokenKey);
+ });
+
+ describe('when csrf token is in the DOM', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <meta name="csrf-token" content="${this.token}">
+ `);
+
+ csrf.init();
+ });
+
+ it('returns the csrf token', () => {
+ expect(csrf.token).toBe(this.token);
+ });
+
+ it('returns the csrf headers object', () => {
+ expect(csrf.headers[this.tokenKey]).toBe(this.token);
+ });
+ });
+
+ describe('when csrf token is not in the DOM', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <meta name="some-other-token">
+ `);
+
+ csrf.init();
+ });
+
+ it('returns null for token', () => {
+ expect(csrf.token).toBeNull();
+ });
+
+ it('returns empty object for headers', () => {
+ expect(typeof csrf.headers).toBe('object');
+ expect(Object.keys(csrf.headers).length).toBe(0);
+ });
+ });
+});
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index 8830a2d29e5..eadab738376 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -78,8 +78,9 @@ import 'vendor/jquery.scrollTo';
});
describe('meta click', () => {
+ let metakeyEvent;
beforeEach(function () {
- spyOn(gl.utils, 'isMetaClick').and.returnValue(true);
+ metakeyEvent = $.Event('click', { keyCode: 91, ctrlKey: true });
});
it('opens page when commits link is clicked', function () {
@@ -89,7 +90,7 @@ import 'vendor/jquery.scrollTo';
});
this.class.bindEvents();
- document.querySelector('.merge-request-tabs .commits-tab a').click();
+ $('.merge-request-tabs .commits-tab a').trigger(metakeyEvent);
});
it('opens page when commits badge is clicked', function () {
@@ -99,7 +100,7 @@ import 'vendor/jquery.scrollTo';
});
this.class.bindEvents();
- document.querySelector('.merge-request-tabs .commits-tab a .badge').click();
+ $('.merge-request-tabs .commits-tab a .badge').trigger(metakeyEvent);
});
});
diff --git a/spec/javascripts/notes/components/issue_comment_form_spec.js b/spec/javascripts/notes/components/issue_comment_form_spec.js
index cca5ec887a3..1c8b1b98242 100644
--- a/spec/javascripts/notes/components/issue_comment_form_spec.js
+++ b/spec/javascripts/notes/components/issue_comment_form_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+import autosize from 'vendor/autosize';
import store from '~/notes/stores';
import issueCommentForm from '~/notes/components/issue_comment_form.vue';
import { loggedOutIssueData, notesDataMock, userDataMock, issueDataMock } from '../mock_data';
@@ -55,6 +56,19 @@ describe('issue_comment_form component', () => {
expect(vm.$el.querySelector(`a[href="${quickActionsDocsPath}"]`).textContent.trim()).toEqual('quick actions');
});
+ it('should resize textarea after note discarded', (done) => {
+ spyOn(autosize, 'update');
+ spyOn(vm, 'discard').and.callThrough();
+
+ vm.note = 'foo';
+ vm.discard();
+
+ Vue.nextTick(() => {
+ expect(autosize.update).toHaveBeenCalled();
+ done();
+ });
+ });
+
describe('edit mode', () => {
it('should enter edit mode when arrow up is pressed', () => {
spyOn(vm, 'editCurrentUserLastNote').and.callThrough();
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
index 4359899299f..3d1ca870ca4 100644
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ b/spec/javascripts/notes/stores/actions_spec.js
@@ -1,4 +1,3 @@
-
import * as actions from '~/notes/stores/actions';
import testAction from '../../helpers/vuex_action_helper';
import { discussionMock, notesDataMock, userDataMock, issueDataMock, individualNote } from '../mock_data';
diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/javascripts/notes/stores/mutation_spec.js
index a38f29c1e39..1e22e03e178 100644
--- a/spec/javascripts/notes/stores/mutation_spec.js
+++ b/spec/javascripts/notes/stores/mutation_spec.js
@@ -3,19 +3,31 @@ import { note, discussionMock, notesDataMock, userDataMock, issueDataMock, indiv
describe('Mutation Notes Store', () => {
describe('ADD_NEW_NOTE', () => {
- it('should add a new note to an array of notes', () => {
- const state = { notes: [] };
+ let state;
+ let noteData;
+
+ beforeEach(() => {
+ state = { notes: [] };
+ noteData = {
+ expanded: true,
+ id: note.discussion_id,
+ individual_note: true,
+ notes: [note],
+ reply_id: note.discussion_id,
+ };
mutations.ADD_NEW_NOTE(state, note);
+ });
+ it('should add a new note to an array of notes', () => {
expect(state).toEqual({
- notes: [{
- expanded: true,
- id: note.discussion_id,
- individual_note: true,
- notes: [note],
- reply_id: note.discussion_id,
- }],
+ notes: [noteData],
});
+ expect(state.notes.length).toBe(1);
+ });
+
+ it('should not add the same note to the notes array', () => {
+ mutations.ADD_NEW_NOTE(state, note);
+ expect(state.notes.length).toBe(1);
});
});
diff --git a/spec/javascripts/pretty_time_spec.js b/spec/javascripts/pretty_time_spec.js
index 0a6c479a95b..084ffe08917 100644
--- a/spec/javascripts/pretty_time_spec.js
+++ b/spec/javascripts/pretty_time_spec.js
@@ -1,215 +1,133 @@
-import '~/lib/utils/pretty_time';
+import { parseSeconds, abbreviateTime, stringifyTime } from '~/lib/utils/pretty_time';
-(() => {
- const prettyTime = gl.utils.prettyTime;
+function assertTimeUnits(obj, minutes, hours, days, weeks) {
+ expect(obj.minutes).toBe(minutes);
+ expect(obj.hours).toBe(hours);
+ expect(obj.days).toBe(days);
+ expect(obj.weeks).toBe(weeks);
+}
- describe('prettyTime methods', function () {
- describe('parseSeconds', function () {
- it('should correctly parse a negative value', function () {
- const parser = prettyTime.parseSeconds;
+describe('prettyTime methods', () => {
+ describe('parseSeconds', () => {
+ it('should correctly parse a negative value', () => {
+ const zeroSeconds = parseSeconds(-1000);
- const zeroSeconds = parser(-1000);
-
- expect(zeroSeconds.minutes).toBe(16);
- expect(zeroSeconds.hours).toBe(0);
- expect(zeroSeconds.days).toBe(0);
- expect(zeroSeconds.weeks).toBe(0);
- });
-
- it('should correctly parse a zero value', function () {
- const parser = prettyTime.parseSeconds;
-
- const zeroSeconds = parser(0);
-
- expect(zeroSeconds.minutes).toBe(0);
- expect(zeroSeconds.hours).toBe(0);
- expect(zeroSeconds.days).toBe(0);
- expect(zeroSeconds.weeks).toBe(0);
- });
-
- it('should correctly parse a small non-zero second values', function () {
- const parser = prettyTime.parseSeconds;
-
- const subOneMinute = parser(10);
-
- expect(subOneMinute.minutes).toBe(0);
- expect(subOneMinute.hours).toBe(0);
- expect(subOneMinute.days).toBe(0);
- expect(subOneMinute.weeks).toBe(0);
-
- const aboveOneMinute = parser(100);
-
- expect(aboveOneMinute.minutes).toBe(1);
- expect(aboveOneMinute.hours).toBe(0);
- expect(aboveOneMinute.days).toBe(0);
- expect(aboveOneMinute.weeks).toBe(0);
-
- const manyMinutes = parser(1000);
-
- expect(manyMinutes.minutes).toBe(16);
- expect(manyMinutes.hours).toBe(0);
- expect(manyMinutes.days).toBe(0);
- expect(manyMinutes.weeks).toBe(0);
- });
-
- it('should correctly parse large second values', function () {
- const parser = prettyTime.parseSeconds;
-
- const aboveOneHour = parser(4800);
-
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
-
- const aboveOneDay = parser(110000);
-
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(3);
- expect(aboveOneDay.weeks).toBe(0);
-
- const aboveOneWeek = parser(25000000);
-
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(3);
- expect(aboveOneWeek.weeks).toBe(173);
- });
+ assertTimeUnits(zeroSeconds, 16, 0, 0, 0);
+ });
- it('should correctly accept a custom param for hoursPerDay', function () {
- const parser = prettyTime.parseSeconds;
- const config = { hoursPerDay: 24 };
+ it('should correctly parse a zero value', () => {
+ const zeroSeconds = parseSeconds(0);
- const aboveOneHour = parser(4800, config);
+ assertTimeUnits(zeroSeconds, 0, 0, 0, 0);
+ });
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ it('should correctly parse a small non-zero second values', () => {
+ const subOneMinute = parseSeconds(10);
+ const aboveOneMinute = parseSeconds(100);
+ const manyMinutes = parseSeconds(1000);
- const aboveOneDay = parser(110000, config);
+ assertTimeUnits(subOneMinute, 0, 0, 0, 0);
+ assertTimeUnits(aboveOneMinute, 1, 0, 0, 0);
+ assertTimeUnits(manyMinutes, 16, 0, 0, 0);
+ });
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(1);
- expect(aboveOneDay.weeks).toBe(0);
+ it('should correctly parse large second values', () => {
+ const aboveOneHour = parseSeconds(4800);
+ const aboveOneDay = parseSeconds(110000);
+ const aboveOneWeek = parseSeconds(25000000);
- const aboveOneWeek = parser(25000000, config);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 3, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 3, 173);
+ });
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(8);
- expect(aboveOneWeek.days).toBe(4);
+ it('should correctly accept a custom param for hoursPerDay', () => {
+ const config = { hoursPerDay: 24 };
- expect(aboveOneWeek.weeks).toBe(57);
- });
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- it('should correctly accept a custom param for daysPerWeek', function () {
- const parser = prettyTime.parseSeconds;
- const config = { daysPerWeek: 7 };
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 1, 0);
+ assertTimeUnits(aboveOneWeek, 26, 8, 4, 57);
+ });
- const aboveOneHour = parser(4800, config);
+ it('should correctly accept a custom param for daysPerWeek', () => {
+ const config = { daysPerWeek: 7 };
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- const aboveOneDay = parser(110000, config);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 3, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 0, 124);
+ });
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(3);
- expect(aboveOneDay.weeks).toBe(0);
+ it('should correctly accept custom params for daysPerWeek and hoursPerDay', () => {
+ const config = { daysPerWeek: 55, hoursPerDay: 14 };
- const aboveOneWeek = parser(25000000, config);
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(0);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 2, 2, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 1, 9);
+ });
+ });
- expect(aboveOneWeek.weeks).toBe(124);
- });
+ describe('stringifyTime', () => {
+ it('should stringify values with all non-zero units', () => {
+ const timeObject = {
+ weeks: 1,
+ days: 4,
+ hours: 7,
+ minutes: 20,
+ };
- it('should correctly accept custom params for daysPerWeek and hoursPerDay', function () {
- const parser = prettyTime.parseSeconds;
- const config = { daysPerWeek: 55, hoursPerDay: 14 };
+ const timeString = stringifyTime(timeObject);
- const aboveOneHour = parser(4800, config);
+ expect(timeString).toBe('1w 4d 7h 20m');
+ });
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ it('should stringify values with some non-zero units', () => {
+ const timeObject = {
+ weeks: 0,
+ days: 4,
+ hours: 0,
+ minutes: 20,
+ };
- const aboveOneDay = parser(110000, config);
+ const timeString = stringifyTime(timeObject);
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(2);
- expect(aboveOneDay.days).toBe(2);
- expect(aboveOneDay.weeks).toBe(0);
+ expect(timeString).toBe('4d 20m');
+ });
- const aboveOneWeek = parser(25000000, config);
+ it('should stringify values with no non-zero units', () => {
+ const timeObject = {
+ weeks: 0,
+ days: 0,
+ hours: 0,
+ minutes: 0,
+ };
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(1);
+ const timeString = stringifyTime(timeObject);
- expect(aboveOneWeek.weeks).toBe(9);
- });
+ expect(timeString).toBe('0m');
});
+ });
- describe('stringifyTime', function () {
- it('should stringify values with all non-zero units', function () {
- const timeObject = {
- weeks: 1,
- days: 4,
- hours: 7,
- minutes: 20,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('1w 4d 7h 20m');
- });
-
- it('should stringify values with some non-zero units', function () {
- const timeObject = {
- weeks: 0,
- days: 4,
- hours: 0,
- minutes: 20,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('4d 20m');
- });
-
- it('should stringify values with no non-zero units', function () {
- const timeObject = {
- weeks: 0,
- days: 0,
- hours: 0,
- minutes: 0,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('0m');
- });
+ describe('abbreviateTime', () => {
+ it('should abbreviate stringified times for weeks', () => {
+ const fullTimeString = '1w 3d 4h 5m';
+ expect(abbreviateTime(fullTimeString)).toBe('1w');
});
- describe('abbreviateTime', function () {
- it('should abbreviate stringified times for weeks', function () {
- const fullTimeString = '1w 3d 4h 5m';
- expect(prettyTime.abbreviateTime(fullTimeString)).toBe('1w');
- });
-
- it('should abbreviate stringified times for non-weeks', function () {
- const fullTimeString = '0w 3d 4h 5m';
- expect(prettyTime.abbreviateTime(fullTimeString)).toBe('3d');
- });
+ it('should abbreviate stringified times for non-weeks', () => {
+ const fullTimeString = '0w 3d 4h 5m';
+ expect(abbreviateTime(fullTimeString)).toBe('3d');
});
});
-})(window.gl || (window.gl = {}));
+});
diff --git a/spec/javascripts/todos_spec.js b/spec/javascripts/todos_spec.js
index fd492159081..7d3c9319a11 100644
--- a/spec/javascripts/todos_spec.js
+++ b/spec/javascripts/todos_spec.js
@@ -26,37 +26,30 @@ describe('Todos', () => {
describe('meta click', () => {
let visitUrlSpy;
+ let windowOpenSpy;
+ let metakeyEvent;
beforeEach(() => {
- spyOn(gl.utils, 'isMetaClick').and.returnValue(true);
+ metakeyEvent = $.Event('click', { keyCode: 91, ctrlKey: true });
visitUrlSpy = spyOn(gl.utils, 'visitUrl').and.callFake(() => {});
+ windowOpenSpy = spyOn(window, 'open').and.callFake(() => {});
});
- it('opens the todo url in another tab', (done) => {
+ it('opens the todo url in another tab', () => {
const todoLink = todoItem.dataset.url;
- spyOn(window, 'open').and.callFake((url, target) => {
- expect(todoLink).toEqual(url);
- expect(target).toEqual('_blank');
- done();
- });
+ $('.todos-list .todo').trigger(metakeyEvent);
- todoItem.click();
expect(visitUrlSpy).not.toHaveBeenCalled();
+ expect(windowOpenSpy).toHaveBeenCalledWith(todoLink, '_blank');
});
- it('opens the avatar\'s url in another tab when the avatar is clicked', (done) => {
- const avatarImage = todoItem.querySelector('img');
- const avatarUrl = avatarImage.parentElement.getAttribute('href');
+ it('run native funcionality when avatar is clicked', () => {
+ $('.todos-list a').on('click', e => e.preventDefault());
+ $('.todos-list img').trigger(metakeyEvent);
- spyOn(window, 'open').and.callFake((url, target) => {
- expect(avatarUrl).toEqual(url);
- expect(target).toEqual('_blank');
- done();
- });
-
- avatarImage.click();
expect(visitUrlSpy).not.toHaveBeenCalled();
+ expect(windowOpenSpy).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/javascripts/user_callout_spec.js b/spec/javascripts/user_callout_spec.js
deleted file mode 100644
index 69cb93bd850..00000000000
--- a/spec/javascripts/user_callout_spec.js
+++ /dev/null
@@ -1,49 +0,0 @@
-import Cookies from 'js-cookie';
-import UserCallout from '~/user_callout';
-
-const USER_CALLOUT_COOKIE = 'user_callout_dismissed';
-
-describe('UserCallout', function () {
- const fixtureName = 'dashboard/user-callout.html.raw';
- preloadFixtures(fixtureName);
-
- beforeEach(() => {
- loadFixtures(fixtureName);
- Cookies.remove(USER_CALLOUT_COOKIE);
-
- this.userCallout = new UserCallout();
- this.closeButton = $('.js-close-callout.close');
- this.userCalloutBtn = $('.js-close-callout:not(.close)');
- });
-
- it('hides when user clicks on the dismiss-icon', (done) => {
- this.closeButton.click();
- expect(Cookies.get(USER_CALLOUT_COOKIE)).toBe('true');
-
- setTimeout(() => {
- expect(
- document.querySelector('.user-callout'),
- ).toBeNull();
-
- done();
- });
- });
-
- it('hides when user clicks on the "check it out" button', () => {
- this.userCalloutBtn.click();
- expect(Cookies.get(USER_CALLOUT_COOKIE)).toBe('true');
- });
-
- describe('Sets cookie with setCalloutPerProject', () => {
- beforeEach(() => {
- spyOn(Cookies, 'set').and.callFake(() => {});
- document.querySelector('.user-callout').setAttribute('data-project-path', 'foo/bar');
- this.userCallout = new UserCallout({ setCalloutPerProject: true });
- });
-
- it('sets a cookie when the user clicks the close button', () => {
- this.userCalloutBtn.click();
- expect(Cookies.set).toHaveBeenCalledWith('user_callout_dismissed', 'true', Object({ expires: 365, path: 'foo/bar' }));
- });
- });
-});
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index da66c7504cb..e4324e91502 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -224,29 +224,41 @@ describe('mrWidgetOptions', () => {
describe('handleMounted', () => {
it('should call required methods to do the initial kick-off', () => {
spyOn(vm, 'initDeploymentsPolling');
- spyOn(vm, 'setFavicon');
+ spyOn(vm, 'setFaviconHelper');
vm.handleMounted();
- expect(vm.setFavicon).toHaveBeenCalled();
+ expect(vm.setFaviconHelper).toHaveBeenCalled();
expect(vm.initDeploymentsPolling).toHaveBeenCalled();
});
});
describe('setFavicon', () => {
+ let faviconElement;
+
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ document.body.appendChild(favicon);
+
+ faviconElement = document.getElementById('favicon');
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
it('should call setFavicon method', () => {
- spyOn(gl.utils, 'setFavicon');
- vm.setFavicon();
+ vm.setFaviconHelper();
- expect(gl.utils.setFavicon).toHaveBeenCalledWith(vm.mr.ciStatusFaviconPath);
+ expect(faviconElement.getAttribute('href')).toEqual(vm.mr.ciStatusFaviconPath);
});
it('should not call setFavicon when there is no ciStatusFaviconPath', () => {
- spyOn(gl.utils, 'setFavicon');
vm.mr.ciStatusFaviconPath = null;
- vm.setFavicon();
+ vm.setFaviconHelper();
- expect(gl.utils.setFavicon).not.toHaveBeenCalled();
+ expect(faviconElement.getAttribute('href')).toEqual(null);
});
});
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
index ebd6c79077e..fe7a8c84c9e 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
@@ -296,7 +296,7 @@ describe Banzai::Filter::MilestoneReferenceFilter do
context 'project milestones' do
let(:milestone) { create(:milestone, project: project) }
- let(:reference) { milestone.to_reference }
+ let(:reference) { milestone.to_reference(format: :iid) }
include_examples 'reference parsing'
diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/gitlab/backup/manager_spec.rb
index 8772d3d5ada..422f2af7266 100644
--- a/spec/lib/gitlab/backup/manager_spec.rb
+++ b/spec/lib/gitlab/backup/manager_spec.rb
@@ -26,6 +26,9 @@ describe Backup::Manager do
[
'1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
'1451510000_2015_12_30_gitlab_backup.tar',
'1450742400_2015_12_22_gitlab_backup.tar',
'1449878400_gitlab_backup.tar',
@@ -57,6 +60,30 @@ describe Backup::Manager do
end
end
+ context 'when no valid file is found' do
+ let(:files) do
+ [
+ '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
+ 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ ]
+ end
+
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+
+ subject.remove_old
+ end
+
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
+
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
+ end
+
context 'when there are no files older than keep_time' do
before do
# Set to 30 days
@@ -84,16 +111,22 @@ describe Backup::Manager do
it 'removes matching files with a human-readable versioned timestamp' do
expect(FileUtils).to have_received(:rm).with(files[1])
- end
-
- it 'removes matching files with a human-readable non-versioned timestamp' do
expect(FileUtils).to have_received(:rm).with(files[2])
expect(FileUtils).to have_received(:rm).with(files[3])
end
- it 'removes matching files without a human-readable timestamp' do
+ it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
expect(FileUtils).to have_received(:rm).with(files[4])
+ end
+
+ it 'removes matching files with a human-readable non-versioned timestamp' do
expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ end
+
+ it 'removes matching files without a human-readable timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'does not remove files that are not old enough' do
@@ -101,11 +134,11 @@ describe Backup::Manager do
end
it 'does not remove non-matching files' do
- expect(FileUtils).not_to have_received(:rm).with(files[6])
+ expect(FileUtils).not_to have_received(:rm).with(files[9])
end
it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (5 removed)')
+ expect(progress).to have_received(:puts).with('done. (8 removed)')
end
end
@@ -121,14 +154,15 @@ describe Backup::Manager do
end
it 'removes the remaining expected files' do
- expect(FileUtils).to have_received(:rm).with(files[2])
- expect(FileUtils).to have_received(:rm).with(files[3])
expect(FileUtils).to have_received(:rm).with(files[4])
expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'sets the correct removed count' do
- expect(progress).to have_received(:puts).with('done. (4 removed)')
+ expect(progress).to have_received(:puts).with('done. (7 removed)')
end
it 'prints the error from file that could not be removed' do
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index f8c8b83a3ac..2c7ef622c51 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -5,13 +5,13 @@ describe Gitlab::Checks::ForcePush do
context "exit code checking", skip_gitaly_mock: true do
it "does not raise a runtime error if the `popen` call to git returns a zero exit code" do
- allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['normal output', 0])
expect { described_class.force_push?(project, 'oldrev', 'newrev') }.not_to raise_error
end
it "raises a runtime error if the `popen` call to git returns a non-zero exit code" do
- allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1])
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['error', 1])
expect { described_class.force_push?(project, 'oldrev', 'newrev') }.to raise_error(RuntimeError)
end
diff --git a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
new file mode 100644
index 00000000000..15eb01eb472
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy::Kubernetes do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when kubernetes service is active' do
+ set(:project) { create(:kubernetes_project) }
+
+ it 'is satisfied by a kubernetes pipeline' do
+ expect(described_class.new('active'))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when kubernetes service is inactive' do
+ set(:project) { create(:project) }
+
+ it 'is not satisfied by a pipeline without kubernetes available' do
+ expect(described_class.new('active'))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when kubernetes policy is invalid' do
+ it 'raises an error' do
+ expect { described_class.new('unknown') }
+ .to raise_error(described_class::UnknownPolicyError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
new file mode 100644
index 00000000000..7211187e511
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -0,0 +1,87 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy::Refs do
+ describe '#satisfied_by?' do
+ context 'when matching ref' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'master') }
+
+ it 'is satisfied when pipeline branch matches' do
+ expect(described_class.new(%w[master deploy]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when pipeline branch does not match' do
+ expect(described_class.new(%w[feature fix]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when maching tags' do
+ context 'when pipeline runs for a tag' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'feature', tag: true)
+ end
+
+ it 'is satisfied when tags matcher is specified' do
+ expect(described_class.new(%w[master tags]))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when pipeline is not created for a tag' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'feature', tag: false)
+ end
+
+ it 'is not satisfied when tag match is specified' do
+ expect(described_class.new(%w[master tags]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+ end
+
+ context 'when also matching a path' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'master')
+ end
+
+ it 'is satisfied when provided patch matches specified one' do
+ expect(described_class.new(%W[master@#{pipeline.project_full_path}]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when path differs' do
+ expect(described_class.new(%w[master@some/fork/repository]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when maching a source' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :push) }
+
+ it 'is satisifed when provided source keyword matches' do
+ expect(described_class.new(%w[pushes]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when provided source keyword does not match' do
+ expect(described_class.new(%w[triggers]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when matching a ref by a regular expression' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'docs-something') }
+
+ it 'is satisfied when regexp matches pipeline ref' do
+ expect(described_class.new(['/docs-.*/']))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when regexp does not match pipeline ref' do
+ expect(described_class.new(['/fix-.*/']))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy_spec.rb b/spec/lib/gitlab/ci/build/policy_spec.rb
new file mode 100644
index 00000000000..20ee3dd3e89
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy do
+ let(:policy) { spy('policy specification') }
+
+ before do
+ stub_const("#{described_class}::Something", policy)
+ end
+
+ describe '.fabricate' do
+ context 'when policy exists' do
+ it 'fabricates and initializes relevant policy' do
+ specs = described_class.fabricate(something: 'some value')
+
+ expect(specs).to be_an Array
+ expect(specs).to be_one
+ expect(policy).to have_received(:new).with('some value')
+ end
+ end
+
+ context 'when some policies are not defined' do
+ it 'gracefully skips unknown policies' do
+ expect { described_class.fabricate(unknown: 'first') }
+ .to raise_error(NameError)
+ end
+ end
+
+ context 'when passing a nil value as specs' do
+ it 'returns an empty array' do
+ specs = described_class.fabricate(nil)
+
+ expect(specs).to be_an Array
+ expect(specs).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 2278230f338..d72f8553f55 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -3,8 +3,7 @@ require 'spec_helper'
module Gitlab
module Ci
describe YamlProcessor, :lib do
- subject { described_class.new(config, path) }
- let(:path) { 'path' }
+ subject { described_class.new(config) }
describe 'our current .gitlab-ci.yml' do
let(:config) { File.read("#{Rails.root}/.gitlab-ci.yml") }
@@ -17,7 +16,7 @@ module Gitlab
end
describe '#build_attributes' do
- subject { described_class.new(config, path).build_attributes(:rspec) }
+ subject { described_class.new(config).build_attributes(:rspec) }
describe 'coverage entry' do
describe 'code coverage regexp' do
@@ -167,8 +166,6 @@ module Gitlab
end
context 'when kubernetes policy is specified' do
- let(:pipeline) { create(:ci_empty_pipeline) }
-
let(:config) do
YAML.dump(
spinach: { stage: 'test', script: 'spinach' },
@@ -204,7 +201,7 @@ module Gitlab
end
end
- describe "#builds_for_stage_and_ref" do
+ describe "#pipeline_stage_builds" do
let(:type) { 'test' }
it "returns builds if no branch specified" do
@@ -213,10 +210,10 @@ module Gitlab
rspec: { script: "rspec" }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref(type, "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -241,9 +238,9 @@ module Gitlab
rspec: { script: "rspec", only: ["deploy"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
end
it "does not return builds if only has regexp with another branch" do
@@ -252,9 +249,9 @@ module Gitlab
rspec: { script: "rspec", only: ["/^deploy$/"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
end
it "returns builds if only has specified this branch" do
@@ -263,9 +260,9 @@ module Gitlab
rspec: { script: "rspec", only: ["master"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
end
it "returns builds if only has a list of branches including specified" do
@@ -274,9 +271,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: %w(master deploy) }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "returns builds if only has a branches keyword specified" do
@@ -285,9 +282,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: ["branches"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "does not return builds if only has a tags keyword" do
@@ -296,9 +293,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: ["tags"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "returns builds if only has special keywords specified and source matches" do
@@ -315,9 +312,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1)
end
end
@@ -335,21 +332,27 @@ module Gitlab
rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0)
end
end
it "returns builds if only has current repository path" do
+ seed_pipeline = pipeline(ref: 'deploy')
+
config = YAML.dump({
before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: ["branches@path"] }
+ rspec: {
+ script: "rspec",
+ type: type,
+ only: ["branches@#{seed_pipeline.project_full_path}"]
+ }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(1)
end
it "does not return builds if only has different repository path" do
@@ -358,9 +361,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: ["branches@fork"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "returns build only for specified type" do
@@ -371,11 +374,11 @@ module Gitlab
production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, 'fork')
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2)
- expect(config_processor.builds_for_stage_and_ref("test", "deploy").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "deploy")).size).to eq(2)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "deploy")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "master")).size).to eq(1)
end
context 'for invalid value' do
@@ -418,9 +421,9 @@ module Gitlab
rspec: { script: "rspec", except: ["deploy"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
end
it "returns builds if except has regexp with another branch" do
@@ -429,9 +432,9 @@ module Gitlab
rspec: { script: "rspec", except: ["/^deploy$/"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
end
it "does not return builds if except has specified this branch" do
@@ -440,9 +443,9 @@ module Gitlab
rspec: { script: "rspec", except: ["master"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
end
it "does not return builds if except has a list of branches including specified" do
@@ -451,9 +454,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: %w(master deploy) }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "does not return builds if except has a branches keyword specified" do
@@ -462,9 +465,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: ["branches"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "returns builds if except has a tags keyword" do
@@ -473,9 +476,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: ["tags"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "does not return builds if except has special keywords specified and source matches" do
@@ -492,9 +495,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0)
end
end
@@ -512,21 +515,27 @@ module Gitlab
rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1)
end
end
it "does not return builds if except has current repository path" do
+ seed_pipeline = pipeline(ref: 'deploy')
+
config = YAML.dump({
before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: ["branches@path"] }
+ rspec: {
+ script: "rspec",
+ type: type,
+ except: ["branches@#{seed_pipeline.project_full_path}"]
+ }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(0)
end
it "returns builds if except has different repository path" do
@@ -535,24 +544,28 @@ module Gitlab
rspec: { script: "rspec", type: type, except: ["branches@fork"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "returns build except specified type" do
+ master_pipeline = pipeline(ref: 'master')
+ test_pipeline = pipeline(ref: 'test')
+ deploy_pipeline = pipeline(ref: 'deploy')
+
config = YAML.dump({
before_script: ["pwd"],
- rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@fork"] },
+ rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@#{test_pipeline.project_full_path}"] },
staging: { script: "deploy", type: "deploy", except: ["master"] },
- production: { script: "deploy", type: "deploy", except: ["master@fork"] }
+ production: { script: "deploy", type: "deploy", except: ["master@#{master_pipeline.project_full_path}"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, 'fork')
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2)
- expect(config_processor.builds_for_stage_and_ref("test", "test").size).to eq(0)
- expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds("deploy", deploy_pipeline).size).to eq(2)
+ expect(config_processor.pipeline_stage_builds("test", test_pipeline).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds("deploy", master_pipeline).size).to eq(0)
end
context 'for invalid value' do
@@ -591,9 +604,9 @@ module Gitlab
describe "Scripts handling" do
let(:config_data) { YAML.dump(config) }
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data, path) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) }
- subject { config_processor.builds_for_stage_and_ref("test", "master").first }
+ subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first }
describe "before_script" do
context "in global context" do
@@ -674,10 +687,10 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -709,10 +722,10 @@ module Gitlab
command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -742,10 +755,10 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -771,10 +784,10 @@ module Gitlab
before_script: ["pwd"],
rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -797,7 +810,7 @@ module Gitlab
end
describe 'Variables' do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config), path) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { config_processor.builds.first[:yaml_variables] }
@@ -918,9 +931,9 @@ module Gitlab
rspec: { script: "rspec", when: when_state }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.builds_for_stage_and_ref("test", "master")
+ builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master"))
expect(builds.size).to eq(1)
expect(builds.first[:when]).to eq(when_state)
end
@@ -951,8 +964,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
@@ -970,8 +983,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
@@ -990,8 +1003,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
paths: ["test/"],
untracked: false,
key: 'local',
@@ -1019,8 +1032,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -1055,9 +1068,9 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.builds_for_stage_and_ref("test", "master")
+ builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master"))
expect(builds.size).to eq(1)
expect(builds.first[:options][:artifacts][:when]).to eq(when_state)
end
@@ -1072,7 +1085,7 @@ module Gitlab
end
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
- let(:builds) { processor.builds_for_stage_and_ref('deploy', 'master') }
+ let(:builds) { processor.pipeline_stage_builds('deploy', pipeline(ref: 'master')) }
context 'when a production environment is specified' do
let(:environment) { 'production' }
@@ -1229,7 +1242,7 @@ module Gitlab
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
- subject { config_processor.builds_for_stage_and_ref("test", "master") }
+ subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) }
shared_examples 'hidden_job_handling' do
it "doesn't create jobs that start with dot" do
@@ -1277,7 +1290,7 @@ module Gitlab
describe "YAML Alias/Anchor" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
- subject { config_processor.builds_for_stage_and_ref("build", "master") }
+ subject { config_processor.pipeline_stage_builds("build", pipeline(ref: "master")) }
shared_examples 'job_templates_handling' do
it "is correctly supported for jobs" do
@@ -1377,182 +1390,182 @@ EOT
it "returns errors if tags parameter is invalid" do
config = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec tags should be an array of strings")
end
it "returns errors if before_script parameter is invalid" do
config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array of strings")
end
it "returns errors if job before_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array of strings")
end
it "returns errors if after_script parameter is invalid" do
config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array of strings")
end
it "returns errors if job after_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array of strings")
end
it "returns errors if image parameter is invalid" do
config = YAML.dump({ image: ["test"], rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "image config should be a hash or a string")
end
it "returns errors if job name is blank" do
config = YAML.dump({ '' => { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:job name can't be blank")
end
it "returns errors if job name is non-string" do
config = YAML.dump({ 10 => { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:10 name should be a symbol")
end
it "returns errors if job image parameter is invalid" do
config = YAML.dump({ rspec: { script: "test", image: ["test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:image config should be a hash or a string")
end
it "returns errors if services parameter is not an array" do
config = YAML.dump({ services: "test", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "services config should be a array")
end
it "returns errors if services parameter is not an array of strings" do
config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
end
it "returns errors if job services parameter is not an array" do
config = YAML.dump({ rspec: { script: "test", services: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:services config should be a array")
end
it "returns errors if job services parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
end
it "returns error if job configuration is invalid" do
config = YAML.dump({ extra: "bundle update" })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra config should be a hash")
end
it "returns errors if services configuration is not correct" do
config = YAML.dump({ extra: { script: 'rspec', services: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra:services config should be a array")
end
it "returns errors if there are no jobs defined" do
config = YAML.dump({ before_script: ["bundle update"] })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
it "returns errors if there are no visible jobs defined" do
config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
it "returns errors if job allow_failure parameter is not an boolean" do
config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec allow failure should be a boolean value")
end
it "returns errors if job stage is not a string" do
config = YAML.dump({ rspec: { script: "test", type: 1 } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:type config should be a string")
end
it "returns errors if job stage is not a pre-defined stage" do
config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy")
end
it "returns errors if job stage is not a defined stage" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test")
end
it "returns errors if stages is not an array" do
config = YAML.dump({ stages: "test", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
end
it "returns errors if stages is not an array of strings" do
config = YAML.dump({ stages: [true, "test"], rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
end
it "returns errors if variables is not a map" do
config = YAML.dump({ variables: "test", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
end
it "returns errors if variables is not a map of key-value strings" do
config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
end
it "returns errors if job when is not on_success, on_failure or always" do
config = YAML.dump({ rspec: { script: "test", when: 1 } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec when should be on_success, on_failure, always or manual")
end
@@ -1694,6 +1707,10 @@ EOT
end
end
end
+
+ def pipeline(**attributes)
+ build_stubbed(:ci_empty_pipeline, **attributes)
+ end
end
end
end
diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb
index 15012495247..9e528392756 100644
--- a/spec/lib/gitlab/closing_issue_extractor_spec.rb
+++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb
@@ -347,10 +347,10 @@ describe Gitlab::ClosingIssueExtractor do
end
it "fetches cross-project URL references" do
- message = "Closes #{urls.project_issue_url(issue2.project, issue2)} and #{reference}"
+ message = "Closes #{urls.project_issue_url(issue2.project, issue2)}, #{reference} and #{urls.project_issue_url(other_issue.project, other_issue)}"
expect(subject.closed_by_message(message))
- .to match_array([issue, issue2])
+ .to match_array([issue, issue2, other_issue])
end
it "ignores invalid cross-project URL references" do
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index a3d323fe28a..7dc06c90078 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -1,11 +1,14 @@
require 'spec_helper'
describe Gitlab::Gfm::ReferenceRewriter do
- let(:text) { 'some text' }
- let(:old_project) { create(:project, name: 'old-project') }
- let(:new_project) { create(:project, name: 'new-project') }
+ let(:group) { create(:group) }
+ let(:old_project) { create(:project, name: 'old-project', group: group) }
+ let(:new_project) { create(:project, name: 'new-project', group: group) }
let(:user) { create(:user) }
+ let(:old_project_ref) { old_project.to_reference(new_project) }
+ let(:text) { 'some text' }
+
before do
old_project.team << [user, :reporter]
end
@@ -39,7 +42,7 @@ describe Gitlab::Gfm::ReferenceRewriter do
it { is_expected.not_to include merge_request.to_reference(new_project) }
end
- context 'description ambigous elements' do
+ context 'rewrite ambigous references' do
context 'url' do
let(:url) { 'http://gitlab.com/#1' }
let(:text) { "This references #1, but not #{url}" }
@@ -66,23 +69,21 @@ describe Gitlab::Gfm::ReferenceRewriter do
context 'description with project labels' do
let!(:label) { create(:label, id: 123, name: 'test', project: old_project) }
- let(:project_ref) { old_project.to_reference(new_project) }
context 'label referenced by id' do
let(:text) { '#1 and ~123' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~123} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~123} }
end
context 'label referenced by text' do
let(:text) { '#1 and ~"test"' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~123} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~123} }
end
end
context 'description with group labels' do
let(:old_group) { create(:group) }
let!(:group_label) { create(:group_label, id: 321, name: 'group label', group: old_group) }
- let(:project_ref) { old_project.to_reference(new_project) }
before do
old_project.update(namespace: old_group)
@@ -90,21 +91,53 @@ describe Gitlab::Gfm::ReferenceRewriter do
context 'label referenced by id' do
let(:text) { '#1 and ~321' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~321} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~321} }
end
context 'label referenced by text' do
let(:text) { '#1 and ~"group label"' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~321} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~321} }
end
end
end
+ end
+
+ context 'reference contains project milestone' do
+ let!(:milestone) do
+ create(:milestone, title: '9.0', project: old_project)
+ end
+
+ let(:text) { 'milestone: %"9.0"' }
+
+ it { is_expected.to eq %Q[milestone: #{old_project_ref}%"9.0"] }
+ end
+
+ context 'when referring to group milestone' do
+ let!(:milestone) do
+ create(:milestone, title: '10.0', group: group)
+ end
+
+ let(:text) { 'milestone %"10.0"' }
+
+ it { is_expected.to eq text }
+ end
+
+ context 'when referable has a nil reference' do
+ before do
+ create(:milestone, title: '9.0', project: old_project)
+
+ allow_any_instance_of(Milestone)
+ .to receive(:to_reference)
+ .and_return(nil)
+ end
- context 'reference contains milestone' do
- let(:milestone) { create(:milestone) }
- let(:text) { "milestone ref: #{milestone.to_reference}" }
+ let(:text) { 'milestone: %"9.0"' }
- it { is_expected.to eq text }
+ it 'raises an error that should be fixed' do
+ expect { subject }.to raise_error(
+ described_class::RewriteError,
+ 'Unspecified reference detected for Milestone'
+ )
end
end
end
diff --git a/spec/lib/gitlab/git/hook_spec.rb b/spec/lib/gitlab/git/hook_spec.rb
index ea3e4680b1d..0ff4f3bd105 100644
--- a/spec/lib/gitlab/git/hook_spec.rb
+++ b/spec/lib/gitlab/git/hook_spec.rb
@@ -28,6 +28,7 @@ describe Gitlab::Git::Hook do
f.write(<<-HOOK)
echo 'regular message from the hook'
echo 'error message from the hook' 1>&2
+ echo 'error message from the hook line 2' 1>&2
exit 1
HOOK
end
@@ -73,7 +74,7 @@ describe Gitlab::Git::Hook do
status, errors = hook.trigger(gl_id, blank, blank, ref)
expect(status).to be false
- expect(errors).to eq("error message from the hook\n")
+ expect(errors).to eq("error message from the hook<br>error message from the hook line 2<br>")
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 556a148c3bc..4fc26c625a5 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -481,7 +481,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
it 'raises an error if it failed' do
- expect(Gitlab::Popen).to receive(:popen).and_return(['Error', 1])
+ expect(@repo).to receive(:popen).and_return(['Error', 1])
expect do
@repo.delete_refs('refs/heads/fix')
diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb
index b051a088171..c0eac98d718 100644
--- a/spec/lib/gitlab/git/rev_list_spec.rb
+++ b/spec/lib/gitlab/git/rev_list_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Git::RevList do
let(:rev_list) { described_class.new(newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
it 'calls out to `popen`' do
- expect(Gitlab::Popen).to receive(:popen).with([
+ expect(rev_list).to receive(:popen).with([
Gitlab.config.git.bin_path,
"--git-dir=#{project.repository.path_to_repo}",
'rev-list',
@@ -36,7 +36,7 @@ describe Gitlab::Git::RevList do
let(:rev_list) { described_class.new(oldrev: 'oldrev', newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
it 'calls out to `popen`' do
- expect(Gitlab::Popen).to receive(:popen).with([
+ expect(rev_list).to receive(:popen).with([
Gitlab.config.git.bin_path,
"--git-dir=#{project.repository.path_to_repo}",
'rev-list',
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index ec3abcb0953..1ef3e2e3a5d 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -51,6 +51,10 @@ describe Gitlab::GitalyClient::CommitService do
expect(ret).to be_kind_of(Gitlab::GitalyClient::DiffStitcher)
end
+
+ it 'encodes paths correctly' do
+ expect { client.diff_from_parent(commit, paths: ['encoding/test.txt', 'encoding/テスト.txt']) }.not_to raise_error
+ end
end
describe '#commit_deltas' do
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index a9b861fcff2..9a84d6e6a67 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -38,6 +38,130 @@ describe Gitlab::GitalyClient, skip_gitaly_mock: true do
end
end
+ describe 'allow_n_plus_1_calls' do
+ context 'when RequestStore is enabled', :request_store do
+ it 'returns the result of the allow_n_plus_1_calls block' do
+ expect(described_class.allow_n_plus_1_calls { "result" }).to eq("result")
+ end
+ end
+
+ context 'when RequestStore is not active' do
+ it 'returns the result of the allow_n_plus_1_calls block' do
+ expect(described_class.allow_n_plus_1_calls { "something" }).to eq("something")
+ end
+ end
+ end
+
+ describe 'enforce_gitaly_request_limits?' do
+ def call_gitaly(count = 1)
+ (1..count).each do
+ described_class.enforce_gitaly_request_limits(:test)
+ end
+ end
+
+ context 'when RequestStore is enabled', :request_store do
+ it 'allows up the maximum number of allowed calls' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS) }.not_to raise_error
+ end
+
+ context 'when the maximum number of calls has been reached' do
+ before do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS)
+ end
+
+ it 'fails on the next call' do
+ expect { call_gitaly(1) }.to raise_error(Gitlab::GitalyClient::TooManyInvocationsError)
+ end
+ end
+
+ it 'allows the maximum number of calls to be exceeded within an allow_n_plus_1_calls block' do
+ expect do
+ described_class.allow_n_plus_1_calls do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1)
+ end
+ end.not_to raise_error
+ end
+
+ context 'when the maximum number of calls has been reached within an allow_n_plus_1_calls block' do
+ before do
+ described_class.allow_n_plus_1_calls do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS)
+ end
+ end
+
+ it 'allows up to the maximum number of calls outside of an allow_n_plus_1_calls block' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS) }.not_to raise_error
+ end
+
+ it 'does not allow the maximum number of calls to be exceeded outside of an allow_n_plus_1_calls block' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1) }.to raise_error(Gitlab::GitalyClient::TooManyInvocationsError)
+ end
+ end
+ end
+
+ context 'when RequestStore is not active' do
+ it 'does not raise errors when the maximum number of allowed calls is exceeded' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 2) }.not_to raise_error
+ end
+
+ it 'does not fail when the maximum number of calls is exceeded within an allow_n_plus_1_calls block' do
+ expect do
+ described_class.allow_n_plus_1_calls do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1)
+ end
+ end.not_to raise_error
+ end
+ end
+ end
+
+ describe 'get_request_count' do
+ context 'when RequestStore is enabled', :request_store do
+ context 'when enforce_gitaly_request_limits is called outside of allow_n_plus_1_calls blocks' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ end
+
+ it 'counts gitaly calls' do
+ expect(described_class.get_request_count).to eq(1)
+ end
+ end
+
+ context 'when enforce_gitaly_request_limits is called inside and outside of allow_n_plus_1_calls blocks' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ described_class.allow_n_plus_1_calls do
+ described_class.enforce_gitaly_request_limits(:call)
+ end
+ end
+
+ it 'counts gitaly calls' do
+ expect(described_class.get_request_count).to eq(2)
+ end
+ end
+
+ context 'when reset_counts is called' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ described_class.reset_counts
+ end
+
+ it 'resets counts' do
+ expect(described_class.get_request_count).to eq(0)
+ end
+ end
+ end
+
+ context 'when RequestStore is not active' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ end
+
+ it 'returns zero' do
+ expect(described_class.get_request_count).to eq(0)
+ end
+ end
+ end
+
describe 'feature_enabled?' do
let(:feature_name) { 'my_feature' }
let(:real_feature_name) { "gitaly_#{feature_name}" }
diff --git a/spec/migrations/clean_stages_statuses_migration_spec.rb b/spec/migrations/clean_stages_statuses_migration_spec.rb
new file mode 100644
index 00000000000..38705f8eaae
--- /dev/null
+++ b/spec/migrations/clean_stages_statuses_migration_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20170912113435_clean_stages_statuses_migration.rb')
+
+describe CleanStagesStatusesMigration, :migration, :sidekiq, :redis do
+ let(:migration) { spy('migration') }
+
+ before do
+ allow(Gitlab::BackgroundMigration::MigrateStageStatus)
+ .to receive(:new).and_return(migration)
+ end
+
+ context 'when there are pending background migrations' do
+ it 'processes pending jobs synchronously' do
+ Sidekiq::Testing.disable! do
+ BackgroundMigrationWorker
+ .perform_in(2.minutes, 'MigrateStageStatus', [1, 1])
+ BackgroundMigrationWorker
+ .perform_async('MigrateStageStatus', [1, 1])
+
+ migrate!
+
+ expect(migration).to have_received(:perform).with(1, 1).twice
+ end
+ end
+ end
+
+ context 'when there are no background migrations pending' do
+ it 'does nothing' do
+ Sidekiq::Testing.disable! do
+ migrate!
+
+ expect(migration).not_to have_received(:perform)
+ end
+ end
+ end
+
+ context 'when there are still unmigrated stages afterwards' do
+ let(:stages) { table('ci_stages') }
+
+ before do
+ stages.create!(status: nil, name: 'build')
+ stages.create!(status: nil, name: 'test')
+ end
+
+ it 'migrates statuses sequentially in batches' do
+ migrate!
+
+ expect(migration).to have_received(:perform).once
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 77f0be6b120..9c1e460ab20 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -26,6 +26,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :short_sha }
+ it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
describe '#source' do
context 'when creating new pipeline' do
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index dbc4aba8547..8eabc4ca72f 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -37,30 +37,17 @@ describe Key, :mailer do
end
describe "#update_last_used_at" do
- let(:key) { create(:key) }
-
- context 'when key was not updated during the last day' do
- before do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain)
- .and_return('000000')
- end
-
- it 'enqueues a UseKeyWorker job' do
- expect(UseKeyWorker).to receive(:perform_async).with(key.id)
- key.update_last_used_at
- end
- end
+ it 'updates the last used timestamp' do
+ key = build(:key)
+ service = double(:service)
+
+ expect(Keys::LastUsedService).to receive(:new)
+ .with(key)
+ .and_return(service)
- context 'when key was updated during the last day' do
- before do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain)
- .and_return(false)
- end
+ expect(service).to receive(:execute)
- it 'does not enqueue a UseKeyWorker job' do
- expect(UseKeyWorker).not_to receive(:perform_async)
- key.update_last_used_at
- end
+ key.update_last_used_at
end
end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index d3da0107d5c..13e37fffa4e 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -238,7 +238,7 @@ describe Milestone do
let(:milestone) { build_stubbed(:milestone, iid: 1, project: project, name: 'milestone') }
it 'returns a String reference to the object' do
- expect(milestone.to_reference).to eq '%1'
+ expect(milestone.to_reference).to eq '%"milestone"'
end
it 'returns a reference by name when the format is set to :name' do
@@ -246,24 +246,29 @@ describe Milestone do
end
it 'supports a cross-project reference' do
- expect(milestone.to_reference(another_project)).to eq 'sample-project%1'
+ expect(milestone.to_reference(another_project)).to eq 'sample-project%"milestone"'
end
end
context 'for a group milestone' do
let(:milestone) { build_stubbed(:milestone, iid: 1, group: group, name: 'milestone') }
- it 'returns nil with the default format' do
- expect(milestone.to_reference).to be_nil
+ it 'returns a group milestone reference with a default format' do
+ expect(milestone.to_reference).to eq '%"milestone"'
end
it 'returns a reference by name when the format is set to :name' do
expect(milestone.to_reference(format: :name)).to eq '%"milestone"'
end
- it 'does not supports cross-project references' do
+ it 'does supports cross-project references within a group' do
expect(milestone.to_reference(another_project, format: :name)).to eq '%"milestone"'
end
+
+ it 'raises an error when using iid format' do
+ expect { milestone.to_reference(format: :iid) }
+ .to raise_error(ArgumentError, 'Cannot refer to a group milestone by an internal id!')
+ end
end
end
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index ca13af4d73e..12069575866 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -8,7 +8,21 @@ describe ProjectAutoDevops do
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
- describe 'variables' do
+ describe '#has_domain?' do
+ context 'when domain is defined' do
+ let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: 'domain.com') }
+
+ it { expect(auto_devops).to have_domain }
+ end
+
+ context 'when domain is empty' do
+ let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: '') }
+
+ it { expect(auto_devops).not_to have_domain }
+ end
+ end
+
+ describe '#variables' do
let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: domain) }
context 'when domain is defined' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 60cd7e70055..76bb658b10d 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1311,24 +1311,25 @@ describe Repository, models: true do
describe '#revert' do
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
+ let(:message) { 'revert message' }
context 'when there is a conflict' do
it 'raises an error' do
- expect { repository.revert(user, new_image_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already reverted' do
it 'raises an error' do
- repository.revert(user, update_image_commit, 'master')
+ repository.revert(user, update_image_commit, 'master', message)
- expect { repository.revert(user, update_image_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be reverted' do
it 'reverts the changes' do
- expect(repository.revert(user, update_image_commit, 'master')).to be_truthy
+ expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
end
end
@@ -1337,7 +1338,7 @@ describe Repository, models: true do
merge_commit
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
- repository.revert(user, merge_commit, 'master')
+ repository.revert(user, merge_commit, 'master', message)
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
end
end
@@ -1347,24 +1348,25 @@ describe Repository, models: true do
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
+ let(:message) { 'cherry-pick message' }
context 'when there is a conflict' do
it 'raises an error' do
- expect { repository.cherry_pick(user, conflict_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already cherry-picked' do
it 'raises an error' do
- repository.cherry_pick(user, pickable_commit, 'master')
+ repository.cherry_pick(user, pickable_commit, 'master', message)
- expect { repository.cherry_pick(user, pickable_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be cherry-picked' do
it 'cherry-picks the changes' do
- expect(repository.cherry_pick(user, pickable_commit, 'master')).to be_truthy
+ expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
end
end
@@ -1372,11 +1374,11 @@ describe Repository, models: true do
it 'cherry-picks the changes' do
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
- cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome')
+ cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
- expect(cherry_pick_commit_message).to include('cherry picked from')
+ expect(cherry_pick_commit_message).to eq(message)
end
end
end
diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb
index 6bd17697c33..50d0f72f6bc 100644
--- a/spec/requests/api/access_requests_spec.rb
+++ b/spec/requests/api/access_requests_spec.rb
@@ -1,12 +1,12 @@
require 'spec_helper'
describe API::AccessRequests do
- let(:master) { create(:user) }
- let(:developer) { create(:user) }
- let(:access_requester) { create(:user) }
- let(:stranger) { create(:user) }
+ set(:master) { create(:user) }
+ set(:developer) { create(:user) }
+ set(:access_requester) { create(:user) }
+ set(:stranger) { create(:user) }
- let(:project) do
+ set(:project) do
create(:project, :public, :access_requestable, creator_id: master.id, namespace: master.namespace) do |project|
project.team << [developer, :developer]
project.team << [master, :master]
@@ -14,7 +14,7 @@ describe API::AccessRequests do
end
end
- let(:group) do
+ set(:group) do
create(:group, :public, :access_requestable) do |group|
group.add_developer(developer)
group.add_owner(master)
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 593068b8cd7..7a0765c1fae 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::AwardEmoji do
- let(:user) { create(:user) }
- let!(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
- let!(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
- let!(:note) { create(:note, project: project, noteable: issue) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+ set(:issue) { create(:issue, project: project) }
+ set(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
+ set(:note) { create(:note, project: project, noteable: issue) }
before do
project.team << [user, :master]
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index f698d5dddb3..fcfa4ddfbfe 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -1,34 +1,34 @@
require 'spec_helper'
describe API::Boards do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- let(:guest) { create(:user) }
- let(:admin) { create(:user, :admin) }
- let!(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
-
- let!(:dev_label) do
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:non_member) { create(:user) }
+ set(:guest) { create(:user) }
+ set(:admin) { create(:user, :admin) }
+ set(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+
+ set(:dev_label) do
create(:label, title: 'Development', color: '#FFAABB', project: project)
end
- let!(:test_label) do
+ set(:test_label) do
create(:label, title: 'Testing', color: '#FFAACC', project: project)
end
- let!(:ux_label) do
+ set(:ux_label) do
create(:label, title: 'UX', color: '#FF0000', project: project)
end
- let!(:dev_list) do
+ set(:dev_list) do
create(:list, label: dev_label, position: 1)
end
- let!(:test_list) do
+ set(:test_list) do
create(:list, label: test_label, position: 2)
end
- let!(:board) do
+ set(:board) do
create(:board, project: project, lists: [dev_list, test_list])
end
@@ -187,8 +187,11 @@ describe API::Boards do
end
context "when the user is project owner" do
- let(:owner) { create(:user) }
- let(:project) { create(:project, namespace: owner.namespace) }
+ set(:owner) { create(:user) }
+
+ before do
+ project.update(namespace: owner.namespace)
+ end
it "deletes the list if an admin requests it" do
delete api("#{base_url}/#{dev_list.id}", owner)
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index cc794fad3a7..16b12446ed4 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
describe API::Branches do
- let(:user) { create(:user) }
- let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ set(:user) { create(:user) }
let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
+ let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let(:branch_name) { 'feature' }
let(:branch_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
let(:branch_with_dot) { project.repository.find_branch('ends-with.json') }
@@ -40,7 +40,9 @@ describe API::Branches do
end
context 'when unauthenticated', 'and project is public' do
- let(:project) { create(:project, :public, :repository) }
+ before do
+ project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
it_behaves_like 'repository branches'
end
@@ -118,7 +120,9 @@ describe API::Branches do
end
context 'when unauthenticated', 'and project is public' do
- let(:project) { create(:project, :public, :repository) }
+ before do
+ project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
it_behaves_like 'repository branch'
end
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb
index b043a333d33..eacc575d97f 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/broadcast_messages_spec.rb
@@ -1,8 +1,9 @@
require 'spec_helper'
describe API::BroadcastMessages do
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
+ set(:message) { create(:broadcast_message) }
describe 'GET /broadcast_messages' do
it 'returns a 401 for anonymous users' do
@@ -31,8 +32,6 @@ describe API::BroadcastMessages do
end
describe 'GET /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
-
it 'returns a 401 for anonymous users' do
get api("/broadcast_messages/#{message.id}")
@@ -103,8 +102,6 @@ describe API::BroadcastMessages do
end
describe 'PUT /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
-
it 'returns a 401 for anonymous users' do
put api("/broadcast_messages/#{message.id}"),
attributes_for(:broadcast_message)
@@ -155,8 +152,6 @@ describe API::BroadcastMessages do
end
describe 'DELETE /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
-
it 'returns a 401 for anonymous users' do
delete api("/broadcast_messages/#{message.id}"),
attributes_for(:broadcast_message)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 42f0079e173..1671a046fdf 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -159,11 +159,14 @@ describe API::Groups do
context 'when using owned in the request' do
it 'returns an array of groups the user owns' do
+ group1.add_master(user2)
+
get api('/groups', user2), owned: true
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(group2.name)
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 92e7d797cbd..508df990952 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1181,6 +1181,59 @@ describe API::Projects do
end
end
end
+
+ describe 'GET /projects/:id/forks' do
+ let(:private_fork) { create(:project, :private, :empty_repo) }
+ let(:member) { create(:user) }
+ let(:non_member) { create(:user) }
+
+ before do
+ private_fork.add_developer(member)
+ end
+
+ context 'for a forked project' do
+ before do
+ post api("/projects/#{private_fork.id}/fork/#{project_fork_source.id}", admin)
+ private_fork.reload
+ expect(private_fork.forked_from_project).not_to be_nil
+ expect(private_fork.forked?).to be_truthy
+ project_fork_source.reload
+ expect(project_fork_source.forks.length).to eq(1)
+ expect(project_fork_source.forks).to include(private_fork)
+ end
+
+ context 'for a user that can access the forks' do
+ it 'returns the forks' do
+ get api("/projects/#{project_fork_source.id}/forks", member)
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(1)
+ expect(json_response[0]['name']).to eq(private_fork.name)
+ end
+ end
+
+ context 'for a user that cannot access the forks' do
+ it 'returns an empty array' do
+ get api("/projects/#{project_fork_source.id}/forks", non_member)
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(0)
+ end
+ end
+ end
+
+ context 'for a non-forked project' do
+ it 'returns an empty array' do
+ get api("/projects/#{project_fork_source.id}/forks")
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(0)
+ end
+ end
+ end
end
describe "POST /projects/:id/share" do
diff --git a/spec/requests/api/v3/award_emoji_spec.rb b/spec/requests/api/v3/award_emoji_spec.rb
index 681e8e04295..36d793f505d 100644
--- a/spec/requests/api/v3/award_emoji_spec.rb
+++ b/spec/requests/api/v3/award_emoji_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::V3::AwardEmoji do
- let(:user) { create(:user) }
- let!(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
- let!(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
- let!(:note) { create(:note, project: project, noteable: issue) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+ set(:issue) { create(:issue, project: project) }
+ set(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
+ set(:note) { create(:note, project: project, noteable: issue) }
before { project.team << [user, :master] }
diff --git a/spec/requests/api/v3/boards_spec.rb b/spec/requests/api/v3/boards_spec.rb
index b86aab2ec70..ea2627142bf 100644
--- a/spec/requests/api/v3/boards_spec.rb
+++ b/spec/requests/api/v3/boards_spec.rb
@@ -1,28 +1,28 @@
require 'spec_helper'
describe API::V3::Boards do
- let(:user) { create(:user) }
- let(:guest) { create(:user) }
- let(:non_member) { create(:user) }
- let!(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+ set(:user) { create(:user) }
+ set(:guest) { create(:user) }
+ set(:non_member) { create(:user) }
+ set(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
- let!(:dev_label) do
+ set(:dev_label) do
create(:label, title: 'Development', color: '#FFAABB', project: project)
end
- let!(:test_label) do
+ set(:test_label) do
create(:label, title: 'Testing', color: '#FFAACC', project: project)
end
- let!(:dev_list) do
+ set(:dev_list) do
create(:list, label: dev_label, position: 1)
end
- let!(:test_list) do
+ set(:test_list) do
create(:list, label: test_label, position: 2)
end
- let!(:board) do
+ set(:board) do
create(:board, project: project, lists: [dev_list, test_list])
end
@@ -98,8 +98,11 @@ describe API::V3::Boards do
end
context "when the user is project owner" do
- let(:owner) { create(:user) }
- let(:project) { create(:project, namespace: owner.namespace) }
+ set(:owner) { create(:user) }
+
+ before do
+ project.update(namespace: owner.namespace)
+ end
it "deletes the list if an admin requests it" do
delete v3_api("#{base_url}/#{dev_list.id}", owner)
diff --git a/spec/requests/api/v3/branches_spec.rb b/spec/requests/api/v3/branches_spec.rb
index c88f7788697..9cd11a67712 100644
--- a/spec/requests/api/v3/branches_spec.rb
+++ b/spec/requests/api/v3/branches_spec.rb
@@ -2,11 +2,11 @@ require 'spec_helper'
require 'mime/types'
describe API::V3::Branches do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let!(:project) { create(:project, :repository, creator: user) }
- let!(:master) { create(:project_member, :master, user: user, project: project) }
- let!(:guest) { create(:project_member, :guest, user: user2, project: project) }
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:project) { create(:project, :repository, creator: user) }
+ set(:master) { create(:project_member, :master, user: user, project: project) }
+ set(:guest) { create(:project_member, :guest, user: user2, project: project) }
let!(:branch_name) { 'feature' }
let!(:branch_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
let!(:branch_with_dot) { CreateBranchService.new(project, user).execute("with.1.2.3", "master") }
diff --git a/spec/requests/api/v3/broadcast_messages_spec.rb b/spec/requests/api/v3/broadcast_messages_spec.rb
index 948cd78c177..d04b1c72004 100644
--- a/spec/requests/api/v3/broadcast_messages_spec.rb
+++ b/spec/requests/api/v3/broadcast_messages_spec.rb
@@ -1,11 +1,11 @@
require 'spec_helper'
describe API::V3::BroadcastMessages do
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
describe 'DELETE /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
+ set(:message) { create(:broadcast_message) }
it 'returns a 401 for anonymous users' do
delete v3_api("/broadcast_messages/#{message.id}"),
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index dc95599546c..0a2ff1058e3 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::V3::Builds do
- let(:user) { create(:user) }
+ set(:user) { create(:user) }
let(:api_user) { user }
- let!(:project) { create(:project, :repository, creator: user, public_builds: false) }
- let!(:developer) { create(:project_member, :developer, user: user, project: project) }
- let(:reporter) { create(:project_member, :reporter, project: project) }
- let(:guest) { create(:project_member, :guest, project: project) }
- let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
+ set(:project) { create(:project, :repository, creator: user, public_builds: false) }
+ set(:developer) { create(:project_member, :developer, user: user, project: project) }
+ set(:reporter) { create(:project_member, :reporter, project: project) }
+ set(:guest) { create(:project_member, :guest, project: project) }
+ set(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
let!(:build) { create(:ci_build, pipeline: pipeline) }
describe 'GET /projects/:id/builds ' do
diff --git a/spec/requests/api/v3/issues_spec.rb b/spec/requests/api/v3/issues_spec.rb
index 9a0e6647ebf..86768d7397a 100644
--- a/spec/requests/api/v3/issues_spec.rb
+++ b/spec/requests/api/v3/issues_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::V3::Issues, :mailer do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- let(:guest) { create(:user) }
- let(:author) { create(:author) }
- let(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:non_member) { create(:user) }
+ set(:guest) { create(:user) }
+ set(:author) { create(:author) }
+ set(:assignee) { create(:assignee) }
+ set(:admin) { create(:user, :admin) }
let!(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
let!(:closed_issue) do
create :closed_issue,
@@ -822,7 +822,8 @@ describe API::V3::Issues, :mailer do
end
context 'resolving issues in a merge request' do
- let(:discussion) { create(:diff_note_on_merge_request).to_discussion }
+ set(:diff_note_on_merge_request) { create(:diff_note_on_merge_request) }
+ let(:discussion) { diff_note_on_merge_request.to_discussion }
let(:merge_request) { discussion.noteable }
let(:project) { merge_request.source_project }
before do
@@ -1169,7 +1170,7 @@ describe API::V3::Issues, :mailer do
end
context "when the user is project owner" do
- let(:owner) { create(:user) }
+ set(:owner) { create(:user) }
let(:project) { create(:project, namespace: owner.namespace) }
it "deletes the issue if an admin requests it" do
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 2de8daba6b5..3baf9b1edab 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -103,9 +103,15 @@ describe PipelineSerializer do
let(:project) { create(:project) }
before do
- Ci::Pipeline::AVAILABLE_STATUSES.each do |status|
- create_pipeline(status)
+ # Since RequestStore.active? is true we have to allow the
+ # gitaly calls in this block
+ # Issue: https://gitlab.com/gitlab-org/gitlab-ce/issues/37772
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ Ci::Pipeline::AVAILABLE_STATUSES.each do |status|
+ create_pipeline(status)
+ end
end
+ Gitlab::GitalyClient.reset_counts
end
shared_examples 'no N+1 queries' do
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 171f70c32a8..5c27e8fd561 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -42,7 +42,7 @@ describe Issues::CloseService do
service.execute(issue)
end
- it 'refreshes the number of open issues' do
+ it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
expect { service.execute(issue) }
.to change { project.open_issues_count }.from(1).to(0)
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index cc3d648c340..d86da244520 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -35,7 +35,7 @@ describe Issues::CreateService do
expect(issue.due_date).to eq Date.tomorrow
end
- it 'refreshes the number of open issues' do
+ it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
expect { issue }.to change { project.open_issues_count }.from(0).to(1)
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 15a50b85f19..a8a8aeed1bd 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -64,6 +64,13 @@ describe Issues::UpdateService, :mailer do
expect(issue.due_date).to eq Date.tomorrow
end
+ it 'refreshes the number of open issues when the issue is made confidential', :use_clean_rails_memory_store_caching do
+ issue # make sure the issue is created first so our counts are correct.
+
+ expect { update_issue(confidential: true) }
+ .to change { project.open_issues_count }.from(1).to(0)
+ end
+
it 'updates open issue counter for assignees when issue is reassigned' do
update_issue(assignee_ids: [user2.id])
diff --git a/spec/services/keys/last_used_service_spec.rb b/spec/services/keys/last_used_service_spec.rb
new file mode 100644
index 00000000000..edaace293ae
--- /dev/null
+++ b/spec/services/keys/last_used_service_spec.rb
@@ -0,0 +1,68 @@
+require 'spec_helper'
+
+describe Keys::LastUsedService do
+ describe '#execute', :clean_gitlab_redis_shared_state do
+ it 'updates the key when it has not been used recently' do
+ key = create(:key, last_used_at: 1.year.ago)
+ time = Time.zone.now
+
+ Timecop.freeze(time) { described_class.new(key).execute }
+
+ expect(key.last_used_at).to eq(time)
+ end
+
+ it 'does not update the key when it has been used recently' do
+ time = 1.minute.ago
+ key = create(:key, last_used_at: time)
+
+ described_class.new(key).execute
+
+ expect(key.last_used_at).to eq(time)
+ end
+
+ it 'does not update the updated_at field' do
+ # Since a lot of these updates could happen in parallel for different keys
+ # we want these updates to be as lightweight as possible, hence we want to
+ # make sure we _only_ update last_used_at and not always updated_at.
+ key = create(:key, last_used_at: 1.year.ago)
+
+ recorder = ActiveRecord::QueryRecorder.new do
+ described_class.new(key).execute
+ end
+
+ expect(recorder.count).to eq(1)
+ expect(recorder.log[0]).not_to include('updated_at')
+ end
+ end
+
+ describe '#update?', :clean_gitlab_redis_shared_state do
+ it 'returns true when no last used timestamp is present' do
+ key = build(:key, last_used_at: nil)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(true)
+ end
+
+ it 'returns true when the key needs to be updated' do
+ key = build(:key, last_used_at: 1.year.ago)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(true)
+ end
+
+ it 'returns false when a lease has already been obtained' do
+ key = build(:key, last_used_at: 1.year.ago)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(true)
+ expect(service.update?).to eq(false)
+ end
+
+ it 'returns false when the key does not yet need to be updated' do
+ key = build(:key, last_used_at: 1.minute.ago)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(false)
+ end
+ end
+end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 7e65369762c..b3886987316 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -52,7 +52,7 @@ describe MergeRequests::CloseService do
end
end
- it 'refreshes the number of open merge requests for a valid MR' do
+ it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
service = described_class.new(project, user, {})
expect { service.execute(merge_request) }
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index d6409c0d625..a047f891ab2 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -37,7 +37,7 @@ describe MergeRequests::CreateService do
expect(service).to have_received(:execute_hooks).with(merge_request)
end
- it 'refreshes the number of open merge requests' do
+ it 'refreshes the number of open merge requests', :use_clean_rails_memory_store_caching do
expect { service.execute }
.to change { project.open_merge_requests_count }.from(0).to(1)
end
diff --git a/spec/services/projects/count_service_spec.rb b/spec/services/projects/count_service_spec.rb
index 79b01e7620e..cc496501bad 100644
--- a/spec/services/projects/count_service_spec.rb
+++ b/spec/services/projects/count_service_spec.rb
@@ -66,8 +66,8 @@ describe Projects::CountService do
describe '#cache_key' do
it 'returns the cache key as an Array' do
- allow(service).to receive(:cache_key_name).and_return('count_service')
- expect(service.cache_key).to eq(['projects', 1, 'count_service'])
+ allow(service).to receive(:cache_key_name).and_return('foo')
+ expect(service.cache_key).to eq(['projects', 'count_service', described_class::VERSION, 1, 'foo'])
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index c2d6d7781b9..b1241cd8d0b 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -232,7 +232,9 @@ describe SystemNoteService do
context 'when milestone added' do
it 'sets the note text' do
- expect(subject.note).to eq "changed milestone to #{milestone.to_reference}"
+ reference = milestone.to_reference(format: :iid)
+
+ expect(subject.note).to eq "changed milestone to #{reference}"
end
end
diff --git a/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb b/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
new file mode 100644
index 00000000000..c757ccf02d3
--- /dev/null
+++ b/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe 'projects/pipelines_settings/_show' do
+ let(:project) { create(:project, :repository) }
+
+ before do
+ assign :project, project
+ end
+
+ context 'when kubernetes is not active' do
+ context 'when auto devops domain is not defined' do
+ it 'shows warning message' do
+ render
+
+ expect(rendered).to have_css('.settings-message')
+ expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a domain name and the')
+ expect(rendered).to have_link('Kubernetes service')
+ end
+ end
+
+ context 'when auto devops domain is defined' do
+ before do
+ project.build_auto_devops(domain: 'example.com')
+ end
+
+ it 'shows warning message' do
+ render
+
+ expect(rendered).to have_css('.settings-message')
+ expect(rendered).to have_text('Auto Review Apps and Auto Deploy need the')
+ expect(rendered).to have_link('Kubernetes service')
+ end
+ end
+ end
+
+ context 'when kubernetes is active' do
+ before do
+ project.build_kubernetes_service(active: true)
+ end
+
+ context 'when auto devops domain is not defined' do
+ it 'shows warning message' do
+ render
+
+ expect(rendered).to have_css('.settings-message')
+ expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a domain name to work correctly.')
+ end
+ end
+
+ context 'when auto devops domain is defined' do
+ before do
+ project.build_auto_devops(domain: 'example.com')
+ end
+
+ it 'does not show warning message' do
+ render
+
+ expect(rendered).not_to have_css('.settings-message')
+ end
+ end
+ end
+end
diff --git a/spec/workers/use_key_worker_spec.rb b/spec/workers/use_key_worker_spec.rb
deleted file mode 100644
index e50c788b82a..00000000000
--- a/spec/workers/use_key_worker_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-require 'spec_helper'
-
-describe UseKeyWorker do
- describe "#perform" do
- it "updates the key's last_used_at attribute to the current time when it exists" do
- worker = described_class.new
- key = create(:key)
- current_time = Time.zone.now
-
- Timecop.freeze(current_time) do
- expect { worker.perform(key.id) }
- .to change { key.reload.last_used_at }.from(nil).to be_like_time(current_time)
- end
- end
-
- it "returns false and skips the job when the key doesn't exist" do
- worker = described_class.new
- key = create(:key)
-
- expect(worker.perform(key.id + 1)).to eq false
- end
- end
-end