summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/application_controller_spec.rb201
-rw-r--r--spec/controllers/dashboard/todos_controller_spec.rb7
-rw-r--r--spec/controllers/import/bitbucket_controller_spec.rb67
-rw-r--r--spec/controllers/import/gitlab_controller_spec.rb66
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb83
-rw-r--r--spec/controllers/projects/imports_controller_spec.rb9
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb11
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb12
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb84
-rw-r--r--spec/factories/ci/builds.rb7
-rw-r--r--spec/factories/ci/trigger_schedules.rb26
-rw-r--r--spec/factories/ci/triggers.rb7
-rw-r--r--spec/factories/container_repositories.rb33
-rw-r--r--spec/factories/environments.rb5
-rw-r--r--spec/factories/keys.rb4
-rw-r--r--spec/features/admin/admin_health_check_spec.rb6
-rw-r--r--spec/features/boards/boards_spec.rb2
-rw-r--r--spec/features/boards/modal_filter_spec.rb2
-rw-r--r--spec/features/container_registry_spec.rb62
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb32
-rw-r--r--spec/features/dashboard/project_member_activity_index_spec.rb6
-rw-r--r--spec/features/gitlab_flavored_markdown_spec.rb12
-rw-r--r--spec/features/groups/empty_states_spec.rb70
-rw-r--r--spec/features/groups_spec.rb39
-rw-r--r--spec/features/issues/award_emoji_spec.rb5
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_author_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_label_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_milestone_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb24
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb92
-rw-r--r--spec/features/issues/filtered_search/search_bar_spec.rb8
-rw-r--r--spec/features/issues/move_spec.rb4
-rw-r--r--spec/features/issues/spam_issues_spec.rb2
-rw-r--r--spec/features/issues_spec.rb17
-rw-r--r--spec/features/login_spec.rb131
-rw-r--r--spec/features/merge_requests/create_new_mr_spec.rb20
-rw-r--r--spec/features/merge_requests/reset_filters_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb23
-rw-r--r--spec/features/projects/blobs/edit_spec.rb145
-rw-r--r--spec/features/projects/builds_spec.rb71
-rw-r--r--spec/features/projects/environments/environments_spec.rb40
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb8
-rw-r--r--spec/features/projects/merge_requests/list_spec.rb24
-rw-r--r--spec/features/projects/new_project_spec.rb16
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb20
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb6
-rw-r--r--spec/features/search_spec.rb2
-rw-r--r--spec/features/security/project/internal_access_spec.rb5
-rw-r--r--spec/features/security/project/private_access_spec.rb5
-rw-r--r--spec/features/security/project/public_access_spec.rb5
-rw-r--r--spec/finders/group_projects_finder_spec.rb70
-rw-r--r--spec/finders/projects_finder_spec.rb128
-rw-r--r--spec/fixtures/metrics.json1
-rw-r--r--spec/helpers/blob_helper_spec.rb2
-rw-r--r--spec/helpers/events_helper_spec.rb27
-rw-r--r--spec/helpers/projects_helper_spec.rb8
-rw-r--r--spec/javascripts/blob/3d_viewer/mesh_object_spec.js42
-rw-r--r--spec/javascripts/blob/pdf/index_spec.js80
-rw-r--r--spec/javascripts/blob/pdf/test.pdfbin0 -> 11956 bytes
-rw-r--r--spec/javascripts/blob/sketch/index_spec.js118
-rw-r--r--spec/javascripts/boards/board_list_spec.js201
-rw-r--r--spec/javascripts/build_spec.js8
-rw-r--r--spec/javascripts/environments/environment_actions_spec.js15
-rw-r--r--spec/javascripts/environments/environment_spec.js99
-rw-r--r--spec/javascripts/environments/environments_store_spec.js110
-rw-r--r--spec/javascripts/environments/mock_data.js16
-rw-r--r--spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js166
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js6
-rw-r--r--spec/javascripts/filtered_search/services/recent_searches_service_spec.js56
-rw-r--r--spec/javascripts/filtered_search/stores/recent_searches_store_spec.js59
-rw-r--r--spec/javascripts/fixtures/environments/metrics.html.haml64
-rw-r--r--spec/javascripts/fixtures/pdf_viewer.html.haml1
-rw-r--r--spec/javascripts/fixtures/sketch_viewer.html.haml2
-rw-r--r--spec/javascripts/issue_show/issue_title_spec.js22
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js51
-rw-r--r--spec/javascripts/merge_request_widget_spec.js7
-rw-r--r--spec/javascripts/monitoring/prometheus_graph_spec.js23
-rw-r--r--spec/javascripts/test_bundle.js1
-rw-r--r--spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js15
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb19
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb7
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb6
-rw-r--r--spec/lib/ci/ansi2html_spec.rb110
-rw-r--r--spec/lib/container_registry/blob_spec.rb115
-rw-r--r--spec/lib/container_registry/path_spec.rb212
-rw-r--r--spec/lib/container_registry/registry_spec.rb2
-rw-r--r--spec/lib/container_registry/repository_spec.rb65
-rw-r--r--spec/lib/container_registry/tag_spec.rb86
-rw-r--r--spec/lib/gitlab/backend/shell_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb116
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb201
-rw-r--r--spec/lib/gitlab/ci/trace_reader_spec.rb52
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb216
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb42
-rw-r--r--spec/lib/gitlab/etag_caching/middleware_spec.rb16
-rw-r--r--spec/lib/gitlab/git/attributes_spec.rb4
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb2
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb2
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb8
-rw-r--r--spec/lib/gitlab/git/compare_spec.rb2
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb2
-rw-r--r--spec/lib/gitlab/git/encoding_helper_spec.rb2
-rw-r--r--spec/lib/gitlab/git/index_spec.rb2
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb111
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb2
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/notifications_spec.rb9
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_spec.rb41
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml11
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml11
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb9
-rw-r--r--spec/lib/gitlab/polling_interval_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb13
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb4
-rw-r--r--spec/mailers/previews/notify_preview.rb11
-rw-r--r--spec/models/award_emoji_spec.rb14
-rw-r--r--spec/models/blob_spec.rb62
-rw-r--r--spec/models/ci/build_spec.rb302
-rw-r--r--spec/models/ci/trigger_schedule_spec.rb76
-rw-r--r--spec/models/ci/trigger_spec.rb1
-rw-r--r--spec/models/concerns/routable_spec.rb109
-rw-r--r--spec/models/container_repository_spec.rb209
-rw-r--r--spec/models/environment_spec.rb22
-rw-r--r--spec/models/group_spec.rb42
-rw-r--r--spec/models/issue_spec.rb8
-rw-r--r--spec/models/members/group_member_spec.rb17
-rw-r--r--spec/models/namespace_spec.rb63
-rw-r--r--spec/models/project_spec.rb97
-rw-r--r--spec/models/user_spec.rb112
-rw-r--r--spec/requests/api/deploy_keys_spec.rb9
-rw-r--r--spec/requests/api/issues_spec.rb376
-rw-r--r--spec/requests/api/jobs_spec.rb4
-rw-r--r--spec/requests/api/runner_spec.rb47
-rw-r--r--spec/requests/api/v3/builds_spec.rb4
-rw-r--r--spec/requests/ci/api/builds_spec.rb22
-rw-r--r--spec/rubocop/cop/migration/remove_concurrent_index_spec.rb41
-rw-r--r--spec/rubocop/cop/migration/remove_index_spec.rb35
-rw-r--r--spec/serializers/build_action_entity_spec.rb4
-rw-r--r--spec/serializers/build_entity_spec.rb4
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb94
-rw-r--r--spec/services/merge_requests/build_service_spec.rb42
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb27
-rw-r--r--spec/services/projects/destroy_service_spec.rb67
-rw-r--r--spec/services/projects/transfer_service_spec.rb5
-rw-r--r--spec/services/users/create_service_spec.rb26
-rw-r--r--spec/services/users/destroy_service_spec.rb (renamed from spec/services/users/destroy_spec.rb)42
-rw-r--r--spec/services/users/migrate_to_ghost_user_service_spec.rb64
-rw-r--r--spec/support/controllers/githubish_import_controller_shared_examples.rb59
-rw-r--r--spec/support/filtered_search_helpers.rb18
-rw-r--r--spec/support/matchers/gitaly_matchers.rb2
-rw-r--r--spec/support/seed_helper.rb36
-rw-r--r--spec/support/services/migrate_to_ghost_user_service_shared_examples.rb39
-rw-r--r--spec/support/stub_gitlab_calls.rb39
-rw-r--r--spec/views/notify/pipeline_failed_email.html.haml_spec.rb54
-rw-r--r--spec/views/notify/pipeline_success_email.html.haml_spec.rb54
-rw-r--r--spec/workers/repository_import_worker_spec.rb2
-rw-r--r--spec/workers/stuck_import_jobs_worker_spec.rb36
-rw-r--r--spec/workers/trigger_schedule_worker_spec.rb58
161 files changed, 5697 insertions, 1211 deletions
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 81cbccd5436..760f33b09c1 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -100,8 +100,6 @@ describe ApplicationController do
end
describe '#route_not_found' do
- let(:controller) { ApplicationController.new }
-
it 'renders 404 if authenticated' do
allow(controller).to receive(:current_user).and_return(user)
expect(controller).to receive(:not_found)
@@ -115,4 +113,203 @@ describe ApplicationController do
controller.send(:route_not_found)
end
end
+
+ context 'two-factor authentication' do
+ let(:controller) { ApplicationController.new }
+
+ describe '#check_two_factor_requirement' do
+ subject { controller.send :check_two_factor_requirement }
+
+ it 'does not redirect if 2FA is not required' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(false)
+ expect(controller).not_to receive(:redirect_to)
+
+ subject
+ end
+
+ it 'does not redirect if user is not logged in' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(controller).to receive(:current_user).and_return(nil)
+ expect(controller).not_to receive(:redirect_to)
+
+ subject
+ end
+
+ it 'does not redirect if user has 2FA enabled' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(controller).to receive(:current_user).twice.and_return(user)
+ allow(user).to receive(:two_factor_enabled?).and_return(true)
+ expect(controller).not_to receive(:redirect_to)
+
+ subject
+ end
+
+ it 'does not redirect if 2FA setup can be skipped' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(controller).to receive(:current_user).twice.and_return(user)
+ allow(user).to receive(:two_factor_enabled?).and_return(false)
+ allow(controller).to receive(:skip_two_factor?).and_return(true)
+ expect(controller).not_to receive(:redirect_to)
+
+ subject
+ end
+
+ it 'redirects to 2FA setup otherwise' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(controller).to receive(:current_user).twice.and_return(user)
+ allow(user).to receive(:two_factor_enabled?).and_return(false)
+ allow(controller).to receive(:skip_two_factor?).and_return(false)
+ allow(controller).to receive(:profile_two_factor_auth_path)
+ expect(controller).to receive(:redirect_to)
+
+ subject
+ end
+ end
+
+ describe '#two_factor_authentication_required?' do
+ subject { controller.send :two_factor_authentication_required? }
+
+ it 'returns false if no 2FA requirement is present' do
+ allow(controller).to receive(:current_user).and_return(nil)
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns true if a 2FA requirement is set in the application settings' do
+ stub_application_setting require_two_factor_authentication: true
+ allow(controller).to receive(:current_user).and_return(nil)
+
+ expect(subject).to be_truthy
+ end
+
+ it 'returns true if a 2FA requirement is set on the user' do
+ user.require_two_factor_authentication_from_group = true
+ allow(controller).to receive(:current_user).and_return(user)
+
+ expect(subject).to be_truthy
+ end
+ end
+
+ describe '#two_factor_grace_period' do
+ subject { controller.send :two_factor_grace_period }
+
+ it 'returns the grace period from the application settings' do
+ stub_application_setting two_factor_grace_period: 23
+ allow(controller).to receive(:current_user).and_return(nil)
+
+ expect(subject).to eq 23
+ end
+
+ context 'with a 2FA requirement set on the user' do
+ let(:user) { create :user, require_two_factor_authentication_from_group: true, two_factor_grace_period: 23 }
+
+ it 'returns the user grace period if lower than the application grace period' do
+ stub_application_setting two_factor_grace_period: 24
+ allow(controller).to receive(:current_user).and_return(user)
+
+ expect(subject).to eq 23
+ end
+
+ it 'returns the application grace period if lower than the user grace period' do
+ stub_application_setting two_factor_grace_period: 22
+ allow(controller).to receive(:current_user).and_return(user)
+
+ expect(subject).to eq 22
+ end
+ end
+ end
+
+ describe '#two_factor_grace_period_expired?' do
+ subject { controller.send :two_factor_grace_period_expired? }
+
+ before do
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ it 'returns false if the user has not started their grace period yet' do
+ expect(subject).to be_falsey
+ end
+
+ context 'with grace period started' do
+ let(:user) { create :user, otp_grace_period_started_at: 2.hours.ago }
+
+ it 'returns true if the grace period has expired' do
+ allow(controller).to receive(:two_factor_grace_period).and_return(1)
+
+ expect(subject).to be_truthy
+ end
+
+ it 'returns false if the grace period is still active' do
+ allow(controller).to receive(:two_factor_grace_period).and_return(3)
+
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ describe '#two_factor_skippable' do
+ subject { controller.send :two_factor_skippable? }
+
+ before do
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ it 'returns false if 2FA is not required' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(false)
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns false if the user has already enabled 2FA' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(user).to receive(:two_factor_enabled?).and_return(true)
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns false if the 2FA grace period has expired' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(user).to receive(:two_factor_enabled?).and_return(false)
+ allow(controller).to receive(:two_factor_grace_period_expired?).and_return(true)
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns true otherwise' do
+ allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
+ allow(user).to receive(:two_factor_enabled?).and_return(false)
+ allow(controller).to receive(:two_factor_grace_period_expired?).and_return(false)
+
+ expect(subject).to be_truthy
+ end
+ end
+
+ describe '#skip_two_factor?' do
+ subject { controller.send :skip_two_factor? }
+
+ it 'returns false if 2FA setup was not skipped' do
+ allow(controller).to receive(:session).and_return({})
+
+ expect(subject).to be_falsey
+ end
+
+ context 'with 2FA setup skipped' do
+ before do
+ allow(controller).to receive(:session).and_return({ skip_two_factor: 2.hours.from_now })
+ end
+
+ it 'returns false if the grace period has expired' do
+ Timecop.freeze(3.hours.from_now) do
+ expect(subject).to be_falsey
+ end
+ end
+
+ it 'returns true if the grace period is still active' do
+ Timecop.freeze(1.hour.from_now) do
+ expect(subject).to be_truthy
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/controllers/dashboard/todos_controller_spec.rb b/spec/controllers/dashboard/todos_controller_spec.rb
index 71a4a2c43c7..6075259ea99 100644
--- a/spec/controllers/dashboard/todos_controller_spec.rb
+++ b/spec/controllers/dashboard/todos_controller_spec.rb
@@ -35,6 +35,13 @@ describe Dashboard::TodosController do
expect(assigns(:todos).current_page).to eq(last_page)
expect(response).to have_http_status(200)
end
+
+ it 'does not redirect to external sites when provided a host field' do
+ external_host = "www.example.com"
+ get :index, page: (last_page + 1).to_param, host: external_host
+
+ expect(response).to redirect_to(dashboard_todos_path(page: last_page))
+ end
end
end
diff --git a/spec/controllers/import/bitbucket_controller_spec.rb b/spec/controllers/import/bitbucket_controller_spec.rb
index 51f23e4eeb9..010e3180ea4 100644
--- a/spec/controllers/import/bitbucket_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_controller_spec.rb
@@ -200,5 +200,72 @@ describe Import::BitbucketController do
end
end
end
+
+ context 'user has chosen an existing nested namespace and name for the project' do
+ let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
+ let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) }
+ let(:test_name) { 'test_name' }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::BitbucketImport::ProjectCreator).
+ to receive(:new).with(bitbucket_repo, test_name, nested_namespace, user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js }
+ end
+ end
+
+ context 'user has chosen a non-existent nested namespaces and name for the project' do
+ let(:test_name) { 'test_name' }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::BitbucketImport::ProjectCreator).
+ to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+ end
+
+ it 'creates the namespaces' do
+ allow(Gitlab::BitbucketImport::ProjectCreator).
+ to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } }
+ .to change { Namespace.count }.by(2)
+ end
+
+ it 'new namespace has the right parent' do
+ allow(Gitlab::BitbucketImport::ProjectCreator).
+ to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+
+ expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
+ end
+ end
+
+ context 'user has chosen existent and non-existent nested namespaces and name for the project' do
+ let(:test_name) { 'test_name' }
+ let!(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::BitbucketImport::ProjectCreator).
+ to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js }
+ end
+
+ it 'creates the namespaces' do
+ allow(Gitlab::BitbucketImport::ProjectCreator).
+ to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } }
+ .to change { Namespace.count }.by(2)
+ end
+ end
end
end
diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb
index 3f73ea000ae..2dbb89219d0 100644
--- a/spec/controllers/import/gitlab_controller_spec.rb
+++ b/spec/controllers/import/gitlab_controller_spec.rb
@@ -174,6 +174,72 @@ describe Import::GitlabController do
end
end
end
+
+ context 'user has chosen an existing nested namespace for the project' do
+ let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
+ let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::GitlabImport::ProjectCreator).
+ to receive(:new).with(gitlab_repo, nested_namespace, user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: nested_namespace.full_path, format: :js }
+ end
+ end
+
+ context 'user has chosen a non-existent nested namespaces for the project' do
+ let(:test_name) { 'test_name' }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::GitlabImport::ProjectCreator).
+ to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/bar', format: :js }
+ end
+
+ it 'creates the namespaces' do
+ allow(Gitlab::GitlabImport::ProjectCreator).
+ to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ expect { post :create, { target_namespace: 'foo/bar', format: :js } }
+ .to change { Namespace.count }.by(2)
+ end
+
+ it 'new namespace has the right parent' do
+ allow(Gitlab::GitlabImport::ProjectCreator).
+ to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/bar', format: :js }
+
+ expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
+ end
+ end
+
+ context 'user has chosen existent and non-existent nested namespaces and name for the project' do
+ let(:test_name) { 'test_name' }
+ let!(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::GitlabImport::ProjectCreator).
+ to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/foobar/bar', format: :js }
+ end
+
+ it 'creates the namespaces' do
+ allow(Gitlab::GitlabImport::ProjectCreator).
+ to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params).
+ and_return(double(execute: true))
+
+ expect { post :create, { target_namespace: 'foo/foobar/bar', format: :js } }
+ .to change { Namespace.count }.by(2)
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index ec36a64b415..3e9f272a0d8 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -2,15 +2,10 @@ require 'rails_helper'
describe Projects::BlobController do
let(:project) { create(:project, :public, :repository) }
- let(:user) { create(:user) }
-
- before do
- project.team << [user, :master]
-
- sign_in(user)
- end
describe 'GET diff' do
+ let(:user) { create(:user) }
+
render_views
def do_get(opts = {})
@@ -20,6 +15,12 @@ describe Projects::BlobController do
get :diff, params.merge(opts)
end
+ before do
+ project.team << [user, :master]
+
+ sign_in(user)
+ end
+
context 'when essential params are missing' do
it 'renders nothing' do
do_get
@@ -37,7 +38,69 @@ describe Projects::BlobController do
end
end
+ describe 'GET edit' do
+ let(:default_params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: 'master/CHANGELOG'
+ }
+ end
+
+ context 'anonymous' do
+ before do
+ get :edit, default_params
+ end
+
+ it 'redirects to sign in and returns' do
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'as guest' do
+ let(:guest) { create(:user) }
+
+ before do
+ sign_in(guest)
+ get :edit, default_params
+ end
+
+ it 'redirects to blob show' do
+ expect(response).to redirect_to(namespace_project_blob_path(project.namespace, project, 'master/CHANGELOG'))
+ end
+ end
+
+ context 'as developer' do
+ let(:developer) { create(:user) }
+
+ before do
+ project.team << [developer, :developer]
+ sign_in(developer)
+ get :edit, default_params
+ end
+
+ it 'redirects to blob show' do
+ expect(response).to have_http_status(200)
+ end
+ end
+
+ context 'as master' do
+ let(:master) { create(:user) }
+
+ before do
+ project.team << [master, :master]
+ sign_in(master)
+ get :edit, default_params
+ end
+
+ it 'redirects to blob show' do
+ expect(response).to have_http_status(200)
+ end
+ end
+ end
+
describe 'PUT update' do
+ let(:user) { create(:user) }
let(:default_params) do
{
namespace_id: project.namespace,
@@ -53,6 +116,12 @@ describe Projects::BlobController do
namespace_project_blob_path(project.namespace, project, 'master/CHANGELOG')
end
+ before do
+ project.team << [user, :master]
+
+ sign_in(user)
+ end
+
it 'redirects to blob' do
put :update, default_params
diff --git a/spec/controllers/projects/imports_controller_spec.rb b/spec/controllers/projects/imports_controller_spec.rb
index 7c75815f3c4..6724b474179 100644
--- a/spec/controllers/projects/imports_controller_spec.rb
+++ b/spec/controllers/projects/imports_controller_spec.rb
@@ -96,12 +96,19 @@ describe Projects::ImportsController do
}
end
- it 'redirects to params[:to]' do
+ it 'redirects to internal params[:to]' do
get :show, namespace_id: project.namespace.to_param, project_id: project, continue: params
expect(flash[:notice]).to eq params[:notice]
expect(response).to redirect_to params[:to]
end
+
+ it 'does not redirect to external params[:to]' do
+ params[:to] = "//google.com"
+
+ get :show, namespace_id: project.namespace.to_param, project_id: project, continue: params
+ expect(response).not_to redirect_to params[:to]
+ end
end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 734966d50b2..d5f1d46bf7f 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -83,6 +83,17 @@ describe Projects::IssuesController do
expect(assigns(:issues).current_page).to eq(last_page)
expect(response).to have_http_status(200)
end
+
+ it 'does not redirect to external sites when provided a host field' do
+ external_host = "www.example.com"
+ get :index,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ page: (last_page + 1).to_param,
+ host: external_host
+
+ expect(response).to redirect_to(namespace_project_issues_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
+ end
end
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 72f41f7209a..99d5583e683 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -176,6 +176,18 @@ describe Projects::MergeRequestsController do
expect(assigns(:merge_requests).current_page).to eq(last_page)
expect(response).to have_http_status(200)
end
+
+ it 'does not redirect to external sites when provided a host field' do
+ external_host = "www.example.com"
+ get :index,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ state: 'opened',
+ page: (last_page + 1).to_param,
+ host: external_host
+
+ expect(response).to redirect_to(namespace_project_merge_requests_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
+ end
end
context 'when filtering by opened state' do
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
new file mode 100644
index 00000000000..464302824a8
--- /dev/null
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -0,0 +1,84 @@
+require 'spec_helper'
+
+describe Projects::Registry::RepositoriesController do
+ let(:user) { create(:user) }
+ let(:project) { create(:empty_project, :private) }
+
+ before do
+ sign_in(user)
+ stub_container_registry_config(enabled: true)
+ end
+
+ context 'when user has access to registry' do
+ before do
+ project.add_developer(user)
+ end
+
+ describe 'GET index' do
+ context 'when root container repository exists' do
+ before do
+ create(:container_repository, :root, project: project)
+ end
+
+ it 'does not create root container repository' do
+ expect { go_to_index }.not_to change { ContainerRepository.all.count }
+ end
+ end
+
+ context 'when root container repository is not created' do
+ context 'when there are tags for this repository' do
+ before do
+ stub_container_registry_tags(repository: project.full_path,
+ tags: %w[rc1 latest])
+ end
+
+ it 'successfully renders container repositories' do
+ go_to_index
+
+ expect(response).to have_http_status(:ok)
+ end
+
+ it 'creates a root container repository' do
+ expect { go_to_index }.to change { ContainerRepository.all.count }.by(1)
+ expect(ContainerRepository.first).to be_root_repository
+ end
+ end
+
+ context 'when there are no tags for this repository' do
+ before do
+ stub_container_registry_tags(repository: :any, tags: [])
+ end
+
+ it 'successfully renders container repositories' do
+ go_to_index
+
+ expect(response).to have_http_status(:ok)
+ end
+
+ it 'does not ensure root container repository' do
+ expect { go_to_index }.not_to change { ContainerRepository.all.count }
+ end
+ end
+ end
+ end
+ end
+
+ context 'when user does not have access to registry' do
+ describe 'GET index' do
+ it 'responds with 404' do
+ go_to_index
+
+ expect(response).to have_http_status(:not_found)
+ end
+
+ it 'does not ensure root container repository' do
+ expect { go_to_index }.not_to change { ContainerRepository.all.count }
+ end
+ end
+ end
+
+ def go_to_index
+ get :index, namespace_id: project.namespace,
+ project_id: project
+ end
+end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index f78086211f7..b62def83ee4 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -111,7 +111,7 @@ FactoryGirl.define do
trait :trace do
after(:create) do |build, evaluator|
- build.trace = 'BUILD TRACE'
+ build.trace.set('BUILD TRACE')
end
end
@@ -192,5 +192,10 @@ FactoryGirl.define do
trait :no_options do
options { {} }
end
+
+ trait :non_playable do
+ status 'created'
+ self.when 'manual'
+ end
end
end
diff --git a/spec/factories/ci/trigger_schedules.rb b/spec/factories/ci/trigger_schedules.rb
new file mode 100644
index 00000000000..315bce16995
--- /dev/null
+++ b/spec/factories/ci/trigger_schedules.rb
@@ -0,0 +1,26 @@
+FactoryGirl.define do
+ factory :ci_trigger_schedule, class: Ci::TriggerSchedule do
+ trigger factory: :ci_trigger_for_trigger_schedule
+ cron '0 1 * * *'
+ cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
+
+ after(:build) do |trigger_schedule, evaluator|
+ trigger_schedule.update!(project: trigger_schedule.trigger.project)
+ end
+
+ trait :nightly do
+ cron '0 1 * * *'
+ cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
+ end
+
+ trait :weekly do
+ cron '0 1 * * 6'
+ cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
+ end
+
+ trait :monthly do
+ cron '0 1 22 * *'
+ cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
+ end
+ end
+end
diff --git a/spec/factories/ci/triggers.rb b/spec/factories/ci/triggers.rb
index 40c4663c6d8..c3a29d8bf04 100644
--- a/spec/factories/ci/triggers.rb
+++ b/spec/factories/ci/triggers.rb
@@ -2,6 +2,13 @@ FactoryGirl.define do
factory :ci_trigger_without_token, class: Ci::Trigger do
factory :ci_trigger do
sequence(:token) { |n| "token#{n}" }
+
+ factory :ci_trigger_for_trigger_schedule do
+ token { SecureRandom.hex(15) }
+ owner factory: :user
+ project factory: :project
+ ref 'master'
+ end
end
end
end
diff --git a/spec/factories/container_repositories.rb b/spec/factories/container_repositories.rb
new file mode 100644
index 00000000000..3fcad9fd4b3
--- /dev/null
+++ b/spec/factories/container_repositories.rb
@@ -0,0 +1,33 @@
+FactoryGirl.define do
+ factory :container_repository do
+ name 'test_container_image'
+ project
+
+ transient do
+ tags []
+ end
+
+ trait :root do
+ name ''
+ end
+
+ after(:build) do |repository, evaluator|
+ next if evaluator.tags.to_a.none?
+
+ allow(repository.client)
+ .to receive(:repository_tags)
+ .and_return({
+ 'name' => repository.path,
+ 'tags' => evaluator.tags
+ })
+
+ evaluator.tags.each do |tag|
+ allow(repository.client)
+ .to receive(:repository_tag_digest)
+ .with(repository.path, tag)
+ .and_return('sha256:4c8e63ca4cb663ce6c688cb06f1c3' \
+ '72b088dac5b6d7ad7d49cd620d85cf72a15')
+ end
+ end
+ end
+end
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 0852dda6b29..3fbf24b5c7d 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -32,5 +32,10 @@ FactoryGirl.define do
environment.update_attribute(:deployments, [deployment])
end
end
+
+ trait :non_playable do
+ status 'created'
+ self.when 'manual'
+ end
end
end
diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb
index dd93b439b2b..4e140102492 100644
--- a/spec/factories/keys.rb
+++ b/spec/factories/keys.rb
@@ -23,5 +23,9 @@ FactoryGirl.define do
factory :another_deploy_key, class: 'DeployKey' do
end
end
+
+ factory :write_access_key, class: 'DeployKey' do
+ can_push true
+ end
end
end
diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb
index f7e49a56deb..523afa2318f 100644
--- a/spec/features/admin/admin_health_check_spec.rb
+++ b/spec/features/admin/admin_health_check_spec.rb
@@ -2,7 +2,6 @@ require 'spec_helper'
feature "Admin Health Check", feature: true do
include StubENV
- include WaitForAjax
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
@@ -24,11 +23,12 @@ feature "Admin Health Check", feature: true do
expect(page).to have_selector('#health-check-token', text: token)
end
- describe 'reload access token', js: true do
+ describe 'reload access token' do
it 'changes the access token' do
orig_token = current_application_settings.health_check_access_token
click_button 'Reset health check access token'
- wait_for_ajax
+
+ expect(page).to have_content('New health check access token has been generated!')
expect(find('#health-check-token').text).not_to eq orig_token
end
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index e168585534d..30ad169e30e 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -590,7 +590,7 @@ describe 'Issue Boards', feature: true, js: true do
end
def click_filter_link(link_text)
- page.within('.filtered-search-input-container') do
+ page.within('.filtered-search-box') do
expect(page).to have_button(link_text)
click_button(link_text)
diff --git a/spec/features/boards/modal_filter_spec.rb b/spec/features/boards/modal_filter_spec.rb
index e2281a7da55..4a4c13e79c8 100644
--- a/spec/features/boards/modal_filter_spec.rb
+++ b/spec/features/boards/modal_filter_spec.rb
@@ -219,7 +219,7 @@ describe 'Issue Boards add issue modal filtering', :feature, :js do
end
def click_filter_link(link_text)
- page.within('.add-issues-modal .filtered-search-input-container') do
+ page.within('.add-issues-modal .filtered-search-box') do
expect(page).to have_button(link_text)
click_button(link_text)
diff --git a/spec/features/container_registry_spec.rb b/spec/features/container_registry_spec.rb
index 203e55a36f2..b86609e07c5 100644
--- a/spec/features/container_registry_spec.rb
+++ b/spec/features/container_registry_spec.rb
@@ -1,45 +1,61 @@
require 'spec_helper'
describe "Container Registry" do
+ let(:user) { create(:user) }
let(:project) { create(:empty_project) }
- let(:repository) { project.container_registry_repository }
- let(:tag_name) { 'latest' }
- let(:tags) { [tag_name] }
+
+ let(:container_repository) do
+ create(:container_repository, name: 'my/image')
+ end
before do
- login_as(:user)
- project.team << [@user, :developer]
- stub_container_registry_tags(*tags)
+ login_as(user)
+ project.add_developer(user)
stub_container_registry_config(enabled: true)
- allow(Auth::ContainerRegistryAuthenticationService).to receive(:full_access_token).and_return('token')
+ stub_container_registry_tags(repository: :any, tags: [])
end
- describe 'GET /:project/container_registry' do
+ context 'when there are no image repositories' do
+ scenario 'user visits container registry main page' do
+ visit_container_registry
+
+ expect(page).to have_content 'No container image repositories'
+ end
+ end
+
+ context 'when there are image repositories' do
before do
- visit namespace_project_container_registry_index_path(project.namespace, project)
+ stub_container_registry_tags(repository: %r{my/image}, tags: %w[latest])
+ project.container_repositories << container_repository
end
- context 'when no tags' do
- let(:tags) { [] }
+ scenario 'user wants to see multi-level container repository' do
+ visit_container_registry
- it { expect(page).to have_content('No images in Container Registry for this project') }
+ expect(page).to have_content('my/image')
end
- context 'when there are tags' do
- it { expect(page).to have_content(tag_name) }
- it { expect(page).to have_content('d7a513a66') }
- end
- end
+ scenario 'user removes entire container repository' do
+ visit_container_registry
- describe 'DELETE /:project/container_registry/tag' do
- before do
- visit namespace_project_container_registry_index_path(project.namespace, project)
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(true)
+
+ click_on 'Remove repository'
end
- it do
- expect_any_instance_of(::ContainerRegistry::Tag).to receive(:delete).and_return(true)
+ scenario 'user removes a specific tag from container repository' do
+ visit_container_registry
- click_on 'Remove'
+ expect_any_instance_of(ContainerRegistry::Tag)
+ .to receive(:delete).and_return(true)
+
+ click_on 'Remove tag'
end
end
+
+ def visit_container_registry
+ visit namespace_project_container_registry_index_path(
+ project.namespace, project)
+ end
end
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
new file mode 100644
index 00000000000..508ca38d7e5
--- /dev/null
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe 'Dashboard Merge Requests' do
+ let(:current_user) { create :user }
+ let(:project) do
+ create(:empty_project) do |project|
+ project.add_master(current_user)
+ end
+ end
+
+ before do
+ login_as(current_user)
+ end
+
+ it 'should show an empty state' do
+ visit merge_requests_dashboard_path(assignee_id: current_user.id)
+
+ expect(page).to have_selector('.empty-state')
+ end
+
+ context 'if there are merge requests' do
+ before do
+ create(:merge_request, assignee: current_user, source_project: project)
+
+ visit merge_requests_dashboard_path(assignee_id: current_user.id)
+ end
+
+ it 'should not show an empty state' do
+ expect(page).not_to have_selector('.empty-state')
+ end
+ end
+end
diff --git a/spec/features/dashboard/project_member_activity_index_spec.rb b/spec/features/dashboard/project_member_activity_index_spec.rb
index 49d93db58a9..d62839a09ef 100644
--- a/spec/features/dashboard/project_member_activity_index_spec.rb
+++ b/spec/features/dashboard/project_member_activity_index_spec.rb
@@ -21,20 +21,20 @@ feature 'Project member activity', feature: true, js: true do
context 'when a user joins the project' do
before { visit_activities_and_wait_with_event(Event::JOINED) }
- it { is_expected.to eq("#{user.name} joined project") }
+ it { is_expected.to eq("joined project") }
end
context 'when a user leaves the project' do
before { visit_activities_and_wait_with_event(Event::LEFT) }
- it { is_expected.to eq("#{user.name} left project") }
+ it { is_expected.to eq("left project") }
end
context 'when a users membership expires for the project' do
before { visit_activities_and_wait_with_event(Event::EXPIRED) }
it "presents the correct message" do
- message = "#{user.name} removed due to membership expiration from project"
+ message = "removed due to membership expiration from project"
is_expected.to eq(message)
end
end
diff --git a/spec/features/gitlab_flavored_markdown_spec.rb b/spec/features/gitlab_flavored_markdown_spec.rb
index 84d73d693bc..876f33dd03e 100644
--- a/spec/features/gitlab_flavored_markdown_spec.rb
+++ b/spec/features/gitlab_flavored_markdown_spec.rb
@@ -48,7 +48,9 @@ describe "GitLab Flavored Markdown", feature: true do
end
end
- describe "for issues" do
+ describe "for issues", feature: true, js: true do
+ include WaitForVueResource
+
before do
@other_issue = create(:issue,
author: @user,
@@ -79,6 +81,14 @@ describe "GitLab Flavored Markdown", feature: true do
expect(page).to have_link(fred.to_reference)
end
+
+ it "renders updated subject once edited somewhere else in issues#show" do
+ visit namespace_project_issue_path(project.namespace, project, @issue)
+ @issue.update(title: "fix #{@other_issue.to_reference} and update")
+
+ wait_for_vue_resource
+ expect(page).to have_text("fix #{@other_issue.to_reference} and update")
+ end
end
describe "for merge requests" do
diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb
new file mode 100644
index 00000000000..fef8e41bffe
--- /dev/null
+++ b/spec/features/groups/empty_states_spec.rb
@@ -0,0 +1,70 @@
+require 'spec_helper'
+
+feature 'Groups Merge Requests Empty States' do
+ let(:group) { create(:group) }
+ let(:user) { create(:group_member, :developer, user: create(:user), group: group ).user }
+
+ before do
+ login_as(user)
+ end
+
+ context 'group has a project' do
+ let(:project) { create(:empty_project, namespace: group) }
+
+ before do
+ project.add_master(user)
+ end
+
+ context 'the project has a merge request' do
+ before do
+ create(:merge_request, source_project: project)
+
+ visit merge_requests_group_path(group)
+ end
+
+ it 'should not display an empty state' do
+ expect(page).not_to have_selector('.empty-state')
+ end
+ end
+
+ context 'the project has no merge requests', :js do
+ before do
+ visit merge_requests_group_path(group)
+ end
+
+ it 'should display an empty state' do
+ expect(page).to have_selector('.empty-state')
+ end
+
+ it 'should show a new merge request button' do
+ within '.empty-state' do
+ expect(page).to have_content('New merge request')
+ end
+ end
+
+ it 'the new merge request button opens a project dropdown' do
+ within '.empty-state' do
+ find('.new-project-item-select-button').click
+ end
+
+ expect(page).to have_selector('.ajax-project-dropdown')
+ end
+ end
+ end
+
+ context 'group without a project' do
+ before do
+ visit merge_requests_group_path(group)
+ end
+
+ it 'should display an empty state' do
+ expect(page).to have_selector('.empty-state')
+ end
+
+ it 'should not show a new merge request button' do
+ within '.empty-state' do
+ expect(page).not_to have_link('New merge request')
+ end
+ end
+ end
+end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index c90cc06a8f5..8bfe6f4d54b 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -86,17 +86,40 @@ feature 'Group', feature: true do
describe 'create a nested group' do
let(:group) { create(:group, path: 'foo') }
- before do
- visit subgroups_group_path(group)
- click_link 'New Subgroup'
+ context 'as admin' do
+ before do
+ visit subgroups_group_path(group)
+ click_link 'New Subgroup'
+ end
+
+ it 'creates a nested group' do
+ fill_in 'Group path', with: 'bar'
+ click_button 'Create group'
+
+ expect(current_path).to eq(group_path('foo/bar'))
+ expect(page).to have_content("Group 'bar' was successfully created.")
+ end
end
- it 'creates a nested group' do
- fill_in 'Group path', with: 'bar'
- click_button 'Create group'
+ context 'as group owner' do
+ let(:user) { create(:user) }
- expect(current_path).to eq(group_path('foo/bar'))
- expect(page).to have_content("Group 'bar' was successfully created.")
+ before do
+ group.add_owner(user)
+ logout
+ login_as(user)
+
+ visit subgroups_group_path(group)
+ click_link 'New Subgroup'
+ end
+
+ it 'creates a nested group' do
+ fill_in 'Group path', with: 'bar'
+ click_button 'Create group'
+
+ expect(current_path).to eq(group_path('foo/bar'))
+ expect(page).to have_content("Group 'bar' was successfully created.")
+ end
end
end
diff --git a/spec/features/issues/award_emoji_spec.rb b/spec/features/issues/award_emoji_spec.rb
index 16e453bc328..8e67ab028d7 100644
--- a/spec/features/issues/award_emoji_spec.rb
+++ b/spec/features/issues/award_emoji_spec.rb
@@ -2,6 +2,7 @@ require 'rails_helper'
describe 'Awards Emoji', feature: true do
include WaitForAjax
+ include WaitForVueResource
let!(:project) { create(:project, :public) }
let!(:user) { create(:user) }
@@ -22,10 +23,11 @@ describe 'Awards Emoji', feature: true do
# The `heart_tip` emoji is not valid anymore so we need to skip validation
issue.award_emoji.build(user: user, name: 'heart_tip').save!(validate: false)
visit namespace_project_issue_path(project.namespace, project, issue)
+ wait_for_vue_resource
end
# Regression test: https://gitlab.com/gitlab-org/gitlab-ce/issues/29529
- it 'does not shows a 500 page' do
+ it 'does not shows a 500 page', js: true do
expect(page).to have_text(issue.title)
end
end
@@ -35,6 +37,7 @@ describe 'Awards Emoji', feature: true do
before do
visit namespace_project_issue_path(project.namespace, project, issue)
+ wait_for_vue_resource
end
it 'increments the thumbsdown emoji', js: true do
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index 4dcc56a97d1..3d1a9ed1722 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -194,7 +194,7 @@ describe 'Dropdown assignee', :feature, :js do
new_user = create(:user)
project.team << [new_user, :master]
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
filtered_search.set('assignee')
filtered_search.send_keys(':')
diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb
index 1772a120045..990e3b3e60c 100644
--- a/spec/features/issues/filtered_search/dropdown_author_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb
@@ -172,7 +172,7 @@ describe 'Dropdown author', js: true, feature: true do
new_user = create(:user)
project.team << [new_user, :master]
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
filtered_search.set('author')
send_keys_to_filtered_search(':')
diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb
index b192064b693..c8645e08c4b 100644
--- a/spec/features/issues/filtered_search/dropdown_label_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb
@@ -33,7 +33,7 @@ describe 'Dropdown label', js: true, feature: true do
end
def clear_search_field
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
end
before do
diff --git a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
index ce96a420699..0a525bc68c9 100644
--- a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
@@ -252,7 +252,7 @@ describe 'Dropdown milestone', :feature, :js do
expect(initial_size).to be > 0
create(:milestone, project: project)
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
filtered_search.set('milestone:')
expect(dropdown_milestone_size).to eq(initial_size)
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 004e335dd38..6f00066de4d 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -1,6 +1,7 @@
require 'spec_helper'
describe 'Filter issues', js: true, feature: true do
+ include Devise::Test::IntegrationHelpers
include FilteredSearchHelpers
include WaitForAjax
@@ -42,16 +43,17 @@ describe 'Filter issues', js: true, feature: true do
project.team << [user2, :master]
group.add_developer(user)
group.add_developer(user2)
- login_as(user)
- create(:issue, project: project)
- create(:issue, title: "Bug report 1", project: project)
- create(:issue, title: "Bug report 2", project: project)
- create(:issue, title: "issue with 'single quotes'", project: project)
- create(:issue, title: "issue with \"double quotes\"", project: project)
- create(:issue, title: "issue with !@\#{$%^&*()-+", project: project)
- create(:issue, title: "issue by assignee", project: project, milestone: milestone, author: user, assignee: user)
- create(:issue, title: "issue by assignee with searchTerm", project: project, milestone: milestone, author: user, assignee: user)
+ sign_in(user)
+
+ create(:issue, project: project)
+ create(:issue, project: project, title: "Bug report 1")
+ create(:issue, project: project, title: "Bug report 2")
+ create(:issue, project: project, title: "issue with 'single quotes'")
+ create(:issue, project: project, title: "issue with \"double quotes\"")
+ create(:issue, project: project, title: "issue with !@\#{$%^&*()-+")
+ create(:issue, project: project, title: "issue by assignee", milestone: milestone, author: user, assignee: user)
+ create(:issue, project: project, title: "issue by assignee with searchTerm", milestone: milestone, author: user, assignee: user)
issue = create(:issue,
title: "Bug 2",
@@ -756,10 +758,10 @@ describe 'Filter issues', js: true, feature: true do
expect_issues_list_count(2)
- sort_toggle = find('.filtered-search-container .dropdown-toggle')
+ sort_toggle = find('.filtered-search-wrapper .dropdown-toggle')
sort_toggle.click
- find('.filtered-search-container .dropdown-menu li a', text: 'Oldest updated').click
+ find('.filtered-search-wrapper .dropdown-menu li a', text: 'Oldest updated').click
wait_for_ajax
expect(find('.issues-list .issue:first-of-type .issue-title-text a')).to have_content(old_issue.title)
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
new file mode 100644
index 00000000000..f506065a242
--- /dev/null
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -0,0 +1,92 @@
+require 'spec_helper'
+
+describe 'Recent searches', js: true, feature: true do
+ include FilteredSearchHelpers
+ include WaitForAjax
+
+ let!(:group) { create(:group) }
+ let!(:project) { create(:project, group: group) }
+ let!(:user) { create(:user) }
+
+ before do
+ Capybara.ignore_hidden_elements = false
+ project.add_master(user)
+ group.add_developer(user)
+ create(:issue, project: project)
+ login_as(user)
+
+ remove_recent_searches
+ end
+
+ after do
+ Capybara.ignore_hidden_elements = true
+ end
+
+ it 'searching adds to recent searches' do
+ visit namespace_project_issues_path(project.namespace, project)
+
+ input_filtered_search('foo', submit: true)
+ input_filtered_search('bar', submit: true)
+
+ items = all('.filtered-search-history-dropdown-item', visible: false)
+
+ expect(items.count).to eq(2)
+ expect(items[0].text).to eq('bar')
+ expect(items[1].text).to eq('foo')
+ end
+
+ it 'visiting URL with search params adds to recent searches' do
+ visit namespace_project_issues_path(project.namespace, project, label_name: 'foo', search: 'bar')
+ visit namespace_project_issues_path(project.namespace, project, label_name: 'qux', search: 'garply')
+
+ items = all('.filtered-search-history-dropdown-item', visible: false)
+
+ expect(items.count).to eq(2)
+ expect(items[0].text).to eq('label:~qux garply')
+ expect(items[1].text).to eq('label:~foo bar')
+ end
+
+ it 'saved recent searches are restored last on the list' do
+ set_recent_searches('["saved1", "saved2"]')
+
+ visit namespace_project_issues_path(project.namespace, project, search: 'foo')
+
+ items = all('.filtered-search-history-dropdown-item', visible: false)
+
+ expect(items.count).to eq(3)
+ expect(items[0].text).to eq('foo')
+ expect(items[1].text).to eq('saved1')
+ expect(items[2].text).to eq('saved2')
+ end
+
+ it 'clicking item fills search input' do
+ set_recent_searches('["foo", "bar"]')
+ visit namespace_project_issues_path(project.namespace, project)
+
+ all('.filtered-search-history-dropdown-item', visible: false)[0].trigger('click')
+ wait_for_filtered_search('foo')
+
+ expect(find('.filtered-search').value.strip).to eq('foo')
+ end
+
+ it 'clear recent searches button, clears recent searches' do
+ set_recent_searches('["foo"]')
+ visit namespace_project_issues_path(project.namespace, project)
+
+ items_before = all('.filtered-search-history-dropdown-item', visible: false)
+
+ expect(items_before.count).to eq(1)
+
+ find('.filtered-search-history-clear-button', visible: false).trigger('click')
+ items_after = all('.filtered-search-history-dropdown-item', visible: false)
+
+ expect(items_after.count).to eq(0)
+ end
+
+ it 'shows flash error when failed to parse saved history' do
+ set_recent_searches('fail')
+ visit namespace_project_issues_path(project.namespace, project)
+
+ expect(find('.flash-alert')).to have_text('An error occured while parsing recent searches')
+ end
+end
diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb
index 59244d65eec..48e7af67616 100644
--- a/spec/features/issues/filtered_search/search_bar_spec.rb
+++ b/spec/features/issues/filtered_search/search_bar_spec.rb
@@ -44,7 +44,7 @@ describe 'Search bar', js: true, feature: true do
filtered_search.set(search_text)
expect(filtered_search.value).to eq(search_text)
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
expect(filtered_search.value).to eq('')
end
@@ -55,7 +55,7 @@ describe 'Search bar', js: true, feature: true do
it 'hides after clicked' do
filtered_search.set('a')
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
expect(page).to have_css('.clear-search', visible: false)
end
@@ -81,7 +81,7 @@ describe 'Search bar', js: true, feature: true do
expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to eq(1)
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
filtered_search.click
expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to eq(original_size)
@@ -96,7 +96,7 @@ describe 'Search bar', js: true, feature: true do
expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to eq(0)
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
filtered_search.click
expect(page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size).to be > 0
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index f89b4db9e62..6c09903a2f6 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -37,8 +37,8 @@ feature 'issue move to another project' do
edit_issue(issue)
end
- scenario 'moving issue to another project' do
- first('#move_to_project_id', visible: false).set(new_project.id)
+ scenario 'moving issue to another project', js: true do
+ find('#move_to_project_id', visible: false).set(new_project.id)
click_button('Save changes')
expect(current_url).to include project_path(new_project)
diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb
index 4bc9b49f889..6001476d0ca 100644
--- a/spec/features/issues/spam_issues_spec.rb
+++ b/spec/features/issues/spam_issues_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'New issue', feature: true do
+describe 'New issue', feature: true, js: true do
include StubENV
let(:project) { create(:project, :public) }
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index 7afceb88cf9..e3213d24f6a 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -695,4 +695,21 @@ describe 'Issues', feature: true do
end
end
end
+
+ describe 'title issue#show', js: true do
+ include WaitForVueResource
+
+ it 'updates the title', js: true do
+ issue = create(:issue, author: @user, assignee: @user, project: project, title: 'new title')
+
+ visit namespace_project_issue_path(project.namespace, project, issue)
+
+ expect(page).to have_text("new title")
+
+ issue.update(title: "updated title")
+
+ wait_for_vue_resource
+ expect(page).to have_text("updated title")
+ end
+ end
end
diff --git a/spec/features/login_spec.rb b/spec/features/login_spec.rb
index f32d1f78b40..11d417c253d 100644
--- a/spec/features/login_spec.rb
+++ b/spec/features/login_spec.rb
@@ -199,52 +199,125 @@ feature 'Login', feature: true do
describe 'with required two-factor authentication enabled' do
let(:user) { create(:user) }
- before(:each) { stub_application_setting(require_two_factor_authentication: true) }
+ # TODO: otp_grace_period_started_at
- context 'with grace period defined' do
- before(:each) do
- stub_application_setting(two_factor_grace_period: 48)
- login_with(user)
- end
+ context 'global setting' do
+ before(:each) { stub_application_setting(require_two_factor_authentication: true) }
- context 'within the grace period' do
- it 'redirects to two-factor configuration page' do
- expect(current_path).to eq profile_two_factor_auth_path
- expect(page).to have_content('You must enable Two-Factor Authentication for your account before')
+ context 'with grace period defined' do
+ before(:each) do
+ stub_application_setting(two_factor_grace_period: 48)
+ login_with(user)
end
- it 'allows skipping two-factor configuration', js: true do
- expect(current_path).to eq profile_two_factor_auth_path
+ context 'within the grace period' do
+ it 'redirects to two-factor configuration page' do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ')
+ end
- click_link 'Configure it later'
- expect(current_path).to eq root_path
+ it 'allows skipping two-factor configuration', js: true do
+ expect(current_path).to eq profile_two_factor_auth_path
+
+ click_link 'Configure it later'
+ expect(current_path).to eq root_path
+ end
end
- end
- context 'after the grace period' do
- let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
+ context 'after the grace period' do
+ let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
- it 'redirects to two-factor configuration page' do
- expect(current_path).to eq profile_two_factor_auth_path
- expect(page).to have_content('You must enable Two-Factor Authentication for your account.')
+ it 'redirects to two-factor configuration page' do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_content(
+ 'The global settings require you to enable Two-Factor Authentication for your account.'
+ )
+ end
+
+ it 'disallows skipping two-factor configuration', js: true do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).not_to have_link('Configure it later')
+ end
+ end
+ end
+
+ context 'without grace period defined' do
+ before(:each) do
+ stub_application_setting(two_factor_grace_period: 0)
+ login_with(user)
end
- it 'disallows skipping two-factor configuration', js: true do
+ it 'redirects to two-factor configuration page' do
expect(current_path).to eq profile_two_factor_auth_path
- expect(page).not_to have_link('Configure it later')
+ expect(page).to have_content(
+ 'The global settings require you to enable Two-Factor Authentication for your account.'
+ )
end
end
end
- context 'without grace period defined' do
- before(:each) do
- stub_application_setting(two_factor_grace_period: 0)
- login_with(user)
+ context 'group setting' do
+ before do
+ group1 = create :group, name: 'Group 1', require_two_factor_authentication: true
+ group1.add_user(user, GroupMember::DEVELOPER)
+ group2 = create :group, name: 'Group 2', require_two_factor_authentication: true
+ group2.add_user(user, GroupMember::DEVELOPER)
end
- it 'redirects to two-factor configuration page' do
- expect(current_path).to eq profile_two_factor_auth_path
- expect(page).to have_content('You must enable Two-Factor Authentication for your account.')
+ context 'with grace period defined' do
+ before(:each) do
+ stub_application_setting(two_factor_grace_period: 48)
+ login_with(user)
+ end
+
+ context 'within the grace period' do
+ it 'redirects to two-factor configuration page' do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_content(
+ 'The group settings for Group 1 and Group 2 require you to enable ' \
+ 'Two-Factor Authentication for your account. You need to do this ' \
+ 'before ')
+ end
+
+ it 'allows skipping two-factor configuration', js: true do
+ expect(current_path).to eq profile_two_factor_auth_path
+
+ click_link 'Configure it later'
+ expect(current_path).to eq root_path
+ end
+ end
+
+ context 'after the grace period' do
+ let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
+
+ it 'redirects to two-factor configuration page' do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_content(
+ 'The group settings for Group 1 and Group 2 require you to enable ' \
+ 'Two-Factor Authentication for your account.'
+ )
+ end
+
+ it 'disallows skipping two-factor configuration', js: true do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).not_to have_link('Configure it later')
+ end
+ end
+ end
+
+ context 'without grace period defined' do
+ before(:each) do
+ stub_application_setting(two_factor_grace_period: 0)
+ login_with(user)
+ end
+
+ it 'redirects to two-factor configuration page' do
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_content(
+ 'The group settings for Group 1 and Group 2 require you to enable ' \
+ 'Two-Factor Authentication for your account.'
+ )
+ end
end
end
end
diff --git a/spec/features/merge_requests/create_new_mr_spec.rb b/spec/features/merge_requests/create_new_mr_spec.rb
index f1ad4a55246..d4fe67c224f 100644
--- a/spec/features/merge_requests/create_new_mr_spec.rb
+++ b/spec/features/merge_requests/create_new_mr_spec.rb
@@ -15,7 +15,7 @@ feature 'Create New Merge Request', feature: true, js: true do
it 'selects the source branch sha when a tag with the same name exists' do
visit namespace_project_merge_requests_path(project.namespace, project)
- click_link 'New Merge Request'
+ click_link 'New merge request'
expect(page).to have_content('Source branch')
expect(page).to have_content('Target branch')
@@ -27,8 +27,8 @@ feature 'Create New Merge Request', feature: true, js: true do
it 'selects the target branch sha when a tag with the same name exists' do
visit namespace_project_merge_requests_path(project.namespace, project)
-
- click_link 'New Merge Request'
+
+ click_link 'New merge request'
expect(page).to have_content('Source branch')
expect(page).to have_content('Target branch')
@@ -42,7 +42,7 @@ feature 'Create New Merge Request', feature: true, js: true do
it 'generates a diff for an orphaned branch' do
visit namespace_project_merge_requests_path(project.namespace, project)
- click_link 'New Merge Request'
+ page.has_link?('New Merge Request') ? click_link("New Merge Request") : click_link('New merge request')
expect(page).to have_content('Source branch')
expect(page).to have_content('Target branch')
@@ -70,6 +70,18 @@ feature 'Create New Merge Request', feature: true, js: true do
visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { target_project_id: private_project.id })
expect(page).not_to have_content private_project.path_with_namespace
+ expect(page).to have_content project.path_with_namespace
+ end
+ end
+
+ context 'when source project cannot be viewed by the current user' do
+ it 'does not leak the private project name & namespace' do
+ private_project = create(:project, :private)
+
+ visit new_namespace_project_merge_request_path(project.namespace, project, merge_request: { source_project_id: private_project.id })
+
+ expect(page).not_to have_content private_project.path_with_namespace
+ expect(page).to have_content project.path_with_namespace
end
end
diff --git a/spec/features/merge_requests/reset_filters_spec.rb b/spec/features/merge_requests/reset_filters_spec.rb
index 14511707af4..df5943f9136 100644
--- a/spec/features/merge_requests/reset_filters_spec.rb
+++ b/spec/features/merge_requests/reset_filters_spec.rb
@@ -14,7 +14,7 @@ feature 'Merge requests filter clear button', feature: true, js: true do
let!(:mr2) { create(:merge_request, title: "Bugfix1", source_project: project, target_project: project, source_branch: "Bugfix1") }
let(:merge_request_css) { '.merge-request' }
- let(:clear_search_css) { '.filtered-search-input-container .clear-search' }
+ let(:clear_search_css) { '.filtered-search-box .clear-search' }
before do
mr2.labels << bug
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
new file mode 100644
index 00000000000..01cd268ffe8
--- /dev/null
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+feature 'File blob', feature: true do
+ include WaitForAjax
+ include TreeHelper
+
+ let(:project) { create(:project, :public, :test_repo) }
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
+ let(:branch) { 'master' }
+ let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] }
+
+ context 'anonymous' do
+ context 'from blob file path' do
+ before do
+ visit namespace_project_blob_path(project.namespace, project, tree_join(branch, file_path))
+ end
+
+ it 'updates content' do
+ expect(page).to have_link 'Edit'
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index a820d07ab3b..46f2f487e0c 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -2,44 +2,135 @@ require 'spec_helper'
feature 'Editing file blob', feature: true, js: true do
include WaitForAjax
+ include TreeHelper
- given(:user) { create(:user) }
- given(:role) { :developer }
- given(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
- given(:project) { merge_request.target_project }
+ let(:project) { create(:project, :public, :test_repo) }
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
+ let(:branch) { 'master' }
+ let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] }
- background do
- login_as(user)
- project.team << [user, role]
- end
-
- def edit_and_commit
- wait_for_ajax
- first('.file-actions').click_link 'Edit'
- execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")')
- click_button 'Commit Changes'
- end
+ context 'as a developer' do
+ let(:user) { create(:user) }
+ let(:role) { :developer }
- context 'from MR diff' do
before do
- visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
- edit_and_commit
+ project.team << [user, role]
+ login_as(user)
+ end
+
+ def edit_and_commit
+ wait_for_ajax
+ find('.js-edit-blob').click
+ execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")')
+ click_button 'Commit Changes'
+ end
+
+ context 'from MR diff' do
+ before do
+ visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
+ edit_and_commit
+ end
+
+ it 'returns me to the mr' do
+ expect(page).to have_content(merge_request.title)
+ end
end
- scenario 'returns me to the mr' do
- expect(page).to have_content(merge_request.title)
+ context 'from blob file path' do
+ before do
+ visit namespace_project_blob_path(project.namespace, project, tree_join(branch, file_path))
+ edit_and_commit
+ end
+
+ it 'updates content' do
+ expect(page).to have_content 'successfully committed'
+ expect(page).to have_content 'NextFeature'
+ end
end
end
- context 'from blob file path' do
- before do
- visit namespace_project_blob_path(project.namespace, project, '/feature/files/ruby/feature.rb')
- edit_and_commit
+ context 'visit blob edit' do
+ context 'redirects to sign in and returns' do
+ context 'as developer' do
+ let(:user) { create(:user) }
+
+ before do
+ project.team << [user, :developer]
+ visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))
+ end
+
+ it 'redirects to sign in and returns' do
+ expect(page).to have_current_path(new_user_session_path)
+
+ login_as(user)
+
+ expect(page).to have_current_path(namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)))
+ end
+ end
+
+ context 'as guest' do
+ let(:user) { create(:user) }
+
+ before do
+ visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))
+ end
+
+ it 'redirects to sign in and returns' do
+ expect(page).to have_current_path(new_user_session_path)
+
+ login_as(user)
+
+ expect(page).to have_current_path(namespace_project_blob_path(project.namespace, project, tree_join(branch, file_path)))
+ end
+ end
+ end
+
+ context 'as developer' do
+ let(:user) { create(:user) }
+ let(:protected_branch) { 'protected-branch' }
+
+ before do
+ project.team << [user, :developer]
+ project.repository.add_branch(user, protected_branch, 'master')
+ create(:protected_branch, project: project, name: protected_branch)
+ login_as(user)
+ end
+
+ context 'on some branch' do
+ before do
+ visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))
+ end
+
+ it 'shows blob editor with same branch' do
+ expect(page).to have_current_path(namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)))
+ expect(find('.js-target-branch .dropdown-toggle-text').text).to eq(branch)
+ end
+ end
+
+ context 'with protected branch' do
+ before do
+ visit namespace_project_edit_blob_path(project.namespace, project, tree_join(protected_branch, file_path))
+ end
+
+ it 'shows blob editor with patch branch' do
+ expect(find('.js-target-branch .dropdown-toggle-text').text).to eq('patch-1')
+ end
+ end
end
- scenario 'updates content' do
- expect(page).to have_content 'successfully committed'
- expect(page).to have_content 'NextFeature'
+ context 'as master' do
+ let(:user) { create(:user) }
+
+ before do
+ project.team << [user, :master]
+ login_as(user)
+ visit namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path))
+ end
+
+ it 'shows blob editor with same branch' do
+ expect(page).to have_current_path(namespace_project_edit_blob_path(project.namespace, project, tree_join(branch, file_path)))
+ expect(find('.js-target-branch .dropdown-toggle-text').text).to eq(branch)
+ end
end
end
end
diff --git a/spec/features/projects/builds_spec.rb b/spec/features/projects/builds_spec.rb
index 2116721b224..ab10434e10c 100644
--- a/spec/features/projects/builds_spec.rb
+++ b/spec/features/projects/builds_spec.rb
@@ -205,21 +205,13 @@ feature 'Builds', :feature do
it 'loads job trace' do
expect(page).to have_content 'BUILD TRACE'
- build.append_trace(' and more trace', 11)
+ build.trace.write do |stream|
+ stream.append(' and more trace', 11)
+ end
expect(page).to have_content 'BUILD TRACE and more trace'
end
end
-
- context 'when build does not have an initial trace' do
- let(:build) { create(:ci_build, pipeline: pipeline) }
-
- it 'loads new trace' do
- build.append_trace('build trace', 0)
-
- expect(page).to have_content 'build trace'
- end
- end
end
feature 'Variables' do
@@ -390,7 +382,7 @@ feature 'Builds', :feature do
it 'sends the right headers' do
expect(page.status_code).to eq(200)
expect(page.response_headers['Content-Type']).to eq('text/plain; charset=utf-8')
- expect(page.response_headers['X-Sendfile']).to eq(build.path_to_trace)
+ expect(page.response_headers['X-Sendfile']).to eq(build.trace.send(:current_path))
end
end
@@ -409,43 +401,24 @@ feature 'Builds', :feature do
context 'storage form' do
let(:existing_file) { Tempfile.new('existing-trace-file').path }
- let(:non_existing_file) do
- file = Tempfile.new('non-existing-trace-file')
- path = file.path
- file.unlink
- path
- end
- context 'when build has trace in file' do
- before do
- Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile')
- build.run!
- visit namespace_project_build_path(project.namespace, project, build)
+ before do
+ Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile')
- allow_any_instance_of(Project).to receive(:ci_id).and_return(nil)
- allow_any_instance_of(Ci::Build).to receive(:path_to_trace).and_return(existing_file)
- allow_any_instance_of(Ci::Build).to receive(:old_path_to_trace).and_return(non_existing_file)
+ build.run!
- page.within('.js-build-sidebar') { click_link 'Raw' }
- end
+ allow_any_instance_of(Gitlab::Ci::Trace).to receive(:paths)
+ .and_return(paths)
- it 'sends the right headers' do
- expect(page.status_code).to eq(200)
- expect(page.response_headers['Content-Type']).to eq('text/plain; charset=utf-8')
- expect(page.response_headers['X-Sendfile']).to eq(existing_file)
- end
+ visit namespace_project_build_path(project.namespace, project, build)
end
- context 'when build has trace in old file' do
- before do
- Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile')
- build.run!
- visit namespace_project_build_path(project.namespace, project, build)
-
- allow_any_instance_of(Project).to receive(:ci_id).and_return(999)
- allow_any_instance_of(Ci::Build).to receive(:path_to_trace).and_return(non_existing_file)
- allow_any_instance_of(Ci::Build).to receive(:old_path_to_trace).and_return(existing_file)
+ context 'when build has trace in file' do
+ let(:paths) do
+ [existing_file]
+ end
+ before do
page.within('.js-build-sidebar') { click_link 'Raw' }
end
@@ -457,20 +430,10 @@ feature 'Builds', :feature do
end
context 'when build has trace in DB' do
- before do
- Capybara.current_session.driver.header('X-Sendfile-Type', 'X-Sendfile')
- build.run!
- visit namespace_project_build_path(project.namespace, project, build)
-
- allow_any_instance_of(Project).to receive(:ci_id).and_return(nil)
- allow_any_instance_of(Ci::Build).to receive(:path_to_trace).and_return(non_existing_file)
- allow_any_instance_of(Ci::Build).to receive(:old_path_to_trace).and_return(non_existing_file)
-
- page.within('.js-build-sidebar') { click_link 'Raw' }
- end
+ let(:paths) { [] }
it 'sends the right headers' do
- expect(page.status_code).to eq(404)
+ expect(page.status_code).not_to have_link('Raw')
end
end
end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 641e2cf7402..cf393afccbb 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -23,6 +23,46 @@ feature 'Environments page', :feature, :js do
expect(page).to have_link('Available')
expect(page).to have_link('Stopped')
end
+
+ describe 'with one available environment' do
+ given(:environment) { create(:environment, project: project, state: :available) }
+
+ describe 'in available tab page' do
+ it 'should show one environment' do
+ visit namespace_project_environments_path(project.namespace, project, scope: 'available')
+ expect(page).to have_css('.environments-container')
+ expect(page.all('tbody > tr').length).to eq(1)
+ end
+ end
+
+ describe 'in stopped tab page' do
+ it 'should show no environments' do
+ visit namespace_project_environments_path(project.namespace, project, scope: 'stopped')
+ expect(page).to have_css('.environments-container')
+ expect(page).to have_content('You don\'t have any environments right now')
+ end
+ end
+ end
+
+ describe 'with one stopped environment' do
+ given(:environment) { create(:environment, project: project, state: :stopped) }
+
+ describe 'in available tab page' do
+ it 'should show no environments' do
+ visit namespace_project_environments_path(project.namespace, project, scope: 'available')
+ expect(page).to have_css('.environments-container')
+ expect(page).to have_content('You don\'t have any environments right now')
+ end
+ end
+
+ describe 'in stopped tab page' do
+ it 'should show one environment' do
+ visit namespace_project_environments_path(project.namespace, project, scope: 'stopped')
+ expect(page).to have_css('.environments-container')
+ expect(page.all('tbody > tr').length).to eq(1)
+ end
+ end
+ end
end
context 'without environments' do
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index 2d1106ea3e8..583f479ec18 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -69,12 +69,8 @@ feature 'Import/Export - project import integration test', feature: true, js: tr
select2(namespace.id, from: '#project_namespace_id')
- # click on disabled element
- find(:link, 'GitLab export').trigger('click')
-
- page.within('.flash-container') do
- expect(page).to have_content('Please enter path and name')
- end
+ # Check for tooltip disabled import button
+ expect(find('.import_gitlab_project')['title']).to eq('Please enter a valid project name.')
end
end
diff --git a/spec/features/projects/merge_requests/list_spec.rb b/spec/features/projects/merge_requests/list_spec.rb
index 5dd58ad66a7..7e8a796c55d 100644
--- a/spec/features/projects/merge_requests/list_spec.rb
+++ b/spec/features/projects/merge_requests/list_spec.rb
@@ -17,4 +17,28 @@ feature 'Merge Requests List' do
expect(page).not_to have_selector('.js-new-board-list')
end
+
+ it 'should show an empty state' do
+ visit namespace_project_merge_requests_path(project.namespace, project)
+
+ expect(page).to have_selector('.empty-state')
+ end
+
+ it 'empty state should have a create merge request button' do
+ visit namespace_project_merge_requests_path(project.namespace, project)
+
+ expect(page).to have_link 'New merge request', href: new_namespace_project_merge_request_path(project.namespace, project)
+ end
+
+ context 'if there are merge requests' do
+ before do
+ create(:merge_request, assignee: user, source_project: project)
+
+ visit namespace_project_merge_requests_path(project.namespace, project)
+ end
+
+ it 'should not show an empty state' do
+ expect(page).not_to have_selector('.empty-state')
+ end
+ end
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 52196ce49bd..c66b9a34b86 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -71,6 +71,22 @@ feature "New project", feature: true do
end
end
end
+
+ context "with subgroup namespace" do
+ let(:group) { create(:group, :private, owner: user) }
+ let(:subgroup) { create(:group, parent: group) }
+
+ before do
+ group.add_master(user)
+ visit new_project_path(namespace_id: subgroup.id)
+ end
+
+ it "selects the group namespace" do
+ namespace = find("#project_namespace_id option[selected]")
+
+ expect(namespace.text).to eq subgroup.full_path
+ end
+ end
end
context 'Import project options' do
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index fff8b9f3447..7bdaafd1beb 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -15,6 +15,10 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
let(:project) { create(:project, namespace: user.namespace) }
context 'when wiki is empty' do
+ scenario 'commit message field has value "Create home"' do
+ expect(page).to have_field('wiki[message]', with: 'Create home')
+ end
+
scenario 'directly from the wiki home page' do
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Create page'
@@ -37,6 +41,9 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
fill_in :new_wiki_path, with: 'foo'
click_button 'Create Page'
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Create foo')
+
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Create page'
@@ -51,6 +58,9 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
fill_in :new_wiki_path, with: 'Spaces in the name'
click_button 'Create Page'
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Create spaces in the name')
+
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Create page'
@@ -65,6 +75,9 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
fill_in :new_wiki_path, with: 'hyphens-in-the-name'
click_button 'Create Page'
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Create hyphens in the name')
+
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Create page'
@@ -80,6 +93,10 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
let(:project) { create(:project, namespace: create(:group, :public)) }
context 'when wiki is empty' do
+ scenario 'commit message field has value "Create home"' do
+ expect(page).to have_field('wiki[message]', with: 'Create home')
+ end
+
scenario 'directly from the wiki home page' do
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Create page'
@@ -101,6 +118,9 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
fill_in :new_wiki_path, with: 'foo'
click_button 'Create Page'
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Create foo')
+
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Create page'
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index aedc0333cb9..86cf520ea80 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -19,6 +19,9 @@ feature 'Projects > Wiki > User updates wiki page', feature: true do
scenario 'success when the wiki content is not empty' do
click_link 'Edit'
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
+
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Save changes'
@@ -48,6 +51,9 @@ feature 'Projects > Wiki > User updates wiki page', feature: true do
scenario 'the home page' do
click_link 'Edit'
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
+
fill_in :wiki_content, with: 'My awesome wiki!'
click_button 'Save changes'
diff --git a/spec/features/search_spec.rb b/spec/features/search_spec.rb
index 40ef4c098b9..e8ad28a00f0 100644
--- a/spec/features/search_spec.rb
+++ b/spec/features/search_spec.rb
@@ -164,6 +164,8 @@ describe "Search", feature: true do
end
context 'click the links in the category search dropdown', js: true do
+ let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignee: user) }
+
before do
page.find('#search').click
end
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 1a66d1a6a1e..6ecdc8cbb71 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -443,9 +443,12 @@ describe "Internal Project Access", feature: true do
end
describe "GET /:project_path/container_registry" do
+ let(:container_repository) { create(:container_repository) }
+
before do
- stub_container_registry_tags('latest')
+ stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
+ project.container_repositories << container_repository
end
subject { namespace_project_container_registry_index_path(project.namespace, project) }
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index ad3bd60a313..a8fc0624588 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -432,9 +432,12 @@ describe "Private Project Access", feature: true do
end
describe "GET /:project_path/container_registry" do
+ let(:container_repository) { create(:container_repository) }
+
before do
- stub_container_registry_tags('latest')
+ stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
+ project.container_repositories << container_repository
end
subject { namespace_project_container_registry_index_path(project.namespace, project) }
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index e06aab4e0b2..c4d2f50ca14 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -443,9 +443,12 @@ describe "Public Project Access", feature: true do
end
describe "GET /:project_path/container_registry" do
+ let(:container_repository) { create(:container_repository) }
+
before do
- stub_container_registry_tags('latest')
+ stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
+ project.container_repositories << container_repository
end
subject { namespace_project_container_registry_index_path(project.namespace, project) }
diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb
index ef97b061ca7..3c7c9bdcd08 100644
--- a/spec/finders/group_projects_finder_spec.rb
+++ b/spec/finders/group_projects_finder_spec.rb
@@ -3,8 +3,9 @@ require 'spec_helper'
describe GroupProjectsFinder do
let(:group) { create(:group) }
let(:current_user) { create(:user) }
+ let(:options) { {} }
- let(:finder) { described_class.new(source_user) }
+ let(:finder) { described_class.new(group: group, current_user: current_user, options: options) }
let!(:public_project) { create(:empty_project, :public, group: group, path: '1') }
let!(:private_project) { create(:empty_project, :private, group: group, path: '2') }
@@ -18,22 +19,27 @@ describe GroupProjectsFinder do
shared_project_3.project_group_links.create(group_access: Gitlab::Access::MASTER, group: group)
end
+ subject { finder.execute }
+
describe 'with a group member current user' do
- before { group.add_user(current_user, Gitlab::Access::MASTER) }
+ before do
+ group.add_master(current_user)
+ end
context "only shared" do
- subject { described_class.new(group, only_shared: true).execute(current_user) }
- it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1]) }
+ let(:options) { { only_shared: true } }
+
+ it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1]) }
end
context "only owned" do
- subject { described_class.new(group, only_owned: true).execute(current_user) }
- it { is_expected.to eq([private_project, public_project]) }
+ let(:options) { { only_owned: true } }
+
+ it { is_expected.to match_array([private_project, public_project]) }
end
context "all" do
- subject { described_class.new(group).execute(current_user) }
- it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) }
+ it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) }
end
end
@@ -44,47 +50,57 @@ describe GroupProjectsFinder do
end
context "only shared" do
+ let(:options) { { only_shared: true } }
+
context "without external user" do
- subject { described_class.new(group, only_shared: true).execute(current_user) }
- it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1]) }
+ it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1]) }
end
context "with external user" do
- before { current_user.update_attributes(external: true) }
- subject { described_class.new(group, only_shared: true).execute(current_user) }
- it { is_expected.to eq([shared_project_2, shared_project_1]) }
+ before do
+ current_user.update_attributes(external: true)
+ end
+
+ it { is_expected.to match_array([shared_project_2, shared_project_1]) }
end
end
context "only owned" do
+ let(:options) { { only_owned: true } }
+
context "without external user" do
- before { private_project.team << [current_user, Gitlab::Access::MASTER] }
- subject { described_class.new(group, only_owned: true).execute(current_user) }
- it { is_expected.to eq([private_project, public_project]) }
+ before do
+ private_project.team << [current_user, Gitlab::Access::MASTER]
+ end
+
+ it { is_expected.to match_array([private_project, public_project]) }
end
context "with external user" do
- before { current_user.update_attributes(external: true) }
- subject { described_class.new(group, only_owned: true).execute(current_user) }
- it { is_expected.to eq([public_project]) }
- end
+ before do
+ current_user.update_attributes(external: true)
+ end
- context "all" do
- subject { described_class.new(group).execute(current_user) }
- it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1, public_project]) }
+ it { is_expected.to eq([public_project]) }
end
end
+
+ context "all" do
+ it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project]) }
+ end
end
describe "no user" do
context "only shared" do
- subject { described_class.new(group, only_shared: true).execute(current_user) }
- it { is_expected.to eq([shared_project_3, shared_project_1]) }
+ let(:options) { { only_shared: true } }
+
+ it { is_expected.to match_array([shared_project_3, shared_project_1]) }
end
context "only owned" do
- subject { described_class.new(group, only_owned: true).execute(current_user) }
- it { is_expected.to eq([public_project]) }
+ let(:options) { { only_owned: true } }
+
+ it { is_expected.to eq([public_project]) }
end
end
end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index e44e7434c80..148adcffe3b 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -21,38 +21,144 @@ describe ProjectsFinder do
create(:empty_project, :private, name: 'D', path: 'D')
end
- let(:finder) { described_class.new }
+ let(:params) { {} }
+ let(:current_user) { user }
+ let(:project_ids_relation) { nil }
+ let(:finder) { described_class.new(params: params, current_user: current_user, project_ids_relation: project_ids_relation) }
+
+ subject { finder.execute }
describe 'without a user' do
- subject { finder.execute }
+ let(:current_user) { nil }
it { is_expected.to eq([public_project]) }
end
describe 'with a user' do
- subject { finder.execute(user) }
-
describe 'without private projects' do
- it { is_expected.to eq([public_project, internal_project]) }
+ it { is_expected.to match_array([public_project, internal_project]) }
end
describe 'with private projects' do
before do
- private_project.add_user(user, Gitlab::Access::MASTER)
+ private_project.add_master(user)
end
- it do
- is_expected.to eq([public_project, internal_project, private_project])
- end
+ it { is_expected.to match_array([public_project, internal_project, private_project]) }
end
end
describe 'with project_ids_relation' do
let(:project_ids_relation) { Project.where(id: internal_project.id) }
- subject { finder.execute(user, project_ids_relation) }
-
it { is_expected.to eq([internal_project]) }
end
+
+ describe 'filter by visibility_level' do
+ before do
+ private_project.add_master(user)
+ end
+
+ context 'private' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::PRIVATE } }
+
+ it { is_expected.to eq([private_project]) }
+ end
+
+ context 'internal' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::INTERNAL } }
+
+ it { is_expected.to eq([internal_project]) }
+ end
+
+ context 'public' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+
+ it { is_expected.to eq([public_project]) }
+ end
+ end
+
+ describe 'filter by tags' do
+ before do
+ public_project.tag_list.add('foo')
+ public_project.save!
+ end
+
+ let(:params) { { tag: 'foo' } }
+
+ it { is_expected.to eq([public_project]) }
+ end
+
+ describe 'filter by personal' do
+ let!(:personal_project) { create(:empty_project, namespace: user.namespace) }
+ let(:params) { { personal: true } }
+
+ it { is_expected.to eq([personal_project]) }
+ end
+
+ describe 'filter by search' do
+ let(:params) { { search: 'C' } }
+
+ it { is_expected.to eq([public_project]) }
+ end
+
+ describe 'filter by name for backward compatibility' do
+ let(:params) { { name: 'C' } }
+
+ it { is_expected.to eq([public_project]) }
+ end
+
+ describe 'filter by archived' do
+ let!(:archived_project) { create(:empty_project, :public, :archived, name: 'E', path: 'E') }
+
+ context 'non_archived=true' do
+ let(:params) { { non_archived: true } }
+
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
+
+ context 'non_archived=false' do
+ let(:params) { { non_archived: false } }
+
+ it { is_expected.to match_array([public_project, internal_project, archived_project]) }
+ end
+
+ describe 'filter by archived for backward compatibility' do
+ let(:params) { { archived: false } }
+
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
+ end
+
+ describe 'filter by trending' do
+ let!(:trending_project) { create(:trending_project, project: public_project) }
+ let(:params) { { trending: true } }
+
+ it { is_expected.to eq([public_project]) }
+ end
+
+ describe 'filter by non_public' do
+ let(:params) { { non_public: true } }
+ before do
+ private_project.add_developer(current_user)
+ end
+
+ it { is_expected.to eq([private_project]) }
+ end
+
+ describe 'filter by viewable_starred_projects' do
+ let(:params) { { starred: true } }
+ before do
+ current_user.toggle_star(public_project)
+ end
+
+ it { is_expected.to eq([public_project]) }
+ end
+
+ describe 'sorting' do
+ let(:params) { { sort: 'name_asc' } }
+
+ it { is_expected.to eq([internal_project, public_project]) }
+ end
end
end
diff --git a/spec/fixtures/metrics.json b/spec/fixtures/metrics.json
new file mode 100644
index 00000000000..06427adce57
--- /dev/null
+++ b/spec/fixtures/metrics.json
@@ -0,0 +1 @@
+{"success":true,"metrics":{"memory_values":[{"metric":{},"values":[[1490935421.33,"9.832775297619047"],[1490935481.33,"9.8359375"],[1490935541.33,"9.837983630952381"],[1490935601.33,"9.840401785714286"],[1490935661.33,"9.84375"],[1490935721.33,"9.846168154761905"],[1490935781.33,"9.849516369047619"],[1490935841.33,"9.85249255952381"],[1490935901.33,"9.855096726190476"],[1490935961.33,"9.845796130952381"],[1490936021.33,"9.847284226190476"],[1490936081.33,"9.84468005952381"],[1490936141.33,"9.847470238095237"],[1490936201.33,"9.850818452380953"],[1490936261.33,"9.852864583333334"],[1490936321.33,"9.854910714285714"],[1490936381.33,"9.857700892857142"],[1490936441.33,"9.865513392857142"],[1490936501.33,"9.874813988095237"],[1490936561.33,"9.866071428571429"],[1490936621.33,"9.849330357142858"],[1490936681.33,"9.841331845238095"],[1490936741.33,"9.853236607142858"],[1490936801.33,"9.839657738095237"],[1490936861.33,"9.841517857142858"],[1490936921.33,"9.852864583333334"],[1490936981.33,"9.851376488095237"],[1490937041.33,"9.837611607142858"],[1490937101.33,"9.840401785714286"],[1490937161.33,"9.843377976190476"],[1490937221.33,"9.845796130952381"],[1490937281.33,"9.84858630952381"],[1490937341.33,"9.866071428571429"],[1490937401.33,"9.852864583333334"],[1490937461.33,"9.855840773809524"],[1490937521.33,"9.837797619047619"],[1490937581.33,"9.840959821428571"],[1490937641.33,"9.848958333333334"],[1490937701.33,"9.844308035714286"],[1490937761.33,"9.845982142857142"],[1490937821.33,"9.83984375"],[1490937881.33,"9.830171130952381"],[1490937941.33,"9.83686755952381"],[1490938001.33,"9.834263392857142"],[1490938061.33,"9.836309523809524"],[1490938121.33,"9.83984375"],[1490938181.33,"9.832775297619047"],[1490938241.33,"9.818266369047619"],[1490938301.33,"9.820126488095237"],[1490938361.33,"9.824032738095237"],[1490938421.33,"9.826078869047619"],[1490938481.33,"9.817708333333334"],[1490938541.33,"9.811755952380953"],[1490938601.33,"9.811197916666666"],[1490938661.33,"9.81156994047619"],[1490938721.33,"9.812313988095237"],[1490938781.33,"9.813058035714286"],[1490938841.33,"9.81343005952381"],[1490938901.33,"9.81547619047619"],[1490938961.33,"9.818824404761905"],[1490939021.33,"9.819754464285714"],[1490939081.33,"9.820684523809524"],[1490939141.33,"9.824776785714286"],[1490939201.33,"9.826078869047619"],[1490939261.33,"9.828311011904763"],[1490939321.33,"9.820870535714286"],[1490939381.33,"9.823846726190476"],[1490939441.33,"9.824404761904763"],[1490939501.33,"9.82905505952381"],[1490939561.33,"9.832775297619047"],[1490939621.33,"9.835565476190476"],[1490939681.33,"9.833333333333334"],[1490939741.33,"9.835379464285714"],[1490939801.33,"9.837239583333334"],[1490939861.33,"9.839285714285714"],[1490939921.33,"9.829613095238095"],[1490939981.33,"9.832403273809524"],[1490940041.33,"9.835751488095237"],[1490940101.33,"9.837797619047619"],[1490940161.33,"9.840959821428571"],[1490940221.33,"9.84375"],[1490940281.33,"9.846354166666666"],[1490940341.33,"9.853980654761905"],[1490940401.33,"9.852678571428571"],[1490940461.33,"9.861979166666666"],[1490940521.33,"9.857700892857142"],[1490940581.33,"9.861793154761905"],[1490940641.33,"9.86421130952381"],[1490940701.33,"9.867001488095237"],[1490940761.33,"9.867931547619047"],[1490940821.33,"9.859933035714286"],[1490940881.33,"9.86235119047619"],[1490940941.33,"9.865141369047619"],[1490941001.33,"9.866443452380953"],[1490941061.33,"9.868861607142858"],[1490941121.33,"9.871465773809524"],[1490941181.33,"9.873511904761905"],[1490941241.33,"9.875558035714286"],[1490941301.33,"9.87797619047619"],[1490941361.33,"9.881324404761905"],[1490941421.33,"9.888392857142858"],[1490941481.33,"9.888392857142858"],[1490941541.33,"9.89546130952381"],[1490941601.33,"9.898065476190476"],[1490941661.33,"9.885044642857142"],[1490941721.33,"9.872395833333334"],[1490941781.33,"9.870349702380953"],[1490941841.33,"9.873325892857142"],[1490941901.33,"9.875558035714286"],[1490941961.33,"9.878534226190476"],[1490942021.33,"9.87983630952381"],[1490942081.33,"9.884300595238095"],[1490942141.33,"9.891927083333334"],[1490942201.33,"9.890252976190476"],[1490942261.33,"9.891927083333334"],[1490942321.33,"9.893787202380953"],[1490942381.33,"9.892113095238095"],[1490942441.33,"9.900111607142858"],[1490942501.33,"9.893415178571429"],[1490942561.33,"9.895647321428571"],[1490942621.33,"9.889322916666666"],[1490942681.33,"9.883556547619047"],[1490942741.33,"9.885602678571429"],[1490942801.33,"9.88764880952381"],[1490942861.33,"9.898623511904763"],[1490942921.33,"9.89453125"],[1490942981.33,"9.885044642857142"],[1490943041.33,"9.874813988095237"],[1490943101.33,"9.880766369047619"],[1490943161.33,"9.868675595238095"],[1490943221.33,"9.864769345238095"],[1490943281.33,"9.852864583333334"],[1490943341.33,"9.855096726190476"],[1490943401.33,"9.857514880952381"],[1490943461.33,"9.859747023809524"],[1490943521.33,"9.861793154761905"],[1490943581.33,"9.864025297619047"],[1490943641.33,"9.857514880952381"],[1490943701.33,"9.859002976190476"],[1490943761.33,"9.860677083333334"],[1490943821.33,"9.864025297619047"],[1490943881.33,"9.86625744047619"],[1490943941.33,"9.873325892857142"],[1490944001.33,"9.876674107142858"],[1490944061.33,"9.888950892857142"],[1490944121.33,"9.878534226190476"],[1490944181.33,"9.880766369047619"],[1490944241.33,"9.884858630952381"],[1490944301.33,"9.870535714285714"],[1490944361.33,"9.864769345238095"],[1490944421.33,"9.851190476190476"],[1490944481.33,"9.85249255952381"],[1490944541.33,"9.85844494047619"],[1490944601.33,"9.855840773809524"],[1490944661.33,"9.868303571428571"],[1490944721.33,"9.859188988095237"],[1490944781.33,"9.860491071428571"],[1490944841.33,"9.863467261904763"],[1490944901.33,"9.864025297619047"],[1490944961.33,"9.857514880952381"],[1490945021.33,"9.843377976190476"],[1490945081.33,"9.836123511904763"],[1490945141.33,"9.837983630952381"],[1490945201.33,"9.84077380952381"],[1490945261.33,"9.847284226190476"],[1490945321.33,"9.849702380952381"],[1490945381.33,"9.827380952380953"],[1490945441.33,"9.82124255952381"],[1490945501.33,"9.822916666666666"],[1490945561.33,"9.824962797619047"],[1490945621.33,"9.814546130952381"],[1490945681.33,"9.805989583333334"],[1490945741.33,"9.791294642857142"],[1490945801.33,"9.786458333333334"],[1490945861.33,"9.77641369047619"],[1490945921.33,"9.76655505952381"],[1490945981.33,"9.76953125"],[1490946041.33,"9.742745535714286"],[1490946101.33,"9.753162202380953"],[1490946161.33,"9.739583333333334"],[1490946221.33,"9.742931547619047"],[1490946281.33,"9.743489583333334"],[1490946341.33,"9.746837797619047"],[1490946401.33,"9.749255952380953"],[1490946461.33,"9.737165178571429"],[1490946521.33,"9.739583333333334"],[1490946581.33,"9.74311755952381"],[1490946641.33,"9.751302083333334"],[1490946701.33,"9.761346726190476"],[1490946761.33,"9.747953869047619"],[1490946821.33,"9.75093005952381"],[1490946881.33,"9.755580357142858"],[1490946941.33,"9.759858630952381"],[1490947001.33,"9.761904761904763"],[1490947061.33,"9.77641369047619"],[1490947121.33,"9.768787202380953"],[1490947181.33,"9.772879464285714"],[1490947241.33,"9.777715773809524"],[1490947301.33,"9.779947916666666"],[1490947361.33,"9.772135416666666"],[1490947421.33,"9.77641369047619"],[1490947481.33,"9.783668154761905"],[1490947541.33,"9.780505952380953"],[1490947601.33,"9.777157738095237"],[1490947661.33,"9.759114583333334"],[1490947721.33,"9.761532738095237"],[1490947781.33,"9.763392857142858"],[1490947841.33,"9.765252976190476"],[1490947901.33,"9.760602678571429"],[1490947961.33,"9.751488095238095"],[1490948021.33,"9.757998511904763"],[1490948081.33,"9.759486607142858"],[1490948141.33,"9.754650297619047"],[1490948201.33,"9.728050595238095"],[1490948261.33,"9.73530505952381"],[1490948321.33,"9.718005952380953"],[1490948381.33,"9.732142857142858"],[1490948441.33,"9.725260416666666"],[1490948501.33,"9.728422619047619"],[1490948561.33,"9.72953869047619"],[1490948621.33,"9.733072916666666"],[1490948681.33,"9.736421130952381"],[1490948741.33,"9.749627976190476"],[1490948801.33,"9.740141369047619"],[1490948861.33,"9.74311755952381"],[1490948921.33,"9.736607142857142"],[1490948981.33,"9.744233630952381"],[1490949041.33,"9.723772321428571"],[1490949101.33,"9.731956845238095"],[1490949161.33,"9.732514880952381"],[1490949221.33,"9.734747023809524"],[1490949281.33,"9.737723214285714"],[1490949341.33,"9.737909226190476"],[1490949401.33,"9.742373511904763"],[1490949461.33,"9.744977678571429"],[1490949521.33,"9.748139880952381"],[1490949581.33,"9.751302083333334"],[1490949641.33,"9.757440476190476"],[1490949701.33,"9.756324404761905"],[1490949761.33,"9.749813988095237"],[1490949821.33,"9.739025297619047"],[1490949881.33,"9.726004464285714"],[1490949941.33,"9.728236607142858"],[1490950001.33,"9.732514880952381"],[1490950061.33,"9.735119047619047"],[1490950121.33,"9.737165178571429"],[1490950181.33,"9.739025297619047"],[1490950241.33,"9.740513392857142"],[1490950301.33,"9.749441964285714"],[1490950361.33,"9.736979166666666"],[1490950421.33,"9.741629464285714"],[1490950481.33,"9.743303571428571"],[1490950541.33,"9.74609375"],[1490950601.33,"9.75093005952381"],[1490950661.33,"9.724330357142858"],[1490950721.33,"9.726748511904763"],[1490950781.33,"9.733258928571429"],[1490950841.33,"9.744233630952381"],[1490950901.33,"9.734375"],[1490950961.33,"9.737537202380953"],[1490951021.33,"9.741071428571429"],[1490951081.33,"9.757254464285714"],[1490951141.33,"9.760044642857142"],[1490951201.33,"9.755952380952381"],[1490951261.33,"9.745349702380953"],[1490951321.33,"9.746651785714286"],[1490951381.33,"9.749441964285714"],[1490951441.33,"9.751674107142858"],[1490951501.33,"9.757998511904763"],[1490951561.33,"9.756510416666666"],[1490951621.33,"9.76264880952381"],[1490951681.33,"9.765625"],[1490951741.33,"9.757254464285714"],[1490951801.33,"9.751674107142858"],[1490951861.33,"9.754278273809524"],[1490951921.33,"9.744233630952381"],[1490951981.33,"9.745349702380953"],[1490952041.33,"9.748883928571429"],[1490952101.33,"9.753162202380953"],[1490952161.33,"9.747953869047619"],[1490952221.33,"9.750186011904763"],[1490952281.33,"9.751116071428571"],[1490952341.33,"9.753162202380953"],[1490952401.33,"9.758928571428571"],[1490952461.33,"9.758928571428571"],[1490952521.33,"9.755394345238095"],[1490952581.33,"9.758928571428571"],[1490952641.33,"9.761160714285714"],[1490952701.33,"9.763206845238095"],[1490952761.33,"9.767857142857142"],[1490952821.33,"9.765438988095237"],[1490952881.33,"9.768229166666666"],[1490952941.33,"9.780877976190476"],[1490953001.33,"9.77250744047619"],[1490953061.33,"9.784412202380953"],[1490953121.33,"9.77827380952381"],[1490953181.33,"9.781063988095237"],[1490953241.33,"9.783668154761905"],[1490953301.33,"9.787016369047619"],[1490953361.33,"9.784970238095237"],[1490953421.33,"9.787946428571429"],[1490953481.33,"9.788690476190476"],[1490953541.33,"9.790922619047619"],[1490953601.33,"9.792596726190476"],[1490953661.33,"9.79594494047619"],[1490953721.33,"9.79780505952381"],[1490953781.33,"9.800223214285714"],[1490953841.33,"9.794828869047619"],[1490953901.33,"9.799293154761905"],[1490953961.33,"9.801525297619047"],[1490954021.33,"9.786458333333334"],[1490954081.33,"9.773809523809524"],[1490954141.33,"9.767485119047619"],[1490954201.33,"9.760044642857142"],[1490954261.33,"9.751116071428571"],[1490954321.33,"9.752790178571429"],[1490954381.33,"9.753162202380953"],[1490954441.33,"9.744419642857142"],[1490954501.33,"9.73921130952381"],[1490954561.33,"9.74125744047619"],[1490954621.33,"9.743303571428571"],[1490954681.33,"9.745535714285714"],[1490954741.33,"9.746837797619047"],[1490954801.33,"9.749255952380953"],[1490954861.33,"9.744419642857142"],[1490954921.33,"9.745349702380953"],[1490954981.33,"9.74702380952381"],[1490955041.33,"9.738467261904763"],[1490955101.33,"9.740141369047619"],[1490955161.33,"9.747767857142858"],[1490955221.33,"9.750372023809524"],[1490955281.33,"9.747767857142858"],[1490955341.33,"9.739025297619047"],[1490955401.33,"9.745349702380953"],[1490955461.33,"9.730282738095237"],[1490955521.33,"9.73139880952381"],[1490955581.33,"9.722842261904763"],[1490955641.33,"9.725818452380953"],[1490955701.33,"9.72749255952381"],[1490955761.33,"9.72953869047619"],[1490955821.33,"9.731956845238095"],[1490955881.33,"9.735677083333334"],[1490955941.33,"9.738467261904763"],[1490956001.33,"9.735863095238095"],[1490956061.33,"9.743675595238095"],[1490956121.33,"9.730840773809524"],[1490956181.33,"9.734747023809524"],[1490956241.33,"9.736235119047619"],[1490956301.33,"9.736607142857142"],[1490956361.33,"9.73921130952381"],[1490956421.33,"9.742001488095237"],[1490956481.33,"9.743675595238095"],[1490956541.33,"9.744977678571429"],[1490956601.33,"9.748697916666666"],[1490956661.33,"9.760602678571429"],[1490956721.33,"9.751302083333334"],[1490956781.33,"9.754278273809524"],[1490956841.33,"9.756324404761905"],[1490956901.33,"9.758370535714286"],[1490956961.33,"9.760416666666666"],[1490957021.33,"9.763020833333334"],[1490957081.33,"9.766183035714286"],[1490957141.33,"9.764508928571429"],[1490957201.33,"9.767299107142858"],[1490957261.33,"9.768787202380953"],[1490957321.33,"9.771019345238095"],[1490957381.33,"9.773623511904763"],[1490957441.33,"9.775111607142858"],[1490957501.33,"9.779389880952381"],[1490957561.33,"9.780691964285714"],[1490957621.33,"9.788690476190476"],[1490957681.33,"9.794828869047619"],[1490957741.33,"9.779203869047619"],[1490957801.33,"9.787016369047619"],[1490957861.33,"9.783854166666666"],[1490957921.33,"9.78515625"],[1490957981.33,"9.786644345238095"],[1490958041.33,"9.787946428571429"],[1490958101.33,"9.800409226190476"],[1490958161.33,"9.787202380952381"],[1490958221.33,"9.789806547619047"],[1490958281.33,"9.791852678571429"],[1490958341.33,"9.788876488095237"],[1490958401.33,"9.78515625"],[1490958461.33,"9.7890625"],[1490958521.33,"9.791108630952381"],[1490958581.33,"9.792596726190476"],[1490958641.33,"9.794828869047619"],[1490958701.33,"9.793154761904763"],[1490958761.33,"9.799293154761905"],[1490958821.33,"9.797247023809524"],[1490958881.33,"9.794084821428571"],[1490958941.33,"9.796875"],[1490959001.33,"9.763950892857142"],[1490959061.33,"9.765997023809524"],[1490959121.33,"9.767671130952381"],[1490959181.33,"9.77046130952381"],[1490959241.33,"9.773809523809524"],[1490959301.33,"9.765252976190476"],[1490959361.33,"9.767485119047619"],[1490959421.33,"9.76953125"],[1490959481.33,"9.774553571428571"],[1490959541.33,"9.77734375"],[1490959601.33,"9.778459821428571"],[1490959661.33,"9.780877976190476"],[1490959721.33,"9.783296130952381"],[1490959781.33,"9.794828869047619"],[1490959841.33,"9.787016369047619"],[1490959901.33,"9.798735119047619"],[1490959961.33,"9.803013392857142"],[1490960021.33,"9.801525297619047"],[1490960081.33,"9.804873511904763"],[1490960141.33,"9.80078125"],[1490960201.33,"9.80375744047619"],[1490960261.33,"9.805059523809524"],[1490960321.33,"9.807849702380953"],[1490960381.33,"9.810825892857142"],[1490960441.33,"9.813058035714286"],[1490960501.33,"9.813616071428571"],[1490960561.33,"9.815104166666666"],[1490960621.33,"9.81733630952381"],[1490960681.33,"9.812872023809524"],[1490960741.33,"9.814546130952381"],[1490960801.33,"9.808035714285714"],[1490960861.33,"9.810081845238095"],[1490960921.33,"9.813058035714286"],[1490960981.33,"9.825892857142858"],[1490961041.33,"9.816964285714286"],[1490961101.33,"9.82421875"],[1490961161.33,"9.80952380952381"],[1490961221.33,"9.804315476190476"],[1490961281.33,"9.797619047619047"],[1490961341.33,"9.80078125"],[1490961401.33,"9.802827380952381"],[1490961461.33,"9.803199404761905"],[1490961521.33,"9.80952380952381"],[1490961581.33,"9.806919642857142"],[1490961641.33,"9.808779761904763"],[1490961701.33,"9.811197916666666"],[1490961761.33,"9.813244047619047"],[1490961821.33,"9.815662202380953"],[1490961881.33,"9.819940476190476"],[1490961941.33,"9.822172619047619"],[1490962001.33,"9.82328869047619"],[1490962061.33,"9.826822916666666"],[1490962121.33,"9.829241071428571"],[1490962181.33,"9.832589285714286"],[1490962241.33,"9.835565476190476"],[1490962301.33,"9.839471726190476"],[1490962361.33,"9.825520833333334"],[1490962421.33,"9.829427083333334"],[1490962481.33,"9.832217261904763"],[1490962541.33,"9.839285714285714"],[1490962601.33,"9.837611607142858"],[1490962661.33,"9.841145833333334"],[1490962721.33,"9.834077380952381"],[1490962781.33,"9.837239583333334"],[1490962841.33,"9.841703869047619"],[1490962901.33,"9.844308035714286"],[1490962961.33,"9.838727678571429"],[1490963021.33,"9.840587797619047"],[1490963081.33,"9.849516369047619"],[1490963141.33,"9.845238095238095"],[1490963201.33,"9.84375"],[1490963261.33,"9.838541666666666"],[1490963321.33,"9.841889880952381"],[1490963381.33,"9.846354166666666"],[1490963441.33,"9.832403273809524"],[1490963501.33,"9.833891369047619"],[1490963561.33,"9.808221726190476"],[1490963621.33,"9.812686011904763"],[1490963681.33,"9.814918154761905"],[1490963741.33,"9.817708333333334"],[1490963801.33,"9.80561755952381"],[1490963861.33,"9.80859375"],[1490963921.33,"9.811197916666666"],[1490963981.33,"9.802269345238095"],[1490964041.33,"9.798177083333334"],[1490964101.33,"9.80078125"],[1490964161.33,"9.815104166666666"],[1490964221.33,"9.806361607142858"]]}],"memory_current":[{"metric":{},"value":[1490964221.593,"9.806361607142858"]}],"cpu_values":[{"metric":{},"values":[[1490935421.446,"0.011520035833333402"],[1490935481.446,"0.010738020634921052"],[1490935541.446,"0.011830812658730162"],[1490935601.446,"0.011666519206349292"],[1490935661.446,"0.012397734365079505"],[1490935721.446,"0.012264678253967905"],[1490935781.446,"0.011701125396825458"],[1490935841.446,"0.011413869087301435"],[1490935901.446,"0.011355704404762157"],[1490935961.446,"0.01295611777777756"],[1490936021.446,"0.012283088253968812"],[1490936081.446,"0.011711742103174674"],[1490936141.446,"0.011066851150792879"],[1490936201.446,"0.011525933611111726"],[1490936261.446,"0.012260294246031015"],[1490936321.446,"0.011917795238095285"],[1490936381.446,"0.011402582301587626"],[1490936441.446,"0.012311798253968057"],[1490936501.446,"0.011604295476191046"],[1490936561.446,"0.012329014206349137"],[1490936621.446,"0.011401263769840977"],[1490936681.446,"0.012310593492063392"],[1490936741.446,"0.01244334305555575"],[1490936801.446,"0.01176146669320973"],[1490936861.446,"0.011186474629011792"],[1490936921.446,"0.013234800079365536"],[1490936981.446,"0.01217435722222217"],[1490937041.446,"0.011211570753967583"],[1490937101.446,"0.012066252420634934"],[1490937161.446,"0.012175381944444839"],[1490937221.446,"0.011215347936507976"],[1490937281.446,"0.012909065515873003"],[1490937341.446,"0.011718783452381023"],[1490937401.446,"0.011740557499999828"],[1490937461.446,"0.012024899960317205"],[1490937521.446,"0.011518551626984471"],[1490937581.446,"0.013295429607829826"],[1490937641.446,"0.013578758822130006"],[1490937701.446,"0.01170811908668783"],[1490937761.446,"0.011867610238095478"],[1490937821.446,"0.012601599007937034"],[1490937881.446,"0.011028959285714405"],[1490937941.446,"0.011972864523808899"],[1490938001.446,"0.012236090515873134"],[1490938061.446,"0.012468855793650629"],[1490938121.446,"0.012324049999999686"],[1490938181.446,"0.012271810317460288"],[1490938241.446,"0.013109732103174912"],[1490938301.446,"0.01201708535714284"],[1490938361.446,"0.01198280035714318"],[1490938421.446,"0.011631491547618469"],[1490938481.446,"0.012698120317460778"],[1490938541.446,"0.011908042499999686"],[1490938601.446,"0.012941332460317123"],[1490938661.446,"0.012009558055555753"],[1490938721.446,"0.011749238293651211"],[1490938781.446,"0.012597720873015857"],[1490938841.446,"0.012128174365079517"],[1490938901.446,"0.013411003452380428"],[1490938961.446,"0.012712377896825132"],[1490939021.446,"0.0126730261111118"],[1490939081.446,"0.012196438134920173"],[1490939141.446,"0.011617917341270696"],[1490939201.446,"0.012271590992062863"],[1490939261.446,"0.01196238253968261"],[1490939321.446,"0.012446522619048245"],[1490939381.446,"0.013146698134919643"],[1490939441.446,"0.013160663611111774"],[1490939501.446,"0.012921960039682278"],[1490939561.446,"0.012100972380952405"],[1490939621.446,"0.01235039095238153"],[1490939681.446,"0.013303590992062684"],[1490939741.446,"0.012064513055556225"],[1490939801.446,"0.011846763531745252"],[1490939861.446,"0.012280224007936782"],[1490939921.446,"0.012305159166666833"],[1490939981.446,"0.012107076111110887"],[1490940041.446,"0.013109447341269884"],[1490940101.446,"0.011668830198412932"],[1490940161.446,"0.011757771468254286"],[1490940221.446,"0.013607426447330252"],[1490940281.446,"0.012069082212503184"],[1490940341.446,"0.012702448174603309"],[1490940401.446,"0.012915864642857006"],[1490940461.446,"0.012882558941478554"],[1490940521.446,"0.01180430288917485"],[1490940581.446,"0.012561457142856586"],[1490940641.446,"0.013117287261905215"],[1490940701.446,"0.0119707260317455"],[1490940761.446,"0.012110876587301957"],[1490940821.446,"0.012900523174603096"],[1490940881.446,"0.012405300317460836"],[1490940941.446,"0.013397718690476127"],[1490941001.446,"0.011853019404761512"],[1490941061.446,"0.011410178968254279"],[1490941121.446,"0.01385021210317412"],[1490941181.446,"0.012158262658730703"],[1490941241.446,"0.012590782142857021"],[1490941301.446,"0.011902994444444289"],[1490941361.446,"0.012597971468253468"],[1490941421.446,"0.013460530436508394"],[1490941481.446,"0.012871132936507318"],[1490941541.446,"0.012321937023810644"],[1490941601.446,"0.012861435992063004"],[1490941661.446,"0.011904687658730493"],[1490941721.446,"0.013068603849206292"],[1490941781.446,"0.011558027420635053"],[1490941841.446,"0.011785108134920095"],[1490941901.446,"0.013018491984126938"],[1490941961.446,"0.012803318611111494"],[1490942021.446,"0.011276595873015969"],[1490942081.446,"0.012407365753968128"],[1490942141.446,"0.01261537746031769"],[1490942201.446,"0.011981626507936492"],[1490942261.446,"0.011779192579364465"],[1490942321.446,"0.012944439365080001"],[1490942381.446,"0.012563845515873258"],[1490942441.446,"0.012490993809523204"],[1490942501.446,"0.011721826547619399"],[1490942561.446,"0.012376904523809195"],[1490942621.446,"0.012627997539682608"],[1490942681.446,"0.012353236984126971"],[1490942741.446,"0.012143749162511788"],[1490942801.446,"0.01210106380777602"],[1490942861.446,"0.01323092650793727"],[1490942921.446,"0.01217811805555557"],[1490942981.446,"0.011703709655399819"],[1490943041.446,"0.01140056596399108"],[1490943101.446,"0.011589462460317477"],[1490943161.446,"0.011424534784915178"],[1490943221.446,"0.011720420858480131"],[1490943281.446,"0.011956359603174035"],[1490943341.446,"0.011627974444444375"],[1490943401.446,"0.012056417142857899"],[1490943461.446,"0.012875421865079256"],[1490943521.446,"0.011447757222222438"],[1490943581.446,"0.011686728412698438"],[1490943641.446,"0.012264428214285543"],[1490943701.446,"0.011396086150793258"],[1490943761.446,"0.012637377857143453"],[1490943821.446,"0.012229487817460189"],[1490943881.446,"0.012519327516820155"],[1490943941.446,"0.011632154440677021"],[1490944001.446,"0.0127011905614214"],[1490944061.446,"0.012041664776432408"],[1490944121.446,"0.011550796183789442"],[1490944181.446,"0.012340807579364546"],[1490944241.446,"0.012514561706348858"],[1490944301.446,"0.011591095515873378"],[1490944361.446,"0.011562522896825472"],[1490944421.446,"0.012653687499999684"],[1490944481.446,"0.012597878095237767"],[1490944541.446,"0.011373836746032411"],[1490944601.446,"0.011489111309523512"],[1490944661.446,"0.012365606547618906"],[1490944721.446,"0.011246835793650788"],[1490944781.446,"0.011556645833333596"],[1490944841.446,"0.0114839880952384"],[1490944901.446,"0.011559932103174322"],[1490944961.446,"0.011456621547618827"],[1490945021.446,"0.011137903531746323"],[1490945081.446,"0.011371503134920238"],[1490945141.446,"0.01262392527777806"],[1490945201.446,"0.011231213571428417"],[1490945261.446,"0.011834045595238011"],[1490945321.446,"0.011222574087301793"],[1490945381.446,"0.01139294579365124"],[1490945441.446,"0.011876671865079205"],[1490945501.446,"0.012003088888888104"],[1490945561.446,"0.011232171746032069"],[1490945621.446,"0.01189458067460394"],[1490945681.446,"0.011593709801586787"],[1490945741.446,"0.01179023611111146"],[1490945801.446,"0.012056340952381187"],[1490945861.446,"0.011755026706348978"],[1490945921.446,"0.011906753412698057"],[1490945981.446,"0.011362850850868408"],[1490946041.446,"0.011567284784873766"],[1490946101.446,"0.01159940924603172"],[1490946161.446,"0.01169248444646143"],[1490946221.446,"0.011294826570231075"],[1490946281.446,"0.011797972936507535"],[1490946341.446,"0.011732454126984091"],[1490946401.446,"0.011992103412699077"],[1490946461.446,"0.011787900634920185"],[1490946521.446,"0.01170581265873045"],[1490946581.446,"0.011391009603175007"],[1490946641.446,"0.01205839841269773"],[1490946701.446,"0.01188169805555573"],[1490946761.446,"0.011459351746031153"],[1490946821.446,"0.012089251071429255"],[1490946881.446,"0.011159798611111122"],[1490946941.446,"0.012261993650793439"],[1490947001.446,"0.011150941865079526"],[1490947061.446,"0.011784560238095428"],[1490947121.446,"0.01146369333333352"],[1490947181.446,"0.011946112341269969"],[1490947241.446,"0.012244168452380742"],[1490947301.446,"0.01108276087301507"],[1490947361.446,"0.011391418571428976"],[1490947421.446,"0.012042411525379642"],[1490947481.446,"0.012082919141039653"],[1490947541.446,"0.011615924682540189"],[1490947601.446,"0.01218819496031727"],[1490947661.446,"0.011292488293650517"],[1490947721.446,"0.011232974365079479"],[1490947781.446,"0.011638264880952223"],[1490947841.446,"0.0115353722619047"],[1490947901.446,"0.011426710952381045"],[1490947961.446,"0.0121381246428574"],[1490948021.446,"0.011812514087301832"],[1490948081.446,"0.012050580317459442"],[1490948141.446,"0.011855329166666742"],[1490948201.446,"0.011649919960317898"],[1490948261.446,"0.01163187396825391"],[1490948321.446,"0.011266725634920935"],[1490948381.446,"0.011934722460317146"],[1490948441.446,"0.011368148333333088"],[1490948501.446,"0.011662377698413048"],[1490948561.446,"0.011039417341269188"],[1490948621.446,"0.012176113174603589"],[1490948681.446,"0.011265313531746158"],[1490948741.446,"0.01158711781746033"],[1490948801.446,"0.011557390912698215"],[1490948861.446,"0.012131684804188454"],[1490948921.446,"0.011474324082027133"],[1490948981.446,"0.011376334484127639"],[1490949041.446,"0.011627233571428175"],[1490949101.446,"0.012499916785714077"],[1490949161.446,"0.011920621706348947"],[1490949221.446,"0.011574053410790661"],[1490949281.446,"0.011837460242165967"],[1490949341.446,"0.011227153174603937"],[1490949401.446,"0.011635896944444115"],[1490949461.446,"0.011701339047618983"],[1490949521.446,"0.011847283650793895"],[1490949581.446,"0.0116057894841271"],[1490949641.446,"0.011789695753968094"],[1490949701.446,"0.011279284841269992"],[1490949761.446,"0.011470807460317041"],[1490949821.446,"0.012172255515873568"],[1490949881.446,"0.011721892103174175"],[1490949941.446,"0.010727560317460336"],[1490950001.446,"0.011509186269841303"],[1490950061.446,"0.01188623087301566"],[1490950121.446,"0.011476948452380968"],[1490950181.446,"0.01211593166666722"],[1490950241.446,"0.011757469444444444"],[1490950301.446,"0.011519936865079109"],[1490950361.446,"0.01165834781746044"],[1490950421.446,"0.010831068928571068"],[1490950481.446,"0.011977692023809912"],[1490950541.446,"0.011828264880952136"],[1490950601.446,"0.01191921916666625"],[1490950661.446,"0.011901336547619379"],[1490950721.446,"0.011776620238095158"],[1490950781.446,"0.011911536031746153"],[1490950841.446,"0.011467936309523809"],[1490950901.446,"0.012163667023809579"],[1490950961.446,"0.0116551746825399"],[1490951021.446,"0.011799408095237739"],[1490951081.446,"0.011845631309524084"],[1490951141.446,"0.011289116626983809"],[1490951201.446,"0.012258327777777984"],[1490951261.446,"0.012265819682539036"],[1490951321.446,"0.011346034166667811"],[1490951381.446,"0.011996446111110597"],[1490951441.446,"0.011511485714285046"],[1490951501.446,"0.011980616349206635"],[1490951561.446,"0.011565376031746316"],[1490951621.446,"0.010918043373016443"],[1490951681.446,"0.011479107380951632"],[1490951741.446,"0.012467024051997748"],[1490951801.446,"0.01235313125400671"],[1490951861.446,"0.012167793061507889"],[1490951921.446,"0.01249734373015914"],[1490951981.446,"0.011414617499999877"],[1490952041.446,"0.012559693849205949"],[1490952101.446,"0.012135384801587835"],[1490952161.446,"0.01195310698412663"],[1490952221.446,"0.011996730515873409"],[1490952281.446,"0.012245181626984071"],[1490952341.446,"0.01172794166666644"],[1490952401.446,"0.012153839325397124"],[1490952461.446,"0.01287662682539674"],[1490952521.446,"0.011412833611110576"],[1490952581.446,"0.0115385753968256"],[1490952641.446,"0.011953797142857927"],[1490952701.446,"0.012210606230158325"],[1490952761.446,"0.012193429836568915"],[1490952821.446,"0.01175164000191546"],[1490952881.446,"0.011686968928571266"],[1490952941.446,"0.01204885615079335"],[1490953001.446,"0.010858237182540066"],[1490953061.446,"0.012570554523809901"],[1490953121.446,"0.011606933412697877"],[1490953181.446,"0.011895175039682713"],[1490953241.446,"0.011877423888888992"],[1490953301.446,"0.01134354857142876"],[1490953361.446,"0.011999752857142089"],[1490953421.446,"0.011927079960317739"],[1490953481.446,"0.01172722273809559"],[1490953541.446,"0.0114388174999997"],[1490953601.446,"0.012584772738095138"],[1490953661.446,"0.011858990837323214"],[1490953721.446,"0.011489406427467985"],[1490953781.446,"0.011673106071428765"],[1490953841.446,"0.012389803452380168"],[1490953901.446,"0.010877735714285755"],[1490953961.446,"0.012098601984127518"],[1490954021.446,"0.011876002539682478"],[1490954081.446,"0.0119792138492057"],[1490954141.446,"0.01116768142857198"],[1490954201.446,"0.011819058452381173"],[1490954261.446,"0.011543723055555002"],[1490954321.446,"0.011877097777778114"],[1490954381.446,"0.011255818690476465"],[1490954441.446,"0.011544411269840424"],[1490954501.446,"0.011844739246031948"],[1490954561.446,"0.012498686626984624"],[1490954621.446,"0.011012790753967753"],[1490954681.446,"0.011763483769841236"],[1490954741.446,"0.011742064880952764"],[1490954801.446,"0.011329697023809454"],[1490954861.446,"0.011616721150793869"],[1490954921.446,"0.011935843650793056"],[1490954981.446,"0.012041806150794254"],[1490955041.446,"0.011776362817460298"],[1490955101.446,"0.011507964920634838"],[1490955161.446,"0.012249892380951723"],[1490955221.446,"0.011680689451964254"],[1490955281.446,"0.011966289381797203"],[1490955341.446,"0.011113054447726804"],[1490955401.446,"0.012155607703748966"],[1490955461.446,"0.011851554722222412"],[1490955521.446,"0.011899298531746077"],[1490955581.446,"0.01202313674603201"],[1490955641.446,"0.011739823253968055"],[1490955701.446,"0.011866135595237215"],[1490955761.446,"0.012171682563083994"],[1490955821.446,"0.01125473955952014"],[1490955881.446,"0.011791852817460289"],[1490955941.446,"0.011389896547619342"],[1490956001.446,"0.011801524404761971"],[1490956061.446,"0.011788201388888577"],[1490956121.446,"0.011472721388889214"],[1490956181.446,"0.012352298174603236"],[1490956241.446,"0.011831984404761721"],[1490956301.446,"0.0114478640476188"],[1490956361.446,"0.012315896944444986"],[1490956421.446,"0.01184387992063444"],[1490956481.446,"0.0108170579365078"],[1490956541.446,"0.012441825119047971"],[1490956601.446,"0.011650502579365023"],[1490956661.446,"0.011244622936507553"],[1490956721.446,"0.01138462460317496"],[1490956781.446,"0.012361013348424437"],[1490956841.446,"0.011687763677888905"],[1490956901.446,"0.011387440952381297"],[1490956961.446,"0.012246620039682158"],[1490957021.446,"0.010769535198412467"],[1490957081.446,"0.012311013690477024"],[1490957141.446,"0.011455958968253554"],[1490957201.446,"0.012126715198413286"],[1490957261.446,"0.011078292499999627"],[1490957321.446,"0.012041933253967746"],[1490957381.446,"0.01147051317460329"],[1490957441.446,"0.01173451460317538"],[1490957501.446,"0.011660740317459825"],[1490957561.446,"0.011851131269840753"],[1490957621.446,"0.012117949444444812"],[1490957681.446,"0.011214277301587397"],[1490957741.446,"0.011935565277777841"],[1490957801.446,"0.011180848809523986"],[1490957861.446,"0.011540955039682404"],[1490957921.446,"0.011678924523809829"],[1490957981.446,"0.01175049698412655"],[1490958041.446,"0.01179233821428546"],[1490958101.446,"0.011217207341269743"],[1490958161.446,"0.011623496111110998"],[1490958221.446,"0.011751017182540137"],[1490958281.446,"0.011548055515872839"],[1490958341.446,"0.01157145297619062"],[1490958401.446,"0.011809365079364814"],[1490958461.446,"0.011367088134920926"],[1490958521.446,"0.011220626785714515"],[1490958581.446,"0.012502413531745657"],[1490958641.446,"0.011674712222222085"],[1490958701.446,"0.010840117777778147"],[1490958761.446,"0.01169669242063464"],[1490958821.446,"0.01206404448412709"],[1490958881.446,"0.011476003253967956"],[1490958941.446,"0.011927363650794281"],[1490959001.446,"0.011834540039682623"],[1490959061.446,"0.011952310396106811"],[1490959121.446,"0.011641002569963536"],[1490959181.446,"0.011215335912698408"],[1490959241.446,"0.011801235515873079"],[1490959301.446,"0.012109150079365269"],[1490959361.446,"0.011696530238095701"],[1490959421.446,"0.01188721699431308"],[1490959481.446,"0.011013023946272025"],[1490959541.446,"0.011927455988174854"],[1490959601.446,"0.011773952156046168"],[1490959661.446,"0.011311449525742057"],[1490959721.446,"0.011926485873016056"],[1490959781.446,"0.012208613174603443"],[1490959841.446,"0.011077256706349554"],[1490959901.446,"0.012141572896825473"],[1490959961.446,"0.011884196547619123"],[1490960021.446,"0.01182910611111061"],[1490960081.446,"0.011089906190476237"],[1490960141.446,"0.011485851349206303"],[1490960201.446,"0.011621675079365073"],[1490960261.446,"0.011420984246031282"],[1490960321.446,"0.011702707664224543"],[1490960381.446,"0.011122996101531552"],[1490960441.446,"0.011923133293650747"],[1490960501.446,"0.012209551587301823"],[1490960561.446,"0.011541768293650705"],[1490960621.446,"0.01133343007936486"],[1490960681.446,"0.011718844880952742"],[1490960741.446,"0.01170618126984048"],[1490960801.446,"0.01158023575396868"],[1490960861.446,"0.012154581865079351"],[1490960921.446,"0.011287024246031918"],[1490960981.446,"0.012035483412697787"],[1490961041.446,"0.01206407186508005"],[1490961101.446,"0.011742228333332922"],[1490961161.446,"0.011460450952381294"],[1490961221.446,"0.011752177539682223"],[1490961281.446,"0.012416623373015778"],[1490961341.446,"0.01134374146825419"],[1490961401.446,"0.011742214642857577"],[1490961461.446,"0.01157076337301528"],[1490961521.446,"0.011251291190475883"],[1490961581.446,"0.010835279404761772"],[1490961641.446,"0.012082314722223412"],[1490961701.446,"0.011244282817460054"],[1490961761.446,"0.012600352738094536"],[1490961821.446,"0.011595374841270692"],[1490961881.446,"0.012047435158729298"],[1490961941.446,"0.012117879285714984"],[1490962001.446,"0.011105805912698236"],[1490962061.446,"0.011228379365079935"],[1490962121.446,"0.012051188888888457"],[1490962181.446,"0.011811605198411965"],[1490962241.446,"0.011438638690477312"],[1490962301.446,"0.011535638928571016"],[1490962361.446,"0.011846252277212543"],[1490962421.446,"0.011137096779830425"],[1490962481.446,"0.011301488807399701"],[1490962541.446,"0.011706436349206364"],[1490962601.446,"0.011607870952381014"],[1490962661.446,"0.01165941666666676"],[1490962721.446,"0.011457761706349363"],[1490962781.446,"0.012004376428571304"],[1490962841.446,"0.012380191230158676"],[1490962901.446,"0.011650816111111262"],[1490962961.446,"0.011339834484126858"],[1490963021.446,"0.011815001031746352"],[1490963081.446,"0.01215702742063424"],[1490963141.446,"0.011112767612387399"],[1490963201.446,"0.011991515394890143"],[1490963261.446,"0.011573327579365182"],[1490963321.446,"0.011559778809523533"],[1490963381.446,"0.012400119444444207"],[1490963441.446,"0.011127036507936056"],[1490963501.446,"0.012095518055556944"],[1490963561.446,"0.011203742460316668"],[1490963621.446,"0.012493672738095584"],[1490963681.446,"0.012086427023809085"],[1490963741.446,"0.01073350408730215"],[1490963801.446,"0.011784052619047683"],[1490963861.446,"0.011817165277777068"],[1490963921.446,"0.01162805619047661"],[1490963981.446,"0.01141054027777739"],[1490964041.446,"0.012398790952381392"],[1490964101.446,"0.011081906428571691"],[1490964161.446,"0.012049610714285322"],[1490964221.446,"0.011764468492063805"]]}],"cpu_current":[{"metric":{},"value":[1490964221.765,"0.011764468492063801"]}]},"last_update":"2017-03-31T12:43:41.618Z"}
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index bead7948486..508aeb7cf67 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -73,7 +73,7 @@ describe BlobHelper do
let(:project) { create(:project, :repository, namespace: namespace) }
before do
- allow(self).to receive(:current_user).and_return(double)
+ allow(self).to receive(:current_user).and_return(nil)
allow(self).to receive(:can_collaborate_with_project?).and_return(true)
end
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index 70443d27f33..a7c3c281083 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -2,8 +2,10 @@ require 'spec_helper'
describe EventsHelper do
describe '#event_note' do
+ let(:user) { build(:user) }
+
before do
- allow(helper).to receive(:current_user).and_return(double)
+ allow(helper).to receive(:current_user).and_return(user)
end
it 'displays one line of plain text without alteration' do
@@ -60,11 +62,26 @@ describe EventsHelper do
expect(helper.event_note(input)).to eq(expected)
end
- it 'preserves style attribute within a tag' do
- input = '<span class="" style="background-color: #44ad8e; color: #FFFFFF;"></span>'
- expected = '<p><span style="background-color: #44ad8e; color: #FFFFFF;"></span></p>'
+ context 'labels formatting' do
+ let(:input) { 'this should be ~label_1' }
- expect(helper.event_note(input)).to eq(expected)
+ def format_event_note(project)
+ create(:label, title: 'label_1', project: project)
+
+ helper.event_note(input, { project: project })
+ end
+
+ it 'preserves style attribute for a label that can be accessed by current_user' do
+ project = create(:empty_project, :public)
+
+ expect(format_event_note(project)).to match(/span class=.*style=.*/)
+ end
+
+ it 'does not style a label that can not be accessed by current_user' do
+ project = create(:empty_project, :private)
+
+ expect(format_event_note(project)).to eq("<p>#{input}</p>")
+ end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index fc6ad6419ac..44312ada438 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -167,6 +167,7 @@ describe ProjectsHelper do
before do
allow(project).to receive(:repository_storage_path).and_return('/base/repo/path')
+ allow(Settings.shared).to receive(:[]).with('path').and_return('/base/repo/export/path')
end
it 'removes the repo path' do
@@ -175,6 +176,13 @@ describe ProjectsHelper do
expect(sanitize_repo_path(project, import_error)).to eq('Could not clone [REPOS PATH]/namespace/test.git')
end
+
+ it 'removes the temporary repo path used for uploads/exports' do
+ repo = '/base/repo/export/path/tmp/project_exports/uploads/test.tar.gz'
+ import_error = "Unable to decompress #{repo}\n"
+
+ expect(sanitize_repo_path(project, import_error)).to eq('Unable to decompress [REPO EXPORT PATH]/uploads/test.tar.gz')
+ end
end
describe '#last_push_event' do
diff --git a/spec/javascripts/blob/3d_viewer/mesh_object_spec.js b/spec/javascripts/blob/3d_viewer/mesh_object_spec.js
new file mode 100644
index 00000000000..d1ebae33dab
--- /dev/null
+++ b/spec/javascripts/blob/3d_viewer/mesh_object_spec.js
@@ -0,0 +1,42 @@
+import {
+ BoxGeometry,
+} from 'three/build/three.module';
+import MeshObject from '~/blob/3d_viewer/mesh_object';
+
+describe('Mesh object', () => {
+ it('defaults to non-wireframe material', () => {
+ const object = new MeshObject(
+ new BoxGeometry(10, 10, 10),
+ );
+
+ expect(object.material.wireframe).toBeFalsy();
+ });
+
+ it('changes to wirefame material', () => {
+ const object = new MeshObject(
+ new BoxGeometry(10, 10, 10),
+ );
+
+ object.changeMaterial('wireframe');
+
+ expect(object.material.wireframe).toBeTruthy();
+ });
+
+ it('scales object down', () => {
+ const object = new MeshObject(
+ new BoxGeometry(10, 10, 10),
+ );
+ const radius = object.geometry.boundingSphere.radius;
+
+ expect(radius).not.toBeGreaterThan(4);
+ });
+
+ it('does not scale object down', () => {
+ const object = new MeshObject(
+ new BoxGeometry(1, 1, 1),
+ );
+ const radius = object.geometry.boundingSphere.radius;
+
+ expect(radius).toBeLessThan(1);
+ });
+});
diff --git a/spec/javascripts/blob/pdf/index_spec.js b/spec/javascripts/blob/pdf/index_spec.js
new file mode 100644
index 00000000000..d3a4d04345b
--- /dev/null
+++ b/spec/javascripts/blob/pdf/index_spec.js
@@ -0,0 +1,80 @@
+import renderPDF from '~/blob/pdf';
+import testPDF from './test.pdf';
+
+describe('PDF renderer', () => {
+ let viewer;
+ let app;
+
+ const checkLoaded = (done) => {
+ if (app.loading) {
+ setTimeout(() => {
+ checkLoaded(done);
+ }, 100);
+ } else {
+ done();
+ }
+ };
+
+ preloadFixtures('static/pdf_viewer.html.raw');
+
+ beforeEach(() => {
+ loadFixtures('static/pdf_viewer.html.raw');
+ viewer = document.getElementById('js-pdf-viewer');
+ viewer.dataset.endpoint = testPDF;
+ });
+
+ it('shows loading icon', () => {
+ renderPDF();
+
+ expect(
+ document.querySelector('.loading'),
+ ).not.toBeNull();
+ });
+
+ describe('successful response', () => {
+ beforeEach((done) => {
+ app = renderPDF();
+
+ checkLoaded(done);
+ });
+
+ it('does not show loading icon', () => {
+ expect(
+ document.querySelector('.loading'),
+ ).toBeNull();
+ });
+
+ it('renders the PDF', () => {
+ expect(
+ document.querySelector('.pdf-viewer'),
+ ).not.toBeNull();
+ });
+
+ it('renders the PDF page', () => {
+ expect(
+ document.querySelector('.pdf-page'),
+ ).not.toBeNull();
+ });
+ });
+
+ describe('error getting file', () => {
+ beforeEach((done) => {
+ viewer.dataset.endpoint = 'invalid/endpoint';
+ app = renderPDF();
+
+ checkLoaded(done);
+ });
+
+ it('does not show loading icon', () => {
+ expect(
+ document.querySelector('.loading'),
+ ).toBeNull();
+ });
+
+ it('shows error message', () => {
+ expect(
+ document.querySelector('.md').textContent.trim(),
+ ).toBe('An error occured whilst loading the file. Please try again later.');
+ });
+ });
+});
diff --git a/spec/javascripts/blob/pdf/test.pdf b/spec/javascripts/blob/pdf/test.pdf
new file mode 100644
index 00000000000..eb3d147fde3
--- /dev/null
+++ b/spec/javascripts/blob/pdf/test.pdf
Binary files differ
diff --git a/spec/javascripts/blob/sketch/index_spec.js b/spec/javascripts/blob/sketch/index_spec.js
new file mode 100644
index 00000000000..0e4431548c4
--- /dev/null
+++ b/spec/javascripts/blob/sketch/index_spec.js
@@ -0,0 +1,118 @@
+/* eslint-disable no-new */
+import JSZip from 'jszip';
+import SketchLoader from '~/blob/sketch';
+
+describe('Sketch viewer', () => {
+ const generateZipFileArrayBuffer = (zipFile, resolve, done) => {
+ zipFile
+ .generateAsync({ type: 'arrayBuffer' })
+ .then((content) => {
+ resolve(content);
+
+ setTimeout(() => {
+ done();
+ }, 100);
+ });
+ };
+
+ preloadFixtures('static/sketch_viewer.html.raw');
+
+ beforeEach(() => {
+ loadFixtures('static/sketch_viewer.html.raw');
+ });
+
+ describe('with error message', () => {
+ beforeEach((done) => {
+ spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(() => new Promise((resolve, reject) => {
+ reject();
+
+ setTimeout(() => {
+ done();
+ });
+ }));
+
+ new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('renders error message', () => {
+ expect(
+ document.querySelector('#js-sketch-viewer p'),
+ ).not.toBeNull();
+
+ expect(
+ document.querySelector('#js-sketch-viewer p').textContent.trim(),
+ ).toContain('Cannot show preview.');
+ });
+
+ it('removes render the loading icon', () => {
+ expect(
+ document.querySelector('.js-loading-icon'),
+ ).toBeNull();
+ });
+ });
+
+ describe('success', () => {
+ beforeEach((done) => {
+ spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(() => new Promise((resolve) => {
+ const zipFile = new JSZip();
+ zipFile.folder('previews')
+ .file('preview.png', 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAMAAAAoyzS7AAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAA1JREFUeNoBAgD9/wAAAAIAAVMrnDAAAAAASUVORK5CYII=', {
+ base64: true,
+ });
+
+ generateZipFileArrayBuffer(zipFile, resolve, done);
+ }));
+
+ new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('does not render error message', () => {
+ expect(
+ document.querySelector('#js-sketch-viewer p'),
+ ).toBeNull();
+ });
+
+ it('removes render the loading icon', () => {
+ expect(
+ document.querySelector('.js-loading-icon'),
+ ).toBeNull();
+ });
+
+ it('renders preview img', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+
+ expect(img).not.toBeNull();
+ expect(img.classList.contains('img-responsive')).toBeTruthy();
+ });
+
+ it('renders link to image', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+ const link = document.querySelector('#js-sketch-viewer a');
+
+ expect(link.href).toBe(img.src);
+ expect(link.target).toBe('_blank');
+ });
+ });
+
+ describe('incorrect file', () => {
+ beforeEach((done) => {
+ spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(() => new Promise((resolve) => {
+ const zipFile = new JSZip();
+
+ generateZipFileArrayBuffer(zipFile, resolve, done);
+ }));
+
+ new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('renders error message', () => {
+ expect(
+ document.querySelector('#js-sketch-viewer p'),
+ ).not.toBeNull();
+
+ expect(
+ document.querySelector('#js-sketch-viewer p').textContent.trim(),
+ ).toContain('Cannot show preview.');
+ });
+ });
+});
diff --git a/spec/javascripts/boards/board_list_spec.js b/spec/javascripts/boards/board_list_spec.js
new file mode 100644
index 00000000000..3f598887603
--- /dev/null
+++ b/spec/javascripts/boards/board_list_spec.js
@@ -0,0 +1,201 @@
+/* global BoardService */
+/* global boardsMockInterceptor */
+/* global List */
+/* global listObj */
+/* global ListIssue */
+import Vue from 'vue';
+import _ from 'underscore';
+import Sortable from 'vendor/Sortable';
+import BoardList from '~/boards/components/board_list';
+import eventHub from '~/boards/eventhub';
+import '~/boards/mixins/sortable_default_options';
+import '~/boards/models/issue';
+import '~/boards/models/list';
+import '~/boards/stores/boards_store';
+import './mock_data';
+
+window.Sortable = Sortable;
+
+describe('Board list component', () => {
+ let component;
+
+ beforeEach((done) => {
+ const el = document.createElement('div');
+
+ document.body.appendChild(el);
+ Vue.http.interceptors.push(boardsMockInterceptor);
+ gl.boardService = new BoardService('/test/issue-boards/board', '', '1');
+ gl.issueBoards.BoardsStore.create();
+ gl.IssueBoardsApp = new Vue();
+
+ const BoardListComp = Vue.extend(BoardList);
+ const list = new List(listObj);
+ const issue = new ListIssue({
+ title: 'Testing',
+ iid: 1,
+ confidential: false,
+ labels: [],
+ });
+ list.issuesSize = 1;
+ list.issues.push(issue);
+
+ component = new BoardListComp({
+ el,
+ propsData: {
+ disabled: false,
+ list,
+ issues: list.issues,
+ loading: false,
+ issueLinkBase: '/issues',
+ rootPath: '/',
+ },
+ }).$mount();
+
+ Vue.nextTick(() => {
+ done();
+ });
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
+ });
+
+ it('renders component', () => {
+ expect(
+ component.$el.classList.contains('board-list-component'),
+ ).toBe(true);
+ });
+
+ it('renders loading icon', (done) => {
+ component.loading = true;
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-list-loading'),
+ ).not.toBeNull();
+
+ done();
+ });
+ });
+
+ it('renders issues', () => {
+ expect(
+ component.$el.querySelectorAll('.card').length,
+ ).toBe(1);
+ });
+
+ it('sets data attribute with issue id', () => {
+ expect(
+ component.$el.querySelector('.card').getAttribute('data-issue-id'),
+ ).toBe('1');
+ });
+
+ it('shows new issue form', (done) => {
+ component.toggleForm();
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-new-issue-form'),
+ ).not.toBeNull();
+
+ expect(
+ component.$el.querySelector('.is-smaller'),
+ ).not.toBeNull();
+
+ done();
+ });
+ });
+
+ it('shows new issue form after eventhub event', (done) => {
+ eventHub.$emit(`hide-issue-form-${component.list.id}`);
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-new-issue-form'),
+ ).not.toBeNull();
+
+ expect(
+ component.$el.querySelector('.is-smaller'),
+ ).not.toBeNull();
+
+ done();
+ });
+ });
+
+ it('does not show new issue form for closed list', (done) => {
+ component.list.type = 'closed';
+ component.toggleForm();
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-new-issue-form'),
+ ).toBeNull();
+
+ done();
+ });
+ });
+
+ it('shows count list item', (done) => {
+ component.showCount = true;
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-list-count'),
+ ).not.toBeNull();
+
+ expect(
+ component.$el.querySelector('.board-list-count').textContent.trim(),
+ ).toBe('Showing all issues');
+
+ done();
+ });
+ });
+
+ it('shows how many more issues to load', (done) => {
+ component.showCount = true;
+ component.list.issuesSize = 20;
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-list-count').textContent.trim(),
+ ).toBe('Showing 1 of 20 issues');
+
+ done();
+ });
+ });
+
+ it('loads more issues after scrolling', (done) => {
+ spyOn(component.list, 'nextPage');
+ component.$refs.list.style.height = '100px';
+ component.$refs.list.style.overflow = 'scroll';
+
+ for (let i = 0; i < 19; i += 1) {
+ const issue = component.list.issues[0];
+ issue.id += 1;
+ component.list.issues.push(issue);
+ }
+
+ Vue.nextTick(() => {
+ component.$refs.list.scrollTop = 20000;
+
+ setTimeout(() => {
+ expect(component.list.nextPage).toHaveBeenCalled();
+
+ done();
+ });
+ });
+ });
+
+ it('shows loading more spinner', (done) => {
+ component.showCount = true;
+ component.list.loadingMore = true;
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.board-list-count .fa-spinner'),
+ ).not.toBeNull();
+
+ done();
+ });
+ });
+});
diff --git a/spec/javascripts/build_spec.js b/spec/javascripts/build_spec.js
index 549c7af8ea8..cb48f14fd9a 100644
--- a/spec/javascripts/build_spec.js
+++ b/spec/javascripts/build_spec.js
@@ -72,17 +72,21 @@ describe('Build', () => {
it('displays the initial build trace', () => {
expect($.ajax.calls.count()).toBe(1);
const [{ url, dataType, success, context }] = $.ajax.calls.argsFor(0);
- expect(url).toBe(`${BUILD_URL}.json`);
+ expect(url).toBe(
+ `${BUILD_URL}/trace.json`,
+ );
expect(dataType).toBe('json');
expect(success).toEqual(jasmine.any(Function));
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- success.call(context, { trace_html: '<span>Example</span>', status: 'running' });
+ success.call(context, { html: '<span>Example</span>', status: 'running' });
expect($('#build-trace .js-build-output').text()).toMatch(/Example/);
});
it('removes the spinner', () => {
const [{ success, context }] = $.ajax.calls.argsFor(0);
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
success.call(context, { trace_html: '<span>Example</span>', status: 'success' });
expect($('.js-build-refresh').length).toBe(0);
diff --git a/spec/javascripts/environments/environment_actions_spec.js b/spec/javascripts/environments/environment_actions_spec.js
index 13840b42bd6..6348d97b0a5 100644
--- a/spec/javascripts/environments/environment_actions_spec.js
+++ b/spec/javascripts/environments/environment_actions_spec.js
@@ -19,6 +19,11 @@ describe('Actions Component', () => {
name: 'foo',
play_path: '#',
},
+ {
+ name: 'foo bar',
+ play_path: 'url',
+ playable: false,
+ },
];
spy = jasmine.createSpy('spy').and.returnValue(Promise.resolve());
@@ -49,4 +54,14 @@ describe('Actions Component', () => {
expect(spy).toHaveBeenCalledWith(actionsMock[0].play_path);
});
+
+ it('should render a disabled action when it\'s not playable', () => {
+ expect(
+ component.$el.querySelector('.dropdown-menu li:last-child button').getAttribute('disabled'),
+ ).toEqual('disabled');
+
+ expect(
+ component.$el.querySelector('.dropdown-menu li:last-child button').classList.contains('disabled'),
+ ).toEqual(true);
+ });
});
diff --git a/spec/javascripts/environments/environment_spec.js b/spec/javascripts/environments/environment_spec.js
index 9bcf617fcd8..4431baa4b96 100644
--- a/spec/javascripts/environments/environment_spec.js
+++ b/spec/javascripts/environments/environment_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import '~/flash';
import EnvironmentsComponent from '~/environments/components/environment';
-import { environment } from './mock_data';
+import { environment, folder } from './mock_data';
describe('Environment', () => {
preloadFixtures('static/environments/environments.html.raw');
@@ -179,4 +179,101 @@ describe('Environment', () => {
}, 0);
});
});
+
+ describe('expandable folders', () => {
+ const environmentsResponseInterceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify({
+ environments: [folder],
+ stopped_count: 0,
+ available_count: 1,
+ }), {
+ status: 200,
+ headers: {
+ 'X-nExt-pAge': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '1',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '2',
+ },
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(environmentsResponseInterceptor);
+ component = new EnvironmentsComponent({
+ el: document.querySelector('#environments-list-view'),
+ });
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(
+ Vue.http.interceptors, environmentsResponseInterceptor,
+ );
+ });
+
+ it('should open a closed folder', (done) => {
+ setTimeout(() => {
+ component.$el.querySelector('.folder-name').click();
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.folder-icon i.fa-caret-right').getAttribute('style'),
+ ).toContain('display: none');
+ expect(
+ component.$el.querySelector('.folder-icon i.fa-caret-down').getAttribute('style'),
+ ).not.toContain('display: none');
+ done();
+ });
+ });
+ });
+
+ it('should close an opened folder', (done) => {
+ setTimeout(() => {
+ // open folder
+ component.$el.querySelector('.folder-name').click();
+
+ Vue.nextTick(() => {
+ // close folder
+ component.$el.querySelector('.folder-name').click();
+
+ Vue.nextTick(() => {
+ expect(
+ component.$el.querySelector('.folder-icon i.fa-caret-down').getAttribute('style'),
+ ).toContain('display: none');
+ expect(
+ component.$el.querySelector('.folder-icon i.fa-caret-right').getAttribute('style'),
+ ).not.toContain('display: none');
+ done();
+ });
+ });
+ });
+ });
+
+ it('should show children environments and a button to show all environments', (done) => {
+ setTimeout(() => {
+ // open folder
+ component.$el.querySelector('.folder-name').click();
+
+ Vue.nextTick(() => {
+ const folderInterceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify({
+ environments: [environment],
+ }), { status: 200 }));
+ };
+
+ Vue.http.interceptors.push(folderInterceptor);
+
+ // wait for next async request
+ setTimeout(() => {
+ expect(component.$el.querySelectorAll('.js-child-row').length).toEqual(1);
+ expect(component.$el.querySelector('td.text-center > a.btn').textContent).toContain('Show all');
+
+ Vue.http.interceptors = _.without(Vue.http.interceptors, folderInterceptor);
+ done();
+ });
+ });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/environments/environments_store_spec.js b/spec/javascripts/environments/environments_store_spec.js
index 115d84b50f5..f617c4bdffe 100644
--- a/spec/javascripts/environments/environments_store_spec.js
+++ b/spec/javascripts/environments/environments_store_spec.js
@@ -1,38 +1,106 @@
import Store from '~/environments/stores/environments_store';
import { environmentsList, serverData } from './mock_data';
-(() => {
- describe('Store', () => {
- let store;
+describe('Store', () => {
+ let store;
- beforeEach(() => {
- store = new Store();
- });
+ beforeEach(() => {
+ store = new Store();
+ });
- it('should start with a blank state', () => {
- expect(store.state.environments.length).toEqual(0);
- expect(store.state.stoppedCounter).toEqual(0);
- expect(store.state.availableCounter).toEqual(0);
- expect(store.state.paginationInformation).toEqual({});
- });
+ it('should start with a blank state', () => {
+ expect(store.state.environments.length).toEqual(0);
+ expect(store.state.stoppedCounter).toEqual(0);
+ expect(store.state.availableCounter).toEqual(0);
+ expect(store.state.paginationInformation).toEqual({});
+ });
+ it('should store environments', () => {
+ store.storeEnvironments(serverData);
+ expect(store.state.environments.length).toEqual(serverData.length);
+ expect(store.state.environments[0]).toEqual(environmentsList[0]);
+ });
+
+ it('should store available count', () => {
+ store.storeAvailableCount(2);
+ expect(store.state.availableCounter).toEqual(2);
+ });
+
+ it('should store stopped count', () => {
+ store.storeStoppedCount(2);
+ expect(store.state.stoppedCounter).toEqual(2);
+ });
+
+ describe('store environments', () => {
it('should store environments', () => {
store.storeEnvironments(serverData);
expect(store.state.environments.length).toEqual(serverData.length);
- expect(store.state.environments[0]).toEqual(environmentsList[0]);
});
- it('should store available count', () => {
- store.storeAvailableCount(2);
- expect(store.state.availableCounter).toEqual(2);
+ it('should add folder keys when environment is a folder', () => {
+ const environment = {
+ name: 'bar',
+ size: 3,
+ id: 2,
+ };
+
+ store.storeEnvironments([environment]);
+ expect(store.state.environments[0].isFolder).toEqual(true);
+ expect(store.state.environments[0].folderName).toEqual('bar');
+ });
+
+ it('should extract content of `latest` key when provided', () => {
+ const environment = {
+ name: 'bar',
+ size: 3,
+ id: 2,
+ latest: {
+ last_deployment: {},
+ isStoppable: true,
+ },
+ };
+
+ store.storeEnvironments([environment]);
+ expect(store.state.environments[0].last_deployment).toEqual({});
+ expect(store.state.environments[0].isStoppable).toEqual(true);
});
- it('should store stopped count', () => {
- store.storeStoppedCount(2);
- expect(store.state.stoppedCounter).toEqual(2);
+ it('should store latest.name when the environment is not a folder', () => {
+ store.storeEnvironments(serverData);
+ expect(store.state.environments[0].name).toEqual(serverData[0].latest.name);
});
- it('should store pagination information', () => {
+ it('should store root level name when environment is a folder', () => {
+ store.storeEnvironments(serverData);
+ expect(store.state.environments[1].folderName).toEqual(serverData[1].name);
+ });
+ });
+
+ describe('toggleFolder', () => {
+ it('should toggle folder', () => {
+ store.storeEnvironments(serverData);
+
+ store.toggleFolder(store.state.environments[1]);
+ expect(store.state.environments[1].isOpen).toEqual(true);
+
+ store.toggleFolder(store.state.environments[1]);
+ expect(store.state.environments[1].isOpen).toEqual(false);
+ });
+ });
+
+ describe('setfolderContent', () => {
+ it('should store folder content', () => {
+ store.storeEnvironments(serverData);
+
+ store.setfolderContent(store.state.environments[1], serverData);
+
+ expect(store.state.environments[1].children.length).toEqual(serverData.length);
+ expect(store.state.environments[1].children[0].isChildren).toEqual(true);
+ });
+ });
+
+ describe('store pagination', () => {
+ it('should store normalized and integer pagination information', () => {
const pagination = {
'X-nExt-pAge': '2',
'X-page': '1',
@@ -55,4 +123,4 @@ import { environmentsList, serverData } from './mock_data';
expect(store.state.paginationInformation).toEqual(expectedResult);
});
});
-})();
+});
diff --git a/spec/javascripts/environments/mock_data.js b/spec/javascripts/environments/mock_data.js
index 30861481cc5..15e11aa686b 100644
--- a/spec/javascripts/environments/mock_data.js
+++ b/spec/javascripts/environments/mock_data.js
@@ -84,3 +84,19 @@ export const environment = {
updated_at: '2017-01-31T10:53:46.894Z',
},
};
+
+export const folder = {
+ folderName: 'build',
+ size: 5,
+ id: 12,
+ name: 'build/update-README',
+ state: 'available',
+ external_url: null,
+ environment_type: 'build',
+ last_deployment: null,
+ 'stop_action?': false,
+ environment_path: '/root/review-app/environments/12',
+ stop_path: '/root/review-app/environments/12/stop',
+ created_at: '2017-02-01T19:42:18.400Z',
+ updated_at: '2017-02-01T19:42:18.400Z',
+};
diff --git a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
new file mode 100644
index 00000000000..2722882375f
--- /dev/null
+++ b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
@@ -0,0 +1,166 @@
+import Vue from 'vue';
+import eventHub from '~/filtered_search/event_hub';
+import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content';
+
+const createComponent = (propsData) => {
+ const Component = Vue.extend(RecentSearchesDropdownContent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData,
+ });
+};
+
+// Remove all the newlines and whitespace from the formatted markup
+const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim();
+
+describe('RecentSearchesDropdownContent', () => {
+ const propsDataWithoutItems = {
+ items: [],
+ };
+ const propsDataWithItems = {
+ items: [
+ 'foo',
+ 'author:@root label:~foo bar',
+ ],
+ };
+
+ let vm;
+ afterEach(() => {
+ if (vm) {
+ vm.$destroy();
+ }
+ });
+
+ describe('with no items', () => {
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent(propsDataWithoutItems);
+ el = vm.$el;
+ });
+
+ it('should render empty state', () => {
+ expect(el.querySelector('.dropdown-info-note')).toBeDefined();
+
+ const items = el.querySelectorAll('.filtered-search-history-dropdown-item');
+ expect(items.length).toEqual(propsDataWithoutItems.items.length);
+ });
+ });
+
+ describe('with items', () => {
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent(propsDataWithItems);
+ el = vm.$el;
+ });
+
+ it('should render clear recent searches button', () => {
+ expect(el.querySelector('.filtered-search-history-clear-button')).toBeDefined();
+ });
+
+ it('should render recent search items', () => {
+ const items = el.querySelectorAll('.filtered-search-history-dropdown-item');
+ expect(items.length).toEqual(propsDataWithItems.items.length);
+
+ expect(trimMarkupWhitespace(items[0].querySelector('.filtered-search-history-dropdown-search-token').textContent)).toEqual('foo');
+
+ const item1Tokens = items[1].querySelectorAll('.filtered-search-history-dropdown-token');
+ expect(item1Tokens.length).toEqual(2);
+ expect(item1Tokens[0].querySelector('.name').textContent).toEqual('author:');
+ expect(item1Tokens[0].querySelector('.value').textContent).toEqual('@root');
+ expect(item1Tokens[1].querySelector('.name').textContent).toEqual('label:');
+ expect(item1Tokens[1].querySelector('.value').textContent).toEqual('~foo');
+ expect(trimMarkupWhitespace(items[1].querySelector('.filtered-search-history-dropdown-search-token').textContent)).toEqual('bar');
+ });
+ });
+
+ describe('computed', () => {
+ describe('processedItems', () => {
+ it('with items', () => {
+ vm = createComponent(propsDataWithItems);
+ const processedItems = vm.processedItems;
+
+ expect(processedItems.length).toEqual(2);
+
+ expect(processedItems[0].text).toEqual(propsDataWithItems.items[0]);
+ expect(processedItems[0].tokens).toEqual([]);
+ expect(processedItems[0].searchToken).toEqual('foo');
+
+ expect(processedItems[1].text).toEqual(propsDataWithItems.items[1]);
+ expect(processedItems[1].tokens.length).toEqual(2);
+ expect(processedItems[1].tokens[0].prefix).toEqual('author:');
+ expect(processedItems[1].tokens[0].suffix).toEqual('@root');
+ expect(processedItems[1].tokens[1].prefix).toEqual('label:');
+ expect(processedItems[1].tokens[1].suffix).toEqual('~foo');
+ expect(processedItems[1].searchToken).toEqual('bar');
+ });
+
+ it('with no items', () => {
+ vm = createComponent(propsDataWithoutItems);
+ const processedItems = vm.processedItems;
+
+ expect(processedItems.length).toEqual(0);
+ });
+ });
+
+ describe('hasItems', () => {
+ it('with items', () => {
+ vm = createComponent(propsDataWithItems);
+ const hasItems = vm.hasItems;
+ expect(hasItems).toEqual(true);
+ });
+
+ it('with no items', () => {
+ vm = createComponent(propsDataWithoutItems);
+ const hasItems = vm.hasItems;
+ expect(hasItems).toEqual(false);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('onItemActivated', () => {
+ let onRecentSearchesItemSelectedSpy;
+
+ beforeEach(() => {
+ onRecentSearchesItemSelectedSpy = jasmine.createSpy('spy');
+ eventHub.$on('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
+
+ vm = createComponent(propsDataWithItems);
+ });
+
+ afterEach(() => {
+ eventHub.$off('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
+ });
+
+ it('emits event', () => {
+ expect(onRecentSearchesItemSelectedSpy).not.toHaveBeenCalled();
+ vm.onItemActivated('something');
+ expect(onRecentSearchesItemSelectedSpy).toHaveBeenCalledWith('something');
+ });
+ });
+
+ describe('onRequestClearRecentSearches', () => {
+ let onRequestClearRecentSearchesSpy;
+
+ beforeEach(() => {
+ onRequestClearRecentSearchesSpy = jasmine.createSpy('spy');
+ eventHub.$on('requestClearRecentSearches', onRequestClearRecentSearchesSpy);
+
+ vm = createComponent(propsDataWithItems);
+ });
+
+ afterEach(() => {
+ eventHub.$off('requestClearRecentSearches', onRequestClearRecentSearchesSpy);
+ });
+
+ it('emits event', () => {
+ expect(onRequestClearRecentSearchesSpy).not.toHaveBeenCalled();
+ vm.onRequestClearRecentSearches({ stopPropagation: () => {} });
+ expect(onRequestClearRecentSearchesSpy).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index 5f7c05e9014..97af681429b 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -29,7 +29,7 @@ const FilteredSearchSpecHelper = require('../helpers/filtered_search_spec_helper
beforeEach(() => {
setFixtures(`
- <div class="filtered-search-input-container">
+ <div class="filtered-search-box">
<form>
<ul class="tokens-container list-unstyled">
${FilteredSearchSpecHelper.createInputHTML(placeholder)}
@@ -264,12 +264,12 @@ const FilteredSearchSpecHelper = require('../helpers/filtered_search_spec_helper
describe('toggleInputContainerFocus', () => {
it('toggles on focus', () => {
input.focus();
- expect(document.querySelector('.filtered-search-input-container').classList.contains('focus')).toEqual(true);
+ expect(document.querySelector('.filtered-search-box').classList.contains('focus')).toEqual(true);
});
it('toggles on blur', () => {
input.blur();
- expect(document.querySelector('.filtered-search-input-container').classList.contains('focus')).toEqual(false);
+ expect(document.querySelector('.filtered-search-box').classList.contains('focus')).toEqual(false);
});
});
});
diff --git a/spec/javascripts/filtered_search/services/recent_searches_service_spec.js b/spec/javascripts/filtered_search/services/recent_searches_service_spec.js
new file mode 100644
index 00000000000..2a58fb3a7df
--- /dev/null
+++ b/spec/javascripts/filtered_search/services/recent_searches_service_spec.js
@@ -0,0 +1,56 @@
+import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
+
+describe('RecentSearchesService', () => {
+ let service;
+
+ beforeEach(() => {
+ service = new RecentSearchesService();
+ window.localStorage.removeItem(service.localStorageKey);
+ });
+
+ describe('fetch', () => {
+ it('should default to empty array', (done) => {
+ const fetchItemsPromise = service.fetch();
+
+ fetchItemsPromise
+ .then((items) => {
+ expect(items).toEqual([]);
+ done();
+ })
+ .catch((err) => {
+ done.fail('Shouldn\'t reject with empty localStorage key', err);
+ });
+ });
+
+ it('should reject when unable to parse', (done) => {
+ window.localStorage.setItem(service.localStorageKey, 'fail');
+ const fetchItemsPromise = service.fetch();
+
+ fetchItemsPromise
+ .catch(() => {
+ done();
+ });
+ });
+
+ it('should return items from localStorage', (done) => {
+ window.localStorage.setItem(service.localStorageKey, '["foo", "bar"]');
+ const fetchItemsPromise = service.fetch();
+
+ fetchItemsPromise
+ .then((items) => {
+ expect(items).toEqual(['foo', 'bar']);
+ done();
+ });
+ });
+ });
+
+ describe('setRecentSearches', () => {
+ it('should save things in localStorage', () => {
+ const items = ['foo', 'bar'];
+ service.save(items);
+ const newLocalStorageValue =
+ window.localStorage.getItem(service.localStorageKey);
+ expect(JSON.parse(newLocalStorageValue)).toEqual(items);
+ });
+ });
+});
diff --git a/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js b/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js
new file mode 100644
index 00000000000..1eebc6f2367
--- /dev/null
+++ b/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js
@@ -0,0 +1,59 @@
+import RecentSearchesStore from '~/filtered_search/stores/recent_searches_store';
+
+describe('RecentSearchesStore', () => {
+ let store;
+
+ beforeEach(() => {
+ store = new RecentSearchesStore();
+ });
+
+ describe('addRecentSearch', () => {
+ it('should add to the front of the list', () => {
+ store.addRecentSearch('foo');
+ store.addRecentSearch('bar');
+
+ expect(store.state.recentSearches).toEqual(['bar', 'foo']);
+ });
+
+ it('should deduplicate', () => {
+ store.addRecentSearch('foo');
+ store.addRecentSearch('bar');
+ store.addRecentSearch('foo');
+
+ expect(store.state.recentSearches).toEqual(['foo', 'bar']);
+ });
+
+ it('only keeps track of 5 items', () => {
+ store.addRecentSearch('1');
+ store.addRecentSearch('2');
+ store.addRecentSearch('3');
+ store.addRecentSearch('4');
+ store.addRecentSearch('5');
+ store.addRecentSearch('6');
+ store.addRecentSearch('7');
+
+ expect(store.state.recentSearches).toEqual(['7', '6', '5', '4', '3']);
+ });
+ });
+
+ describe('setRecentSearches', () => {
+ it('should override list', () => {
+ store.setRecentSearches([
+ 'foo',
+ 'bar',
+ ]);
+ store.setRecentSearches([
+ 'baz',
+ 'qux',
+ ]);
+
+ expect(store.state.recentSearches).toEqual(['baz', 'qux']);
+ });
+
+ it('only keeps track of 5 items', () => {
+ store.setRecentSearches(['1', '2', '3', '4', '5', '6', '7']);
+
+ expect(store.state.recentSearches).toEqual(['1', '2', '3', '4', '5']);
+ });
+ });
+});
diff --git a/spec/javascripts/fixtures/environments/metrics.html.haml b/spec/javascripts/fixtures/environments/metrics.html.haml
index 483063fb889..e2dd9519898 100644
--- a/spec/javascripts/fixtures/environments/metrics.html.haml
+++ b/spec/javascripts/fixtures/environments/metrics.html.haml
@@ -1,12 +1,62 @@
-%div
+.prometheus-container{ 'data-has-metrics': "false", 'data-doc-link': '/help/administration/monitoring/prometheus/index.md', 'data-prometheus-integration': '/root/hello-prometheus/services/prometheus/edit' }
.top-area
.row
.col-sm-6
%h3.page-title
Metrics for environment
- .row
- .col-sm-12
- %svg.prometheus-graph{ 'graph-type' => 'cpu_values' }
- .row
- .col-sm-12
- %svg.prometheus-graph{ 'graph-type' => 'memory_values' } \ No newline at end of file
+ .prometheus-state
+ .js-getting-started.hidden
+ .row
+ .col-md-4.col-md-offset-4.state-svg
+ %svg
+ .row
+ .col-md-6.col-md-offset-3
+ %h4.text-center.state-title
+ Get started with performance monitoring
+ .row
+ .col-md-6.col-md-offset-3
+ .description-text.text-center.state-description
+ Stay updated about the performance and health of your environment by configuring Prometheus to monitor your deployments. Learn more about performance monitoring
+ .row.state-button-section
+ .col-md-4.col-md-offset-4.text-center.state-button
+ %a.btn.btn-success
+ Configure Prometheus
+ .js-loading.hidden
+ .row
+ .col-md-4.col-md-offset-4.state-svg
+ %svg
+ .row
+ .col-md-6.col-md-offset-3
+ %h4.text-center.state-title
+ Waiting for performance data
+ .row
+ .col-md-6.col-md-offset-3
+ .description-text.text-center.state-description
+ Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available.
+ .row.state-button-section
+ .col-md-4.col-md-offset-4.text-center.state-button
+ %a.btn.btn-success
+ View documentation
+ .js-unable-to-connect.hidden
+ .row
+ .col-md-4.col-md-offset-4.state-svg
+ %svg
+ .row
+ .col-md-6.col-md-offset-3
+ %h4.text-center.state-title
+ Unable to connect to Prometheus server
+ .row
+ .col-md-6.col-md-offset-3
+ .description-text.text-center.state-description
+ Ensure connectivity is available from the GitLab server to the Prometheus server
+ .row.state-button-section
+ .col-md-4.col-md-offset-4.text-center.state-button
+ %a.btn.btn-success
+ View documentation
+ .prometheus-graphs
+ .row
+ .col-sm-12
+ %svg.prometheus-graph{ 'graph-type' => 'cpu_values' }
+ .row
+ .col-sm-12
+ %svg.prometheus-graph{ 'graph-type' => 'memory_values' }
diff --git a/spec/javascripts/fixtures/pdf_viewer.html.haml b/spec/javascripts/fixtures/pdf_viewer.html.haml
new file mode 100644
index 00000000000..2e57beae54b
--- /dev/null
+++ b/spec/javascripts/fixtures/pdf_viewer.html.haml
@@ -0,0 +1 @@
+.file-content#js-pdf-viewer{ data: { endpoint: '/test' } }
diff --git a/spec/javascripts/fixtures/sketch_viewer.html.haml b/spec/javascripts/fixtures/sketch_viewer.html.haml
new file mode 100644
index 00000000000..f01bd00925a
--- /dev/null
+++ b/spec/javascripts/fixtures/sketch_viewer.html.haml
@@ -0,0 +1,2 @@
+.file-content#js-sketch-viewer{ data: { endpoint: '/test_sketch_file.sketch' } }
+ .js-loading-icon
diff --git a/spec/javascripts/issue_show/issue_title_spec.js b/spec/javascripts/issue_show/issue_title_spec.js
new file mode 100644
index 00000000000..806d728a874
--- /dev/null
+++ b/spec/javascripts/issue_show/issue_title_spec.js
@@ -0,0 +1,22 @@
+import Vue from 'vue';
+import issueTitle from '~/issue_show/issue_title';
+
+describe('Issue Title', () => {
+ let IssueTitleComponent;
+
+ beforeEach(() => {
+ IssueTitleComponent = Vue.extend(issueTitle);
+ });
+
+ it('should render a title', () => {
+ const component = new IssueTitleComponent({
+ propsData: {
+ initialTitle: 'wow',
+ endpoint: '/gitlab-org/gitlab-shell/issues/9/rendered_title',
+ },
+ }).$mount();
+
+ expect(component.$el.classList).toContain('title');
+ expect(component.$el.innerHTML).toContain('wow');
+ });
+});
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index 5a93d479c1f..03f3c206f44 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -310,5 +310,56 @@ require('~/lib/utils/common_utils');
});
}, 10000);
});
+
+ describe('gl.utils.setFavicon', () => {
+ it('should set page favicon to provided favicon', () => {
+ const faviconName = 'custom_favicon';
+ const fakeLink = {
+ setAttribute() {},
+ };
+
+ spyOn(window.document, 'getElementById').and.callFake(() => fakeLink);
+ spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => {
+ expect(attr).toEqual('href');
+ expect(val.indexOf('/assets/custom_favicon.ico') > -1).toBe(true);
+ });
+ gl.utils.setFavicon(faviconName);
+ });
+ });
+
+ describe('gl.utils.resetFavicon', () => {
+ it('should reset page favicon to tanuki', () => {
+ const fakeLink = {
+ setAttribute() {},
+ };
+
+ spyOn(window.document, 'getElementById').and.callFake(() => fakeLink);
+ spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => {
+ expect(attr).toEqual('href');
+ expect(val).toMatch(/favicon/);
+ });
+ gl.utils.resetFavicon();
+ });
+ });
+
+ describe('gl.utils.setCiStatusFavicon', () => {
+ it('should set page favicon to CI status favicon based on provided status', () => {
+ const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/builds/1/status.json`;
+ const FAVICON_PATH = 'ci_favicons/';
+ const FAVICON = 'icon_status_success';
+ const spySetFavicon = spyOn(gl.utils, 'setFavicon').and.stub();
+ const spyResetFavicon = spyOn(gl.utils, 'resetFavicon').and.stub();
+ spyOn($, 'ajax').and.callFake(function (options) {
+ options.success({ icon: FAVICON });
+ expect(spySetFavicon).toHaveBeenCalledWith(FAVICON_PATH + FAVICON);
+ options.success();
+ expect(spyResetFavicon).toHaveBeenCalled();
+ options.error();
+ expect(spyResetFavicon).toHaveBeenCalled();
+ });
+
+ gl.utils.setCiStatusFavicon(BUILD_URL);
+ });
+ });
});
})();
diff --git a/spec/javascripts/merge_request_widget_spec.js b/spec/javascripts/merge_request_widget_spec.js
index d5193b41c33..88dae8c3e06 100644
--- a/spec/javascripts/merge_request_widget_spec.js
+++ b/spec/javascripts/merge_request_widget_spec.js
@@ -142,18 +142,21 @@ require('~/lib/utils/datetime_utility');
it('should call showCIStatus even if a notification should not be displayed', function() {
var spy;
spy = spyOn(this["class"], 'showCIStatus').and.stub();
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"].getCIStatus(false);
return expect(spy).toHaveBeenCalledWith(this.ciStatusData.status);
});
it('should call showCIStatus when a notification should be displayed', function() {
var spy;
spy = spyOn(this["class"], 'showCIStatus').and.stub();
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"].getCIStatus(true);
return expect(spy).toHaveBeenCalledWith(this.ciStatusData.status);
});
it('should call showCICoverage when the coverage rate is set', function() {
var spy;
spy = spyOn(this["class"], 'showCICoverage').and.stub();
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"].getCIStatus(false);
return expect(spy).toHaveBeenCalledWith(this.ciStatusData.coverage);
});
@@ -161,12 +164,14 @@ require('~/lib/utils/datetime_utility');
var spy;
this.ciStatusData.coverage = null;
spy = spyOn(this["class"], 'showCICoverage').and.stub();
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"].getCIStatus(false);
return expect(spy).not.toHaveBeenCalled();
});
it('should not display a notification on the first check after the widget has been created', function() {
var spy;
spy = spyOn(window, 'notify');
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"] = new window.gl.MergeRequestWidget(this.opts);
this["class"].getCIStatus(true);
return expect(spy).not.toHaveBeenCalled();
@@ -174,6 +179,7 @@ require('~/lib/utils/datetime_utility');
it('should update the pipeline URL when the pipeline changes', function() {
var spy;
spy = spyOn(this["class"], 'updatePipelineUrls').and.stub();
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"].getCIStatus(false);
this.ciStatusData.pipeline += 1;
this["class"].getCIStatus(false);
@@ -182,6 +188,7 @@ require('~/lib/utils/datetime_utility');
it('should update the commit URL when the sha changes', function() {
var spy;
spy = spyOn(this["class"], 'updateCommitUrls').and.stub();
+ spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
this["class"].getCIStatus(false);
this.ciStatusData.sha = "9b50b99a";
this["class"].getCIStatus(false);
diff --git a/spec/javascripts/monitoring/prometheus_graph_spec.js b/spec/javascripts/monitoring/prometheus_graph_spec.js
index c2bcd9c0f7c..4b904fc2960 100644
--- a/spec/javascripts/monitoring/prometheus_graph_spec.js
+++ b/spec/javascripts/monitoring/prometheus_graph_spec.js
@@ -1,5 +1,4 @@
import 'jquery';
-import '~/lib/utils/common_utils';
import PrometheusGraph from '~/monitoring/prometheus_graph';
import { prometheusMockData } from './prometheus_mock_data';
@@ -12,6 +11,7 @@ describe('PrometheusGraph', () => {
beforeEach(() => {
loadFixtures(fixtureName);
+ $('.prometheus-container').data('has-metrics', 'true');
this.prometheusGraph = new PrometheusGraph();
const self = this;
const fakeInit = (metricsResponse) => {
@@ -75,3 +75,24 @@ describe('PrometheusGraph', () => {
});
});
});
+
+describe('PrometheusGraphs UX states', () => {
+ const fixtureName = 'static/environments/metrics.html.raw';
+ preloadFixtures(fixtureName);
+
+ beforeEach(() => {
+ loadFixtures(fixtureName);
+ this.prometheusGraph = new PrometheusGraph();
+ });
+
+ it('shows a specified state', () => {
+ this.prometheusGraph.state = '.js-getting-started';
+ this.prometheusGraph.updateState();
+ const $state = $('.js-getting-started');
+ expect($state).toBeDefined();
+ expect($('.state-title', $state)).toBeDefined();
+ expect($('.state-svg', $state)).toBeDefined();
+ expect($('.state-description', $state)).toBeDefined();
+ expect($('.state-button', $state)).toBeDefined();
+ });
+});
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index b30c5da8822..07dc51a7815 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -64,6 +64,7 @@ if (process.env.BABEL_ENV === 'coverage') {
'./snippet/snippet_bundle.js',
'./terminal/terminal_bundle.js',
'./users/users_bundle.js',
+ './issue_show/index.js',
];
describe('Uncovered files', function () {
diff --git a/spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js b/spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js
index dba998c7688..0910df61915 100644
--- a/spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js
+++ b/spec/javascripts/vue_pipelines_index/pipelines_actions_spec.js
@@ -15,6 +15,11 @@ describe('Pipelines Actions dropdown', () => {
name: 'stop_review',
path: '/root/review-app/builds/1893/play',
},
+ {
+ name: 'foo',
+ path: '#',
+ playable: false,
+ },
];
spy = jasmine.createSpy('spy').and.returnValue(Promise.resolve());
@@ -59,4 +64,14 @@ describe('Pipelines Actions dropdown', () => {
expect(component.$el.querySelector('.fa-spinner')).toEqual(null);
});
+
+ it('should render a disabled action when it\'s not playable', () => {
+ expect(
+ component.$el.querySelector('.dropdown-menu li:last-child button').getAttribute('disabled'),
+ ).toEqual('disabled');
+
+ expect(
+ component.$el.querySelector('.dropdown-menu li:last-child button').classList.contains('disabled'),
+ ).toEqual(true);
+ });
});
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
new file mode 100644
index 00000000000..897288b8ad5
--- /dev/null
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe Banzai::Filter::MarkdownFilter, lib: true do
+ include FilterSpecHelper
+
+ context 'code block' do
+ it 'adds language to lang attribute when specified' do
+ result = filter("```html\nsome code\n```")
+
+ expect(result).to start_with("\n<pre><code lang=\"html\">")
+ end
+
+ it 'does not add language to lang attribute when not specified' do
+ result = filter("```\nsome code\n```")
+
+ expect(result).to start_with("\n<pre><code>")
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index b4cd5f63a15..fdbc65b5e00 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -49,11 +49,12 @@ describe Banzai::Filter::SanitizationFilter, lib: true do
instance = described_class.new('Foo')
3.times { instance.whitelist }
- expect(instance.whitelist[:transformers].size).to eq 5
+ expect(instance.whitelist[:transformers].size).to eq 4
end
- it 'allows syntax highlighting' do
- exp = act = %q{<pre class="code highlight white c"><code><span class="k">def</span></code></pre>}
+ it 'sanitizes `class` attribute from all elements' do
+ act = %q{<pre class="code highlight white c"><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>}
+ exp = %q{<pre><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>}
expect(filter(act).to_html).to eq exp
end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 63fb1bb25c4..f61fc8ceb9e 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -12,14 +12,14 @@ describe Banzai::Filter::SyntaxHighlightFilter, lib: true do
context "when a valid language is specified" do
it "highlights as that language" do
- result = filter('<pre><code class="ruby">def fun end</code></pre>')
+ result = filter('<pre><code lang="ruby">def fun end</code></pre>')
expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>')
end
end
context "when an invalid language is specified" do
it "highlights as plaintext" do
- result = filter('<pre><code class="gnuplot">This is a test</code></pre>')
+ result = filter('<pre><code lang="gnuplot">This is a test</code></pre>')
expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
end
end
@@ -30,7 +30,7 @@ describe Banzai::Filter::SyntaxHighlightFilter, lib: true do
end
it "highlights as plaintext" do
- result = filter('<pre><code class="ruby">This is a test</code></pre>')
+ result = filter('<pre><code lang="ruby">This is a test</code></pre>')
expect(result.to_html).to eq('<pre class="code highlight" lang="" v-pre="true"><code>This is a test</code></pre>')
end
end
diff --git a/spec/lib/ci/ansi2html_spec.rb b/spec/lib/ci/ansi2html_spec.rb
index 0762fd7e56a..a5dfb49478a 100644
--- a/spec/lib/ci/ansi2html_spec.rb
+++ b/spec/lib/ci/ansi2html_spec.rb
@@ -1,159 +1,160 @@
require 'spec_helper'
describe Ci::Ansi2html, lib: true do
- subject { Ci::Ansi2html }
+ subject { described_class }
it "prints non-ansi as-is" do
- expect(subject.convert("Hello")[:html]).to eq('Hello')
+ expect(convert_html("Hello")).to eq('Hello')
end
it "strips non-color-changing controll sequences" do
- expect(subject.convert("Hello \e[2Kworld")[:html]).to eq('Hello world')
+ expect(convert_html("Hello \e[2Kworld")).to eq('Hello world')
end
it "prints simply red" do
- expect(subject.convert("\e[31mHello\e[0m")[:html]).to eq('<span class="term-fg-red">Hello</span>')
+ expect(convert_html("\e[31mHello\e[0m")).to eq('<span class="term-fg-red">Hello</span>')
end
it "prints simply red without trailing reset" do
- expect(subject.convert("\e[31mHello")[:html]).to eq('<span class="term-fg-red">Hello</span>')
+ expect(convert_html("\e[31mHello")).to eq('<span class="term-fg-red">Hello</span>')
end
it "prints simply yellow" do
- expect(subject.convert("\e[33mHello\e[0m")[:html]).to eq('<span class="term-fg-yellow">Hello</span>')
+ expect(convert_html("\e[33mHello\e[0m")).to eq('<span class="term-fg-yellow">Hello</span>')
end
it "prints default on blue" do
- expect(subject.convert("\e[39;44mHello")[:html]).to eq('<span class="term-bg-blue">Hello</span>')
+ expect(convert_html("\e[39;44mHello")).to eq('<span class="term-bg-blue">Hello</span>')
end
it "prints red on blue" do
- expect(subject.convert("\e[31;44mHello")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello</span>')
+ expect(convert_html("\e[31;44mHello")).to eq('<span class="term-fg-red term-bg-blue">Hello</span>')
end
it "resets colors after red on blue" do
- expect(subject.convert("\e[31;44mHello\e[0m world")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello</span> world')
+ expect(convert_html("\e[31;44mHello\e[0m world")).to eq('<span class="term-fg-red term-bg-blue">Hello</span> world')
end
it "performs color change from red/blue to yellow/blue" do
- expect(subject.convert("\e[31;44mHello \e[33mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-blue">world</span>')
+ expect(convert_html("\e[31;44mHello \e[33mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-blue">world</span>')
end
it "performs color change from red/blue to yellow/green" do
- expect(subject.convert("\e[31;44mHello \e[33;42mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-green">world</span>')
+ expect(convert_html("\e[31;44mHello \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-fg-yellow term-bg-green">world</span>')
end
it "performs color change from red/blue to reset to yellow/green" do
- expect(subject.convert("\e[31;44mHello\e[0m \e[33;42mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello</span> <span class="term-fg-yellow term-bg-green">world</span>')
+ expect(convert_html("\e[31;44mHello\e[0m \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello</span> <span class="term-fg-yellow term-bg-green">world</span>')
end
it "ignores unsupported codes" do
- expect(subject.convert("\e[51mHello\e[0m")[:html]).to eq('Hello')
+ expect(convert_html("\e[51mHello\e[0m")).to eq('Hello')
end
it "prints light red" do
- expect(subject.convert("\e[91mHello\e[0m")[:html]).to eq('<span class="term-fg-l-red">Hello</span>')
+ expect(convert_html("\e[91mHello\e[0m")).to eq('<span class="term-fg-l-red">Hello</span>')
end
it "prints default on light red" do
- expect(subject.convert("\e[101mHello\e[0m")[:html]).to eq('<span class="term-bg-l-red">Hello</span>')
+ expect(convert_html("\e[101mHello\e[0m")).to eq('<span class="term-bg-l-red">Hello</span>')
end
it "performs color change from red/blue to default/blue" do
- expect(subject.convert("\e[31;44mHello \e[39mworld")[:html]).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>')
+ expect(convert_html("\e[31;44mHello \e[39mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>')
end
it "performs color change from light red/blue to default/blue" do
- expect(subject.convert("\e[91;44mHello \e[39mworld")[:html]).to eq('<span class="term-fg-l-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>')
+ expect(convert_html("\e[91;44mHello \e[39mworld")).to eq('<span class="term-fg-l-red term-bg-blue">Hello </span><span class="term-bg-blue">world</span>')
end
it "prints bold text" do
- expect(subject.convert("\e[1mHello")[:html]).to eq('<span class="term-bold">Hello</span>')
+ expect(convert_html("\e[1mHello")).to eq('<span class="term-bold">Hello</span>')
end
it "resets bold text" do
- expect(subject.convert("\e[1mHello\e[21m world")[:html]).to eq('<span class="term-bold">Hello</span> world')
- expect(subject.convert("\e[1mHello\e[22m world")[:html]).to eq('<span class="term-bold">Hello</span> world')
+ expect(convert_html("\e[1mHello\e[21m world")).to eq('<span class="term-bold">Hello</span> world')
+ expect(convert_html("\e[1mHello\e[22m world")).to eq('<span class="term-bold">Hello</span> world')
end
it "prints italic text" do
- expect(subject.convert("\e[3mHello")[:html]).to eq('<span class="term-italic">Hello</span>')
+ expect(convert_html("\e[3mHello")).to eq('<span class="term-italic">Hello</span>')
end
it "resets italic text" do
- expect(subject.convert("\e[3mHello\e[23m world")[:html]).to eq('<span class="term-italic">Hello</span> world')
+ expect(convert_html("\e[3mHello\e[23m world")).to eq('<span class="term-italic">Hello</span> world')
end
it "prints underlined text" do
- expect(subject.convert("\e[4mHello")[:html]).to eq('<span class="term-underline">Hello</span>')
+ expect(convert_html("\e[4mHello")).to eq('<span class="term-underline">Hello</span>')
end
it "resets underlined text" do
- expect(subject.convert("\e[4mHello\e[24m world")[:html]).to eq('<span class="term-underline">Hello</span> world')
+ expect(convert_html("\e[4mHello\e[24m world")).to eq('<span class="term-underline">Hello</span> world')
end
it "prints concealed text" do
- expect(subject.convert("\e[8mHello")[:html]).to eq('<span class="term-conceal">Hello</span>')
+ expect(convert_html("\e[8mHello")).to eq('<span class="term-conceal">Hello</span>')
end
it "resets concealed text" do
- expect(subject.convert("\e[8mHello\e[28m world")[:html]).to eq('<span class="term-conceal">Hello</span> world')
+ expect(convert_html("\e[8mHello\e[28m world")).to eq('<span class="term-conceal">Hello</span> world')
end
it "prints crossed-out text" do
- expect(subject.convert("\e[9mHello")[:html]).to eq('<span class="term-cross">Hello</span>')
+ expect(convert_html("\e[9mHello")).to eq('<span class="term-cross">Hello</span>')
end
it "resets crossed-out text" do
- expect(subject.convert("\e[9mHello\e[29m world")[:html]).to eq('<span class="term-cross">Hello</span> world')
+ expect(convert_html("\e[9mHello\e[29m world")).to eq('<span class="term-cross">Hello</span> world')
end
it "can print 256 xterm fg colors" do
- expect(subject.convert("\e[38;5;16mHello")[:html]).to eq('<span class="xterm-fg-16">Hello</span>')
+ expect(convert_html("\e[38;5;16mHello")).to eq('<span class="xterm-fg-16">Hello</span>')
end
it "can print 256 xterm fg colors on normal magenta background" do
- expect(subject.convert("\e[38;5;16;45mHello")[:html]).to eq('<span class="xterm-fg-16 term-bg-magenta">Hello</span>')
+ expect(convert_html("\e[38;5;16;45mHello")).to eq('<span class="xterm-fg-16 term-bg-magenta">Hello</span>')
end
it "can print 256 xterm bg colors" do
- expect(subject.convert("\e[48;5;240mHello")[:html]).to eq('<span class="xterm-bg-240">Hello</span>')
+ expect(convert_html("\e[48;5;240mHello")).to eq('<span class="xterm-bg-240">Hello</span>')
end
it "can print 256 xterm bg colors on normal magenta foreground" do
- expect(subject.convert("\e[48;5;16;35mHello")[:html]).to eq('<span class="term-fg-magenta xterm-bg-16">Hello</span>')
+ expect(convert_html("\e[48;5;16;35mHello")).to eq('<span class="term-fg-magenta xterm-bg-16">Hello</span>')
end
it "prints bold colored text vividly" do
- expect(subject.convert("\e[1;31mHello\e[0m")[:html]).to eq('<span class="term-fg-l-red term-bold">Hello</span>')
+ expect(convert_html("\e[1;31mHello\e[0m")).to eq('<span class="term-fg-l-red term-bold">Hello</span>')
end
it "prints bold light colored text correctly" do
- expect(subject.convert("\e[1;91mHello\e[0m")[:html]).to eq('<span class="term-fg-l-red term-bold">Hello</span>')
+ expect(convert_html("\e[1;91mHello\e[0m")).to eq('<span class="term-fg-l-red term-bold">Hello</span>')
end
it "prints &lt;" do
- expect(subject.convert("<")[:html]).to eq('&lt;')
+ expect(convert_html("<")).to eq('&lt;')
end
it "replaces newlines with line break tags" do
- expect(subject.convert("\n")[:html]).to eq('<br>')
+ expect(convert_html("\n")).to eq('<br>')
end
it "groups carriage returns with newlines" do
- expect(subject.convert("\r\n")[:html]).to eq('<br>')
+ expect(convert_html("\r\n")).to eq('<br>')
end
describe "incremental update" do
shared_examples 'stateable converter' do
- let(:pass1) { subject.convert(pre_text) }
- let(:pass2) { subject.convert(pre_text + text, pass1[:state]) }
+ let(:pass1_stream) { StringIO.new(pre_text) }
+ let(:pass2_stream) { StringIO.new(pre_text + text) }
+ let(:pass1) { subject.convert(pass1_stream) }
+ let(:pass2) { subject.convert(pass2_stream, pass1.state) }
it "to returns html to append" do
- expect(pass2[:append]).to be_truthy
- expect(pass2[:html]).to eq(html)
- expect(pass1[:text] + pass2[:text]).to eq(pre_text + text)
- expect(pass1[:html] + pass2[:html]).to eq(pre_html + html)
+ expect(pass2.append).to be_truthy
+ expect(pass2.html).to eq(html)
+ expect(pass1.html + pass2.html).to eq(pre_html + html)
end
end
@@ -193,4 +194,27 @@ describe Ci::Ansi2html, lib: true do
it_behaves_like 'stateable converter'
end
end
+
+ describe "truncates" do
+ let(:text) { "Hello World" }
+ let(:stream) { StringIO.new(text) }
+ let(:subject) { described_class.convert(stream) }
+
+ before do
+ stream.seek(3, IO::SEEK_SET)
+ end
+
+ it "returns truncated output" do
+ expect(subject.truncated).to be_truthy
+ end
+
+ it "does not append output" do
+ expect(subject.append).to be_falsey
+ end
+ end
+
+ def convert_html(data)
+ stream = StringIO.new(data)
+ subject.convert(stream).html
+ end
end
diff --git a/spec/lib/container_registry/blob_spec.rb b/spec/lib/container_registry/blob_spec.rb
index bbacdc67ebd..f06e5fd54a2 100644
--- a/spec/lib/container_registry/blob_spec.rb
+++ b/spec/lib/container_registry/blob_spec.rb
@@ -1,110 +1,121 @@
require 'spec_helper'
describe ContainerRegistry::Blob do
- let(:digest) { 'sha256:0123456789012345' }
+ let(:group) { create(:group, name: 'group') }
+ let(:project) { create(:empty_project, path: 'test', group: group) }
+
+ let(:repository) do
+ create(:container_repository, name: 'image',
+ tags: %w[latest rc1],
+ project: project)
+ end
+
let(:config) do
- {
- 'digest' => digest,
+ { 'digest' => 'sha256:0123456789012345',
'mediaType' => 'binary',
- 'size' => 1000
- }
+ 'size' => 1000 }
+ end
+
+ let(:blob) { described_class.new(repository, config) }
+
+ before do
+ stub_container_registry_config(enabled: true,
+ api_url: 'http://registry.gitlab',
+ host_port: 'registry.gitlab')
end
- let(:token) { 'authorization-token' }
-
- let(:registry) { ContainerRegistry::Registry.new('http://example.com', token: token) }
- let(:repository) { registry.repository('group/test') }
- let(:blob) { repository.blob(config) }
it { expect(blob).to respond_to(:repository) }
it { expect(blob).to delegate_method(:registry).to(:repository) }
it { expect(blob).to delegate_method(:client).to(:repository) }
- context '#path' do
- subject { blob.path }
-
- it { is_expected.to eq('example.com/group/test@sha256:0123456789012345') }
+ describe '#path' do
+ it 'returns a valid path to the blob' do
+ expect(blob.path).to eq('group/test/image@sha256:0123456789012345')
+ end
end
- context '#digest' do
- subject { blob.digest }
-
- it { is_expected.to eq(digest) }
+ describe '#digest' do
+ it 'return correct digest value' do
+ expect(blob.digest).to eq 'sha256:0123456789012345'
+ end
end
- context '#type' do
- subject { blob.type }
-
- it { is_expected.to eq('binary') }
+ describe '#type' do
+ it 'returns a correct type' do
+ expect(blob.type).to eq 'binary'
+ end
end
- context '#revision' do
- subject { blob.revision }
-
- it { is_expected.to eq('0123456789012345') }
+ describe '#revision' do
+ it 'returns a correct blob SHA' do
+ expect(blob.revision).to eq '0123456789012345'
+ end
end
- context '#short_revision' do
- subject { blob.short_revision }
-
- it { is_expected.to eq('012345678') }
+ describe '#short_revision' do
+ it 'return a short SHA' do
+ expect(blob.short_revision).to eq '012345678'
+ end
end
- context '#delete' do
+ describe '#delete' do
before do
- stub_request(:delete, 'http://example.com/v2/group/test/blobs/sha256:0123456789012345').
- to_return(status: 200)
+ stub_request(:delete, 'http://registry.gitlab/v2/group/test/image/blobs/sha256:0123456789012345')
+ .to_return(status: 200)
end
- subject { blob.delete }
-
- it { is_expected.to be_truthy }
+ it 'returns true when blob has been successfuly deleted' do
+ expect(blob.delete).to be_truthy
+ end
end
- context '#data' do
- let(:data) { '{"key":"value"}' }
-
- subject { blob.data }
-
+ describe '#data' do
context 'when locally stored' do
before do
- stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:0123456789012345').
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/image/blobs/sha256:0123456789012345').
to_return(
status: 200,
headers: { 'Content-Type' => 'application/json' },
- body: data)
+ body: '{"key":"value"}')
end
- it { is_expected.to eq(data) }
+ it 'returns a correct blob data' do
+ expect(blob.data).to eq '{"key":"value"}'
+ end
end
context 'when externally stored' do
+ let(:location) { 'http://external.com/blob/file' }
+
before do
- stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:0123456789012345').
- with(headers: { 'Authorization' => "bearer #{token}" }).
- to_return(
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/image/blobs/sha256:0123456789012345')
+ .with(headers: { 'Authorization' => 'bearer token' })
+ .to_return(
status: 307,
headers: { 'Location' => location })
end
context 'for a valid address' do
- let(:location) { 'http://external.com/blob/file' }
-
before do
stub_request(:get, location).
with(headers: { 'Authorization' => nil }).
to_return(
status: 200,
headers: { 'Content-Type' => 'application/json' },
- body: data)
+ body: '{"key":"value"}')
end
- it { is_expected.to eq(data) }
+ it 'returns correct data' do
+ expect(blob.data).to eq '{"key":"value"}'
+ end
end
context 'for invalid file' do
let(:location) { 'file:///etc/passwd' }
- it { expect{ subject }.to raise_error(ArgumentError, 'invalid address') }
+ it 'raises an error' do
+ expect { blob.data }.to raise_error(ArgumentError, 'invalid address')
+ end
end
end
end
diff --git a/spec/lib/container_registry/path_spec.rb b/spec/lib/container_registry/path_spec.rb
new file mode 100644
index 00000000000..b9c4572c269
--- /dev/null
+++ b/spec/lib/container_registry/path_spec.rb
@@ -0,0 +1,212 @@
+require 'spec_helper'
+
+describe ContainerRegistry::Path do
+ subject { described_class.new(path) }
+
+ describe '#components' do
+ let(:path) { 'path/to/some/project' }
+
+ it 'splits components by a forward slash' do
+ expect(subject.components).to eq %w[path to some project]
+ end
+ end
+
+ describe '#nodes' do
+ context 'when repository path is valid' do
+ let(:path) { 'path/to/some/project' }
+
+ it 'return all project path like node in reverse order' do
+ expect(subject.nodes).to eq %w[path/to/some/project
+ path/to/some
+ path/to]
+ end
+ end
+
+ context 'when repository path is invalid' do
+ let(:path) { '' }
+
+ it 'rasises en error' do
+ expect { subject.nodes }
+ .to raise_error described_class::InvalidRegistryPathError
+ end
+ end
+ end
+
+ describe '#to_s' do
+ let(:path) { 'some/image' }
+
+ it 'return a string with a repository path' do
+ expect(subject.to_s).to eq path
+ end
+ end
+
+ describe '#valid?' do
+ context 'when path has less than two components' do
+ let(:path) { 'something/' }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when path has more than allowed number of components' do
+ let(:path) { 'a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/r/s/t/u/w/y/z' }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when path has invalid characters' do
+ let(:path) { 'some\path' }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when path has two or more components' do
+ let(:path) { 'some/path' }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when path is related to multi-level image' do
+ let(:path) { 'some/path/my/image' }
+
+ it { is_expected.to be_valid }
+ end
+ end
+
+ describe '#has_repository?' do
+ context 'when project exists' do
+ let(:project) { create(:empty_project) }
+ let(:path) { "#{project.full_path}/my/image" }
+
+ context 'when path already has matching repository' do
+ before do
+ create(:container_repository, project: project, name: 'my/image')
+ end
+
+ it { is_expected.to have_repository }
+ it { is_expected.to have_project }
+ end
+
+ context 'when path does not have matching repository' do
+ it { is_expected.not_to have_repository }
+ it { is_expected.to have_project }
+ end
+ end
+
+ context 'when project does not exist' do
+ let(:path) { 'some/project/my/image' }
+
+ it { is_expected.not_to have_repository }
+ it { is_expected.not_to have_project }
+ end
+ end
+
+ describe '#repository_project' do
+ let(:group) { create(:group, path: 'some_group') }
+
+ context 'when project for given path exists' do
+ let(:path) { 'some_group/some_project' }
+
+ before do
+ create(:empty_project, group: group, name: 'some_project')
+ create(:empty_project, name: 'some_project')
+ end
+
+ it 'returns a correct project' do
+ expect(subject.repository_project.group).to eq group
+ end
+ end
+
+ context 'when project for given path does not exist' do
+ let(:path) { 'not/matching' }
+
+ it 'returns nil' do
+ expect(subject.repository_project).to be_nil
+ end
+ end
+
+ context 'when matching multi-level path' do
+ let(:project) do
+ create(:empty_project, group: group, name: 'some_project')
+ end
+
+ context 'when using the zero-level path' do
+ let(:path) { project.full_path }
+
+ it 'supports zero-level path' do
+ expect(subject.repository_project).to eq project
+ end
+ end
+
+ context 'when using first-level path' do
+ let(:path) { "#{project.full_path}/repository" }
+
+ it 'supports first-level path' do
+ expect(subject.repository_project).to eq project
+ end
+ end
+
+ context 'when using second-level path' do
+ let(:path) { "#{project.full_path}/repository/name" }
+
+ it 'supports second-level path' do
+ expect(subject.repository_project).to eq project
+ end
+ end
+
+ context 'when using too deep nesting in the path' do
+ let(:path) { "#{project.full_path}/repository/name/invalid" }
+
+ it 'does not support three-levels of nesting' do
+ expect(subject.repository_project).to be_nil
+ end
+ end
+ end
+ end
+
+ describe '#repository_name' do
+ context 'when project does not exist' do
+ let(:path) { 'some/name' }
+
+ it 'returns nil' do
+ expect(subject.repository_name).to be_nil
+ end
+ end
+
+ context 'when project exists' do
+ let(:group) { create(:group, path: 'some_group') }
+
+ let(:project) do
+ create(:empty_project, group: group, name: 'some_project')
+ end
+
+ before do
+ allow(path).to receive(:repository_project)
+ .and_return(project)
+ end
+
+ context 'when project path equal repository path' do
+ let(:path) { 'some_group/some_project' }
+
+ it 'returns an empty string' do
+ expect(subject.repository_name).to eq ''
+ end
+ end
+
+ context 'when repository path has one additional level' do
+ let(:path) { 'some_group/some_project/repository' }
+
+ it 'returns a correct repository name' do
+ expect(subject.repository_name).to eq 'repository'
+ end
+ end
+
+ context 'when repository path has two additional levels' do
+ let(:path) { 'some_group/some_project/repository/image' }
+
+ it 'returns a correct repository name' do
+ expect(subject.repository_name).to eq 'repository/image'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/container_registry/registry_spec.rb b/spec/lib/container_registry/registry_spec.rb
index 4f3f8b24fc4..4d6eea94bf0 100644
--- a/spec/lib/container_registry/registry_spec.rb
+++ b/spec/lib/container_registry/registry_spec.rb
@@ -10,7 +10,7 @@ describe ContainerRegistry::Registry do
it { is_expected.to respond_to(:uri) }
it { is_expected.to respond_to(:path) }
- it { expect(subject.repository('test')).not_to be_nil }
+ it { expect(subject).not_to be_nil }
context '#path' do
subject { registry.path }
diff --git a/spec/lib/container_registry/repository_spec.rb b/spec/lib/container_registry/repository_spec.rb
deleted file mode 100644
index c364e759108..00000000000
--- a/spec/lib/container_registry/repository_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-require 'spec_helper'
-
-describe ContainerRegistry::Repository do
- let(:registry) { ContainerRegistry::Registry.new('http://example.com') }
- let(:repository) { registry.repository('group/test') }
-
- it { expect(repository).to respond_to(:registry) }
- it { expect(repository).to delegate_method(:client).to(:registry) }
- it { expect(repository.tag('test')).not_to be_nil }
-
- context '#path' do
- subject { repository.path }
-
- it { is_expected.to eq('example.com/group/test') }
- end
-
- context 'manifest processing' do
- before do
- stub_request(:get, 'http://example.com/v2/group/test/tags/list').
- with(headers: { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' }).
- to_return(
- status: 200,
- body: JSON.dump(tags: ['test']),
- headers: { 'Content-Type' => 'application/json' })
- end
-
- context '#manifest' do
- subject { repository.manifest }
-
- it { is_expected.not_to be_nil }
- end
-
- context '#valid?' do
- subject { repository.valid? }
-
- it { is_expected.to be_truthy }
- end
-
- context '#tags' do
- subject { repository.tags }
-
- it { is_expected.not_to be_empty }
- end
- end
-
- context '#delete_tags' do
- let(:tag) { ContainerRegistry::Tag.new(repository, 'tag') }
-
- before { expect(repository).to receive(:tags).twice.and_return([tag]) }
-
- subject { repository.delete_tags }
-
- context 'succeeds' do
- before { expect(tag).to receive(:delete).and_return(true) }
-
- it { is_expected.to be_truthy }
- end
-
- context 'any fails' do
- before { expect(tag).to receive(:delete).and_return(false) }
-
- it { is_expected.to be_falsey }
- end
- end
-end
diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb
index c5e31ae82b6..bc1912d8e6c 100644
--- a/spec/lib/container_registry/tag_spec.rb
+++ b/spec/lib/container_registry/tag_spec.rb
@@ -1,25 +1,59 @@
require 'spec_helper'
describe ContainerRegistry::Tag do
- let(:registry) { ContainerRegistry::Registry.new('http://example.com') }
- let(:repository) { registry.repository('group/test') }
- let(:tag) { repository.tag('tag') }
- let(:headers) { { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' } }
+ let(:group) { create(:group, name: 'group') }
+ let(:project) { create(:project, path: 'test', group: group) }
+
+ let(:repository) do
+ create(:container_repository, name: '', project: project)
+ end
+
+ let(:headers) do
+ { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' }
+ end
+
+ let(:tag) { described_class.new(repository, 'tag') }
+
+ before do
+ stub_container_registry_config(enabled: true,
+ api_url: 'http://registry.gitlab',
+ host_port: 'registry.gitlab')
+ end
it { expect(tag).to respond_to(:repository) }
it { expect(tag).to delegate_method(:registry).to(:repository) }
it { expect(tag).to delegate_method(:client).to(:repository) }
- context '#path' do
- subject { tag.path }
+ describe '#path' do
+ context 'when tag belongs to zero-level repository' do
+ let(:repository) do
+ create(:container_repository, name: '',
+ tags: %w[rc1],
+ project: project)
+ end
+
+ it 'returns path to the image' do
+ expect(tag.path).to eq('group/test:tag')
+ end
+ end
+
+ context 'when tag belongs to first-level repository' do
+ let(:repository) do
+ create(:container_repository, name: 'my_image',
+ tags: %w[tag],
+ project: project)
+ end
- it { is_expected.to eq('example.com/group/test:tag') }
+ it 'returns path to the image' do
+ expect(tag.path).to eq('group/test/my_image:tag')
+ end
+ end
end
context 'manifest processing' do
context 'schema v1' do
before do
- stub_request(:get, 'http://example.com/v2/group/test/manifests/tag').
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/manifests/tag').
with(headers: headers).
to_return(
status: 200,
@@ -56,7 +90,7 @@ describe ContainerRegistry::Tag do
context 'schema v2' do
before do
- stub_request(:get, 'http://example.com/v2/group/test/manifests/tag').
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/manifests/tag').
with(headers: headers).
to_return(
status: 200,
@@ -93,7 +127,7 @@ describe ContainerRegistry::Tag do
context 'when locally stored' do
before do
- stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac').
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac').
with(headers: { 'Accept' => 'application/octet-stream' }).
to_return(
status: 200,
@@ -105,7 +139,7 @@ describe ContainerRegistry::Tag do
context 'when externally stored' do
before do
- stub_request(:get, 'http://example.com/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac').
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/blobs/sha256:d7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac').
with(headers: { 'Accept' => 'application/octet-stream' }).
to_return(
status: 307,
@@ -123,29 +157,29 @@ describe ContainerRegistry::Tag do
end
end
- context 'manifest digest' do
+ context 'with stubbed digest' do
before do
- stub_request(:head, 'http://example.com/v2/group/test/manifests/tag').
- with(headers: headers).
- to_return(status: 200, headers: { 'Docker-Content-Digest' => 'sha256:digest' })
+ stub_request(:head, 'http://registry.gitlab/v2/group/test/manifests/tag')
+ .with(headers: headers)
+ .to_return(status: 200, headers: { 'Docker-Content-Digest' => 'sha256:digest' })
end
- context '#digest' do
- subject { tag.digest }
-
- it { is_expected.to eq('sha256:digest') }
+ describe '#digest' do
+ it 'returns a correct tag digest' do
+ expect(tag.digest).to eq 'sha256:digest'
+ end
end
- context '#delete' do
+ describe '#delete' do
before do
- stub_request(:delete, 'http://example.com/v2/group/test/manifests/sha256:digest').
- with(headers: headers).
- to_return(status: 200)
+ stub_request(:delete, 'http://registry.gitlab/v2/group/test/manifests/sha256:digest')
+ .with(headers: headers)
+ .to_return(status: 200)
end
- subject { tag.delete }
-
- it { is_expected.to be_truthy }
+ it 'correctly deletes the tag' do
+ expect(tag.delete).to be_truthy
+ end
end
end
end
diff --git a/spec/lib/gitlab/backend/shell_spec.rb b/spec/lib/gitlab/backend/shell_spec.rb
index 4b08a02ec73..6675d26734e 100644
--- a/spec/lib/gitlab/backend/shell_spec.rb
+++ b/spec/lib/gitlab/backend/shell_spec.rb
@@ -69,6 +69,15 @@ describe Gitlab::Shell, lib: true do
expect(io).to have_received(:puts).with("key-42\tssh-rsa foo")
end
+ it 'handles multiple spaces in the key' do
+ io = spy(:io)
+ adder = described_class.new(io)
+
+ adder.add_key('key-42', "ssh-rsa foo")
+
+ expect(io).to have_received(:puts).with("key-42\tssh-rsa foo")
+ end
+
it 'raises an exception if the key contains a tab' do
expect do
described_class.new(StringIO.new).add_key('key-42', "ssh-rsa\tfoobar")
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
new file mode 100644
index 00000000000..0864bc7258d
--- /dev/null
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -0,0 +1,116 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::CronParser do
+ shared_examples_for "returns time in the future" do
+ it { is_expected.to be > Time.now }
+ end
+
+ describe '#next_time_from' do
+ subject { described_class.new(cron, cron_timezone).next_time_from(Time.now) }
+
+ context 'when cron and cron_timezone are valid' do
+ context 'when specific time' do
+ let(:cron) { '3 4 5 6 *' }
+ let(:cron_timezone) { 'UTC' }
+
+ it_behaves_like "returns time in the future"
+
+ it 'returns exact time' do
+ expect(subject.min).to eq(3)
+ expect(subject.hour).to eq(4)
+ expect(subject.day).to eq(5)
+ expect(subject.month).to eq(6)
+ end
+ end
+
+ context 'when specific day of week' do
+ let(:cron) { '* * * * 0' }
+ let(:cron_timezone) { 'UTC' }
+
+ it_behaves_like "returns time in the future"
+
+ it 'returns exact day of week' do
+ expect(subject.wday).to eq(0)
+ end
+ end
+
+ context 'when slash used' do
+ let(:cron) { '*/10 */6 */10 */10 *' }
+ let(:cron_timezone) { 'UTC' }
+
+ it_behaves_like "returns time in the future"
+
+ it 'returns specific time' do
+ expect(subject.min).to be_in([0, 10, 20, 30, 40, 50])
+ expect(subject.hour).to be_in([0, 6, 12, 18])
+ expect(subject.day).to be_in([1, 11, 21, 31])
+ expect(subject.month).to be_in([1, 11])
+ end
+ end
+
+ context 'when range used' do
+ let(:cron) { '0,20,40 * 1-5 * *' }
+ let(:cron_timezone) { 'UTC' }
+
+ it_behaves_like "returns time in the future"
+
+ it 'returns specific time' do
+ expect(subject.min).to be_in([0, 20, 40])
+ expect(subject.day).to be_in((1..5).to_a)
+ end
+ end
+
+ context 'when cron_timezone is US/Pacific' do
+ let(:cron) { '0 0 * * *' }
+ let(:cron_timezone) { 'US/Pacific' }
+
+ it_behaves_like "returns time in the future"
+
+ it 'converts time in server time zone' do
+ expect(subject.hour).to eq((Time.zone.now.in_time_zone(cron_timezone).utc_offset / 60 / 60).abs)
+ end
+ end
+ end
+
+ context 'when cron and cron_timezone are invalid' do
+ let(:cron) { 'invalid_cron' }
+ let(:cron_timezone) { 'invalid_cron_timezone' }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#cron_valid?' do
+ subject { described_class.new(cron, Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE).cron_valid? }
+
+ context 'when cron is valid' do
+ let(:cron) { '* * * * *' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when cron is invalid' do
+ let(:cron) { '*********' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#cron_timezone_valid?' do
+ subject { described_class.new(Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_CRON, cron_timezone).cron_timezone_valid? }
+
+ context 'when cron is valid' do
+ let(:cron_timezone) { 'Europe/Istanbul' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when cron is invalid' do
+ let(:cron_timezone) { 'Invalid-zone' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
new file mode 100644
index 00000000000..f1a1a71c528
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -0,0 +1,201 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Trace::Stream do
+ describe 'delegates' do
+ subject { described_class.new { nil } }
+
+ it { is_expected.to delegate_method(:close).to(:stream) }
+ it { is_expected.to delegate_method(:tell).to(:stream) }
+ it { is_expected.to delegate_method(:seek).to(:stream) }
+ it { is_expected.to delegate_method(:size).to(:stream) }
+ it { is_expected.to delegate_method(:path).to(:stream) }
+ it { is_expected.to delegate_method(:truncate).to(:stream) }
+ it { is_expected.to delegate_method(:valid?).to(:stream).as(:present?) }
+ it { is_expected.to delegate_method(:file?).to(:path).as(:present?) }
+ end
+
+ describe '#limit' do
+ let(:stream) do
+ described_class.new do
+ StringIO.new("12345678")
+ end
+ end
+
+ it 'if size is larger we start from beggining' do
+ stream.limit(10)
+
+ expect(stream.tell).to eq(0)
+ end
+
+ it 'if size is smaller we start from the end' do
+ stream.limit(2)
+
+ expect(stream.tell).to eq(6)
+ end
+ end
+
+ describe '#append' do
+ let(:stream) do
+ described_class.new do
+ StringIO.new("12345678")
+ end
+ end
+
+ it "truncates and append content" do
+ stream.append("89", 4)
+ stream.seek(0)
+
+ expect(stream.size).to eq(6)
+ expect(stream.raw).to eq("123489")
+ end
+ end
+
+ describe '#set' do
+ let(:stream) do
+ described_class.new do
+ StringIO.new("12345678")
+ end
+ end
+
+ before do
+ stream.set("8901")
+ end
+
+ it "overwrite content" do
+ stream.seek(0)
+
+ expect(stream.size).to eq(4)
+ expect(stream.raw).to eq("8901")
+ end
+ end
+
+ describe '#raw' do
+ let(:path) { __FILE__ }
+ let(:lines) { File.readlines(path) }
+ let(:stream) do
+ described_class.new do
+ File.open(path)
+ end
+ end
+
+ it 'returns all contents if last_lines is not specified' do
+ result = stream.raw
+
+ expect(result).to eq(lines.join)
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+
+ context 'limit max lines' do
+ before do
+ # specifying BUFFER_SIZE forces to seek backwards
+ allow(described_class).to receive(:BUFFER_SIZE)
+ .and_return(2)
+ end
+
+ it 'returns last few lines' do
+ result = stream.raw(last_lines: 2)
+
+ expect(result).to eq(lines.last(2).join)
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+
+ it 'returns everything if trying to get too many lines' do
+ result = stream.raw(last_lines: lines.size * 2)
+
+ expect(result).to eq(lines.join)
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+ end
+ end
+
+ describe '#html_with_state' do
+ let(:stream) do
+ described_class.new do
+ StringIO.new("1234")
+ end
+ end
+
+ it 'returns html content with state' do
+ result = stream.html_with_state
+
+ expect(result.html).to eq("1234")
+ end
+
+ context 'follow-up state' do
+ let!(:last_result) { stream.html_with_state }
+
+ before do
+ stream.append("5678", 4)
+ stream.seek(0)
+ end
+
+ it "returns appended trace" do
+ result = stream.html_with_state(last_result.state)
+
+ expect(result.append).to be_truthy
+ expect(result.html).to eq("5678")
+ end
+ end
+ end
+
+ describe '#html' do
+ let(:stream) do
+ described_class.new do
+ StringIO.new("12\n34\n56")
+ end
+ end
+
+ it "returns html" do
+ expect(stream.html).to eq("12<br>34<br>56")
+ end
+
+ it "returns html for last line only" do
+ expect(stream.html(last_lines: 1)).to eq("56")
+ end
+ end
+
+ describe '#extract_coverage' do
+ let(:stream) do
+ described_class.new do
+ StringIO.new(data)
+ end
+ end
+
+ subject { stream.extract_coverage(regex) }
+
+ context 'valid content & regex' do
+ let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered' }
+ let(:regex) { '\(\d+.\d+\%\) covered' }
+
+ it { is_expected.to eq(98.29) }
+ end
+
+ context 'valid content & bad regex' do
+ let(:data) { 'Coverage 1033 / 1051 LOC (98.29%) covered\n' }
+ let(:regex) { 'very covered' }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'no coverage content & regex' do
+ let(:data) { 'No coverage for today :sad:' }
+ let(:regex) { '\(\d+.\d+\%\) covered' }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'multiple results in content & regex' do
+ let(:data) { ' (98.39%) covered. (98.29%) covered' }
+ let(:regex) { '\(\d+.\d+\%\) covered' }
+
+ it { is_expected.to eq(98.29) }
+ end
+
+ context 'using a regex capture' do
+ let(:data) { 'TOTAL 9926 3489 65%' }
+ let(:regex) { 'TOTAL\s+\d+\s+\d+\s+(\d{1,3}\%)' }
+
+ it { is_expected.to eq(65) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace_reader_spec.rb b/spec/lib/gitlab/ci/trace_reader_spec.rb
deleted file mode 100644
index ff5551bf703..00000000000
--- a/spec/lib/gitlab/ci/trace_reader_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::Ci::TraceReader do
- let(:path) { __FILE__ }
- let(:lines) { File.readlines(path) }
- let(:bytesize) { lines.sum(&:bytesize) }
-
- it 'returns last few lines' do
- 10.times do
- subject = build_subject
- last_lines = random_lines
-
- expected = lines.last(last_lines).join
- result = subject.read(last_lines: last_lines)
-
- expect(result).to eq(expected)
- expect(result.encoding).to eq(Encoding.default_external)
- end
- end
-
- it 'returns everything if trying to get too many lines' do
- result = build_subject.read(last_lines: lines.size * 2)
-
- expect(result).to eq(lines.join)
- expect(result.encoding).to eq(Encoding.default_external)
- end
-
- it 'returns all contents if last_lines is not specified' do
- result = build_subject.read
-
- expect(result).to eq(lines.join)
- expect(result.encoding).to eq(Encoding.default_external)
- end
-
- it 'raises an error if not passing an integer for last_lines' do
- expect do
- build_subject.read(last_lines: lines)
- end.to raise_error(ArgumentError)
- end
-
- def random_lines
- Random.rand(lines.size) + 1
- end
-
- def random_buffer
- Random.rand(bytesize) + 1
- end
-
- def build_subject
- described_class.new(__FILE__, buffer_size: random_buffer)
- end
-end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
new file mode 100644
index 00000000000..69e8dc9220d
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -0,0 +1,216 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Trace do
+ let(:build) { create(:ci_build) }
+ let(:trace) { described_class.new(build) }
+
+ describe "associations" do
+ it { expect(trace).to respond_to(:job) }
+ it { expect(trace).to delegate_method(:old_trace).to(:job) }
+ end
+
+ describe '#html' do
+ before do
+ trace.set("12\n34")
+ end
+
+ it "returns formatted html" do
+ expect(trace.html).to eq("12<br>34")
+ end
+
+ it "returns last line of formatted html" do
+ expect(trace.html(last_lines: 1)).to eq("34")
+ end
+ end
+
+ describe '#raw' do
+ before do
+ trace.set("12\n34")
+ end
+
+ it "returns raw output" do
+ expect(trace.raw).to eq("12\n34")
+ end
+
+ it "returns last line of raw output" do
+ expect(trace.raw(last_lines: 1)).to eq("34")
+ end
+ end
+
+ describe '#extract_coverage' do
+ let(:regex) { '\(\d+.\d+\%\) covered' }
+
+ before do
+ trace.set('Coverage 1033 / 1051 LOC (98.29%) covered')
+ end
+
+ it "returns valid coverage" do
+ expect(trace.extract_coverage(regex)).to eq(98.29)
+ end
+ end
+
+ describe '#set' do
+ before do
+ trace.set("12")
+ end
+
+ it "returns trace" do
+ expect(trace.raw).to eq("12")
+ end
+
+ context 'overwrite trace' do
+ before do
+ trace.set("34")
+ end
+
+ it "returns new trace" do
+ expect(trace.raw).to eq("34")
+ end
+ end
+
+ context 'runners token' do
+ let(:token) { 'my_secret_token' }
+
+ before do
+ build.project.update(runners_token: token)
+ trace.set(token)
+ end
+
+ it "hides token" do
+ expect(trace.raw).not_to include(token)
+ end
+ end
+
+ context 'hides build token' do
+ let(:token) { 'my_secret_token' }
+
+ before do
+ build.update(token: token)
+ trace.set(token)
+ end
+
+ it "hides token" do
+ expect(trace.raw).not_to include(token)
+ end
+ end
+ end
+
+ describe '#append' do
+ before do
+ trace.set("1234")
+ end
+
+ it "returns correct trace" do
+ expect(trace.append("56", 4)).to eq(6)
+ expect(trace.raw).to eq("123456")
+ end
+
+ context 'tries to append trace at different offset' do
+ it "fails with append" do
+ expect(trace.append("56", 2)).to eq(-4)
+ expect(trace.raw).to eq("1234")
+ end
+ end
+
+ context 'runners token' do
+ let(:token) { 'my_secret_token' }
+
+ before do
+ build.project.update(runners_token: token)
+ trace.append(token, 0)
+ end
+
+ it "hides token" do
+ expect(trace.raw).not_to include(token)
+ end
+ end
+
+ context 'build token' do
+ let(:token) { 'my_secret_token' }
+
+ before do
+ build.update(token: token)
+ trace.append(token, 0)
+ end
+
+ it "hides token" do
+ expect(trace.raw).not_to include(token)
+ end
+ end
+ end
+
+ describe 'trace handling' do
+ context 'trace does not exist' do
+ it { expect(trace.exist?).to be(false) }
+ end
+
+ context 'new trace path is used' do
+ before do
+ trace.send(:ensure_directory)
+
+ File.open(trace.send(:default_path), "w") do |file|
+ file.write("data")
+ end
+ end
+
+ it "trace exist" do
+ expect(trace.exist?).to be(true)
+ end
+
+ it "can be erased" do
+ trace.erase!
+ expect(trace.exist?).to be(false)
+ end
+ end
+
+ context 'deprecated path' do
+ let(:path) { trace.send(:deprecated_path) }
+
+ context 'with valid ci_id' do
+ before do
+ build.project.update(ci_id: 1000)
+
+ FileUtils.mkdir_p(File.dirname(path))
+
+ File.open(path, "w") do |file|
+ file.write("data")
+ end
+ end
+
+ it "trace exist" do
+ expect(trace.exist?).to be(true)
+ end
+
+ it "can be erased" do
+ trace.erase!
+ expect(trace.exist?).to be(false)
+ end
+ end
+
+ context 'without valid ci_id' do
+ it "does not return deprecated path" do
+ expect(path).to be_nil
+ end
+ end
+ end
+
+ context 'stored in database' do
+ before do
+ build.send(:write_attribute, :trace, "data")
+ end
+
+ it "trace exist" do
+ expect(trace.exist?).to be(true)
+ end
+
+ it "can be erased" do
+ trace.erase!
+ expect(trace.exist?).to be(false)
+ end
+
+ it "returns database data" do
+ expect(trace.raw).to eq("data")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index e007044868c..4ac79454647 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -58,6 +58,48 @@ describe Gitlab::Database::MigrationHelpers, lib: true do
end
end
+ describe '#remove_concurrent_index' do
+ context 'outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'using PostgreSQL' do
+ before do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
+ allow(model).to receive(:disable_statement_timeout)
+ end
+
+ it 'removes the index concurrently' do
+ expect(model).to receive(:remove_index).
+ with(:users, { algorithm: :concurrently, column: :foo })
+
+ model.remove_concurrent_index(:users, :foo)
+ end
+ end
+
+ context 'using MySQL' do
+ it 'removes an index' do
+ expect(Gitlab::Database).to receive(:postgresql?).and_return(false)
+
+ expect(model).to receive(:remove_index).
+ with(:users, { column: :foo })
+
+ model.remove_concurrent_index(:users, :foo)
+ end
+ end
+ end
+
+ context 'inside a transaction' do
+ it 'raises RuntimeError' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect { model.remove_concurrent_index(:users, :foo) }.
+ to raise_error(RuntimeError)
+ end
+ end
+ end
+
describe '#add_concurrent_foreign_key' do
context 'inside a transaction' do
it 'raises an error' do
diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb
index 6ec4360adc2..c872d8232b0 100644
--- a/spec/lib/gitlab/etag_caching/middleware_spec.rb
+++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb
@@ -47,9 +47,9 @@ describe Gitlab::EtagCaching::Middleware do
it 'tracks "etag_caching_key_not_found" event' do
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used)
+ .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_key_not_found)
+ .with(:etag_caching_key_not_found, endpoint: 'issue_notes')
middleware.call(build_env(path, if_none_match))
end
@@ -93,9 +93,9 @@ describe Gitlab::EtagCaching::Middleware do
it 'tracks "etag_caching_cache_hit" event' do
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used)
+ .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_cache_hit)
+ .with(:etag_caching_cache_hit, endpoint: 'issue_notes')
middleware.call(build_env(path, if_none_match))
end
@@ -132,9 +132,9 @@ describe Gitlab::EtagCaching::Middleware do
mock_app_response
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used)
+ .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_resource_changed)
+ .with(:etag_caching_resource_changed, endpoint: 'issue_notes')
middleware.call(build_env(path, if_none_match))
end
@@ -150,9 +150,9 @@ describe Gitlab::EtagCaching::Middleware do
it 'tracks "etag_caching_header_missing" event' do
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used)
+ .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_header_missing)
+ .with(:etag_caching_header_missing, endpoint: 'issue_notes')
middleware.call(build_env(path, if_none_match))
end
diff --git a/spec/lib/gitlab/git/attributes_spec.rb b/spec/lib/gitlab/git/attributes_spec.rb
index 9c011e34c11..1cfd8db09a5 100644
--- a/spec/lib/gitlab/git/attributes_spec.rb
+++ b/spec/lib/gitlab/git/attributes_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::Git::Attributes, seed_helper: true do
let(:path) do
- File.join(SEED_REPOSITORY_PATH, 'with-git-attributes.git')
+ File.join(SEED_STORAGE_PATH, 'with-git-attributes.git')
end
subject { described_class.new(path) }
@@ -141,7 +141,7 @@ describe Gitlab::Git::Attributes, seed_helper: true do
end
it 'does not yield when the attributes file has an unsupported encoding' do
- path = File.join(SEED_REPOSITORY_PATH, 'with-invalid-git-attributes.git')
+ path = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git')
attrs = described_class.new(path)
expect { |b| attrs.each_line(&b) }.not_to yield_control
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index e169f5af6b6..8b041ac69b1 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
describe Gitlab::Git::Blame, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
let(:blame) do
Gitlab::Git::Blame.new(repository, SeedRepo::Commit::ID, "CONTRIBUTING.md")
end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index b883526151e..3f494257545 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
describe Gitlab::Git::Blob, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
describe 'initialize' do
let(:blob) { Gitlab::Git::Blob.new(name: 'test') }
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 78234b396c5..cdf1b8beee3 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe Gitlab::Git::Branch, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
subject { repository.branches }
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 5cf4631fbfc..3e44c577643 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe Gitlab::Git::Commit, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
let(:commit) { Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID) }
let(:rugged_commit) do
repository.rugged.lookup(SeedRepo::Commit::ID)
@@ -9,7 +9,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
describe "Commit info" do
before do
- repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged
+ repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged
@committer = {
email: 'mike@smith.com',
@@ -59,7 +59,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
after do
# Erase the new commit so other tests get the original repo
- repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged
+ repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged
repo.references.update("refs/heads/master", SeedRepo::LastCommit::ID)
end
end
@@ -95,7 +95,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
end
context 'with broken repo' do
- let(:repository) { Gitlab::Git::Repository.new(TEST_BROKEN_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_BROKEN_REPO_PATH) }
it 'returns nil' do
expect(Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID)).to be_nil
diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb
index e28debe1494..7c45071ec45 100644
--- a/spec/lib/gitlab/git/compare_spec.rb
+++ b/spec/lib/gitlab/git/compare_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe Gitlab::Git::Compare, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
let(:compare) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, false) }
let(:compare_straight) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, true) }
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 992126ef153..7253a2edeff 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe Gitlab::Git::Diff, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
before do
@raw_diff_hash = {
diff --git a/spec/lib/gitlab/git/encoding_helper_spec.rb b/spec/lib/gitlab/git/encoding_helper_spec.rb
index 83311536893..27bcc241b82 100644
--- a/spec/lib/gitlab/git/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/git/encoding_helper_spec.rb
@@ -2,7 +2,7 @@ require "spec_helper"
describe Gitlab::Git::EncodingHelper do
let(:ext_class) { Class.new { extend Gitlab::Git::EncodingHelper } }
- let(:binary_string) { File.join(SEED_REPOSITORY_PATH, 'gitlab_logo.png') }
+ let(:binary_string) { File.join(SEED_STORAGE_PATH, 'gitlab_logo.png') }
describe '#encode!' do
[
diff --git a/spec/lib/gitlab/git/index_spec.rb b/spec/lib/gitlab/git/index_spec.rb
index d0c7ca60ddc..07d71f6777d 100644
--- a/spec/lib/gitlab/git/index_spec.rb
+++ b/spec/lib/gitlab/git/index_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Git::Index, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
let(:index) { described_class.new(repository) }
before do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index d4b7684adfd..7e8bb796e03 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -3,7 +3,7 @@ require "spec_helper"
describe Gitlab::Git::Repository, seed_helper: true do
include Gitlab::Git::EncodingHelper
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
describe "Respond to" do
subject { repository }
@@ -14,6 +14,32 @@ describe Gitlab::Git::Repository, seed_helper: true do
it { is_expected.to respond_to(:tags) }
end
+ describe '#root_ref' do
+ context 'with gitaly disabled' do
+ before { allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false) }
+
+ it 'calls #discover_default_branch' do
+ expect(repository).to receive(:discover_default_branch)
+ repository.root_ref
+ end
+ end
+
+ context 'with gitaly enabled' do
+ before { stub_gitaly }
+
+ it 'gets the branch name from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
+ repository.root_ref
+ end
+
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name).
+ and_raise(GRPC::Unknown)
+ expect { repository.root_ref }.to raise_error(Gitlab::Git::CommandError)
+ end
+ end
+ end
+
describe "#discover_default_branch" do
let(:master) { 'master' }
let(:feature) { 'feature' }
@@ -55,6 +81,21 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
it { is_expected.to include("master") }
it { is_expected.not_to include("branch-from-space") }
+
+ context 'with gitaly enabled' do
+ before { stub_gitaly }
+
+ it 'gets the branch names from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
+ subject
+ end
+
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names).
+ and_raise(GRPC::Unknown)
+ expect { subject }.to raise_error(Gitlab::Git::CommandError)
+ end
+ end
end
describe '#tag_names' do
@@ -71,6 +112,21 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
it { is_expected.to include("v1.0.0") }
it { is_expected.not_to include("v5.0.0") }
+
+ context 'with gitaly enabled' do
+ before { stub_gitaly }
+
+ it 'gets the tag names from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
+ subject
+ end
+
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names).
+ and_raise(GRPC::Unknown)
+ expect { subject }.to raise_error(Gitlab::Git::CommandError)
+ end
+ end
end
shared_examples 'archive check' do |extenstion|
@@ -221,7 +277,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
context '#submodules' do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
context 'where repo has submodules' do
let(:submodules) { repository.submodules('master') }
@@ -290,9 +346,9 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#reset" do
- change_path = File.join(TEST_NORMAL_REPO_PATH, "CHANGELOG")
- untracked_path = File.join(TEST_NORMAL_REPO_PATH, "UNTRACKED")
- tracked_path = File.join(TEST_NORMAL_REPO_PATH, "files", "ruby", "popen.rb")
+ change_path = File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, "CHANGELOG")
+ untracked_path = File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, "UNTRACKED")
+ tracked_path = File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, "files", "ruby", "popen.rb")
change_text = "New changelog text"
untracked_text = "This file is untracked"
@@ -311,7 +367,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
f.write(untracked_text)
end
- @normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH)
+ @normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH)
@normal_repo.reset("HEAD", :hard)
end
@@ -354,7 +410,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
context "-b" do
before(:all) do
- @normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH)
+ @normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH)
@normal_repo.checkout(new_branch, { b: true }, "origin/feature")
end
@@ -382,7 +438,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
context "without -b" do
context "and specifying a nonexistent branch" do
it "should not do anything" do
- normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH)
+ normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH)
expect { normal_repo.checkout(new_branch) }.to raise_error(Rugged::ReferenceError)
expect(normal_repo.rugged.branches[new_branch]).to be_nil
@@ -402,7 +458,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
context "and with a valid branch" do
before(:all) do
- @normal_repo = Gitlab::Git::Repository.new(TEST_NORMAL_REPO_PATH)
+ @normal_repo = Gitlab::Git::Repository.new('default', TEST_NORMAL_REPO_PATH)
@normal_repo.rugged.branches.create("feature", "origin/feature")
@normal_repo.checkout("feature")
end
@@ -414,13 +470,13 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
it "should update the working directory" do
- File.open(File.join(TEST_NORMAL_REPO_PATH, ".gitignore"), "r") do |f|
+ File.open(File.join(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH, ".gitignore"), "r") do |f|
expect(f.read.each_line.to_a).not_to include(".DS_Store\n")
end
end
after(:all) do
- FileUtils.rm_rf(TEST_NORMAL_REPO_PATH)
+ FileUtils.rm_rf(SEED_STORAGE_PATH, TEST_NORMAL_REPO_PATH)
ensure_seeds
end
end
@@ -429,7 +485,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe "#delete_branch" do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo.delete_branch("feature")
end
@@ -449,7 +505,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe "#create_branch" do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
end
it "should create a new branch" do
@@ -496,7 +552,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe "#remote_delete" do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo.remote_delete("expendable")
end
@@ -512,7 +568,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe "#remote_add" do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo.remote_add("new_remote", SeedHelper::GITLAB_GIT_TEST_REPO_URL)
end
@@ -528,7 +584,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe "#remote_update" do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo.remote_update("expendable", url: TEST_NORMAL_REPO_PATH)
end
@@ -551,7 +607,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
before(:context) do
# Add new commits so that there's a renamed file in the commit history
- repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged
+ repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged
commit_with_old_name = new_commit_edit_old_file(repo)
rename_commit = new_commit_move_file(repo)
@@ -560,7 +616,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
after(:context) do
# Erase our commits so other tests get the original repo
- repo = Gitlab::Git::Repository.new(TEST_REPO_PATH).rugged
+ repo = Gitlab::Git::Repository.new('default', TEST_REPO_PATH).rugged
repo.references.update("refs/heads/master", SeedRepo::LastCommit::ID)
end
@@ -885,7 +941,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe '#autocrlf' do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo.rugged.config['core.autocrlf'] = true
end
@@ -900,14 +956,14 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe '#autocrlf=' do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo.rugged.config['core.autocrlf'] = false
end
it 'should set the autocrlf option to the provided option' do
@repo.autocrlf = :input
- File.open(File.join(TEST_MUTABLE_REPO_PATH, '.git', 'config')) do |config_file|
+ File.open(File.join(SEED_STORAGE_PATH, TEST_MUTABLE_REPO_PATH, '.git', 'config')) do |config_file|
expect(config_file.read).to match('autocrlf = input')
end
end
@@ -999,7 +1055,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#copy_gitattributes" do
- let(:attributes_path) { File.join(TEST_REPO_PATH, 'info/attributes') }
+ let(:attributes_path) { File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info/attributes') }
it "raises an error with invalid ref" do
expect { repository.copy_gitattributes("invalid") }.to raise_error(Gitlab::Git::Repository::InvalidRef)
@@ -1075,7 +1131,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#diffable' do
- info_dir_path = attributes_path = File.join(TEST_REPO_PATH, 'info')
+ info_dir_path = attributes_path = File.join(SEED_STORAGE_PATH, TEST_REPO_PATH, 'info')
attributes_path = File.join(info_dir_path, 'attributes')
before(:all) do
@@ -1143,7 +1199,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe '#local_branches' do
before(:all) do
- @repo = Gitlab::Git::Repository.new(TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
end
after(:all) do
@@ -1235,4 +1291,11 @@ describe Gitlab::Git::Repository, seed_helper: true do
sha = Rugged::Commit.create(repo, options)
repo.lookup(sha)
end
+
+ def stub_gitaly
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true)
+
+ stub = double(:stub)
+ allow(Gitaly::Ref::Stub).to receive(:new).and_return(stub)
+ end
end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index ad469e94735..67a9c974298 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe Gitlab::Git::Tag, seed_helper: true do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
describe 'first tag' do
let(:tag) { repository.tags.first }
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 82685712b5b..4b76a43e6b5 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -2,7 +2,7 @@ require "spec_helper"
describe Gitlab::Git::Tree, seed_helper: true do
context :repo do
- let(:repository) { Gitlab::Git::Repository.new(TEST_REPO_PATH) }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH) }
let(:tree) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID) }
it { expect(tree).to be_kind_of Array }
diff --git a/spec/lib/gitlab/gitaly_client/notifications_spec.rb b/spec/lib/gitlab/gitaly_client/notifications_spec.rb
index bb5d93994ad..39c2048fef8 100644
--- a/spec/lib/gitlab/gitaly_client/notifications_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/notifications_spec.rb
@@ -2,12 +2,15 @@ require 'spec_helper'
describe Gitlab::GitalyClient::Notifications do
describe '#post_receive' do
+ let(:project) { create(:empty_project) }
+ let(:repo_path) { project.repository.path_to_repo }
+ subject { described_class.new(project.repository_storage, project.full_path + '.git') }
+
it 'sends a post_receive message' do
- repo_path = create(:empty_project).repository.path_to_repo
expect_any_instance_of(Gitaly::Notifications::Stub).
- to receive(:post_receive).with(post_receive_request_with_repo_path(repo_path))
+ to receive(:post_receive).with(gitaly_request_with_repo_path(repo_path))
- described_class.new(repo_path).post_receive
+ subject.post_receive
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_spec.rb b/spec/lib/gitlab/gitaly_client/ref_spec.rb
new file mode 100644
index 00000000000..79c9ca993e4
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/ref_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::Ref do
+ let(:project) { create(:empty_project) }
+ let(:repo_path) { project.repository.path_to_repo }
+ let(:client) { Gitlab::GitalyClient::Ref.new(project.repository_storage, project.full_path + '.git') }
+
+ before do
+ allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true)
+ end
+
+ describe '#branch_names' do
+ it 'sends a find_all_branch_names message' do
+ expect_any_instance_of(Gitaly::Ref::Stub).
+ to receive(:find_all_branch_names).with(gitaly_request_with_repo_path(repo_path)).
+ and_return([])
+
+ client.branch_names
+ end
+ end
+
+ describe '#tag_names' do
+ it 'sends a find_all_tag_names message' do
+ expect_any_instance_of(Gitaly::Ref::Stub).
+ to receive(:find_all_tag_names).with(gitaly_request_with_repo_path(repo_path)).
+ and_return([])
+
+ client.tag_names
+ end
+ end
+
+ describe '#default_branch_name' do
+ it 'sends a find_default_branch_name message' do
+ expect_any_instance_of(Gitaly::Ref::Stub).
+ to receive(:find_default_branch_name).with(gitaly_request_with_repo_path(repo_path)).
+ and_return(double(name: 'foo'))
+
+ client.default_branch_name
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 5a7e5fd4360..549eac76ced 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -104,6 +104,9 @@ triggers:
- project
- trigger_requests
- owner
+- trigger_schedule
+trigger_schedule:
+- trigger
deploy_keys:
- user
- deploy_keys_projects
@@ -121,6 +124,9 @@ merge_access_levels:
- protected_branch
push_access_levels:
- protected_branch
+container_repositories:
+- project
+- name
project:
- taggings
- base_tags
@@ -161,6 +167,8 @@ project:
- external_wiki_service
- kubernetes_service
- mock_ci_service
+- mock_deployment_service
+- mock_monitoring_service
- forked_project_link
- forked_from_project
- forked_project_links
@@ -195,8 +203,10 @@ project:
- builds
- runner_projects
- runners
+- active_runners
- variables
- triggers
+- trigger_schedules
- environments
- deployments
- project_feature
@@ -205,6 +215,7 @@ project:
- project_authorizations
- route
- statistics
+- container_repositories
- uploads
award_emoji:
- awardable
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 1ad16a9b57d..0c43c5662e8 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -240,6 +240,17 @@ Ci::Trigger:
- updated_at
- owner_id
- description
+- ref
+Ci::TriggerSchedule:
+- id
+- project_id
+- trigger_id
+- deleted_at
+- created_at
+- updated_at
+- cron
+- cron_timezone
+- next_run_at
DeployKey:
- id
- user_id
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index 6c84a4c8b73..8f09266c3b3 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -40,6 +40,15 @@ describe Gitlab::OAuth::User, lib: true do
let(:provider) { 'twitter' }
describe 'signup' do
+ it 'marks user as having password_automatically_set' do
+ stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter'])
+
+ oauth_user.save
+
+ expect(gl_user).to be_persisted
+ expect(gl_user).to be_password_automatically_set
+ end
+
shared_examples 'to verify compliance with allow_single_sign_on' do
context 'provider is marked as external' do
it 'marks user as external' do
diff --git a/spec/lib/gitlab/polling_interval_spec.rb b/spec/lib/gitlab/polling_interval_spec.rb
index 56c2847e26a..5ea8ecb1c30 100644
--- a/spec/lib/gitlab/polling_interval_spec.rb
+++ b/spec/lib/gitlab/polling_interval_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::PollingInterval, lib: true do
it 'sets value to -1' do
polling_interval.set_header(response, interval: 10_000)
- expect(headers['Poll-Interval']).to eq(-1)
+ expect(headers['Poll-Interval']).to eq('-1')
end
end
@@ -27,7 +27,7 @@ describe Gitlab::PollingInterval, lib: true do
it 'applies modifier to base interval' do
polling_interval.set_header(response, interval: 10_000)
- expect(headers['Poll-Interval']).to eq(3333)
+ expect(headers['Poll-Interval']).to eq('3333')
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
index 287bf62d9bd..6307f8c16a3 100644
--- a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::SidekiqStatus::ClientMiddleware do
describe '#call' do
it 'tracks the job in Redis' do
- expect(Gitlab::SidekiqStatus).to receive(:set).with('123')
+ expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION)
described_class.new.
call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil }
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 56f06b61afb..496e50fbae4 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -73,4 +73,17 @@ describe Gitlab::SidekiqStatus do
expect(key).to include('123')
end
end
+
+ describe 'completed', :redis do
+ it 'returns the completed job' do
+ expect(described_class.completed_jids(%w(123))).to eq(['123'])
+ end
+
+ it 'returns only the jobs completed' do
+ described_class.set('123')
+ described_class.set('456')
+
+ expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
+ end
+ end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 9fbcb1fee69..b703e9808a8 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -188,10 +188,8 @@ describe Gitlab::Workhorse, lib: true do
context 'when Gitaly is enabled' do
let(:gitaly_params) do
- address = Gitlab::GitalyClient.get_address('default')
{
- GitalySocketPath: URI(address).path,
- GitalyAddress: address,
+ GitalyAddress: Gitlab::GitalyClient.get_address('default'),
}
end
diff --git a/spec/mailers/previews/notify_preview.rb b/spec/mailers/previews/notify_preview.rb
new file mode 100644
index 00000000000..0e1ccb5b847
--- /dev/null
+++ b/spec/mailers/previews/notify_preview.rb
@@ -0,0 +1,11 @@
+class NotifyPreview < ActionMailer::Preview
+ def pipeline_success_email
+ pipeline = Ci::Pipeline.last
+ Notify.pipeline_success_email(pipeline, pipeline.user.try(:email))
+ end
+
+ def pipeline_failed_email
+ pipeline = Ci::Pipeline.last
+ Notify.pipeline_failed_email(pipeline, pipeline.user.try(:email))
+ end
+end
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index cb3c592f8cd..2a9a27752c1 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -25,6 +25,20 @@ describe AwardEmoji, models: true do
expect(new_award).not_to be_valid
end
+
+ # Assume User A and User B both created award emoji of the same name
+ # on the same awardable. When User A is deleted, User A's award emoji
+ # is moved to the ghost user. When User B is deleted, User B's award emoji
+ # also needs to be moved to the ghost user - this cannot happen unless
+ # the uniqueness validation is disabled for ghost users.
+ it "allows duplicate award emoji for ghost users" do
+ user = create(:user, :ghost)
+ issue = create(:issue)
+ create(:award_emoji, user: user, awardable: issue)
+ new_award = build(:award_emoji, user: user, awardable: issue)
+
+ expect(new_award).to be_valid
+ end
end
end
end
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index 552229e9b07..0f29766db41 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -53,6 +53,20 @@ describe Blob do
end
end
+ describe '#pdf?' do
+ it 'is falsey when file extension is not .pdf' do
+ git_blob = double(name: 'git_blob.txt')
+
+ expect(described_class.decorate(git_blob)).not_to be_pdf
+ end
+
+ it 'is truthy when file extension is .pdf' do
+ git_blob = double(name: 'git_blob.pdf')
+
+ expect(described_class.decorate(git_blob)).to be_pdf
+ end
+ end
+
describe '#ipython_notebook?' do
it 'is falsey when language is not Jupyter Notebook' do
git_blob = double(text?: true, language: double(name: 'JSON'))
@@ -67,6 +81,20 @@ describe Blob do
end
end
+ describe '#sketch?' do
+ it 'is falsey with image extension' do
+ git_blob = Gitlab::Git::Blob.new(name: "design.png")
+
+ expect(described_class.decorate(git_blob)).not_to be_sketch
+ end
+
+ it 'is truthy with sketch extension' do
+ git_blob = Gitlab::Git::Blob.new(name: "design.sketch")
+
+ expect(described_class.decorate(git_blob)).to be_sketch
+ end
+ end
+
describe '#video?' do
it 'is falsey with image extension' do
git_blob = Gitlab::Git::Blob.new(name: 'image.png')
@@ -83,16 +111,33 @@ describe Blob do
end
end
+ describe '#stl?' do
+ it 'is falsey with image extension' do
+ git_blob = Gitlab::Git::Blob.new(name: 'file.png')
+
+ expect(described_class.decorate(git_blob)).not_to be_stl
+ end
+
+ it 'is truthy with STL extension' do
+ git_blob = Gitlab::Git::Blob.new(name: 'file.stl')
+
+ expect(described_class.decorate(git_blob)).to be_stl
+ end
+ end
+
describe '#to_partial_path' do
let(:project) { double(lfs_enabled?: true) }
def stubbed_blob(overrides = {})
overrides.reverse_merge!(
+ name: nil,
image?: false,
language: nil,
lfs_pointer?: false,
svg?: false,
- text?: false
+ text?: false,
+ binary?: false,
+ stl?: false
)
described_class.decorate(double).tap do |blob|
@@ -131,10 +176,25 @@ describe Blob do
expect(blob.to_partial_path(project)).to eq 'download'
end
+ it 'handles PDFs' do
+ blob = stubbed_blob(name: 'blob.pdf', pdf?: true)
+ expect(blob.to_partial_path(project)).to eq 'pdf'
+ end
+
it 'handles iPython notebooks' do
blob = stubbed_blob(text?: true, ipython_notebook?: true)
expect(blob.to_partial_path(project)).to eq 'notebook'
end
+
+ it 'handles Sketch files' do
+ blob = stubbed_blob(text?: true, sketch?: true, binary?: true)
+ expect(blob.to_partial_path(project)).to eq 'sketch'
+ end
+
+ it 'handles STLs' do
+ blob = stubbed_blob(text?: true, stl?: true)
+ expect(blob.to_partial_path(project)).to eq 'stl'
+ end
end
describe '#size_within_svg_limits?' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index ab7cf47bd2d..6e8845cdcf4 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -17,8 +17,9 @@ describe Ci::Build, :models do
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
it { is_expected.to have_many(:deployments) }
- it { is_expected.to validate_presence_of :ref }
- it { is_expected.to respond_to :trace_html }
+ it { is_expected.to validate_presence_of(:ref) }
+ it { is_expected.to respond_to(:has_trace?) }
+ it { is_expected.to respond_to(:trace) }
describe '#actionize' do
context 'when build is a created' do
@@ -78,32 +79,6 @@ describe Ci::Build, :models do
end
end
- describe '#append_trace' do
- subject { build.trace_html }
-
- context 'when build.trace hides runners token' do
- let(:token) { 'my_secret_token' }
-
- before do
- build.project.update(runners_token: token)
- build.append_trace(token, 0)
- end
-
- it { is_expected.not_to include(token) }
- end
-
- context 'when build.trace hides build token' do
- let(:token) { 'my_secret_token' }
-
- before do
- build.update(token: token)
- build.append_trace(token, 0)
- end
-
- it { is_expected.not_to include(token) }
- end
- end
-
describe '#artifacts?' do
subject { build.artifacts? }
@@ -272,12 +247,98 @@ describe Ci::Build, :models do
describe '#update_coverage' do
context "regarding coverage_regex's value," do
- it "saves the correct extracted coverage value" do
+ before do
build.coverage_regex = '\(\d+.\d+\%\) covered'
- allow(build).to receive(:trace) { 'Coverage 1033 / 1051 LOC (98.29%) covered' }
- expect(build).to receive(:update_attributes).with(coverage: 98.29) { true }
- expect(build.update_coverage).to be true
+ build.trace.set('Coverage 1033 / 1051 LOC (98.29%) covered')
+ end
+
+ it "saves the correct extracted coverage value" do
+ expect(build.update_coverage).to be(true)
+ expect(build.coverage).to eq(98.29)
+ end
+ end
+ end
+
+ describe '#trace' do
+ subject { build.trace }
+
+ it { is_expected.to be_a(Gitlab::Ci::Trace) }
+ end
+
+ describe '#has_trace?' do
+ subject { build.has_trace? }
+
+ it "expect to call exist? method" do
+ expect_any_instance_of(Gitlab::Ci::Trace).to receive(:exist?)
+ .and_return(true)
+
+ is_expected.to be(true)
+ end
+ end
+
+ describe '#trace=' do
+ it "expect to fail trace=" do
+ expect { build.trace = "new" }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#old_trace' do
+ subject { build.old_trace }
+
+ before do
+ build.update_column(:trace, 'old trace')
+ end
+
+ it "expect to receive data from database" do
+ is_expected.to eq('old trace')
+ end
+ end
+
+ describe '#erase_old_trace!' do
+ subject { build.send(:read_attribute, :trace) }
+
+ before do
+ build.send(:write_attribute, :trace, 'old trace')
+ end
+
+ it "expect to receive data from database" do
+ build.erase_old_trace!
+
+ is_expected.to be_nil
+ end
+ end
+
+ describe '#hide_secrets' do
+ let(:subject) { build.hide_secrets(data) }
+
+ context 'hide runners token' do
+ let(:data) { 'new token data'}
+
+ before do
+ build.project.update(runners_token: 'token')
end
+
+ it { is_expected.to eq('new xxxxx data') }
+ end
+
+ context 'hide build token' do
+ let(:data) { 'new token data'}
+
+ before do
+ build.update(token: 'token')
+ end
+
+ it { is_expected.to eq('new xxxxx data') }
+ end
+
+ context 'hide build token' do
+ let(:data) { 'new token data'}
+
+ before do
+ build.update(token: 'token')
+ end
+
+ it { is_expected.to eq('new xxxxx data') }
end
end
@@ -438,7 +499,7 @@ describe Ci::Build, :models do
end
it 'erases build trace in trace file' do
- expect(build.trace).to be_empty
+ expect(build).not_to have_trace
end
it 'sets erased to true' do
@@ -532,38 +593,6 @@ describe Ci::Build, :models do
end
end
- describe '#extract_coverage' do
- context 'valid content & regex' do
- subject { build.extract_coverage('Coverage 1033 / 1051 LOC (98.29%) covered', '\(\d+.\d+\%\) covered') }
-
- it { is_expected.to eq(98.29) }
- end
-
- context 'valid content & bad regex' do
- subject { build.extract_coverage('Coverage 1033 / 1051 LOC (98.29%) covered', 'very covered') }
-
- it { is_expected.to be_nil }
- end
-
- context 'no coverage content & regex' do
- subject { build.extract_coverage('No coverage for today :sad:', '\(\d+.\d+\%\) covered') }
-
- it { is_expected.to be_nil }
- end
-
- context 'multiple results in content & regex' do
- subject { build.extract_coverage(' (98.39%) covered. (98.29%) covered', '\(\d+.\d+\%\) covered') }
-
- it { is_expected.to eq(98.29) }
- end
-
- context 'using a regex capture' do
- subject { build.extract_coverage('TOTAL 9926 3489 65%', 'TOTAL\s+\d+\s+\d+\s+(\d{1,3}\%)') }
-
- it { is_expected.to eq(65) }
- end
- end
-
describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
@@ -949,32 +978,6 @@ describe Ci::Build, :models do
it { is_expected.to eq(project.name) }
end
- describe '#raw_trace' do
- subject { build.raw_trace }
-
- context 'when build.trace hides runners token' do
- let(:token) { 'my_secret_token' }
-
- before do
- build.project.update(runners_token: token)
- build.update(trace: token)
- end
-
- it { is_expected.not_to include(token) }
- end
-
- context 'when build.trace hides build token' do
- let(:token) { 'my_secret_token' }
-
- before do
- build.update(token: token)
- build.update(trace: token)
- end
-
- it { is_expected.not_to include(token) }
- end
- end
-
describe '#ref_slug' do
{
'master' => 'master',
@@ -1040,61 +1043,6 @@ describe Ci::Build, :models do
end
end
- describe '#trace' do
- it 'obfuscates project runners token' do
- allow(build).to receive(:raw_trace).and_return("Test: #{build.project.runners_token}")
-
- expect(build.trace).to eq("Test: xxxxxxxxxxxxxxxxxxxx")
- end
-
- it 'empty project runners token' do
- allow(build).to receive(:raw_trace).and_return(test_trace)
- # runners_token can't normally be set to nil
- allow(build.project).to receive(:runners_token).and_return(nil)
-
- expect(build.trace).to eq(test_trace)
- end
-
- context 'when build does not have trace' do
- it 'is is empty' do
- expect(build.trace).to be_nil
- end
- end
-
- context 'when trace contains text' do
- let(:text) { 'example output' }
- before do
- build.trace = text
- end
-
- it { expect(build.trace).to eq(text) }
- end
-
- context 'when trace hides runners token' do
- let(:token) { 'my_secret_token' }
-
- before do
- build.update(trace: token)
- build.project.update(runners_token: token)
- end
-
- it { expect(build.trace).not_to include(token) }
- it { expect(build.raw_trace).to include(token) }
- end
-
- context 'when build.trace hides build token' do
- let(:token) { 'my_secret_token' }
-
- before do
- build.update(trace: token)
- build.update(token: token)
- end
-
- it { expect(build.trace).not_to include(token) }
- it { expect(build.raw_trace).to include(token) }
- end
- end
-
describe '#has_expiring_artifacts?' do
context 'when artifacts have expiration date set' do
before { build.update(artifacts_expire_at: 1.day.from_now) }
@@ -1113,66 +1061,6 @@ describe Ci::Build, :models do
end
end
- describe '#has_trace_file?' do
- context 'when there is no trace' do
- it { expect(build.has_trace_file?).to be_falsey }
- it { expect(build.trace).to be_nil }
- end
-
- context 'when there is a trace' do
- context 'when trace is stored in file' do
- let(:build_with_trace) { create(:ci_build, :trace) }
-
- it { expect(build_with_trace.has_trace_file?).to be_truthy }
- it { expect(build_with_trace.trace).to eq('BUILD TRACE') }
- end
-
- context 'when trace is stored in old file' do
- before do
- allow(build.project).to receive(:ci_id).and_return(999)
- allow(File).to receive(:exist?).with(build.path_to_trace).and_return(false)
- allow(File).to receive(:exist?).with(build.old_path_to_trace).and_return(true)
- allow(File).to receive(:read).with(build.old_path_to_trace).and_return(test_trace)
- end
-
- it { expect(build.has_trace_file?).to be_truthy }
- it { expect(build.trace).to eq(test_trace) }
- end
-
- context 'when trace is stored in DB' do
- before do
- allow(build.project).to receive(:ci_id).and_return(nil)
- allow(build).to receive(:read_attribute).with(:trace).and_return(test_trace)
- allow(File).to receive(:exist?).with(build.path_to_trace).and_return(false)
- allow(File).to receive(:exist?).with(build.old_path_to_trace).and_return(false)
- end
-
- it { expect(build.has_trace_file?).to be_falsey }
- it { expect(build.trace).to eq(test_trace) }
- end
- end
- end
-
- describe '#trace_file_path' do
- context 'when trace is stored in file' do
- before do
- allow(build).to receive(:has_trace_file?).and_return(true)
- allow(build).to receive(:has_old_trace_file?).and_return(false)
- end
-
- it { expect(build.trace_file_path).to eq(build.path_to_trace) }
- end
-
- context 'when trace is stored in old file' do
- before do
- allow(build).to receive(:has_trace_file?).and_return(true)
- allow(build).to receive(:has_old_trace_file?).and_return(true)
- end
-
- it { expect(build.trace_file_path).to eq(build.old_path_to_trace) }
- end
- end
-
describe '#update_project_statistics' do
let!(:build) { create(:ci_build, artifacts_size: 23) }
@@ -1426,7 +1314,7 @@ describe Ci::Build, :models do
{ key: 'CI_REGISTRY', value: 'registry.example.com', public: true }
end
let(:ci_registry_image) do
- { key: 'CI_REGISTRY_IMAGE', value: project.container_registry_repository_url, public: true }
+ { key: 'CI_REGISTRY_IMAGE', value: project.container_registry_url, public: true }
end
context 'and is disabled for project' do
diff --git a/spec/models/ci/trigger_schedule_spec.rb b/spec/models/ci/trigger_schedule_spec.rb
new file mode 100644
index 00000000000..75d21541cee
--- /dev/null
+++ b/spec/models/ci/trigger_schedule_spec.rb
@@ -0,0 +1,76 @@
+require 'spec_helper'
+
+describe Ci::TriggerSchedule, models: true do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:trigger) }
+ it { is_expected.to respond_to(:ref) }
+
+ describe '#set_next_run_at' do
+ context 'when creates new TriggerSchedule' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ @expected_next_run_at = Gitlab::Ci::CronParser.new(trigger_schedule.cron, trigger_schedule.cron_timezone)
+ .next_time_from(Time.now)
+ end
+
+ it 'updates next_run_at automatically' do
+ expect(Ci::TriggerSchedule.last.next_run_at).to eq(@expected_next_run_at)
+ end
+ end
+
+ context 'when updates cron of exsisted TriggerSchedule' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ new_cron = '0 0 1 1 *'
+ trigger_schedule.update!(cron: new_cron) # Subject
+ @expected_next_run_at = Gitlab::Ci::CronParser.new(new_cron, trigger_schedule.cron_timezone)
+ .next_time_from(Time.now)
+ end
+
+ it 'updates next_run_at automatically' do
+ expect(Ci::TriggerSchedule.last.next_run_at).to eq(@expected_next_run_at)
+ end
+ end
+ end
+
+ describe '#schedule_next_run!' do
+ context 'when reschedules after 10 days from now' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ time_future = Time.now + 10.days
+ allow(Time).to receive(:now).and_return(time_future)
+ trigger_schedule.schedule_next_run! # Subject
+ @expected_next_run_at = Gitlab::Ci::CronParser.new(trigger_schedule.cron, trigger_schedule.cron_timezone)
+ .next_time_from(time_future)
+ end
+
+ it 'points to proper next_run_at' do
+ expect(Ci::TriggerSchedule.last.next_run_at).to eq(@expected_next_run_at)
+ end
+ end
+
+ context 'when cron is invalid' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ trigger_schedule.cron = 'Invalid-cron'
+ trigger_schedule.schedule_next_run! # Subject
+ end
+
+ it 'sets nil to next_run_at' do
+ expect(Ci::TriggerSchedule.last.next_run_at).to be_nil
+ end
+ end
+
+ context 'when cron_timezone is invalid' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ trigger_schedule.cron_timezone = 'Invalid-cron_timezone'
+ trigger_schedule.schedule_next_run! # Subject
+ end
+
+ it 'sets nil to next_run_at' do
+ expect(Ci::TriggerSchedule.last.next_run_at).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/trigger_spec.rb b/spec/models/ci/trigger_spec.rb
index 92c15c13c18..d26121018ce 100644
--- a/spec/models/ci/trigger_spec.rb
+++ b/spec/models/ci/trigger_spec.rb
@@ -7,6 +7,7 @@ describe Ci::Trigger, models: true do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:owner) }
it { is_expected.to have_many(:trigger_requests) }
+ it { is_expected.to have_one(:trigger_schedule) }
end
describe 'before_validation' do
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 677e60e1282..f191605dbdb 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Group, 'Routable' do
- let!(:group) { create(:group) }
+ let!(:group) { create(:group, name: 'foo') }
describe 'Validations' do
it { is_expected.to validate_presence_of(:route) }
@@ -81,6 +81,113 @@ describe Group, 'Routable' do
it { is_expected.to eq([nested_group]) }
end
+ describe '.member_self_and_descendants' do
+ let!(:user) { create(:user) }
+ let!(:nested_group) { create(:group, parent: group) }
+
+ before { group.add_owner(user) }
+ subject { described_class.member_self_and_descendants(user.id) }
+
+ it { is_expected.to match_array [group, nested_group] }
+ end
+
+ describe '.member_hierarchy' do
+ # foo/bar would also match foo/barbaz instead of just foo/bar and foo/bar/baz
+ let!(:user) { create(:user) }
+
+ # group
+ # _______ (foo) _______
+ # | |
+ # | |
+ # nested_group_1 nested_group_2
+ # (bar) (barbaz)
+ # | |
+ # | |
+ # nested_group_1_1 nested_group_2_1
+ # (baz) (baz)
+ #
+ let!(:nested_group_1) { create :group, parent: group, name: 'bar' }
+ let!(:nested_group_1_1) { create :group, parent: nested_group_1, name: 'baz' }
+ let!(:nested_group_2) { create :group, parent: group, name: 'barbaz' }
+ let!(:nested_group_2_1) { create :group, parent: nested_group_2, name: 'baz' }
+
+ context 'user is not a member of any group' do
+ subject { described_class.member_hierarchy(user.id) }
+
+ it 'returns an empty array' do
+ is_expected.to eq []
+ end
+ end
+
+ context 'user is member of all groups' do
+ before do
+ group.add_owner(user)
+ nested_group_1.add_owner(user)
+ nested_group_1_1.add_owner(user)
+ nested_group_2.add_owner(user)
+ nested_group_2_1.add_owner(user)
+ end
+ subject { described_class.member_hierarchy(user.id) }
+
+ it 'returns all groups' do
+ is_expected.to match_array [
+ group,
+ nested_group_1, nested_group_1_1,
+ nested_group_2, nested_group_2_1
+ ]
+ end
+ end
+
+ context 'user is member of the top group' do
+ before { group.add_owner(user) }
+ subject { described_class.member_hierarchy(user.id) }
+
+ it 'returns all groups' do
+ is_expected.to match_array [
+ group,
+ nested_group_1, nested_group_1_1,
+ nested_group_2, nested_group_2_1
+ ]
+ end
+ end
+
+ context 'user is member of the first child (internal node), branch 1' do
+ before { nested_group_1.add_owner(user) }
+ subject { described_class.member_hierarchy(user.id) }
+
+ it 'returns the groups in the hierarchy' do
+ is_expected.to match_array [
+ group,
+ nested_group_1, nested_group_1_1
+ ]
+ end
+ end
+
+ context 'user is member of the first child (internal node), branch 2' do
+ before { nested_group_2.add_owner(user) }
+ subject { described_class.member_hierarchy(user.id) }
+
+ it 'returns the groups in the hierarchy' do
+ is_expected.to match_array [
+ group,
+ nested_group_2, nested_group_2_1
+ ]
+ end
+ end
+
+ context 'user is member of the last child (leaf node)' do
+ before { nested_group_1_1.add_owner(user) }
+ subject { described_class.member_hierarchy(user.id) }
+
+ it 'returns the groups in the hierarchy' do
+ is_expected.to match_array [
+ group,
+ nested_group_1, nested_group_1_1
+ ]
+ end
+ end
+ end
+
describe '#full_path' do
let(:group) { create(:group) }
let(:nested_group) { create(:group, parent: group) }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
new file mode 100644
index 00000000000..f7ee0b57072
--- /dev/null
+++ b/spec/models/container_repository_spec.rb
@@ -0,0 +1,209 @@
+require 'spec_helper'
+
+describe ContainerRepository do
+ let(:group) { create(:group, name: 'group') }
+ let(:project) { create(:project, path: 'test', group: group) }
+
+ let(:container_repository) do
+ create(:container_repository, name: 'my_image', project: project)
+ end
+
+ before do
+ stub_container_registry_config(enabled: true,
+ api_url: 'http://registry.gitlab',
+ host_port: 'registry.gitlab')
+
+ stub_request(:get, 'http://registry.gitlab/v2/group/test/my_image/tags/list')
+ .with(headers: { 'Accept' => 'application/vnd.docker.distribution.manifest.v2+json' })
+ .to_return(
+ status: 200,
+ body: JSON.dump(tags: ['test_tag']),
+ headers: { 'Content-Type' => 'application/json' })
+ end
+
+ describe 'associations' do
+ it 'belongs to the project' do
+ expect(container_repository).to belong_to(:project)
+ end
+ end
+
+ describe '#tag' do
+ it 'has a test tag' do
+ expect(container_repository.tag('test')).not_to be_nil
+ end
+ end
+
+ describe '#path' do
+ it 'returns a full path to the repository' do
+ expect(container_repository.path).to eq('group/test/my_image')
+ end
+ end
+
+ describe '#manifest' do
+ subject { container_repository.manifest }
+
+ it { is_expected.not_to be_nil }
+ end
+
+ describe '#valid?' do
+ subject { container_repository.valid? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#tags' do
+ subject { container_repository.tags }
+
+ it { is_expected.not_to be_empty }
+ end
+
+ describe '#has_tags?' do
+ it 'has tags' do
+ expect(container_repository).to have_tags
+ end
+ end
+
+ describe '#delete_tags!' do
+ let(:container_repository) do
+ create(:container_repository, name: 'my_image',
+ tags: %w[latest rc1],
+ project: project)
+ end
+
+ context 'when action succeeds' do
+ it 'returns status that indicates success' do
+ expect(container_repository.client)
+ .to receive(:delete_repository_tag)
+ .and_return(true)
+
+ expect(container_repository.delete_tags!).to be_truthy
+ end
+ end
+
+ context 'when action fails' do
+ it 'returns status that indicates failure' do
+ expect(container_repository.client)
+ .to receive(:delete_repository_tag)
+ .and_return(false)
+
+ expect(container_repository.delete_tags!).to be_falsey
+ end
+ end
+ end
+
+ describe '#root_repository?' do
+ context 'when repository is a root repository' do
+ let(:repository) { create(:container_repository, :root) }
+
+ it 'returns true' do
+ expect(repository).to be_root_repository
+ end
+ end
+
+ context 'when repository is not a root repository' do
+ it 'returns false' do
+ expect(container_repository).not_to be_root_repository
+ end
+ end
+ end
+
+ describe '.build_from_path' do
+ let(:registry_path) do
+ ContainerRegistry::Path.new(project.full_path + '/some/image')
+ end
+
+ let(:repository) do
+ described_class.build_from_path(registry_path)
+ end
+
+ it 'fabricates repository assigned to a correct project' do
+ expect(repository.project).to eq project
+ end
+
+ it 'fabricates repository with a correct name' do
+ expect(repository.name).to eq 'some/image'
+ end
+
+ it 'is not persisted' do
+ expect(repository).not_to be_persisted
+ end
+ end
+
+ describe '.create_from_path!' do
+ let(:repository) do
+ described_class.create_from_path!(ContainerRegistry::Path.new(path))
+ end
+
+ let(:repository_path) { ContainerRegistry::Path.new(path) }
+
+ context 'when received multi-level repository path' do
+ let(:path) { project.full_path + '/some/image' }
+
+ it 'fabricates repository assigned to a correct project' do
+ expect(repository.project).to eq project
+ end
+
+ it 'fabricates repository with a correct name' do
+ expect(repository.name).to eq 'some/image'
+ end
+ end
+
+ context 'when path is too long' do
+ let(:path) do
+ project.full_path + '/a/b/c/d/e/f/g/h/i/j/k/l/n/o/p/s/t/u/x/y/z'
+ end
+
+ it 'does not create repository and raises error' do
+ expect { repository }.to raise_error(
+ ContainerRegistry::Path::InvalidRegistryPathError)
+ end
+ end
+
+ context 'when received multi-level repository with nested groups' do
+ let(:group) { create(:group, :nested, name: 'nested') }
+ let(:path) { project.full_path + '/some/image' }
+
+ it 'fabricates repository assigned to a correct project' do
+ expect(repository.project).to eq project
+ end
+
+ it 'fabricates repository with a correct name' do
+ expect(repository.name).to eq 'some/image'
+ end
+
+ it 'has path including a nested group' do
+ expect(repository.path).to include 'nested/test/some/image'
+ end
+ end
+
+ context 'when received root repository path' do
+ let(:path) { project.full_path }
+
+ it 'fabricates repository assigned to a correct project' do
+ expect(repository.project).to eq project
+ end
+
+ it 'fabricates repository with an empty name' do
+ expect(repository.name).to be_empty
+ end
+ end
+ end
+
+ describe '.build_root_repository' do
+ let(:repository) do
+ described_class.build_root_repository(project)
+ end
+
+ it 'fabricates a root repository object' do
+ expect(repository).to be_root_repository
+ end
+
+ it 'assignes it to the correct project' do
+ expect(repository.project).to eq project
+ end
+
+ it 'does not persist it' do
+ expect(repository).not_to be_persisted
+ end
+ end
+end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 9f0e7fbbe26..af7753caba6 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -100,12 +100,26 @@ describe Environment, models: true do
let(:head_commit) { project.commit }
let(:commit) { project.commit.parent }
- it 'returns deployment id for the environment' do
- expect(environment.first_deployment_for(commit)).to eq deployment1
+ context 'Gitaly find_ref_name feature disabled' do
+ it 'returns deployment id for the environment' do
+ expect(environment.first_deployment_for(commit)).to eq deployment1
+ end
+
+ it 'return nil when no deployment is found' do
+ expect(environment.first_deployment_for(head_commit)).to eq nil
+ end
end
- it 'return nil when no deployment is found' do
- expect(environment.first_deployment_for(head_commit)).to eq nil
+ context 'Gitaly find_ref_name feature enabled' do
+ before do
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:find_ref_name).and_return(true)
+ end
+
+ it 'calls GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:find_ref_name)
+
+ environment.first_deployment_for(commit)
+ end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 5d87938235a..8ffde6f7fbb 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -55,6 +55,8 @@ describe Group, models: true do
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:parent_id) }
it { is_expected.to validate_presence_of :path }
it { is_expected.not_to validate_presence_of :owner }
+ it { is_expected.to validate_presence_of :two_factor_grace_period }
+ it { is_expected.to validate_numericality_of(:two_factor_grace_period).is_greater_than_or_equal_to(0) }
end
describe '.visible_to_user' do
@@ -315,4 +317,44 @@ describe Group, models: true do
to include(master.id, developer.id)
end
end
+
+ describe '#update_two_factor_requirement' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_user(user, GroupMember::OWNER)
+ end
+
+ it 'is called when require_two_factor_authentication is changed' do
+ expect_any_instance_of(User).to receive(:update_two_factor_requirement)
+
+ group.update!(require_two_factor_authentication: true)
+ end
+
+ it 'is called when two_factor_grace_period is changed' do
+ expect_any_instance_of(User).to receive(:update_two_factor_requirement)
+
+ group.update!(two_factor_grace_period: 23)
+ end
+
+ it 'is not called when other attributes are changed' do
+ expect_any_instance_of(User).not_to receive(:update_two_factor_requirement)
+
+ group.update!(description: 'foobar')
+ end
+
+ it 'calls #update_two_factor_requirement on each group member' do
+ other_user = create(:user)
+ group.add_user(other_user, GroupMember::OWNER)
+
+ calls = 0
+ allow_any_instance_of(User).to receive(:update_two_factor_requirement) do
+ calls += 1
+ end
+
+ group.update!(require_two_factor_authentication: true, two_factor_grace_period: 23)
+
+ expect(calls).to eq 2
+ end
+ end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index b8584301baa..4bdd46a581d 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -51,14 +51,6 @@ describe Issue, models: true do
expect(issue.closed_at).to eq(now)
end
-
- it 'sets closed_at to nil when issue is reopened' do
- issue = create(:issue, state: 'closed')
-
- issue.reopen
-
- expect(issue.closed_at).to be_nil
- end
end
describe '#to_reference' do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 370aeb9e0a9..024380b7ebb 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -61,7 +61,7 @@ describe GroupMember, models: true do
describe '#after_accept_request' do
it 'calls NotificationService.accept_group_access_request' do
- member = create(:group_member, user: build_stubbed(:user), requested_at: Time.now)
+ member = create(:group_member, user: build(:user), requested_at: Time.now)
expect_any_instance_of(NotificationService).to receive(:new_group_member)
@@ -75,4 +75,19 @@ describe GroupMember, models: true do
it { is_expected.to eq 'Group' }
end
end
+
+ describe '#update_two_factor_requirement' do
+ let(:user) { build :user }
+ let(:group_member) { build :group_member, user: user }
+
+ it 'is called after creation and deletion' do
+ expect(user).to receive(:update_two_factor_requirement)
+
+ group_member.save
+
+ expect(user).to receive(:update_two_factor_requirement)
+
+ group_member.destroy
+ end
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index ccaf0d7abc7..e406d0a16bd 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -148,42 +148,62 @@ describe Namespace, models: true do
expect(@namespace.move_dir).to be_truthy
end
- context "when any project has container tags" do
+ context "when any project has container images" do
+ let(:container_repository) { create(:container_repository) }
+
before do
stub_container_registry_config(enabled: true)
- stub_container_registry_tags('tag')
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
- create(:empty_project, namespace: @namespace)
+ create(:empty_project, namespace: @namespace, container_repositories: [container_repository])
allow(@namespace).to receive(:path_was).and_return(@namespace.path)
allow(@namespace).to receive(:path).and_return('new_path')
end
- it { expect { @namespace.move_dir }.to raise_error('Namespace cannot be moved, because at least one project has tags in container registry') }
+ it 'raises an error about not movable project' do
+ expect { @namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ end
end
- context 'renaming a sub-group' do
+ context 'with subgroups' do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child) }
- let(:uploads_dir) { File.join(CarrierWave.root, 'uploads', 'parent') }
- let(:pages_dir) { File.join(TestEnv.pages_path, 'parent') }
+ let(:uploads_dir) { File.join(CarrierWave.root, 'uploads') }
+ let(:pages_dir) { TestEnv.pages_path }
before do
- FileUtils.mkdir_p(File.join(uploads_dir, 'child', 'the-project'))
- FileUtils.mkdir_p(File.join(pages_dir, 'child', 'the-project'))
+ FileUtils.mkdir_p(File.join(uploads_dir, 'parent', 'child', 'the-project'))
+ FileUtils.mkdir_p(File.join(pages_dir, 'parent', 'child', 'the-project'))
+ end
+
+ context 'renaming child' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
+
+ child.update_attributes!(path: 'renamed')
+
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
end
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'renamed', 'the-project')
- expected_pages_path = File.join(pages_dir, 'renamed', 'the-project')
+ context 'renaming parent' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
- child.update_attributes!(path: 'renamed')
+ parent.update_attributes!(path: 'renamed')
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
end
end
end
@@ -295,4 +315,13 @@ describe Namespace, models: true do
to eq([namespace.owner_id])
end
end
+
+ describe '#all_projects' do
+ let(:group) { create(:group) }
+ let(:child) { create(:group, parent: group) }
+ let!(:project1) { create(:project_empty_repo, namespace: group) }
+ let!(:project2) { create(:project_empty_repo, namespace: child) }
+
+ it { expect(group.all_projects.to_a).to eq([project2, project1]) }
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index a899cfcceb7..abb61660e5d 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1158,11 +1158,12 @@ describe Project, models: true do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
-
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
+ stub_container_registry_config(enabled: false)
+
expect(gitlab_shell).to receive(:mv_repository).
ordered.
with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}").
@@ -1186,10 +1187,13 @@ describe Project, models: true do
project.rename_repo
end
- context 'container registry with tags' do
+ context 'container registry with images' do
+ let(:container_repository) { create(:container_repository) }
+
before do
stub_container_registry_config(enabled: true)
- stub_container_registry_tags('tag')
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
+ project.container_repositories << container_repository
end
subject { project.rename_repo }
@@ -1387,38 +1391,17 @@ describe Project, models: true do
end
end
- describe '#container_registry_path_with_namespace' do
- let(:project) { create(:empty_project, path: 'PROJECT') }
-
- subject { project.container_registry_path_with_namespace }
-
- it { is_expected.not_to eq(project.path_with_namespace) }
- it { is_expected.to eq(project.path_with_namespace.downcase) }
- end
-
- describe '#container_registry_repository' do
- let(:project) { create(:empty_project) }
-
- before { stub_container_registry_config(enabled: true) }
-
- subject { project.container_registry_repository }
-
- it { is_expected.not_to be_nil }
- end
-
- describe '#container_registry_repository_url' do
+ describe '#container_registry_url' do
let(:project) { create(:empty_project) }
- subject { project.container_registry_repository_url }
+ subject { project.container_registry_url }
before { stub_container_registry_config(**registry_settings) }
context 'for enabled registry' do
let(:registry_settings) do
- {
- enabled: true,
- host_port: 'example.com',
- }
+ { enabled: true,
+ host_port: 'example.com' }
end
it { is_expected.not_to be_nil }
@@ -1426,9 +1409,7 @@ describe Project, models: true do
context 'for disabled registry' do
let(:registry_settings) do
- {
- enabled: false
- }
+ { enabled: false }
end
it { is_expected.to be_nil }
@@ -1438,28 +1419,60 @@ describe Project, models: true do
describe '#has_container_registry_tags?' do
let(:project) { create(:empty_project) }
- subject { project.has_container_registry_tags? }
-
- context 'for enabled registry' do
+ context 'when container registry is enabled' do
before { stub_container_registry_config(enabled: true) }
- context 'with tags' do
- before { stub_container_registry_tags('test', 'test2') }
+ context 'when tags are present for multi-level registries' do
+ before do
+ create(:container_repository, project: project, name: 'image')
+
+ stub_container_registry_tags(repository: /image/,
+ tags: %w[latest rc1])
+ end
- it { is_expected.to be_truthy }
+ it 'should have image tags' do
+ expect(project).to have_container_registry_tags
+ end
end
- context 'when no tags' do
- before { stub_container_registry_tags }
+ context 'when tags are present for root repository' do
+ before do
+ stub_container_registry_tags(repository: project.full_path,
+ tags: %w[latest rc1 pre1])
+ end
- it { is_expected.to be_falsey }
+ it 'should have image tags' do
+ expect(project).to have_container_registry_tags
+ end
+ end
+
+ context 'when there are no tags at all' do
+ before do
+ stub_container_registry_tags(repository: :any, tags: [])
+ end
+
+ it 'should not have image tags' do
+ expect(project).not_to have_container_registry_tags
+ end
end
end
- context 'for disabled registry' do
+ context 'when container registry is disabled' do
before { stub_container_registry_config(enabled: false) }
- it { is_expected.to be_falsey }
+ it 'should not have image tags' do
+ expect(project).not_to have_container_registry_tags
+ end
+
+ it 'should not check root repository tags' do
+ expect(project).not_to receive(:full_path)
+ expect(project).not_to have_container_registry_tags
+ end
+
+ it 'should iterate through container repositories' do
+ expect(project).to receive(:container_repositories)
+ expect(project).not_to have_container_registry_tags
+ end
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index a9e37be1157..6f7b9c2388a 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -28,7 +28,6 @@ describe User, models: true do
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
it { is_expected.to have_many(:assigned_merge_requests).dependent(:nullify) }
it { is_expected.to have_many(:identities).dependent(:destroy) }
- it { is_expected.to have_one(:abuse_report) }
it { is_expected.to have_many(:spam_logs).dependent(:destroy) }
it { is_expected.to have_many(:todos).dependent(:destroy) }
it { is_expected.to have_many(:award_emoji).dependent(:destroy) }
@@ -37,6 +36,34 @@ describe User, models: true do
it { is_expected.to have_many(:pipelines).dependent(:nullify) }
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
it { is_expected.to have_many(:uploads).dependent(:destroy) }
+ it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
+
+ describe "#abuse_report" do
+ let(:current_user) { create(:user) }
+ let(:other_user) { create(:user) }
+
+ it { is_expected.to have_one(:abuse_report) }
+
+ it "refers to the abuse report whose user_id is the current user" do
+ abuse_report = create(:abuse_report, reporter: other_user, user: current_user)
+
+ expect(current_user.abuse_report).to eq(abuse_report)
+ end
+
+ it "does not refer to the abuse report whose reporter_id is the current user" do
+ create(:abuse_report, reporter: current_user, user: other_user)
+
+ expect(current_user.abuse_report).to be_nil
+ end
+
+ it "does not update the user_id of an abuse report when the user is updated" do
+ abuse_report = create(:abuse_report, reporter: current_user, user: other_user)
+
+ current_user.block
+
+ expect(abuse_report.reload.user).to eq(other_user)
+ end
+ end
describe '#group_members' do
it 'does not include group memberships for which user is a requester' do
@@ -1407,6 +1434,17 @@ describe User, models: true do
it { expect(user.nested_groups).to eq([nested_group]) }
end
+ describe '#all_expanded_groups' do
+ let!(:user) { create(:user) }
+ let!(:group) { create(:group) }
+ let!(:nested_group_1) { create(:group, parent: group) }
+ let!(:nested_group_2) { create(:group, parent: group) }
+
+ before { nested_group_1.add_owner(user) }
+
+ it { expect(user.all_expanded_groups).to match_array [group, nested_group_1] }
+ end
+
describe '#nested_groups_projects' do
let!(:user) { create(:user) }
let!(:group) { create(:group) }
@@ -1521,4 +1559,76 @@ describe User, models: true do
end
end
end
+
+ describe '#update_two_factor_requirement' do
+ let(:user) { create :user }
+
+ context 'with 2FA requirement on groups' do
+ let(:group1) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 23 }
+ let(:group2) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 32 }
+
+ before do
+ group1.add_user(user, GroupMember::OWNER)
+ group2.add_user(user, GroupMember::OWNER)
+
+ user.update_two_factor_requirement
+ end
+
+ it 'requires 2FA' do
+ expect(user.require_two_factor_authentication_from_group).to be true
+ end
+
+ it 'uses the shortest grace period' do
+ expect(user.two_factor_grace_period).to be 23
+ end
+ end
+
+ context 'with 2FA requirement on nested parent group' do
+ let!(:group1) { create :group, require_two_factor_authentication: true }
+ let!(:group1a) { create :group, require_two_factor_authentication: false, parent: group1 }
+
+ before do
+ group1a.add_user(user, GroupMember::OWNER)
+
+ user.update_two_factor_requirement
+ end
+
+ it 'requires 2FA' do
+ expect(user.require_two_factor_authentication_from_group).to be true
+ end
+ end
+
+ context 'with 2FA requirement on nested child group' do
+ let!(:group1) { create :group, require_two_factor_authentication: false }
+ let!(:group1a) { create :group, require_two_factor_authentication: true, parent: group1 }
+
+ before do
+ group1.add_user(user, GroupMember::OWNER)
+
+ user.update_two_factor_requirement
+ end
+
+ it 'requires 2FA' do
+ expect(user.require_two_factor_authentication_from_group).to be true
+ end
+ end
+
+ context 'without 2FA requirement on groups' do
+ let(:group) { create :group }
+
+ before do
+ group.add_user(user, GroupMember::OWNER)
+
+ user.update_two_factor_requirement
+ end
+
+ it 'does not require 2FA' do
+ expect(user.require_two_factor_authentication_from_group).to be false
+ end
+
+ it 'falls back to the default grace period' do
+ expect(user.two_factor_grace_period).to be 48
+ end
+ end
+ end
end
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 4f4b18cf0e0..e1beac28dab 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -108,6 +108,15 @@ describe API::DeployKeys, api: true do
expect(response).to have_http_status(201)
end
+
+ it 'accepts can_push parameter' do
+ key_attrs = attributes_for :write_access_key
+
+ post api("/projects/#{project.id}/deploy_keys", admin), key_attrs
+
+ expect(response).to have_http_status(201)
+ expect(json_response['can_push']).to eq(true)
+ end
end
describe 'DELETE /projects/:id/deploy_keys/:key_id' do
diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb
index 4729adba11c..2b27ce6390a 100644
--- a/spec/requests/api/issues_spec.rb
+++ b/spec/requests/api/issues_spec.rb
@@ -12,6 +12,8 @@ describe API::Issues, api: true do
let(:assignee) { create(:assignee) }
let(:admin) { create(:user, :admin) }
let!(:project) { create(:empty_project, :public, creator_id: user.id, namespace: user.namespace ) }
+ let(:issue_title) { 'foo' }
+ let(:issue_description) { 'closed' }
let!(:closed_issue) do
create :closed_issue,
author: user,
@@ -38,7 +40,9 @@ describe API::Issues, api: true do
project: project,
milestone: milestone,
created_at: generate(:past_time),
- updated_at: 1.hour.ago
+ updated_at: 1.hour.ago,
+ title: issue_title,
+ description: issue_description
end
let!(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
@@ -61,6 +65,7 @@ describe API::Issues, api: true do
context "when unauthenticated" do
it "returns authentication error" do
get api("/issues")
+
expect(response).to have_http_status(401)
end
end
@@ -69,9 +74,7 @@ describe API::Issues, api: true do
it "returns an array of issues" do
get api("/issues", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+ expect_paginated_array_response(size: 2)
expect(json_response.first['title']).to eq(issue.title)
expect(json_response.last).to have_key('web_url')
end
@@ -79,41 +82,43 @@ describe API::Issues, api: true do
it 'returns an array of closed issues' do
get api('/issues?state=closed', user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(closed_issue.id)
end
it 'returns an array of opened issues' do
get api('/issues?state=opened', user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(issue.id)
end
it 'returns an array of all issues' do
get api('/issues?state=all', user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
expect(json_response.first['id']).to eq(issue.id)
expect(json_response.second['id']).to eq(closed_issue.id)
end
+ it 'returns issues matching given search string for title' do
+ get api("/issues?search=#{issue.title}", user)
+
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(issue.id)
+ end
+
+ it 'returns issues matching given search string for description' do
+ get api("/issues?search=#{issue.description}", user)
+
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(issue.id)
+ end
+
it 'returns an array of labeled issues' do
get api("/issues?labels=#{label.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([label.title])
end
@@ -126,29 +131,20 @@ describe API::Issues, api: true do
get api("/issues", user), labels: "#{label.title},#{label_b.title},#{label_c.title}"
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([label_c.title, label_b.title, label.title])
end
it 'returns an empty array if no issue matches labels' do
get api('/issues?labels=foo,bar', user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an array of labeled issues matching given state' do
get api("/issues?labels=#{label.title}&state=opened", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([label.title])
expect(json_response.first['state']).to eq('opened')
end
@@ -156,47 +152,32 @@ describe API::Issues, api: true do
it 'returns unlabeled issues for "No Label" label' do
get api("/issues", user), labels: 'No Label'
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to be_empty
end
it 'returns an empty array if no issue matches labels and state filters' do
get api("/issues?labels=#{label.title}&state=closed", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if no issue matches milestone' do
get api("/issues?milestone=#{empty_milestone.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if milestone does not exist' do
get api("/issues?milestone=foo", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an array of issues in given milestone' do
get api("/issues?milestone=#{milestone.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
expect(json_response.first['id']).to eq(issue.id)
expect(json_response.second['id']).to eq(closed_issue.id)
end
@@ -205,49 +186,36 @@ describe API::Issues, api: true do
get api("/issues?milestone=#{milestone.title}"\
'&state=closed', user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(closed_issue.id)
end
it 'returns an array of issues with no milestone' do
get api("/issues?milestone=#{no_milestone_title}", author)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(confidential_issue.id)
end
it 'returns an array of issues found by iids' do
get api('/issues', user), iids: [closed_issue.iid]
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(closed_issue.id)
end
it 'returns an empty array if iid does not exist' do
get api("/issues", user), iids: [99999]
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'sorts by created_at descending by default' do
get api('/issues', user)
response_dates = json_response.map { |issue| issue['created_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 2)
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -255,9 +223,8 @@ describe API::Issues, api: true do
get api('/issues?sort=asc', user)
response_dates = json_response.map { |issue| issue['created_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 2)
expect(response_dates).to eq(response_dates.sort)
end
@@ -265,9 +232,8 @@ describe API::Issues, api: true do
get api('/issues?order_by=updated_at', user)
response_dates = json_response.map { |issue| issue['updated_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 2)
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -275,9 +241,8 @@ describe API::Issues, api: true do
get api('/issues?order_by=updated_at&sort=asc', user)
response_dates = json_response.map { |issue| issue['updated_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 2)
expect(response_dates).to eq(response_dates.sort)
end
@@ -316,7 +281,9 @@ describe API::Issues, api: true do
assignee: user,
project: group_project,
milestone: group_milestone,
- updated_at: 1.hour.ago
+ updated_at: 1.hour.ago,
+ title: issue_title,
+ description: issue_description
end
let!(:group_label) do
create(:label, title: 'group_lbl', color: '#FFAABB', project: group_project)
@@ -336,74 +303,65 @@ describe API::Issues, api: true do
it 'returns all group issues (including opened and closed)' do
get api(base_url, admin)
- expect(response).to have_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_paginated_array_response(size: 3)
end
it 'returns group issues without confidential issues for non project members' do
get api("#{base_url}?state=opened", non_member)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['title']).to eq(group_issue.title)
end
it 'returns group confidential issues for author' do
get api("#{base_url}?state=opened", author)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
end
it 'returns group confidential issues for assignee' do
get api("#{base_url}?state=opened", assignee)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
end
it 'returns group issues with confidential issues for project members' do
get api("#{base_url}?state=opened", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
end
it 'returns group confidential issues for admin' do
get api("#{base_url}?state=opened", admin)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
end
it 'returns an array of labeled group issues' do
get api("#{base_url}?labels=#{group_label.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([group_label.title])
end
it 'returns an array of labeled group issues where all labels match' do
get api("#{base_url}?labels=#{group_label.title},foo,bar", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
+ end
+
+ it 'returns issues matching given search string for title' do
+ get api("#{base_url}?search=#{group_issue.title}", user)
+
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_issue.id)
+ end
+
+ it 'returns issues matching given search string for description' do
+ get api("#{base_url}?search=#{group_issue.description}", user)
+
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(group_issue.id)
end
it 'returns an array of labeled issues when all labels matches' do
@@ -415,65 +373,45 @@ describe API::Issues, api: true do
get api("#{base_url}", user), labels: "#{group_label.title},#{label_b.title},#{label_c.title}"
- expect(response).to have_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([label_c.title, label_b.title, group_label.title])
end
it 'returns an array of issues found by iids' do
get api(base_url, user), iids: [group_issue.iid]
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(group_issue.id)
end
it 'returns an empty array if iid does not exist' do
get api(base_url, user), iids: [99999]
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if no group issue matches labels' do
get api("#{base_url}?labels=foo,bar", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if no issue matches milestone' do
get api("#{base_url}?milestone=#{group_empty_milestone.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if milestone does not exist' do
get api("#{base_url}?milestone=foo", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an array of issues in given milestone' do
get api("#{base_url}?state=opened&milestone=#{group_milestone.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(group_issue.id)
end
@@ -481,10 +419,7 @@ describe API::Issues, api: true do
get api("#{base_url}?milestone=#{group_milestone.title}"\
'&state=closed', user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(group_closed_issue.id)
end
@@ -492,9 +427,8 @@ describe API::Issues, api: true do
get api("#{base_url}?milestone=#{no_milestone_title}", user)
expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(group_confidential_issue.id)
end
@@ -502,9 +436,8 @@ describe API::Issues, api: true do
get api(base_url, user)
response_dates = json_response.map { |issue| issue['created_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -512,9 +445,8 @@ describe API::Issues, api: true do
get api("#{base_url}?sort=asc", user)
response_dates = json_response.map { |issue| issue['created_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort)
end
@@ -522,9 +454,8 @@ describe API::Issues, api: true do
get api("#{base_url}?order_by=updated_at", user)
response_dates = json_response.map { |issue| issue['updated_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -532,9 +463,8 @@ describe API::Issues, api: true do
get api("#{base_url}?order_by=updated_at&sort=asc", user)
response_dates = json_response.map { |issue| issue['updated_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort)
end
end
@@ -563,79 +493,55 @@ describe API::Issues, api: true do
get api("/projects/#{restricted_project.id}/issues", non_member)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response).to eq([])
+ expect_paginated_array_response(size: 0)
end
it 'returns project issues without confidential issues for non project members' do
get api("#{base_url}/issues", non_member)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
expect(json_response.first['title']).to eq(issue.title)
end
it 'returns project issues without confidential issues for project members with guest role' do
get api("#{base_url}/issues", guest)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
expect(json_response.first['title']).to eq(issue.title)
end
it 'returns project confidential issues for author' do
get api("#{base_url}/issues", author)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_paginated_array_response(size: 3)
expect(json_response.first['title']).to eq(issue.title)
end
it 'returns project confidential issues for assignee' do
get api("#{base_url}/issues", assignee)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_paginated_array_response(size: 3)
expect(json_response.first['title']).to eq(issue.title)
end
it 'returns project issues with confidential issues for project members' do
get api("#{base_url}/issues", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_paginated_array_response(size: 3)
expect(json_response.first['title']).to eq(issue.title)
end
it 'returns project confidential issues for admin' do
get api("#{base_url}/issues", admin)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect_paginated_array_response(size: 3)
expect(json_response.first['title']).to eq(issue.title)
end
it 'returns an array of labeled project issues' do
get api("#{base_url}/issues?labels=#{label.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([label.title])
end
@@ -648,74 +554,65 @@ describe API::Issues, api: true do
get api("#{base_url}/issues", user), labels: "#{label.title},#{label_b.title},#{label_c.title}"
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['labels']).to eq([label_c.title, label_b.title, label.title])
end
+ it 'returns issues matching given search string for title' do
+ get api("#{base_url}/issues?search=#{issue.title}", user)
+
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(issue.id)
+ end
+
+ it 'returns issues matching given search string for description' do
+ get api("#{base_url}/issues?search=#{issue.description}", user)
+
+ expect_paginated_array_response(size: 1)
+ expect(json_response.first['id']).to eq(issue.id)
+ end
+
it 'returns an array of issues found by iids' do
get api("#{base_url}/issues", user), iids: [issue.iid]
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(issue.id)
end
it 'returns an empty array if iid does not exist' do
get api("#{base_url}/issues", user), iids: [99999]
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if not all labels matches' do
get api("#{base_url}/issues?labels=#{label.title},foo", user)
- expect(response).to have_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if no project issue matches labels' do
get api("#{base_url}/issues?labels=foo,bar", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if no issue matches milestone' do
get api("#{base_url}/issues?milestone=#{empty_milestone.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an empty array if milestone does not exist' do
get api("#{base_url}/issues?milestone=foo", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_paginated_array_response(size: 0)
end
it 'returns an array of issues in given milestone' do
get api("#{base_url}/issues?milestone=#{milestone.title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response(size: 2)
expect(json_response.first['id']).to eq(issue.id)
expect(json_response.second['id']).to eq(closed_issue.id)
end
@@ -723,20 +620,14 @@ describe API::Issues, api: true do
it 'returns an array of issues matching state in milestone' do
get api("#{base_url}/issues?milestone=#{milestone.title}&state=closed", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(closed_issue.id)
end
it 'returns an array of issues with no milestone' do
get api("#{base_url}/issues?milestone=#{no_milestone_title}", user)
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response(size: 1)
expect(json_response.first['id']).to eq(confidential_issue.id)
end
@@ -744,9 +635,8 @@ describe API::Issues, api: true do
get api("#{base_url}/issues", user)
response_dates = json_response.map { |issue| issue['created_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -754,9 +644,8 @@ describe API::Issues, api: true do
get api("#{base_url}/issues?sort=asc", user)
response_dates = json_response.map { |issue| issue['created_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort)
end
@@ -764,9 +653,8 @@ describe API::Issues, api: true do
get api("#{base_url}/issues?order_by=updated_at", user)
response_dates = json_response.map { |issue| issue['updated_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -774,9 +662,8 @@ describe API::Issues, api: true do
get api("#{base_url}/issues?order_by=updated_at&sort=asc", user)
response_dates = json_response.map { |issue| issue['updated_at'] }
- expect(response).to have_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+
+ expect_paginated_array_response(size: 3)
expect(response_dates).to eq(response_dates.sort)
end
end
@@ -1457,4 +1344,11 @@ describe API::Issues, api: true do
include_examples 'time tracking endpoints', 'issue'
end
+
+ def expect_paginated_array_response(size: nil)
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(size) if size
+ end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 9450701064b..d8a56c02a63 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -320,7 +320,7 @@ describe API::Jobs, api: true do
context 'authorized user' do
it 'returns specific job trace' do
expect(response).to have_http_status(200)
- expect(response.body).to eq(build.trace)
+ expect(response.body).to eq(build.trace.raw)
end
end
@@ -408,7 +408,7 @@ describe API::Jobs, api: true do
it 'erases job content' do
expect(response).to have_http_status(201)
- expect(build.trace).to be_empty
+ expect(build).not_to have_trace
expect(build.artifacts_file.exists?).to be_falsy
expect(build.artifacts_metadata.exists?).to be_falsy
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 044b989e5ba..409a59d6c23 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -461,6 +461,29 @@ describe API::Runner do
end
end
+ context 'when dependencies is an empty array' do
+ let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:empty_dependencies_job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
+ stage: 'deploy', stage_idx: 1,
+ options: { dependencies: [] })
+ end
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns an empty array' do
+ request_job
+
+ expect(response).to have_http_status(201)
+ expect(json_response['id']).to eq(empty_dependencies_job.id)
+ expect(json_response['dependencies'].count).to eq(0)
+ end
+ end
+
context 'when job has no tags' do
before { job.update(tags: []) }
@@ -569,7 +592,7 @@ describe API::Runner do
update_job(trace: 'BUILD TRACE UPDATED')
expect(response).to have_http_status(200)
- expect(job.reload.trace).to eq 'BUILD TRACE UPDATED'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE UPDATED'
end
end
@@ -577,7 +600,7 @@ describe API::Runner do
it 'does not override trace information' do
update_job
- expect(job.reload.trace).to eq 'BUILD TRACE'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE'
end
end
@@ -608,7 +631,7 @@ describe API::Runner do
context 'when request is valid' do
it 'gets correct response' do
expect(response.status).to eq 202
- expect(job.reload.trace).to eq 'BUILD TRACE appended'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
end
@@ -619,7 +642,7 @@ describe API::Runner do
it "changes the job's trace" do
patch_the_trace
- expect(job.reload.trace).to eq 'BUILD TRACE appended appended'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
@@ -628,7 +651,7 @@ describe API::Runner do
it "doesn't change the build.trace" do
force_patch_the_trace
- expect(job.reload.trace).to eq 'BUILD TRACE appended'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
@@ -641,7 +664,7 @@ describe API::Runner do
it 'changes the job.trace' do
patch_the_trace
- expect(job.reload.trace).to eq 'BUILD TRACE appended appended'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
@@ -650,7 +673,7 @@ describe API::Runner do
it "doesn't change the job.trace" do
force_patch_the_trace
- expect(job.reload.trace).to eq 'BUILD TRACE appended'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
@@ -675,7 +698,7 @@ describe API::Runner do
it 'gets correct response' do
expect(response.status).to eq 202
- expect(job.reload.trace).to eq 'BUILD TRACE appended'
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
end
@@ -715,9 +738,11 @@ describe API::Runner do
def patch_the_trace(content = ' appended', request_headers = nil)
unless request_headers
- offset = job.trace_length
- limit = offset + content.length - 1
- request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
+ job.trace.read do |stream|
+ offset = stream.size
+ limit = offset + content.length - 1
+ request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
+ end
end
Timecop.travel(job.updated_at + update_interval) do
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index a50c22a6dd1..e97d2b0cee0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -330,7 +330,7 @@ describe API::V3::Builds, api: true do
context 'authorized user' do
it 'returns specific job trace' do
expect(response).to have_http_status(200)
- expect(response.body).to eq(build.trace)
+ expect(response.body).to eq(build.trace.raw)
end
end
@@ -418,7 +418,7 @@ describe API::V3::Builds, api: true do
it 'erases job content' do
expect(response.status).to eq 201
- expect(build.trace).to be_empty
+ expect(build).not_to have_trace
expect(build.artifacts_file.exists?).to be_falsy
expect(build.artifacts_metadata.exists?).to be_falsy
end
diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb
index c879f37f50d..ef30d8638dd 100644
--- a/spec/requests/ci/api/builds_spec.rb
+++ b/spec/requests/ci/api/builds_spec.rb
@@ -285,7 +285,7 @@ describe Ci::API::Builds do
end
it 'does not override trace information when no trace is given' do
- expect(build.reload.trace).to eq 'BUILD TRACE'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE'
end
context 'job has been erased' do
@@ -309,9 +309,11 @@ describe Ci::API::Builds do
def patch_the_trace(content = ' appended', request_headers = nil)
unless request_headers
- offset = build.trace_length
- limit = offset + content.length - 1
- request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
+ build.trace.read do |stream|
+ offset = stream.size
+ limit = offset + content.length - 1
+ request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
+ end
end
Timecop.travel(build.updated_at + update_interval) do
@@ -335,7 +337,7 @@ describe Ci::API::Builds do
context 'when request is valid' do
it 'gets correct response' do
expect(response.status).to eq 202
- expect(build.reload.trace).to eq 'BUILD TRACE appended'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Build-Status'
end
@@ -346,7 +348,7 @@ describe Ci::API::Builds do
it 'changes the build trace' do
patch_the_trace
- expect(build.reload.trace).to eq 'BUILD TRACE appended appended'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
@@ -355,7 +357,7 @@ describe Ci::API::Builds do
it "doesn't change the build.trace" do
force_patch_the_trace
- expect(build.reload.trace).to eq 'BUILD TRACE appended'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
@@ -368,7 +370,7 @@ describe Ci::API::Builds do
it 'changes the build.trace' do
patch_the_trace
- expect(build.reload.trace).to eq 'BUILD TRACE appended appended'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
@@ -377,7 +379,7 @@ describe Ci::API::Builds do
it "doesn't change the build.trace" do
force_patch_the_trace
- expect(build.reload.trace).to eq 'BUILD TRACE appended'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
@@ -403,7 +405,7 @@ describe Ci::API::Builds do
it 'gets correct response' do
expect(response.status).to eq 202
- expect(build.reload.trace).to eq 'BUILD TRACE appended'
+ expect(build.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Build-Status'
end
diff --git a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
new file mode 100644
index 00000000000..a714bf4e5d5
--- /dev/null
+++ b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/remove_concurrent_index'
+
+describe RuboCop::Cop::Migration::RemoveConcurrentIndex do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ it 'registers an offense when remove_concurrent_index is used inside a change method' do
+ inspect_source(cop, 'def change; remove_concurrent_index :table, :column; end')
+
+ aggregate_failures do
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ end
+ end
+
+ it 'registers no offense when remove_concurrent_index is used inside an up method' do
+ inspect_source(cop, 'def up; remove_concurrent_index :table, :column; end')
+
+ expect(cop.offenses.size).to eq(0)
+ end
+ end
+
+ context 'outside of migration' do
+ it 'registers no offense' do
+ inspect_source(cop, 'def change; remove_concurrent_index :table, :column; end')
+
+ expect(cop.offenses.size).to eq(0)
+ end
+ end
+end
diff --git a/spec/rubocop/cop/migration/remove_index_spec.rb b/spec/rubocop/cop/migration/remove_index_spec.rb
new file mode 100644
index 00000000000..31923cb7429
--- /dev/null
+++ b/spec/rubocop/cop/migration/remove_index_spec.rb
@@ -0,0 +1,35 @@
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/remove_index'
+
+describe RuboCop::Cop::Migration::RemoveIndex do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ it 'registers an offense when remove_index is used' do
+ inspect_source(cop, 'def change; remove_index :table, :column; end')
+
+ aggregate_failures do
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ end
+ end
+ end
+
+ context 'outside of migration' do
+ it 'registers no offense' do
+ inspect_source(cop, 'def change; remove_index :table, :column; end')
+
+ expect(cop.offenses.size).to eq(0)
+ end
+ end
+end
diff --git a/spec/serializers/build_action_entity_spec.rb b/spec/serializers/build_action_entity_spec.rb
index 0f7be8b2c39..54ac17447b1 100644
--- a/spec/serializers/build_action_entity_spec.rb
+++ b/spec/serializers/build_action_entity_spec.rb
@@ -17,5 +17,9 @@ describe BuildActionEntity do
it 'contains path to the action play' do
expect(subject[:path]).to include "builds/#{build.id}/play"
end
+
+ it 'contains whether it is playable' do
+ expect(subject[:playable]).to eq build.playable?
+ end
end
end
diff --git a/spec/serializers/build_entity_spec.rb b/spec/serializers/build_entity_spec.rb
index 7dcdf54fd93..f76a5cf72d1 100644
--- a/spec/serializers/build_entity_spec.rb
+++ b/spec/serializers/build_entity_spec.rb
@@ -24,6 +24,10 @@ describe BuildEntity do
expect(subject).not_to include(/variables/)
end
+ it 'contains whether it is playable' do
+ expect(subject[:playable]).to eq build.playable?
+ end
+
it 'contains timestamps' do
expect(subject).to include(:created_at, :updated_at)
end
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index b91234ddb1e..e273dfe1552 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -6,14 +6,15 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
let(:current_params) { {} }
let(:rsa_key) { OpenSSL::PKey::RSA.generate(512) }
let(:payload) { JWT.decode(subject[:token], rsa_key).first }
+
let(:authentication_abilities) do
- [
- :read_container_image,
- :create_container_image
- ]
+ [:read_container_image, :create_container_image]
end
- subject { described_class.new(current_project, current_user, current_params).execute(authentication_abilities: authentication_abilities) }
+ subject do
+ described_class.new(current_project, current_user, current_params)
+ .execute(authentication_abilities: authentication_abilities)
+ end
before do
allow(Gitlab.config.registry).to receive_messages(enabled: true, issuer: 'rspec', key: nil)
@@ -40,13 +41,11 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
end
- shared_examples 'a accessible' do
+ shared_examples 'an accessible' do
let(:access) do
- [{
- 'type' => 'repository',
+ [{ 'type' => 'repository',
'name' => project.path_with_namespace,
- 'actions' => actions,
- }]
+ 'actions' => actions }]
end
it_behaves_like 'a valid token'
@@ -59,19 +58,19 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
shared_examples 'a pullable' do
- it_behaves_like 'a accessible' do
+ it_behaves_like 'an accessible' do
let(:actions) { ['pull'] }
end
end
shared_examples 'a pushable' do
- it_behaves_like 'a accessible' do
+ it_behaves_like 'an accessible' do
let(:actions) { ['push'] }
end
end
shared_examples 'a pullable and pushable' do
- it_behaves_like 'a accessible' do
+ it_behaves_like 'an accessible' do
let(:actions) { %w(pull push) }
end
end
@@ -81,15 +80,30 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
it { is_expected.not_to include(:token) }
end
+ shared_examples 'container repository factory' do
+ it 'creates a new container repository resource' do
+ expect { subject }
+ .to change { project.container_repositories.count }.by(1)
+ end
+ end
+
+ shared_examples 'not a container repository factory' do
+ it 'does not create a new container repository resource' do
+ expect { subject }.not_to change { ContainerRepository.count }
+ end
+ end
+
describe '#full_access_token' do
let(:project) { create(:empty_project) }
let(:token) { described_class.full_access_token(project.path_with_namespace) }
subject { { token: token } }
- it_behaves_like 'a accessible' do
+ it_behaves_like 'an accessible' do
let(:actions) { ['*'] }
end
+
+ it_behaves_like 'not a container repository factory'
end
context 'user authorization' do
@@ -110,16 +124,20 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pushable'
+ it_behaves_like 'container repository factory'
end
context 'allow reporter to pull images' do
before { project.team << [current_user, :reporter] }
- let(:current_params) do
- { scope: "repository:#{project.path_with_namespace}:pull" }
- end
+ context 'when pulling from root level repository' do
+ let(:current_params) do
+ { scope: "repository:#{project.path_with_namespace}:pull" }
+ end
- it_behaves_like 'a pullable'
+ it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
+ end
end
context 'return a least of privileges' do
@@ -130,6 +148,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
context 'disallow guest to pull or push images' do
@@ -140,6 +159,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
end
@@ -152,6 +172,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
context 'disallow anyone to push images' do
@@ -160,6 +181,16 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
+
+ context 'when repository name is invalid' do
+ let(:current_params) do
+ { scope: 'repository:invalid:push' }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
end
@@ -173,6 +204,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
context 'disallow anyone to push images' do
@@ -181,6 +213,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
end
@@ -191,6 +224,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
end
end
@@ -198,11 +232,9 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
context 'build authorized as user' do
let(:current_project) { create(:empty_project) }
let(:current_user) { create(:user) }
+
let(:authentication_abilities) do
- [
- :build_read_container_image,
- :build_create_container_image
- ]
+ [:build_read_container_image, :build_create_container_image]
end
before do
@@ -219,6 +251,10 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
it_behaves_like 'a pullable and pushable' do
let(:project) { current_project }
end
+
+ it_behaves_like 'container repository factory' do
+ let(:project) { current_project }
+ end
end
context 'for other projects' do
@@ -231,11 +267,13 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
let(:project) { create(:empty_project, :public) }
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
shared_examples 'pullable for being team member' do
context 'when you are not member' do
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
context 'when you are member' do
@@ -244,12 +282,14 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
context 'when you are owner' do
let(:project) { create(:empty_project, namespace: current_user.namespace) }
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
end
@@ -263,6 +303,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
context 'when you are not member' do
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
context 'when you are member' do
@@ -271,12 +312,14 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
context 'when you are owner' do
let(:project) { create(:empty_project, namespace: current_user.namespace) }
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
end
end
@@ -296,12 +339,14 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
context 'when you are owner' do
let(:project) { create(:empty_project, :public, namespace: current_user.namespace) }
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
end
end
@@ -318,6 +363,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
end
end
end
@@ -325,6 +371,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
context 'unauthorized' do
context 'disallow to use scope-less authentication' do
it_behaves_like 'a forbidden'
+ it_behaves_like 'not a container repository factory'
end
context 'for invalid scope' do
@@ -333,6 +380,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a forbidden'
+ it_behaves_like 'not a container repository factory'
end
context 'for private project' do
@@ -354,6 +402,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a pullable'
+ it_behaves_like 'not a container repository factory'
end
context 'when pushing' do
@@ -362,6 +411,7 @@ describe Auth::ContainerRegistryAuthenticationService, services: true do
end
it_behaves_like 'a forbidden'
+ it_behaves_like 'not a container repository factory'
end
end
end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index c8bd4d4601a..be9f9ea2dec 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -4,6 +4,8 @@ describe MergeRequests::BuildService, services: true do
include RepoHelpers
let(:project) { create(:project, :repository) }
+ let(:source_project) { nil }
+ let(:target_project) { nil }
let(:user) { create(:user) }
let(:issue_confidential) { false }
let(:issue) { create(:issue, project: project, title: 'A bug', confidential: issue_confidential) }
@@ -20,7 +22,9 @@ describe MergeRequests::BuildService, services: true do
MergeRequests::BuildService.new(project, user,
description: description,
source_branch: source_branch,
- target_branch: target_branch)
+ target_branch: target_branch,
+ source_project: source_project,
+ target_project: target_project)
end
before do
@@ -256,5 +260,41 @@ describe MergeRequests::BuildService, services: true do
)
end
end
+
+ context 'target_project is set and accessible by current_user' do
+ let(:target_project) { create(:project, :public, :repository)}
+ let(:commits) { Commit.decorate([commit_1], project) }
+
+ it 'sets target project correctly' do
+ expect(merge_request.target_project).to eq(target_project)
+ end
+ end
+
+ context 'target_project is set but not accessible by current_user' do
+ let(:target_project) { create(:project, :private, :repository)}
+ let(:commits) { Commit.decorate([commit_1], project) }
+
+ it 'sets target project correctly' do
+ expect(merge_request.target_project).to eq(project)
+ end
+ end
+
+ context 'source_project is set and accessible by current_user' do
+ let(:source_project) { create(:project, :public, :repository)}
+ let(:commits) { Commit.decorate([commit_1], project) }
+
+ it 'sets target project correctly' do
+ expect(merge_request.source_project).to eq(source_project)
+ end
+ end
+
+ context 'source_project is set but not accessible by current_user' do
+ let(:source_project) { create(:project, :private, :repository)}
+ let(:commits) { Commit.decorate([commit_1], project) }
+
+ it 'sets target project correctly' do
+ expect(merge_request.source_project).to eq(project)
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index c22d145ca5d..03215a4624a 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -49,6 +49,7 @@ describe MergeRequests::RefreshService, services: true do
context 'push to origin repo source branch' do
let(:refresh_service) { service.new(@project, @user) }
+
before do
allow(refresh_service).to receive(:execute_hooks)
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
@@ -70,6 +71,32 @@ describe MergeRequests::RefreshService, services: true do
end
end
+ context 'push to origin repo source branch when an MR was reopened' do
+ let(:refresh_service) { service.new(@project, @user) }
+
+ before do
+ @merge_request.update(state: :reopened)
+
+ allow(refresh_service).to receive(:execute_hooks)
+ refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
+ reload_mrs
+ end
+
+ it 'executes hooks with update action' do
+ expect(refresh_service).to have_received(:execute_hooks).
+ with(@merge_request, 'update', @oldrev)
+
+ expect(@merge_request.notes).not_to be_empty
+ expect(@merge_request).to be_open
+ expect(@merge_request.merge_when_pipeline_succeeds).to be_falsey
+ expect(@merge_request.diff_head_sha).to eq(@newrev)
+ expect(@fork_merge_request).to be_open
+ expect(@fork_merge_request.notes).to be_empty
+ expect(@build_failed_todo).to be_done
+ expect(@fork_build_failed_todo).to be_done
+ end
+ end
+
context 'push to origin repo target branch' do
before do
service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index b1e10f4562e..4b8589b2736 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -7,6 +7,11 @@ describe Projects::DestroyService, services: true do
let!(:remove_path) { path.sub(/\.git\Z/, "+#{project.id}+deleted.git") }
let!(:async) { false } # execute or async_execute
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: [])
+ end
+
shared_examples 'deleting the project' do
it 'deletes the project' do
expect(Project.unscoped.all).not_to include(project)
@@ -89,30 +94,64 @@ describe Projects::DestroyService, services: true do
it_behaves_like 'deleting the project with pipeline and build'
end
- context 'container registry' do
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags('tag')
- end
+ describe 'container registry' do
+ context 'when there are regular container repositories' do
+ let(:container_repository) { create(:container_repository) }
+
+ before do
+ stub_container_registry_tags(repository: project.full_path + '/image',
+ tags: ['tag'])
+ project.container_repositories << container_repository
+ end
+
+ context 'when image repository deletion succeeds' do
+ it 'removes tags' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(true)
+
+ destroy_project(project, user)
+ end
+ end
- context 'tags deletion succeeds' do
- it do
- expect_any_instance_of(ContainerRegistry::Tag).to receive(:delete).and_return(true)
+ context 'when image repository deletion fails' do
+ it 'raises an exception' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(false)
- destroy_project(project, user, {})
+ expect{ destroy_project(project, user) }
+ .to raise_error(ActiveRecord::RecordNotDestroyed)
+ end
end
end
- context 'tags deletion fails' do
- before { expect_any_instance_of(ContainerRegistry::Tag).to receive(:delete).and_return(false) }
+ context 'when there are tags for legacy root repository' do
+ before do
+ stub_container_registry_tags(repository: project.full_path,
+ tags: ['tag'])
+ end
+
+ context 'when image repository tags deletion succeeds' do
+ it 'removes tags' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(true)
- subject { destroy_project(project, user, {}) }
+ destroy_project(project, user)
+ end
+ end
+
+ context 'when image repository tags deletion fails' do
+ it 'raises an exception' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(false)
- it { expect{subject}.to raise_error(Projects::DestroyService::DestroyError) }
+ expect { destroy_project(project, user) }
+ .to raise_error(Projects::DestroyService::DestroyError)
+ end
+ end
end
end
- def destroy_project(project, user, params)
+ def destroy_project(project, user, params = {})
if async
Projects::DestroyService.new(project, user, params).async_execute
else
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index f8187fefc14..29ccce59c53 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -29,9 +29,12 @@ describe Projects::TransferService, services: true do
end
context 'disallow transfering of project with tags' do
+ let(:container_repository) { create(:container_repository) }
+
before do
stub_container_registry_config(enabled: true)
- stub_container_registry_tags('tag')
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
+ project.container_repositories << container_repository
end
subject { transfer_project(project, user, group) }
diff --git a/spec/services/users/create_service_spec.rb b/spec/services/users/create_service_spec.rb
index 66f68650f81..a111aec2f89 100644
--- a/spec/services/users/create_service_spec.rb
+++ b/spec/services/users/create_service_spec.rb
@@ -122,6 +122,32 @@ describe Users::CreateService, services: true do
end
end
+ context 'when password_automatically_set parameter is true' do
+ let(:params) do
+ {
+ name: 'John Doe',
+ username: 'jduser',
+ email: 'jd@example.com',
+ password: 'mydummypass',
+ password_automatically_set: true
+ }
+ end
+
+ it 'persists the given attributes' do
+ user = service.execute
+ user.reload
+
+ expect(user).to have_attributes(
+ name: params[:name],
+ username: params[:username],
+ email: params[:email],
+ password: params[:password],
+ created_by_id: admin_user.id,
+ password_automatically_set: params[:password_automatically_set]
+ )
+ end
+ end
+
context 'when skip_confirmation parameter is true' do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', skip_confirmation: true }
diff --git a/spec/services/users/destroy_spec.rb b/spec/services/users/destroy_service_spec.rb
index 66c61b7f8ff..43c18992d1a 100644
--- a/spec/services/users/destroy_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -46,43 +46,47 @@ describe Users::DestroyService, services: true do
project.add_developer(user)
end
- context "for an issue the user has created" do
- let!(:issue) { create(:issue, project: project, author: user) }
+ context "for an issue the user was assigned to" do
+ let!(:issue) { create(:issue, project: project, assignee: user) }
before do
service.execute(user)
end
- it 'does not delete the issue' do
+ it 'does not delete issues the user is assigned to' do
expect(Issue.find_by_id(issue.id)).to be_present
end
- it 'migrates the issue so that the "Ghost User" is the issue owner' do
+ it 'migrates the issue so that it is "Unassigned"' do
migrated_issue = Issue.find_by_id(issue.id)
- expect(migrated_issue.author).to eq(User.ghost)
+ expect(migrated_issue.assignee).to be_nil
end
+ end
+ end
- it 'blocks the user before migrating issues to the "Ghost User' do
- expect(user).to be_blocked
- end
+ context "a deleted user's merge_requests" do
+ let(:project) { create(:project) }
+
+ before do
+ project.add_developer(user)
end
- context "for an issue the user was assigned to" do
- let!(:issue) { create(:issue, project: project, assignee: user) }
+ context "for an merge request the user was assigned to" do
+ let!(:merge_request) { create(:merge_request, source_project: project, assignee: user) }
before do
service.execute(user)
end
- it 'does not delete issues the user is assigned to' do
- expect(Issue.find_by_id(issue.id)).to be_present
+ it 'does not delete merge requests the user is assigned to' do
+ expect(MergeRequest.find_by_id(merge_request.id)).to be_present
end
- it 'migrates the issue so that it is "Unassigned"' do
- migrated_issue = Issue.find_by_id(issue.id)
+ it 'migrates the merge request so that it is "Unassigned"' do
+ migrated_merge_request = MergeRequest.find_by_id(merge_request.id)
- expect(migrated_issue.assignee).to be_nil
+ expect(migrated_merge_request.assignee).to be_nil
end
end
end
@@ -141,5 +145,13 @@ describe Users::DestroyService, services: true do
expect(User.exists?(user.id)).to be(false)
end
end
+
+ context "migrating associated records" do
+ it 'delegates to the `MigrateToGhostUser` service to move associated records to the ghost user' do
+ expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once
+
+ service.execute(user)
+ end
+ end
end
end
diff --git a/spec/services/users/migrate_to_ghost_user_service_spec.rb b/spec/services/users/migrate_to_ghost_user_service_spec.rb
new file mode 100644
index 00000000000..8c5b7e41c15
--- /dev/null
+++ b/spec/services/users/migrate_to_ghost_user_service_spec.rb
@@ -0,0 +1,64 @@
+require 'spec_helper'
+
+describe Users::MigrateToGhostUserService, services: true do
+ let!(:user) { create(:user) }
+ let!(:project) { create(:project) }
+ let(:service) { described_class.new(user) }
+
+ context "migrating a user's associated records to the ghost user" do
+ context 'issues' do
+ include_examples "migrating a deleted user's associated records to the ghost user", Issue do
+ let(:created_record) { create(:issue, project: project, author: user) }
+ let(:assigned_record) { create(:issue, project: project, assignee: user) }
+ end
+ end
+
+ context 'merge requests' do
+ include_examples "migrating a deleted user's associated records to the ghost user", MergeRequest do
+ let(:created_record) { create(:merge_request, source_project: project, author: user, target_branch: "first") }
+ let(:assigned_record) { create(:merge_request, source_project: project, assignee: user, target_branch: 'second') }
+ end
+ end
+
+ context 'notes' do
+ include_examples "migrating a deleted user's associated records to the ghost user", Note do
+ let(:created_record) { create(:note, project: project, author: user) }
+ end
+ end
+
+ context 'abuse reports' do
+ include_examples "migrating a deleted user's associated records to the ghost user", AbuseReport do
+ let(:created_record) { create(:abuse_report, reporter: user, user: create(:user)) }
+ end
+ end
+
+ context 'award emoji' do
+ include_examples "migrating a deleted user's associated records to the ghost user", AwardEmoji do
+ let(:created_record) { create(:award_emoji, user: user) }
+ let(:author_alias) { :user }
+
+ context "when the awardable already has an award emoji of the same name assigned to the ghost user" do
+ let(:awardable) { create(:issue) }
+ let!(:existing_award_emoji) { create(:award_emoji, user: User.ghost, name: "thumbsup", awardable: awardable) }
+ let!(:award_emoji) { create(:award_emoji, user: user, name: "thumbsup", awardable: awardable) }
+
+ it "migrates the award emoji regardless" do
+ service.execute
+
+ migrated_record = AwardEmoji.find_by_id(award_emoji.id)
+
+ expect(migrated_record.user).to eq(User.ghost)
+ end
+
+ it "does not leave the migrated award emoji in an invalid state" do
+ service.execute
+
+ migrated_record = AwardEmoji.find_by_id(award_emoji.id)
+
+ expect(migrated_record).to be_valid
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/controllers/githubish_import_controller_shared_examples.rb
index 51f1015f43c..c59b30c772d 100644
--- a/spec/support/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/controllers/githubish_import_controller_shared_examples.rb
@@ -180,7 +180,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the new namespace" do
expect(Gitlab::GithubImport::ProjectCreator).
to receive(:new).with(provider_repo, provider_repo.name, an_instance_of(Group), user, access_params, type: provider).
- and_return(double(execute: true))
+ and_return(double(execute: true))
post :create, target_namespace: provider_repo.name, format: :js
end
@@ -201,7 +201,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the current user's namespace" do
expect(Gitlab::GithubImport::ProjectCreator).
to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider).
- and_return(double(execute: true))
+ and_return(double(execute: true))
post :create, format: :js
end
@@ -229,7 +229,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
end
- context 'user has chosen a nested namespace and name for the project' do
+ context 'user has chosen an existing nested namespace and name for the project' do
let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) }
let(:test_name) { 'test_name' }
@@ -242,5 +242,58 @@ shared_examples 'a GitHub-ish import controller: POST create' do
post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js }
end
end
+
+ context 'user has chosen a non-existent nested namespaces and name for the project' do
+ let(:test_name) { 'test_name' }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::GithubImport::ProjectCreator).
+ to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+ end
+
+ it 'creates the namespaces' do
+ allow(Gitlab::GithubImport::ProjectCreator).
+ to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider).
+ and_return(double(execute: true))
+
+ expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } }
+ .to change { Namespace.count }.by(2)
+ end
+
+ it 'new namespace has the right parent' do
+ allow(Gitlab::GithubImport::ProjectCreator).
+ to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+
+ expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
+ end
+ end
+
+ context 'user has chosen existent and non-existent nested namespaces and name for the project' do
+ let(:test_name) { 'test_name' }
+ let!(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::GithubImport::ProjectCreator).
+ to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js }
+ end
+
+ it 'creates the namespaces' do
+ allow(Gitlab::GithubImport::ProjectCreator).
+ to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider).
+ and_return(double(execute: true))
+
+ expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } }
+ .to change { Namespace.count }.by(2)
+ end
+ end
end
end
diff --git a/spec/support/filtered_search_helpers.rb b/spec/support/filtered_search_helpers.rb
index 6b009b132b6..36be0bb6bf8 100644
--- a/spec/support/filtered_search_helpers.rb
+++ b/spec/support/filtered_search_helpers.rb
@@ -30,7 +30,7 @@ module FilteredSearchHelpers
end
def clear_search_field
- find('.filtered-search-input-container .clear-search').click
+ find('.filtered-search-box .clear-search').click
end
def reset_filters
@@ -51,7 +51,7 @@ module FilteredSearchHelpers
# Iterates through each visual token inside
# .tokens-container to make sure the correct names and values are rendered
def expect_tokens(tokens)
- page.find '.filtered-search-input-container .tokens-container' do
+ page.find '.filtered-search-box .tokens-container' do
page.all(:css, '.tokens-container li').each_with_index do |el, index|
token_name = tokens[index][:name]
token_value = tokens[index][:value]
@@ -71,4 +71,18 @@ module FilteredSearchHelpers
def get_filtered_search_placeholder
find('.filtered-search')['placeholder']
end
+
+ def remove_recent_searches
+ execute_script('window.localStorage.removeItem(\'issue-recent-searches\');')
+ end
+
+ def set_recent_searches(input)
+ execute_script("window.localStorage.setItem('issue-recent-searches', '#{input}');")
+ end
+
+ def wait_for_filtered_search(text)
+ Timeout.timeout(Capybara.default_max_wait_time) do
+ loop until find('.filtered-search').value.strip == text
+ end
+ end
end
diff --git a/spec/support/matchers/gitaly_matchers.rb b/spec/support/matchers/gitaly_matchers.rb
index d7a53820684..65dbc01f6e4 100644
--- a/spec/support/matchers/gitaly_matchers.rb
+++ b/spec/support/matchers/gitaly_matchers.rb
@@ -1,3 +1,3 @@
-RSpec::Matchers.define :post_receive_request_with_repo_path do |path|
+RSpec::Matchers.define :gitaly_request_with_repo_path do |path|
match { |actual| actual.repository.path == path }
end
diff --git a/spec/support/seed_helper.rb b/spec/support/seed_helper.rb
index f55fee28ff9..47b5f556e66 100644
--- a/spec/support/seed_helper.rb
+++ b/spec/support/seed_helper.rb
@@ -1,20 +1,22 @@
+require_relative 'test_env'
+
# This file is specific to specs in spec/lib/gitlab/git/
-SEED_REPOSITORY_PATH = File.expand_path('../../tmp/repositories', __dir__)
-TEST_REPO_PATH = File.join(SEED_REPOSITORY_PATH, 'gitlab-git-test.git')
-TEST_NORMAL_REPO_PATH = File.join(SEED_REPOSITORY_PATH, "not-bare-repo.git")
-TEST_MUTABLE_REPO_PATH = File.join(SEED_REPOSITORY_PATH, "mutable-repo.git")
-TEST_BROKEN_REPO_PATH = File.join(SEED_REPOSITORY_PATH, "broken-repo.git")
+SEED_STORAGE_PATH = TestEnv.repos_path
+TEST_REPO_PATH = 'gitlab-git-test.git'.freeze
+TEST_NORMAL_REPO_PATH = 'not-bare-repo.git'.freeze
+TEST_MUTABLE_REPO_PATH = 'mutable-repo.git'.freeze
+TEST_BROKEN_REPO_PATH = 'broken-repo.git'.freeze
module SeedHelper
GITLAB_GIT_TEST_REPO_URL = ENV.fetch('GITLAB_GIT_TEST_REPO_URL', 'https://gitlab.com/gitlab-org/gitlab-git-test.git').freeze
def ensure_seeds
- if File.exist?(SEED_REPOSITORY_PATH)
- FileUtils.rm_r(SEED_REPOSITORY_PATH)
+ if File.exist?(SEED_STORAGE_PATH)
+ FileUtils.rm_r(SEED_STORAGE_PATH)
end
- FileUtils.mkdir_p(SEED_REPOSITORY_PATH)
+ FileUtils.mkdir_p(SEED_STORAGE_PATH)
create_bare_seeds
create_normal_seeds
@@ -26,41 +28,45 @@ module SeedHelper
def create_bare_seeds
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{GITLAB_GIT_TEST_REPO_URL}),
- chdir: SEED_REPOSITORY_PATH,
+ chdir: SEED_STORAGE_PATH,
out: '/dev/null',
err: '/dev/null')
end
def create_normal_seeds
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone #{TEST_REPO_PATH} #{TEST_NORMAL_REPO_PATH}),
+ chdir: SEED_STORAGE_PATH,
out: '/dev/null',
err: '/dev/null')
end
def create_mutable_seeds
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone #{TEST_REPO_PATH} #{TEST_MUTABLE_REPO_PATH}),
+ chdir: SEED_STORAGE_PATH,
out: '/dev/null',
err: '/dev/null')
- system(git_env, *%w(git branch -t feature origin/feature),
- chdir: TEST_MUTABLE_REPO_PATH, out: '/dev/null', err: '/dev/null')
+ mutable_repo_full_path = File.join(SEED_STORAGE_PATH, TEST_MUTABLE_REPO_PATH)
+ system(git_env, *%W(#{Gitlab.config.git.bin_path} branch -t feature origin/feature),
+ chdir: mutable_repo_full_path, out: '/dev/null', err: '/dev/null')
system(git_env, *%W(#{Gitlab.config.git.bin_path} remote add expendable #{GITLAB_GIT_TEST_REPO_URL}),
- chdir: TEST_MUTABLE_REPO_PATH, out: '/dev/null', err: '/dev/null')
+ chdir: mutable_repo_full_path, out: '/dev/null', err: '/dev/null')
end
def create_broken_seeds
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} #{TEST_BROKEN_REPO_PATH}),
+ chdir: SEED_STORAGE_PATH,
out: '/dev/null',
err: '/dev/null')
- refs_path = File.join(TEST_BROKEN_REPO_PATH, 'refs')
+ refs_path = File.join(SEED_STORAGE_PATH, TEST_BROKEN_REPO_PATH, 'refs')
FileUtils.rm_r(refs_path)
end
def create_git_attributes
- dir = File.join(SEED_REPOSITORY_PATH, 'with-git-attributes.git', 'info')
+ dir = File.join(SEED_STORAGE_PATH, 'with-git-attributes.git', 'info')
FileUtils.mkdir_p(dir)
@@ -85,7 +91,7 @@ bla/bla.txt
end
def create_invalid_git_attributes
- dir = File.join(SEED_REPOSITORY_PATH, 'with-invalid-git-attributes.git', 'info')
+ dir = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git', 'info')
FileUtils.mkdir_p(dir)
diff --git a/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb
new file mode 100644
index 00000000000..0eac587e973
--- /dev/null
+++ b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb
@@ -0,0 +1,39 @@
+require "spec_helper"
+
+shared_examples "migrating a deleted user's associated records to the ghost user" do |record_class|
+ record_class_name = record_class.to_s.titleize.downcase
+
+ let(:project) { create(:project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ context "for a #{record_class_name} the user has created" do
+ let!(:record) { created_record }
+
+ it "does not delete the #{record_class_name}" do
+ service.execute
+
+ expect(record_class.find_by_id(record.id)).to be_present
+ end
+
+ it "migrates the #{record_class_name} so that the 'Ghost User' is the #{record_class_name} owner" do
+ service.execute
+
+ migrated_record = record_class.find_by_id(record.id)
+
+ if migrated_record.respond_to?(:author)
+ expect(migrated_record.author).to eq(User.ghost)
+ else
+ expect(migrated_record.send(author_alias)).to eq(User.ghost)
+ end
+ end
+
+ it "blocks the user before migrating #{record_class_name}s to the 'Ghost User'" do
+ service.execute
+
+ expect(user).to be_blocked
+ end
+ end
+end
diff --git a/spec/support/stub_gitlab_calls.rb b/spec/support/stub_gitlab_calls.rb
index a01ef576234..ded2d593059 100644
--- a/spec/support/stub_gitlab_calls.rb
+++ b/spec/support/stub_gitlab_calls.rb
@@ -27,23 +27,40 @@ module StubGitlabCalls
def stub_container_registry_config(registry_settings)
allow(Gitlab.config.registry).to receive_messages(registry_settings)
- allow(Auth::ContainerRegistryAuthenticationService).to receive(:full_access_token).and_return('token')
+ allow(Auth::ContainerRegistryAuthenticationService)
+ .to receive(:full_access_token).and_return('token')
end
- def stub_container_registry_tags(*tags)
- allow_any_instance_of(ContainerRegistry::Client).to receive(:repository_tags).and_return(
- { "tags" => tags }
- )
- allow_any_instance_of(ContainerRegistry::Client).to receive(:repository_manifest).and_return(
- JSON.parse(File.read(Rails.root + 'spec/fixtures/container_registry/tag_manifest.json'))
- )
- allow_any_instance_of(ContainerRegistry::Client).to receive(:blob).and_return(
- File.read(Rails.root + 'spec/fixtures/container_registry/config_blob.json')
- )
+ def stub_container_registry_tags(repository: :any, tags:)
+ repository = any_args if repository == :any
+
+ allow_any_instance_of(ContainerRegistry::Client)
+ .to receive(:repository_tags).with(repository)
+ .and_return({ 'tags' => tags })
+
+ allow_any_instance_of(ContainerRegistry::Client)
+ .to receive(:repository_manifest).with(repository)
+ .and_return(stub_container_registry_tag_manifest)
+
+ allow_any_instance_of(ContainerRegistry::Client)
+ .to receive(:blob).with(repository)
+ .and_return(stub_container_registry_blob)
end
private
+ def stub_container_registry_tag_manifest
+ fixture_path = 'spec/fixtures/container_registry/tag_manifest.json'
+
+ JSON.parse(File.read(Rails.root + fixture_path))
+ end
+
+ def stub_container_registry_blob
+ fixture_path = 'spec/fixtures/container_registry/config_blob.json'
+
+ File.read(Rails.root + fixture_path)
+ end
+
def gitlab_url
Gitlab.config.gitlab.url
end
diff --git a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
new file mode 100644
index 00000000000..f627f9165fb
--- /dev/null
+++ b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe 'notify/pipeline_failed_email.html.haml' do
+ include Devise::Test::ControllerHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project) }
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ user: user,
+ ref: project.default_branch,
+ sha: project.commit.sha,
+ status: :success)
+ end
+
+ before do
+ assign(:project, project)
+ assign(:pipeline, pipeline)
+ assign(:merge_request, merge_request)
+ end
+
+ context 'pipeline with user' do
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content "Your pipeline has failed"
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50)
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content pipeline.user.name
+ end
+ end
+
+ context 'pipeline without user' do
+ before do
+ pipeline.update_attribute(:user, nil)
+ end
+
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content "Your pipeline has failed"
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50)
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content "by API"
+ end
+ end
+end
diff --git a/spec/views/notify/pipeline_success_email.html.haml_spec.rb b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
new file mode 100644
index 00000000000..ecd096ee579
--- /dev/null
+++ b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe 'notify/pipeline_success_email.html.haml' do
+ include Devise::Test::ControllerHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project) }
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ user: user,
+ ref: project.default_branch,
+ sha: project.commit.sha,
+ status: :success)
+ end
+
+ before do
+ assign(:project, project)
+ assign(:pipeline, pipeline)
+ assign(:merge_request, merge_request)
+ end
+
+ context 'pipeline with user' do
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content "Your pipeline has passed"
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50)
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content pipeline.user.name
+ end
+ end
+
+ context 'pipeline without user' do
+ before do
+ pipeline.update_attribute(:user, nil)
+ end
+
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content "Your pipeline has passed"
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50)
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content "by API"
+ end
+ end
+end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index fbb22439f33..5a2c0671dac 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -23,10 +23,12 @@ describe RepositoryImportWorker do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
expect_any_instance_of(Projects::ImportService).to receive(:execute).
and_return({ status: :error, message: error })
+ allow(subject).to receive(:jid).and_return('123')
subject.perform(project.id)
expect(project.reload.import_error).to include("https://*****:*****@test.com/root/repoC.git/")
+ expect(project.reload.import_jid).not_to be_nil
end
end
end
diff --git a/spec/workers/stuck_import_jobs_worker_spec.rb b/spec/workers/stuck_import_jobs_worker_spec.rb
new file mode 100644
index 00000000000..466277a5e5e
--- /dev/null
+++ b/spec/workers/stuck_import_jobs_worker_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe StuckImportJobsWorker do
+ let(:worker) { described_class.new }
+ let(:exclusive_lease_uuid) { SecureRandom.uuid }
+
+ before do
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(exclusive_lease_uuid)
+ end
+
+ describe 'long running import' do
+ let(:project) { create(:empty_project, import_jid: '123', import_status: 'started') }
+
+ before do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(['123'])
+ end
+
+ it 'marks the project as failed' do
+ expect { worker.perform }.to change { project.reload.import_status }.to('failed')
+ end
+ end
+
+ describe 'running import' do
+ let(:project) { create(:empty_project, import_jid: '123', import_status: 'started') }
+
+ before do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
+ end
+
+ it 'does not mark the project as failed' do
+ worker.perform
+
+ expect(project.reload.import_status).to eq('started')
+ end
+ end
+end
diff --git a/spec/workers/trigger_schedule_worker_spec.rb b/spec/workers/trigger_schedule_worker_spec.rb
new file mode 100644
index 00000000000..151e1c2f7b9
--- /dev/null
+++ b/spec/workers/trigger_schedule_worker_spec.rb
@@ -0,0 +1,58 @@
+require 'spec_helper'
+
+describe TriggerScheduleWorker do
+ let(:worker) { described_class.new }
+
+ before do
+ stub_ci_pipeline_to_return_yaml_file
+ end
+
+ context 'when there is a scheduled trigger within next_run_at' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ time_future = Time.now + 10.days
+ allow(Time).to receive(:now).and_return(time_future)
+ @next_time = Gitlab::Ci::CronParser.new(trigger_schedule.cron, trigger_schedule.cron_timezone).next_time_from(time_future)
+ end
+
+ it 'creates a new trigger request' do
+ expect { worker.perform }.to change { Ci::TriggerRequest.count }.by(1)
+ end
+
+ it 'creates a new pipeline' do
+ expect { worker.perform }.to change { Ci::Pipeline.count }.by(1)
+ expect(Ci::Pipeline.last).to be_pending
+ end
+
+ it 'updates next_run_at' do
+ expect { worker.perform }.to change { Ci::TriggerSchedule.last.next_run_at }.to(@next_time)
+ end
+ end
+
+ context 'when there are no scheduled triggers within next_run_at' do
+ before { create(:ci_trigger_schedule, :nightly) }
+
+ it 'does not create a new pipeline' do
+ expect { worker.perform }.not_to change { Ci::Pipeline.count }
+ end
+
+ it 'does not update next_run_at' do
+ expect { worker.perform }.not_to change { Ci::TriggerSchedule.last.next_run_at }
+ end
+ end
+
+ context 'when next_run_at is nil' do
+ before do
+ trigger_schedule = create(:ci_trigger_schedule, :nightly)
+ trigger_schedule.update_attribute(:next_run_at, nil)
+ end
+
+ it 'does not create a new pipeline' do
+ expect { worker.perform }.not_to change { Ci::Pipeline.count }
+ end
+
+ it 'does not update next_run_at' do
+ expect { worker.perform }.not_to change { Ci::TriggerSchedule.last.next_run_at }
+ end
+ end
+end