summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/groups/variables_controller_spec.rb53
-rw-r--r--spec/controllers/help_controller_spec.rb2
-rw-r--r--spec/controllers/import/bitbucket_controller_spec.rb98
-rw-r--r--spec/controllers/import/gitlab_controller_spec.rb95
-rw-r--r--spec/controllers/profiles_controller_spec.rb40
-rw-r--r--spec/controllers/projects/variables_controller_spec.rb56
-rw-r--r--spec/controllers/projects_controller_spec.rb96
-rw-r--r--spec/factories/ci/builds.rb4
-rw-r--r--spec/factories/keys.rb4
-rw-r--r--spec/factories/lfs_file_locks.rb7
-rw-r--r--spec/factories/projects.rb9
-rw-r--r--spec/factories/services.rb3
-rw-r--r--spec/features/admin/admin_settings_spec.rb10
-rw-r--r--spec/features/admin/admin_users_spec.rb8
-rw-r--r--spec/features/ci_lint_spec.rb34
-rw-r--r--spec/features/group_variables_spec.rb69
-rw-r--r--spec/features/groups/members/search_members_spec.rb29
-rw-r--r--spec/features/groups/milestone_spec.rb151
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb (renamed from spec/features/copy_as_gfm_spec.rb)0
-rw-r--r--spec/features/markdown/gitlab_flavored_markdown_spec.rb (renamed from spec/features/gitlab_flavored_markdown_spec.rb)0
-rw-r--r--spec/features/markdown/markdown_spec.rb (renamed from spec/features/markdown_spec.rb)0
-rw-r--r--spec/features/markdown/math_spec.rb22
-rw-r--r--spec/features/markdown/mermaid_spec.rb24
-rw-r--r--spec/features/profiles/password_spec.rb76
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb58
-rw-r--r--spec/features/profiles/user_manages_applications_spec.rb39
-rw-r--r--spec/features/profiles/user_visits_profile_authentication_log_spec.rb25
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb51
-rw-r--r--spec/features/project_variables_spec.rb18
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb4
-rw-r--r--spec/features/projects/import_export/namespace_export_file_spec.rb6
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb7
-rw-r--r--spec/features/projects/services/user_activates_issue_tracker_spec.rb92
-rw-r--r--spec/features/projects/services/user_activates_jira_spec.rb31
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb304
-rw-r--r--spec/features/projects/wiki/user_views_wiki_page_spec.rb205
-rw-r--r--spec/features/variables_spec.rb145
-rw-r--r--spec/finders/snippets_finder_spec.rb67
-rw-r--r--spec/fixtures/api/schemas/deployment.json45
-rw-r--r--spec/fixtures/api/schemas/deployments.json44
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/blobs.json19
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commit/detail.json7
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commits_details.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issue.json96
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issues.json95
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_requests.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/milestones.json24
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/notes.json34
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/projects.json36
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippets.json33
-rw-r--r--spec/fixtures/api/schemas/variable.json17
-rw-r--r--spec/fixtures/api/schemas/variables.json11
-rw-r--r--spec/helpers/application_helper_spec.rb46
-rw-r--r--spec/helpers/auto_devops_helper_spec.rb35
-rw-r--r--spec/helpers/avatars_helper_spec.rb16
-rw-r--r--spec/helpers/events_helper_spec.rb4
-rw-r--r--spec/helpers/graph_helper_spec.rb6
-rw-r--r--spec/helpers/projects_helper_spec.rb2
-rw-r--r--spec/javascripts/awards_handler_spec.js13
-rw-r--r--spec/javascripts/boards/board_list_spec.js2
-rw-r--r--spec/javascripts/ci_variable_list/ajax_variable_list_spec.js213
-rw-r--r--spec/javascripts/ci_variable_list/ci_variable_list_spec.js124
-rw-r--r--spec/javascripts/commits_spec.js20
-rw-r--r--spec/javascripts/datetime_utility_spec.js64
-rw-r--r--spec/javascripts/droplab/drop_down_spec.js113
-rw-r--r--spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js6
-rw-r--r--spec/javascripts/filtered_search/dropdown_user_spec.js4
-rw-r--r--spec/javascripts/filtered_search/dropdown_utils_spec.js3
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js11
-rw-r--r--spec/javascripts/filtered_search/filtered_search_token_keys_spec.js40
-rw-r--r--spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js4
-rw-r--r--spec/javascripts/fixtures/groups.rb29
-rw-r--r--spec/javascripts/fixtures/projects.rb51
-rw-r--r--spec/javascripts/gl_form_spec.js20
-rw-r--r--spec/javascripts/gpg_badges_spec.js50
-rw-r--r--spec/javascripts/groups/components/app_spec.js12
-rw-r--r--spec/javascripts/importer_status_spec.js107
-rw-r--r--spec/javascripts/issuable_time_tracker_spec.js2
-rw-r--r--spec/javascripts/job_spec.js45
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js29
-rw-r--r--spec/javascripts/lib/utils/text_utility_spec.js6
-rw-r--r--spec/javascripts/monitoring/dashboard_state_spec.js29
-rw-r--r--spec/javascripts/monitoring/mock_data.js1
-rw-r--r--spec/javascripts/notebook/cells/markdown_spec.js2
-rw-r--r--spec/javascripts/notes/components/note_app_spec.js92
-rw-r--r--spec/javascripts/notes/components/noteable_note_spec.js21
-rw-r--r--spec/javascripts/notes/helpers.js12
-rw-r--r--spec/javascripts/notes/mock_data.js2
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js71
-rw-r--r--spec/javascripts/notes/stores/getters_spec.js6
-rw-r--r--spec/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js (renamed from spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js)2
-rw-r--r--spec/javascripts/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js (renamed from spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js)2
-rw-r--r--spec/javascripts/pipelines/async_button_spec.js8
-rw-r--r--spec/javascripts/settings_panels_spec.js29
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js34
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js29
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js27
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js9
-rw-r--r--spec/javascripts/vue_shared/components/confirmation_input_spec.js63
-rw-r--r--spec/javascripts/vue_shared/components/gl_modal_spec.js192
-rw-r--r--spec/lib/backup/repository_spec.rb18
-rw-r--r--spec/lib/banzai/filter/html_entity_filter_spec.rb9
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb57
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb8
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb189
-rw-r--r--spec/lib/gitlab/bare_repository_import/importer_spec.rb6
-rw-r--r--spec/lib/gitlab/checks/change_access_spec.rb39
-rw-r--r--spec/lib/gitlab/ci/config/loader_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb43
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb14
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb11
-rw-r--r--spec/lib/gitlab/email/attachment_uploader_spec.rb2
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb5
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb4
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb94
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb8
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb30
-rw-r--r--spec/lib/gitlab/git_access_spec.rb19
-rw-r--r--spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb36
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb26
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml3
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml6
-rw-r--r--spec/lib/gitlab/import_export/uploads_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/uploads_saver_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/wiki_restorer_spec.rb45
-rw-r--r--spec/lib/gitlab/ldap/config_spec.rb8
-rw-r--r--spec/lib/gitlab/ldap/person_spec.rb9
-rw-r--r--spec/lib/gitlab/middleware/multipart_spec.rb10
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb4
-rw-r--r--spec/lib/gitlab/profiler_spec.rb9
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb4
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb24
-rw-r--r--spec/lib/gitlab/query_limiting/transaction_spec.rb12
-rw-r--r--spec/lib/gitlab/query_limiting_spec.rb10
-rw-r--r--spec/lib/gitlab/regex_spec.rb5
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb4
-rw-r--r--spec/lib/gitlab/search_results_spec.rb6
-rw-r--r--spec/lib/gitlab/shell_spec.rb2
-rw-r--r--spec/lib/gitlab/ssh_public_key_spec.rb35
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb2
-rw-r--r--spec/migrations/README.md2
-rw-r--r--spec/migrations/convert_custom_notification_settings_to_columns_spec.rb4
-rw-r--r--spec/migrations/migrate_process_commit_worker_jobs_spec.rb2
-rw-r--r--spec/migrations/remove_redundant_pipeline_stages_spec.rb59
-rw-r--r--spec/migrations/rename_reserved_project_names_spec.rb12
-rw-r--r--spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb34
-rw-r--r--spec/models/ci/build_spec.rb9
-rw-r--r--spec/models/ci/runner_spec.rb108
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb72
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb39
-rw-r--r--spec/models/group_spec.rb8
-rw-r--r--spec/models/identity_spec.rb33
-rw-r--r--spec/models/key_spec.rb51
-rw-r--r--spec/models/lfs_file_lock_spec.rb57
-rw-r--r--spec/models/namespace_spec.rb206
-rw-r--r--spec/models/project_auto_devops_spec.rb43
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb237
-rw-r--r--spec/models/project_spec.rb78
-rw-r--r--spec/models/repository_spec.rb34
-rw-r--r--spec/models/user_spec.rb33
-rw-r--r--spec/models/wiki_page_spec.rb229
-rw-r--r--spec/policies/personal_snippet_policy_spec.rb1
-rw-r--r--spec/policies/project_snippet_policy_spec.rb1
-rw-r--r--spec/presenters/ci/group_variable_presenter_spec.rb17
-rw-r--r--spec/presenters/ci/variable_presenter_spec.rb17
-rw-r--r--spec/requests/api/commits_spec.rb66
-rw-r--r--spec/requests/api/group_variables_spec.rb4
-rw-r--r--spec/requests/api/internal_spec.rb66
-rw-r--r--spec/requests/api/project_import_spec.rb102
-rw-r--r--spec/requests/api/projects_spec.rb11
-rw-r--r--spec/requests/api/runner_spec.rb3
-rw-r--r--spec/requests/api/search_spec.rb318
-rw-r--r--spec/requests/api/snippets_spec.rb21
-rw-r--r--spec/requests/api/todos_spec.rb6
-rw-r--r--spec/requests/api/v3/projects_spec.rb4
-rw-r--r--spec/requests/api/v3/todos_spec.rb6
-rw-r--r--spec/requests/api/variables_spec.rb4
-rw-r--r--spec/requests/git_http_spec.rb2
-rw-r--r--spec/requests/lfs_http_spec.rb2
-rw-r--r--spec/requests/lfs_locks_api_spec.rb159
-rw-r--r--spec/requests/openid_connect_spec.rb27
-rw-r--r--spec/requests/rack_attack_global_spec.rb10
-rw-r--r--spec/serializers/group_variable_entity_spec.rb14
-rw-r--r--spec/serializers/lfs_file_lock_entity_spec.rb19
-rw-r--r--spec/serializers/variable_entity_spec.rb14
-rw-r--r--spec/services/ci/ensure_stage_service_spec.rb51
-rw-r--r--spec/services/ci/retry_build_service_spec.rb36
-rw-r--r--spec/services/groups/destroy_service_spec.rb6
-rw-r--r--spec/services/issues/fetch_referenced_merge_requests_service_spec.rb35
-rw-r--r--spec/services/lfs/lock_file_service_spec.rb62
-rw-r--r--spec/services/lfs/locks_finder_service_spec.rb101
-rw-r--r--spec/services/lfs/unlock_file_service_spec.rb105
-rw-r--r--spec/services/members/authorized_destroy_service_spec.rb56
-rw-r--r--spec/services/merge_requests/build_service_spec.rb162
-rw-r--r--spec/services/projects/create_from_template_service_spec.rb8
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage_migration_service_spec.rb2
-rw-r--r--spec/services/projects/transfer_service_spec.rb4
-rw-r--r--spec/services/projects/update_service_spec.rb2
-rw-r--r--spec/services/search/snippet_service_spec.rb2
-rw-r--r--spec/services/users/destroy_service_spec.rb4
-rw-r--r--spec/support/controllers/githubish_import_controller_shared_examples.rb152
-rw-r--r--spec/support/factory_bot.rb (renamed from spec/support/factory_girl.rb)0
-rw-r--r--spec/support/features/variable_list_shared_examples.rb269
-rw-r--r--spec/support/fixture_helpers.rb8
-rw-r--r--spec/support/matchers/pagination_matcher.rb6
-rw-r--r--spec/support/migrations_helpers.rb37
-rw-r--r--spec/support/reactive_caching_helpers.rb6
-rw-r--r--spec/support/shared_examples/controllers/variables_shared_examples.rb123
-rw-r--r--spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb158
-rw-r--r--spec/support/snippet_visibility.rb304
-rw-r--r--spec/support/track_untracked_uploads_helpers.rb4
-rw-r--r--spec/uploaders/file_uploader_spec.rb6
-rw-r--r--spec/validators/variable_duplicates_validator_spec.rb67
-rw-r--r--spec/workers/check_gcp_project_billing_worker_spec.rb65
-rw-r--r--spec/workers/repository_fork_worker_spec.rb2
-rw-r--r--spec/workers/storage_migrator_worker_spec.rb2
223 files changed, 6816 insertions, 2283 deletions
diff --git a/spec/controllers/groups/variables_controller_spec.rb b/spec/controllers/groups/variables_controller_spec.rb
index 8ea98cd9e8f..39a36b92bb4 100644
--- a/spec/controllers/groups/variables_controller_spec.rb
+++ b/spec/controllers/groups/variables_controller_spec.rb
@@ -9,48 +9,27 @@ describe Groups::VariablesController do
group.add_master(user)
end
- describe 'POST #create' do
- context 'variable is valid' do
- it 'shows a success flash message' do
- post :create, group_id: group, variable: { key: "one", value: "two" }
-
- expect(flash[:notice]).to include 'Variable was successfully created.'
- expect(response).to redirect_to(group_settings_ci_cd_path(group))
- end
- end
-
- context 'variable is invalid' do
- it 'renders show' do
- post :create, group_id: group, variable: { key: "..one", value: "two" }
+ describe 'GET #show' do
+ let!(:variable) { create(:ci_group_variable, group: group) }
- expect(response).to render_template("groups/variables/show")
- end
+ subject do
+ get :show, group_id: group, format: :json
end
- end
-
- describe 'POST #update' do
- let(:variable) { create(:ci_group_variable) }
- context 'updating a variable with valid characters' do
- before do
- group.variables << variable
- end
-
- it 'shows a success flash message' do
- post :update, group_id: group,
- id: variable.id, variable: { key: variable.key, value: 'two' }
-
- expect(flash[:notice]).to include 'Variable was successfully updated.'
- expect(response).to redirect_to(group_variables_path(group))
- end
+ include_examples 'GET #show lists all variables'
+ end
- it 'renders the action #show if the variable key is invalid' do
- post :update, group_id: group,
- id: variable.id, variable: { key: '?', value: variable.value }
+ describe 'PATCH #update' do
+ let!(:variable) { create(:ci_group_variable, group: group) }
+ let(:owner) { group }
- expect(response).to have_gitlab_http_status(200)
- expect(response).to render_template :show
- end
+ subject do
+ patch :update,
+ group_id: group,
+ variables_attributes: variables_attributes,
+ format: :json
end
+
+ include_examples 'PATCH #update updates variables'
end
end
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index f75048f422c..21d59c62613 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -68,7 +68,7 @@ describe HelpController do
context 'when requested file exists' do
it 'renders the raw file' do
get :show,
- path: 'user/project/img/labels_filter',
+ path: 'user/project/img/labels_default',
format: :png
expect(response).to be_success
expect(response.content_type).to eq 'image/png'
diff --git a/spec/controllers/import/bitbucket_controller_spec.rb b/spec/controllers/import/bitbucket_controller_spec.rb
index e8707760a5a..2be46049aab 100644
--- a/spec/controllers/import/bitbucket_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_controller_spec.rb
@@ -84,20 +84,42 @@ describe Import::BitbucketController do
double(slug: "vim", owner: bitbucket_username, name: 'vim')
end
+ let(:project) { create(:project) }
+
before do
allow_any_instance_of(Bitbucket::Client).to receive(:repo).and_return(bitbucket_repo)
allow_any_instance_of(Bitbucket::Client).to receive(:user).and_return(bitbucket_user)
assign_session_tokens
end
+ it 'returns 200 response when the project is imported successfully' do
+ allow(Gitlab::BitbucketImport::ProjectCreator)
+ .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params)
+ .and_return(double(execute: project))
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'returns 422 response when the project could not be imported' do
+ allow(Gitlab::BitbucketImport::ProjectCreator)
+ .to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params)
+ .and_return(double(execute: build(:project)))
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+
context "when the repository owner is the Bitbucket user" do
context "when the Bitbucket user and GitLab user's usernames match" do
it "takes the current user's namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -107,9 +129,9 @@ describe Import::BitbucketController do
it "takes the current user's namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -120,7 +142,7 @@ describe Import::BitbucketController do
allow(controller).to receive(:current_user).and_return(user)
allow(user).to receive(:can?).and_return(false)
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -143,9 +165,9 @@ describe Import::BitbucketController do
it "takes the existing namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, bitbucket_repo.name, existing_namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -154,7 +176,7 @@ describe Import::BitbucketController do
expect(Gitlab::BitbucketImport::ProjectCreator)
.not_to receive(:new)
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -163,17 +185,17 @@ describe Import::BitbucketController do
context "when current user can create namespaces" do
it "creates the namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: true))
+ .to receive(:new).and_return(double(execute: project))
- expect { post :create, format: :js }.to change(Namespace, :count).by(1)
+ expect { post :create, format: :json }.to change(Namespace, :count).by(1)
end
it "takes the new namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, bitbucket_repo.name, an_instance_of(Group), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -184,23 +206,23 @@ describe Import::BitbucketController do
it "doesn't create the namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: true))
+ .to receive(:new).and_return(double(execute: project))
- expect { post :create, format: :js }.not_to change(Namespace, :count)
+ expect { post :create, format: :json }.not_to change(Namespace, :count)
end
it "takes the current user's namespace" do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, bitbucket_repo.name, user.namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
end
end
- context 'user has chosen an existing nested namespace and name for the project' do
+ context 'user has chosen an existing nested namespace and name for the project', :postgresql do
let(:parent_namespace) { create(:group, name: 'foo', owner: user) }
let(:nested_namespace) { create(:group, name: 'bar', parent: parent_namespace) }
let(:test_name) { 'test_name' }
@@ -212,63 +234,77 @@ describe Import::BitbucketController do
it 'takes the selected namespace and name' do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, test_name, nested_namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js }
+ post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :json }
end
end
- context 'user has chosen a non-existent nested namespaces and name for the project' do
+ context 'user has chosen a non-existent nested namespaces and name for the project', :postgresql do
let(:test_name) { 'test_name' }
it 'takes the selected namespace and name' do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json }
end
it 'creates the namespaces' do
allow(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } }
+ expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } }
.to change { Namespace.count }.by(2)
end
it 'new namespace has the right parent' do
allow(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json }
expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
end
end
- context 'user has chosen existent and non-existent nested namespaces and name for the project' do
+ context 'user has chosen existent and non-existent nested namespaces and name for the project', :postgresql do
let(:test_name) { 'test_name' }
let!(:parent_namespace) { create(:group, name: 'foo', owner: user) }
+ before do
+ parent_namespace.add_owner(user)
+ end
+
it 'takes the selected namespace and name' do
expect(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js }
+ post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json }
end
it 'creates the namespaces' do
allow(Gitlab::BitbucketImport::ProjectCreator)
.to receive(:new).with(bitbucket_repo, test_name, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } }
+ expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json } }
.to change { Namespace.count }.by(2)
end
end
+
+ context 'when user can not create projects in the chosen namespace' do
+ it 'returns 422 response' do
+ other_namespace = create(:group, name: 'other_namespace')
+
+ post :create, { target_namespace: other_namespace.name, format: :json }
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+ end
end
end
diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb
index faf1e6f63ea..e958be077c2 100644
--- a/spec/controllers/import/gitlab_controller_spec.rb
+++ b/spec/controllers/import/gitlab_controller_spec.rb
@@ -57,6 +57,7 @@ describe Import::GitlabController do
end
describe "POST create" do
+ let(:project) { create(:project) }
let(:gitlab_username) { user.username }
let(:gitlab_user) do
{ username: gitlab_username }.with_indifferent_access
@@ -75,14 +76,34 @@ describe Import::GitlabController do
assign_session_token
end
+ it 'returns 200 response when the project is imported successfully' do
+ allow(Gitlab::GitlabImport::ProjectCreator)
+ .to receive(:new).with(gitlab_repo, user.namespace, user, access_params)
+ .and_return(double(execute: project))
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'returns 422 response when the project could not be imported' do
+ allow(Gitlab::GitlabImport::ProjectCreator)
+ .to receive(:new).with(gitlab_repo, user.namespace, user, access_params)
+ .and_return(double(execute: build(:project)))
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+
context "when the repository owner is the GitLab.com user" do
context "when the GitLab.com user and GitLab server user's usernames match" do
it "takes the current user's namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, user.namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -92,9 +113,9 @@ describe Import::GitlabController do
it "takes the current user's namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, user.namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -118,9 +139,9 @@ describe Import::GitlabController do
it "takes the existing namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, existing_namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -129,7 +150,7 @@ describe Import::GitlabController do
expect(Gitlab::GitlabImport::ProjectCreator)
.not_to receive(:new)
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -138,17 +159,17 @@ describe Import::GitlabController do
context "when current user can create namespaces" do
it "creates the namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: true))
+ .to receive(:new).and_return(double(execute: project))
- expect { post :create, format: :js }.to change(Namespace, :count).by(1)
+ expect { post :create, format: :json }.to change(Namespace, :count).by(1)
end
it "takes the new namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, an_instance_of(Group), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -159,22 +180,22 @@ describe Import::GitlabController do
it "doesn't create the namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: true))
+ .to receive(:new).and_return(double(execute: project))
- expect { post :create, format: :js }.not_to change(Namespace, :count)
+ expect { post :create, format: :json }.not_to change(Namespace, :count)
end
it "takes the current user's namespace" do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, user.namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
end
- context 'user has chosen an existing nested namespace for the project' do
+ context 'user has chosen an existing nested namespace for the project', :postgresql do
let(:parent_namespace) { create(:group, name: 'foo', owner: user) }
let(:nested_namespace) { create(:group, name: 'bar', parent: parent_namespace) }
@@ -185,64 +206,78 @@ describe Import::GitlabController do
it 'takes the selected namespace and name' do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, nested_namespace, user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: nested_namespace.full_path, format: :js }
+ post :create, { target_namespace: nested_namespace.full_path, format: :json }
end
end
- context 'user has chosen a non-existent nested namespaces for the project' do
+ context 'user has chosen a non-existent nested namespaces for the project', :postgresql do
let(:test_name) { 'test_name' }
it 'takes the selected namespace and name' do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/bar', format: :js }
+ post :create, { target_namespace: 'foo/bar', format: :json }
end
it 'creates the namespaces' do
allow(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- expect { post :create, { target_namespace: 'foo/bar', format: :js } }
+ expect { post :create, { target_namespace: 'foo/bar', format: :json } }
.to change { Namespace.count }.by(2)
end
it 'new namespace has the right parent' do
allow(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/bar', format: :js }
+ post :create, { target_namespace: 'foo/bar', format: :json }
expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
end
end
- context 'user has chosen existent and non-existent nested namespaces and name for the project' do
+ context 'user has chosen existent and non-existent nested namespaces and name for the project', :postgresql do
let(:test_name) { 'test_name' }
let!(:parent_namespace) { create(:group, name: 'foo', owner: user) }
+ before do
+ parent_namespace.add_owner(user)
+ end
+
it 'takes the selected namespace and name' do
expect(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/foobar/bar', format: :js }
+ post :create, { target_namespace: 'foo/foobar/bar', format: :json }
end
it 'creates the namespaces' do
allow(Gitlab::GitlabImport::ProjectCreator)
.to receive(:new).with(gitlab_repo, kind_of(Namespace), user, access_params)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- expect { post :create, { target_namespace: 'foo/foobar/bar', format: :js } }
+ expect { post :create, { target_namespace: 'foo/foobar/bar', format: :json } }
.to change { Namespace.count }.by(2)
end
end
+
+ context 'when user can not create projects in the chosen namespace' do
+ it 'returns 422 response' do
+ other_namespace = create(:group, name: 'other_namespace')
+
+ post :create, { target_namespace: other_namespace.name, format: :json }
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+ end
end
end
end
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index d380978b86e..03cbbb21e62 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -69,9 +69,8 @@ describe ProfilesController, :request_store do
describe 'PUT update_username' do
let(:namespace) { user.namespace }
- let(:project) { create(:project_empty_repo, namespace: namespace) }
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:new_username) { 'renamedtosomethingelse' }
+ let(:new_username) { generate(:username) }
it 'allows username change' do
sign_in(user)
@@ -85,16 +84,39 @@ describe ProfilesController, :request_store do
expect(user.username).to eq(new_username)
end
- it 'moves dependent projects to new namespace' do
- sign_in(user)
+ context 'with legacy storage' do
+ it 'moves dependent projects to new namespace' do
+ project = create(:project_empty_repo, :legacy_storage, namespace: namespace)
- put :update_username,
- user: { username: new_username }
+ sign_in(user)
- user.reload
+ put :update_username,
+ user: { username: new_username }
- expect(response.status).to eq(302)
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy
+ user.reload
+
+ expect(response.status).to eq(302)
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy
+ end
+ end
+
+ context 'with hashed storage' do
+ it 'keeps repository location unchanged on disk' do
+ project = create(:project_empty_repo, namespace: namespace)
+
+ before_disk_path = project.disk_path
+
+ sign_in(user)
+
+ put :update_username,
+ user: { username: new_username }
+
+ user.reload
+
+ expect(response.status).to eq(302)
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy
+ expect(before_disk_path).to eq(project.disk_path)
+ end
end
end
end
diff --git a/spec/controllers/projects/variables_controller_spec.rb b/spec/controllers/projects/variables_controller_spec.rb
index 9fde6544215..68019743be0 100644
--- a/spec/controllers/projects/variables_controller_spec.rb
+++ b/spec/controllers/projects/variables_controller_spec.rb
@@ -9,50 +9,28 @@ describe Projects::VariablesController do
project.add_master(user)
end
- describe 'POST #create' do
- context 'variable is valid' do
- it 'shows a success flash message' do
- post :create, namespace_id: project.namespace.to_param, project_id: project,
- variable: { key: "one", value: "two" }
-
- expect(flash[:notice]).to include 'Variable was successfully created.'
- expect(response).to redirect_to(project_settings_ci_cd_path(project))
- end
- end
-
- context 'variable is invalid' do
- it 'renders show' do
- post :create, namespace_id: project.namespace.to_param, project_id: project,
- variable: { key: "..one", value: "two" }
+ describe 'GET #show' do
+ let!(:variable) { create(:ci_variable, project: project) }
- expect(response).to render_template("projects/variables/show")
- end
+ subject do
+ get :show, namespace_id: project.namespace.to_param, project_id: project, format: :json
end
- end
-
- describe 'POST #update' do
- let(:variable) { create(:ci_variable) }
- context 'updating a variable with valid characters' do
- before do
- project.variables << variable
- end
-
- it 'shows a success flash message' do
- post :update, namespace_id: project.namespace.to_param, project_id: project,
- id: variable.id, variable: { key: variable.key, value: 'two' }
-
- expect(flash[:notice]).to include 'Variable was successfully updated.'
- expect(response).to redirect_to(project_variables_path(project))
- end
+ include_examples 'GET #show lists all variables'
+ end
- it 'renders the action #show if the variable key is invalid' do
- post :update, namespace_id: project.namespace.to_param, project_id: project,
- id: variable.id, variable: { key: '?', value: variable.value }
+ describe 'PATCH #update' do
+ let!(:variable) { create(:ci_variable, project: project) }
+ let(:owner) { project }
- expect(response).to have_gitlab_http_status(200)
- expect(response).to render_template :show
- end
+ subject do
+ patch :update,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ variables_attributes: variables_attributes,
+ format: :json
end
+
+ include_examples 'PATCH #update updates variables'
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 5202ffdd8bb..994da3cd159 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -288,62 +288,82 @@ describe ProjectsController do
render_views
let(:admin) { create(:admin) }
- let(:project) { create(:project, :repository) }
before do
sign_in(admin)
end
- context 'when only renaming a project path' do
- it "sets the repository to the right path after a rename" do
- expect { update_project path: 'renamed_path' }
- .to change { project.reload.path }
+ shared_examples_for 'updating a project' do
+ context 'when only renaming a project path' do
+ it "sets the repository to the right path after a rename" do
+ original_repository_path = project.repository.path
- expect(project.path).to include 'renamed_path'
- expect(assigns(:repository).path).to include project.path
- expect(response).to have_gitlab_http_status(302)
- end
- end
+ expect { update_project path: 'renamed_path' }
+ .to change { project.reload.path }
+ expect(project.path).to include 'renamed_path'
- context 'when project has container repositories with tags' do
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: /image/, tags: %w[rc1])
- create(:container_repository, project: project, name: :image)
+ if project.hashed_storage?(:repository)
+ expect(assigns(:repository).path).to eq(original_repository_path)
+ else
+ expect(assigns(:repository).path).to include(project.path)
+ end
+
+ expect(response).to have_gitlab_http_status(302)
+ end
end
- it 'does not allow to rename the project' do
- expect { update_project path: 'renamed_path' }
- .not_to change { project.reload.path }
+ context 'when project has container repositories with tags' do
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: /image/, tags: %w[rc1])
+ create(:container_repository, project: project, name: :image)
+ end
- expect(controller).to set_flash[:alert].to(/container registry tags/)
- expect(response).to have_gitlab_http_status(200)
+ it 'does not allow to rename the project' do
+ expect { update_project path: 'renamed_path' }
+ .not_to change { project.reload.path }
+
+ expect(controller).to set_flash[:alert].to(/container registry tags/)
+ expect(response).to have_gitlab_http_status(200)
+ end
end
- end
- it 'updates Fast Forward Merge attributes' do
- controller.instance_variable_set(:@project, project)
+ it 'updates Fast Forward Merge attributes' do
+ controller.instance_variable_set(:@project, project)
- params = {
- merge_method: :ff
- }
+ params = {
+ merge_method: :ff
+ }
- put :update,
- namespace_id: project.namespace,
- id: project.id,
- project: params
+ put :update,
+ namespace_id: project.namespace,
+ id: project.id,
+ project: params
- expect(response).to have_gitlab_http_status(302)
- params.each do |param, value|
- expect(project.public_send(param)).to eq(value)
+ expect(response).to have_gitlab_http_status(302)
+ params.each do |param, value|
+ expect(project.public_send(param)).to eq(value)
+ end
+ end
+
+ def update_project(**parameters)
+ put :update,
+ namespace_id: project.namespace.path,
+ id: project.path,
+ project: parameters
end
end
- def update_project(**parameters)
- put :update,
- namespace_id: project.namespace.path,
- id: project.path,
- project: parameters
+ context 'hashed storage' do
+ let(:project) { create(:project, :repository) }
+
+ it_behaves_like 'updating a project'
+ end
+
+ context 'legacy storage' do
+ let(:project) { create(:project, :repository, :legacy_storage) }
+
+ it_behaves_like 'updating a project'
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 6ba599cdf83..f6ba3a581ca 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -180,8 +180,8 @@ FactoryBot.define do
trait :artifacts do
after(:create) do |build|
- create(:ci_job_artifact, :archive, job: build)
- create(:ci_job_artifact, :metadata, job: build)
+ create(:ci_job_artifact, :archive, job: build, expire_at: build.artifacts_expire_at)
+ create(:ci_job_artifact, :metadata, job: build, expire_at: build.artifacts_expire_at)
build.reload
end
end
diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb
index 23a98a899f1..f0c43f3d6f5 100644
--- a/spec/factories/keys.rb
+++ b/spec/factories/keys.rb
@@ -5,10 +5,6 @@ FactoryBot.define do
title
key { Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate + ' dummy@gitlab.com' }
- factory :key_without_comment do
- key { Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate }
- end
-
factory :deploy_key, class: 'DeployKey'
factory :personal_key do
diff --git a/spec/factories/lfs_file_locks.rb b/spec/factories/lfs_file_locks.rb
new file mode 100644
index 00000000000..b9d24f82b65
--- /dev/null
+++ b/spec/factories/lfs_file_locks.rb
@@ -0,0 +1,7 @@
+FactoryBot.define do
+ factory :lfs_file_lock do
+ user
+ project
+ path 'README.md'
+ end
+end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 20976977f21..1761b6e2a3b 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -81,8 +81,10 @@ FactoryBot.define do
archived true
end
- trait :hashed do
- storage_version Project::LATEST_STORAGE_VERSION
+ storage_version Project::LATEST_STORAGE_VERSION
+
+ trait :legacy_storage do
+ storage_version nil
end
trait :access_requestable do
@@ -249,7 +251,8 @@ FactoryBot.define do
project.create_prometheus_service(
active: true,
properties: {
- api_url: 'https://prometheus.example.com'
+ api_url: 'https://prometheus.example.com/',
+ manual_configuration: true
}
)
end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 110ef33c6f7..0d4fd49bf3a 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -30,7 +30,8 @@ FactoryBot.define do
project
active true
properties({
- api_url: 'https://prometheus.example.com/'
+ api_url: 'https://prometheus.example.com/',
+ manual_configuration: true
})
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index cc0849d1cc6..39b213988f0 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -47,6 +47,16 @@ feature 'Admin updates settings' do
expect(page).to have_content "Application settings saved successfully"
end
+ scenario 'Change AutoDevOps settings' do
+ check 'Enabled Auto DevOps (Beta) for projects by default'
+ fill_in 'Auto devops domain', with: 'domain.com'
+ click_button 'Save'
+
+ expect(Gitlab::CurrentSettings.auto_devops_enabled?).to be true
+ expect(Gitlab::CurrentSettings.auto_devops_domain).to eq('domain.com')
+ expect(page).to have_content "Application settings saved successfully"
+ end
+
scenario 'Change Slack Notifications Service template settings' do
first(:link, 'Service Templates').click
click_link 'Slack notifications'
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index a69b428d117..2307ba5985e 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -26,8 +26,8 @@ describe "Admin::Users" do
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
expect(page).to have_link('Block', href: block_admin_user_path(user))
- expect(page).to have_link('Remove user', href: admin_user_path(user))
- expect(page).to have_link('Remove user and contributions', href: admin_user_path(user, hard_delete: true))
+ expect(page).to have_button('Delete user')
+ expect(page).to have_button('Delete user and contributions')
end
describe 'Two-factor Authentication filters' do
@@ -122,8 +122,8 @@ describe "Admin::Users" do
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
expect(page).to have_link('Block user', href: block_admin_user_path(user))
- expect(page).to have_link('Remove user', href: admin_user_path(user))
- expect(page).to have_link('Remove user and contributions', href: admin_user_path(user, hard_delete: true))
+ expect(page).to have_button('Delete user')
+ expect(page).to have_button('Delete user and contributions')
end
describe 'Impersonation' do
diff --git a/spec/features/ci_lint_spec.rb b/spec/features/ci_lint_spec.rb
index 9bc23baf6cf..b1dceec9da8 100644
--- a/spec/features/ci_lint_spec.rb
+++ b/spec/features/ci_lint_spec.rb
@@ -3,16 +3,19 @@ require 'spec_helper'
describe 'CI Lint', :js do
before do
sign_in(create(:user))
+
+ visit ci_lint_path
+ find('#ci-editor')
+ execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});")
+
+ # Ace editor updates a hidden textarea and it happens asynchronously
+ wait_for('YAML content') do
+ find('.ace_content').text.present?
+ end
end
describe 'YAML parsing' do
before do
- visit ci_lint_path
- # Ace editor updates a hidden textarea and it happens asynchronously
- # `sleep 0.1` is actually needed here because of this
- find('#ci-editor')
- execute_script("ace.edit('ci-editor').setValue(" + yaml_content.to_json + ");")
- sleep 0.1
click_on 'Validate'
end
@@ -32,11 +35,10 @@ describe 'CI Lint', :js do
end
context 'YAML is incorrect' do
- let(:yaml_content) { '' }
+ let(:yaml_content) { 'value: cannot have :' }
it 'displays information about an error' do
expect(page).to have_content('Status: syntax is incorrect')
- expect(page).to have_content('Error: Please provide content of .gitlab-ci.yml')
end
end
@@ -48,4 +50,20 @@ describe 'CI Lint', :js do
end
end
end
+
+ describe 'YAML clearing' do
+ before do
+ click_on 'Clear'
+ end
+
+ context 'YAML is present' do
+ let(:yaml_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ end
+
+ it 'YAML content is cleared' do
+ expect(page).to have_field('content', with: '', visible: false, type: 'textarea')
+ end
+ end
+ end
end
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index e9b375f4c94..f7863807572 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -3,76 +3,15 @@ require 'spec_helper'
feature 'Group variables', :js do
let(:user) { create(:user) }
let(:group) { create(:group) }
+ let!(:variable) { create(:ci_group_variable, key: 'test_key', value: 'test value', group: group) }
+ let(:page_path) { group_settings_ci_cd_path(group) }
background do
group.add_master(user)
gitlab_sign_in(user)
- end
-
- context 'when user creates a new variable' do
- background do
- visit group_settings_ci_cd_path(group)
- fill_in 'variable_key', with: 'AAA'
- fill_in 'variable_value', with: 'AAA123'
- find(:css, "#variable_protected").set(true)
- click_on 'Add new variable'
- end
-
- scenario 'user sees the created variable' do
- page.within('.variables-table') do
- expect(find(".variable-key")).to have_content('AAA')
- expect(find(".variable-value")).to have_content('******')
- expect(find(".variable-protected")).to have_content('Yes')
- end
- click_on 'Reveal value'
- page.within('.variables-table') do
- expect(find(".variable-value")).to have_content('AAA123')
- end
- end
- end
-
- context 'when user edits a variable' do
- background do
- create(:ci_group_variable, key: 'AAA', value: 'AAA123', protected: true,
- group: group)
-
- visit group_settings_ci_cd_path(group)
- page.within('.variable-menu') do
- click_on 'Update'
- end
-
- fill_in 'variable_key', with: 'BBB'
- fill_in 'variable_value', with: 'BBB123'
- find(:css, "#variable_protected").set(false)
- click_on 'Save variable'
- end
-
- scenario 'user sees the updated variable' do
- page.within('.variables-table') do
- expect(find(".variable-key")).to have_content('BBB')
- expect(find(".variable-value")).to have_content('******')
- expect(find(".variable-protected")).to have_content('No')
- end
- end
+ visit page_path
end
- context 'when user deletes a variable' do
- background do
- create(:ci_group_variable, key: 'BBB', value: 'BBB123', protected: false,
- group: group)
-
- visit group_settings_ci_cd_path(group)
-
- page.within('.variable-menu') do
- page.accept_alert 'Are you sure?' do
- click_on 'Remove'
- end
- end
- end
-
- scenario 'user does not see the deleted variable' do
- expect(page).to have_no_css('.variables-table')
- end
- end
+ it_behaves_like 'variable list'
end
diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb
new file mode 100644
index 00000000000..31fbbcf562c
--- /dev/null
+++ b/spec/features/groups/members/search_members_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe 'Search group member' do
+ let(:user) { create :user }
+ let(:member) { create :user }
+
+ let!(:guest_group) do
+ create(:group) do |group|
+ group.add_guest(user)
+ group.add_guest(member)
+ end
+ end
+
+ before do
+ sign_in(user)
+ visit group_group_members_path(guest_group)
+ end
+
+ it 'renders member users' do
+ page.within '.member-search-form' do
+ fill_in 'search', with: member.name
+ find('.member-search-btn').click
+ end
+
+ group_members_list = find(".panel .content-list")
+ expect(group_members_list).to have_content(member.name)
+ expect(group_members_list).not_to have_content(user.name)
+ end
+end
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index 1b41b3842c8..20337f1d3b0 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Group milestones', :js do
+feature 'Group milestones' do
let(:group) { create(:group) }
let!(:project) { create(:project_empty_repo, group: group) }
let(:user) { create(:group_member, :master, user: create(:user), group: group ).user }
@@ -13,7 +13,7 @@ feature 'Group milestones', :js do
sign_in(user)
end
- context 'create a milestone' do
+ context 'create a milestone', :js do
before do
visit new_group_milestone_path(group)
end
@@ -61,55 +61,132 @@ feature 'Group milestones', :js do
end
context 'milestones list' do
- let!(:other_project) { create(:project_empty_repo, group: group) }
-
- let!(:active_project_milestone1) { create(:milestone, project: project, state: 'active', title: 'v1.0') }
- let!(:active_project_milestone2) { create(:milestone, project: other_project, state: 'active', title: 'v1.0') }
- let!(:closed_project_milestone1) { create(:milestone, project: project, state: 'closed', title: 'v2.0') }
- let!(:closed_project_milestone2) { create(:milestone, project: other_project, state: 'closed', title: 'v2.0') }
- let!(:active_group_milestone) { create(:milestone, group: group, state: 'active') }
- let!(:closed_group_milestone) { create(:milestone, group: group, state: 'closed') }
-
- before do
- visit group_milestones_path(group)
+ context 'when no milestones' do
+ it 'renders no milestones text' do
+ visit group_milestones_path(group)
+ expect(page).to have_content('No milestones to show')
+ end
end
- it 'counts milestones correctly' do
- expect(find('.top-area .active .badge').text).to eq("2")
- expect(find('.top-area .closed .badge').text).to eq("2")
- expect(find('.top-area .all .badge').text).to eq("4")
- end
+ context 'when milestones exists' do
+ let!(:other_project) { create(:project_empty_repo, group: group) }
+
+ let!(:active_project_milestone1) do
+ create(
+ :milestone,
+ project: project,
+ state: 'active',
+ title: 'v1.0',
+ due_date: '2114-08-20',
+ description: 'Lorem Ipsum is simply dummy text'
+ )
+ end
+ let!(:active_project_milestone2) { create(:milestone, project: other_project, state: 'active', title: 'v1.0') }
+ let!(:closed_project_milestone1) { create(:milestone, project: project, state: 'closed', title: 'v2.0') }
+ let!(:closed_project_milestone2) { create(:milestone, project: other_project, state: 'closed', title: 'v2.0') }
+ let!(:active_group_milestone) { create(:milestone, group: group, state: 'active', title: 'GL-113') }
+ let!(:closed_group_milestone) { create(:milestone, group: group, state: 'closed') }
+ let!(:issue) do
+ create :issue, project: project, assignees: [user], author: user, milestone: active_project_milestone1
+ end
- it 'lists legacy group milestones and group milestones' do
- legacy_milestone = GroupMilestone.build_collection(group, group.projects, { state: 'active' }).first
+ before do
+ visit group_milestones_path(group)
+ end
- expect(page).to have_selector("#milestone_#{active_group_milestone.id}", count: 1)
- expect(page).to have_selector("#milestone_#{legacy_milestone.milestones.first.id}", count: 1)
- end
+ it 'counts milestones correctly' do
+ expect(find('.top-area .active .badge').text).to eq("2")
+ expect(find('.top-area .closed .badge').text).to eq("2")
+ expect(find('.top-area .all .badge').text).to eq("4")
+ end
- it 'updates milestone' do
- page.within(".milestones #milestone_#{active_group_milestone.id}") do
- click_link('Edit')
+ it 'lists legacy group milestones and group milestones' do
+ legacy_milestone = GroupMilestone.build_collection(group, group.projects, { state: 'active' }).first
+
+ expect(page).to have_selector("#milestone_#{active_group_milestone.id}", count: 1)
+ expect(page).to have_selector("#milestone_#{legacy_milestone.milestones.first.id}", count: 1)
end
- page.within('.milestone-form') do
- fill_in 'milestone_title', with: 'new title'
- click_button('Update milestone')
+ it 'updates milestone' do
+ page.within(".milestones #milestone_#{active_group_milestone.id}") do
+ click_link('Edit')
+ end
+
+ page.within('.milestone-form') do
+ fill_in 'milestone_title', with: 'new title'
+ click_button('Update milestone')
+ end
+
+ expect(find('#content-body h2')).to have_content('new title')
end
- expect(find('#content-body h2')).to have_content('new title')
- end
+ it 'shows milestone detail and supports its edit' do
+ page.within(".milestones #milestone_#{active_group_milestone.id}") do
+ click_link(active_group_milestone.title)
+ end
+
+ page.within('.detail-page-header') do
+ click_link('Edit')
+ end
- it 'shows milestone detail and supports its edit' do
- page.within(".milestones #milestone_#{active_group_milestone.id}") do
- click_link(active_group_milestone.title)
+ expect(page).to have_selector('.milestone-form')
end
- page.within('.detail-page-header') do
- click_link('Edit')
+ it 'renders milestones' do
+ expect(page).to have_content('v1.0')
+ expect(page).to have_content('GL-113')
+ expect(page).to have_link(
+ '1 Issue',
+ href: issues_group_path(group, milestone_title: 'v1.0')
+ )
+ expect(page).to have_link(
+ '0 Merge Requests',
+ href: merge_requests_group_path(group, milestone_title: 'v1.0')
+ )
end
- expect(page).to have_selector('.milestone-form')
+ it 'renders group milestone details' do
+ click_link 'v1.0'
+
+ expect(page).to have_content('expires on Aug 20, 2114')
+ expect(page).to have_content('v1.0')
+ expect(page).to have_content('Issues 1 Open: 1 Closed: 0')
+ expect(page).to have_link(issue.title, href: project_issue_path(issue.project, issue))
+ end
+
+ describe 'labels' do
+ before do
+ create(:label, project: project, title: 'bug') do |label|
+ issue.labels << label
+ end
+
+ create(:label, project: project, title: 'feature') do |label|
+ issue.labels << label
+ end
+ end
+
+ it 'renders labels' do
+ click_link 'v1.0'
+
+ page.within('#tab-issues') do
+ expect(page).to have_content 'bug'
+ expect(page).to have_content 'feature'
+ end
+ end
+
+ it 'renders labels list', :js do
+ click_link 'v1.0'
+
+ page.within('.content .nav-links') do
+ page.find(:xpath, "//a[@href='#tab-labels']").click
+ end
+
+ page.within('#tab-labels') do
+ expect(page).to have_content 'bug'
+ expect(page).to have_content 'feature'
+ end
+ end
+ end
end
end
end
diff --git a/spec/features/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index f82ed6300cc..f82ed6300cc 100644
--- a/spec/features/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
diff --git a/spec/features/gitlab_flavored_markdown_spec.rb b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
index 3c2186b3598..3c2186b3598 100644
--- a/spec/features/gitlab_flavored_markdown_spec.rb
+++ b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
diff --git a/spec/features/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb
index f13d78d24e3..f13d78d24e3 100644
--- a/spec/features/markdown_spec.rb
+++ b/spec/features/markdown/markdown_spec.rb
diff --git a/spec/features/markdown/math_spec.rb b/spec/features/markdown/math_spec.rb
new file mode 100644
index 00000000000..6a23d6b78ab
--- /dev/null
+++ b/spec/features/markdown/math_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe 'Math rendering', :js do
+ it 'renders inline and display math correctly' do
+ description = <<~MATH
+ This math is inline $`a^2+b^2=c^2`$.
+
+ This is on a separate line
+ ```math
+ a^2+b^2=c^2
+ ```
+ MATH
+
+ project = create(:project, :public)
+ issue = create(:issue, project: project, description: description)
+
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_selector('.katex .mord.mathit', text: 'b')
+ expect(page).to have_selector('.katex-display .mord.mathit', text: 'b')
+ end
+end
diff --git a/spec/features/markdown/mermaid_spec.rb b/spec/features/markdown/mermaid_spec.rb
new file mode 100644
index 00000000000..a25d701ee35
--- /dev/null
+++ b/spec/features/markdown/mermaid_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'Mermaid rendering', :js do
+ it 'renders Mermaid diagrams correctly' do
+ description = <<~MERMAID
+ ```mermaid
+ graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+ ```
+ MERMAID
+
+ project = create(:project, :public)
+ issue = create(:issue, project: project, description: description)
+
+ visit project_issue_path(project, issue)
+
+ %w[A B C D].each do |label|
+ expect(page).to have_selector('svg foreignObject', text: label)
+ end
+ end
+end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 4665626f114..1d7700b6767 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -1,6 +1,15 @@
require 'spec_helper'
describe 'Profile > Password' do
+ let(:user) { create(:user) }
+
+ def fill_passwords(password, confirmation)
+ fill_in 'New password', with: password
+ fill_in 'Password confirmation', with: confirmation
+
+ click_button 'Save password'
+ end
+
context 'Password authentication enabled' do
let(:user) { create(:user, password_automatically_set: true) }
@@ -9,13 +18,6 @@ describe 'Profile > Password' do
visit edit_profile_password_path
end
- def fill_passwords(password, confirmation)
- fill_in 'New password', with: password
- fill_in 'Password confirmation', with: confirmation
-
- click_button 'Save password'
- end
-
context 'User with password automatically set' do
describe 'User puts different passwords in the field and in the confirmation' do
it 'shows an error message' do
@@ -73,4 +75,64 @@ describe 'Profile > Password' do
end
end
end
+
+ context 'Change passowrd' do
+ before do
+ sign_in(user)
+ visit(edit_profile_password_path)
+ end
+
+ it 'does not change user passowrd without old one' do
+ page.within '.update-password' do
+ fill_passwords('22233344', '22233344')
+ end
+
+ page.within '.flash-container' do
+ expect(page).to have_content 'You must provide a valid current password'
+ end
+ end
+
+ it 'does not change password with invalid old password' do
+ page.within '.update-password' do
+ fill_in 'user_current_password', with: 'invalid'
+ fill_passwords('password', 'confirmation')
+ end
+
+ page.within '.flash-container' do
+ expect(page).to have_content 'You must provide a valid current password'
+ end
+ end
+
+ it 'changes user password' do
+ page.within '.update-password' do
+ fill_in "user_current_password", with: user.password
+ fill_passwords('22233344', '22233344')
+ end
+
+ expect(current_path).to eq new_user_session_path
+ end
+ end
+
+ context 'when password is expired' do
+ before do
+ sign_in(user)
+
+ user.update_attributes(password_expires_at: 1.hour.ago)
+ user.identities.delete
+ expect(user.ldap_user?).to eq false
+ end
+
+ it 'needs change user password' do
+ visit edit_profile_password_path
+
+ expect(current_path).to eq new_profile_password_path
+
+ fill_in :user_current_password, with: user.password
+ fill_in :user_password, with: '12345678'
+ fill_in :user_password_confirmation, with: '12345678'
+ click_button 'Set new password'
+
+ expect(current_path).to eq new_user_session_path
+ end
+ end
end
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
new file mode 100644
index 00000000000..0b5eacbe916
--- /dev/null
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -0,0 +1,58 @@
+require 'spec_helper'
+
+describe 'User edit profile' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ visit(profile_path)
+ end
+
+ it 'changes user profile' do
+ fill_in 'user_skype', with: 'testskype'
+ fill_in 'user_linkedin', with: 'testlinkedin'
+ fill_in 'user_twitter', with: 'testtwitter'
+ fill_in 'user_website_url', with: 'testurl'
+ fill_in 'user_location', with: 'Ukraine'
+ fill_in 'user_bio', with: 'I <3 GitLab'
+ fill_in 'user_organization', with: 'GitLab'
+ click_button 'Update profile settings'
+
+ expect(user.reload).to have_attributes(
+ skype: 'testskype',
+ linkedin: 'testlinkedin',
+ twitter: 'testtwitter',
+ website_url: 'testurl',
+ bio: 'I <3 GitLab',
+ organization: 'GitLab'
+ )
+
+ expect(find('#user_location').value).to eq 'Ukraine'
+ expect(page).to have_content('Profile was successfully updated')
+ end
+
+ context 'user avatar' do
+ before do
+ attach_file(:user_avatar, Rails.root.join('spec', 'fixtures', 'banana_sample.gif'))
+ click_button 'Update profile settings'
+ end
+
+ it 'changes user avatar' do
+ expect(page).to have_link('Remove avatar')
+
+ user.reload
+ expect(user.avatar).to be_instance_of AvatarUploader
+ expect(user.avatar.url).to eq "/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif"
+ end
+
+ it 'removes user avatar' do
+ click_link 'Remove avatar'
+
+ user.reload
+
+ expect(user.avatar?).to eq false
+ expect(page).not_to have_link('Remove avatar')
+ expect(page).to have_link('gravatar.com')
+ end
+ end
+end
diff --git a/spec/features/profiles/user_manages_applications_spec.rb b/spec/features/profiles/user_manages_applications_spec.rb
new file mode 100644
index 00000000000..387584fef62
--- /dev/null
+++ b/spec/features/profiles/user_manages_applications_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe 'User manages applications' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ visit applications_profile_path
+ end
+
+ it 'manages applications' do
+ expect(page).to have_content 'Add new application'
+
+ fill_in :doorkeeper_application_name, with: 'test'
+ fill_in :doorkeeper_application_redirect_uri, with: 'https://test.com'
+ click_on 'Save application'
+
+ expect(page).to have_content 'Application: test'
+ expect(page).to have_content 'Application Id'
+ expect(page).to have_content 'Secret'
+
+ click_on 'Edit'
+
+ expect(page).to have_content 'Edit application'
+ fill_in :doorkeeper_application_name, with: 'test_changed'
+ click_on 'Save application'
+
+ expect(page).to have_content 'test_changed'
+ expect(page).to have_content 'Application Id'
+ expect(page).to have_content 'Secret'
+
+ visit applications_profile_path
+
+ page.within '.oauth-applications' do
+ click_on 'Destroy'
+ end
+ expect(page.find('.oauth-applications')).not_to have_content 'test_changed'
+ end
+end
diff --git a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
index a50ebb29e01..0f419c3c2c0 100644
--- a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
+++ b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
@@ -3,13 +3,28 @@ require 'spec_helper'
describe 'User visits the authentication log' do
let(:user) { create(:user) }
- before do
- sign_in(user)
+ context 'when user signed in' do
+ before do
+ sign_in(user)
+ end
- visit(audit_log_profile_path)
+ it 'shows correct menu item' do
+ visit(audit_log_profile_path)
+
+ expect(page).to have_active_navigation('Authentication log')
+ end
end
- it 'shows correct menu item' do
- expect(page).to have_active_navigation('Authentication log')
+ context 'when user has activity' do
+ before do
+ create(:closed_issue_event, author: user)
+ gitlab_sign_in(user)
+ end
+
+ it 'shows user activity' do
+ visit(audit_log_profile_path)
+
+ expect(page).to have_content 'Signed in with standard authentication'
+ end
end
end
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
index 6601d3039ed..713112477c8 100644
--- a/spec/features/profiles/user_visits_profile_spec.rb
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -5,11 +5,58 @@ describe 'User visits their profile' do
before do
sign_in(user)
-
- visit(profile_path)
end
it 'shows correct menu item' do
+ visit(profile_path)
+
expect(page).to have_active_navigation('Profile')
end
+
+ it 'shows profile info' do
+ visit(profile_path)
+
+ expect(page).to have_content "This information will appear on your profile"
+ end
+
+ context 'when user has groups' do
+ let(:group) do
+ create :group do |group|
+ group.add_owner(user)
+ end
+ end
+
+ let!(:project) do
+ create(:project, :repository, namespace: group) do |project|
+ create(:closed_issue_event, project: project)
+ project.add_master(user)
+ end
+ end
+
+ def click_on_profile_picture
+ find(:css, '.header-user-dropdown-toggle').click
+
+ page.within ".header-user" do
+ click_link "Profile"
+ end
+ end
+
+ it 'shows user groups', :js do
+ visit(profile_path)
+ click_on_profile_picture
+
+ page.within ".cover-block" do
+ expect(page).to have_content user.name
+ expect(page).to have_content user.username
+ end
+
+ page.within ".content" do
+ click_link "Groups"
+ end
+
+ page.within "#groups" do
+ expect(page).to have_content group.name
+ end
+ end
+ end
end
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
new file mode 100644
index 00000000000..0ba2224359a
--- /dev/null
+++ b/spec/features/project_variables_spec.rb
@@ -0,0 +1,18 @@
+require 'spec_helper'
+
+describe 'Project variables', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:variable) { create(:ci_variable, key: 'test_key', value: 'test value') }
+ let(:page_path) { project_settings_ci_cd_path(project) }
+
+ before do
+ sign_in(user)
+ project.add_master(user)
+ project.variables << variable
+
+ visit page_path
+ end
+
+ it_behaves_like 'variable list'
+end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 02dbd3380b3..4d47cdb500c 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -25,7 +25,7 @@ feature 'Gcp Cluster', :js do
context 'when user has a GCP project with billing enabled' do
before do
allow_any_instance_of(Projects::Clusters::GcpController).to receive(:authorize_google_project_billing)
- allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return('true')
+ allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return(true)
end
context 'when user does not have a cluster and visits cluster index page' do
@@ -134,7 +134,7 @@ feature 'Gcp Cluster', :js do
context 'when user does not have a GCP project with billing enabled' do
before do
allow_any_instance_of(Projects::Clusters::GcpController).to receive(:authorize_google_project_billing)
- allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return('false')
+ allow_any_instance_of(Projects::Clusters::GcpController).to receive(:google_project_billing_status).and_return(false)
visit project_clusters_path(project)
diff --git a/spec/features/projects/import_export/namespace_export_file_spec.rb b/spec/features/projects/import_export/namespace_export_file_spec.rb
index c1fccf4a40b..7d056b0c140 100644
--- a/spec/features/projects/import_export/namespace_export_file_spec.rb
+++ b/spec/features/projects/import_export/namespace_export_file_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Import/Export - Namespace export file cleanup', :js do
+describe 'Import/Export - Namespace export file cleanup', :js do
let(:export_path) { Dir.mktmpdir('namespace_export_file_spec') }
before do
@@ -42,13 +42,13 @@ feature 'Import/Export - Namespace export file cleanup', :js do
end
describe 'legacy storage' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :legacy_storage) }
it_behaves_like 'handling project exports on namespace change'
end
describe 'hashed storage' do
- let(:project) { create(:project, :hashed) }
+ let(:project) { create(:project) }
it_behaves_like 'handling project exports on namespace change'
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 592c99fc64a..37a06b65481 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -109,7 +109,8 @@ describe 'Pipelines', :js do
context 'when canceling' do
before do
- accept_confirm { find('.js-pipelines-cancel-button').click }
+ find('.js-pipelines-cancel-button').click
+ find('.js-primary-button').click
wait_for_requests
end
@@ -140,6 +141,7 @@ describe 'Pipelines', :js do
context 'when retrying' do
before do
find('.js-pipelines-retry-button').click
+ find('.js-primary-button').click
wait_for_requests
end
@@ -238,7 +240,8 @@ describe 'Pipelines', :js do
context 'when canceling' do
before do
- accept_alert { find('.js-pipelines-cancel-button').click }
+ find('.js-pipelines-cancel-button').click
+ find('.js-primary-button').click
end
it 'indicates that pipeline was canceled' do
diff --git a/spec/features/projects/services/user_activates_issue_tracker_spec.rb b/spec/features/projects/services/user_activates_issue_tracker_spec.rb
new file mode 100644
index 00000000000..e9502178bd7
--- /dev/null
+++ b/spec/features/projects/services/user_activates_issue_tracker_spec.rb
@@ -0,0 +1,92 @@
+require 'spec_helper'
+
+describe 'User activates issue tracker', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ let(:url) { 'http://tracker.example.com' }
+
+ def fill_form(active = true)
+ check 'Active' if active
+
+ fill_in 'service_project_url', with: url
+ fill_in 'service_issues_url', with: "#{url}/:id"
+ fill_in 'service_new_issue_url', with: url
+ end
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit project_settings_integrations_path(project)
+ end
+
+ shared_examples 'external issue tracker activation' do |tracker:|
+ describe 'user sets and activates the Service' do
+ context 'when the connection test succeeds' do
+ before do
+ stub_request(:head, url).to_return(headers: { 'Content-Type' => 'application/json' })
+
+ click_link(tracker)
+ fill_form
+ click_button('Test settings and save changes')
+ wait_for_requests
+ end
+
+ it 'activates the service' do
+ expect(page).to have_content("#{tracker} activated.")
+ expect(current_path).to eq(project_settings_integrations_path(project))
+ end
+
+ it 'shows the link in the menu' do
+ page.within('.nav-sidebar') do
+ expect(page).to have_link(tracker, href: url)
+ end
+ end
+ end
+
+ context 'when the connection test fails' do
+ it 'activates the service' do
+ stub_request(:head, url).to_raise(HTTParty::Error)
+
+ click_link(tracker)
+ fill_form
+ click_button('Test settings and save changes')
+ wait_for_requests
+
+ expect(find('.flash-container-page')).to have_content 'Test failed.'
+ expect(find('.flash-container-page')).to have_content 'Save anyway'
+
+ find('.flash-alert .flash-action').click
+ wait_for_requests
+
+ expect(page).to have_content("#{tracker} activated.")
+ expect(current_path).to eq(project_settings_integrations_path(project))
+ end
+ end
+ end
+
+ describe 'user sets the service but keeps it disabled' do
+ before do
+ click_link(tracker)
+ fill_form(false)
+ click_button('Save changes')
+ end
+
+ it 'saves but does not activate the service' do
+ expect(page).to have_content("#{tracker} settings saved, but not activated.")
+ expect(current_path).to eq(project_settings_integrations_path(project))
+ end
+
+ it 'does not show the external tracker link in the menu' do
+ page.within('.nav-sidebar') do
+ expect(page).not_to have_link(tracker, href: url)
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'external issue tracker activation', tracker: 'Redmine'
+ it_behaves_like 'external issue tracker activation', tracker: 'Bugzilla'
+ it_behaves_like 'external issue tracker activation', tracker: 'Custom Issue Tracker'
+end
diff --git a/spec/features/projects/services/user_activates_jira_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb
index 028669eeaf2..429128ec096 100644
--- a/spec/features/projects/services/user_activates_jira_spec.rb
+++ b/spec/features/projects/services/user_activates_jira_spec.rb
@@ -3,7 +3,6 @@ require 'spec_helper'
describe 'User activates Jira', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
- let(:service) { project.create_jira_service }
let(:url) { 'http://jira.example.com' }
let(:test_url) { 'http://jira.example.com/rest/api/2/serverInfo' }
@@ -26,7 +25,7 @@ describe 'User activates Jira', :js do
describe 'user sets and activates Jira Service' do
context 'when Jira connection test succeeds' do
- it 'activates the JIRA service' do
+ before do
server_info = { key: 'value' }.to_json
WebMock.stub_request(:get, test_url).with(basic_auth: %w(username password)).to_return(body: server_info)
@@ -34,10 +33,18 @@ describe 'User activates Jira', :js do
fill_form
click_button('Test settings and save changes')
wait_for_requests
+ end
+ it 'activates the JIRA service' do
expect(page).to have_content('JIRA activated.')
expect(current_path).to eq(project_settings_integrations_path(project))
end
+
+ it 'shows the JIRA link in the menu' do
+ page.within('.nav-sidebar') do
+ expect(page).to have_link('JIRA', href: url)
+ end
+ end
end
context 'when Jira connection test fails' do
@@ -75,14 +82,20 @@ describe 'User activates Jira', :js do
end
describe 'user sets Jira Service but keeps it disabled' do
- context 'when Jira connection test succeeds' do
- it 'activates the JIRA service' do
- click_link('JIRA')
- fill_form(false)
- click_button('Save changes')
+ before do
+ click_link('JIRA')
+ fill_form(false)
+ click_button('Save changes')
+ end
- expect(page).to have_content('JIRA settings saved, but not activated.')
- expect(current_path).to eq(project_settings_integrations_path(project))
+ it 'saves but does not activate the JIRA service' do
+ expect(page).to have_content('JIRA settings saved, but not activated.')
+ expect(current_path).to eq(project_settings_integrations_path(project))
+ end
+
+ it 'does not show the JIRA link in the menu' do
+ page.within('.nav-sidebar') do
+ expect(page).not_to have_link('JIRA', href: url)
end
end
end
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index 4d2a08afecc..ef1bb712846 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -1,226 +1,234 @@
require 'spec_helper'
-# Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages
-describe 'User updates wiki page', :skip_gitaly_mock do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- context 'when wiki is empty' do
+describe 'User updates wiki page' do
+ shared_examples 'wiki page user update' do
+ let(:user) { create(:user) }
before do
- visit(project_wikis_path(project))
+ project.add_master(user)
+ sign_in(user)
end
- context 'in a user namespace' do
- let(:project) { create(:project, namespace: user.namespace) }
-
- it 'redirects back to the home edit page' do
- page.within(:css, '.wiki-form .form-actions') do
- click_on('Cancel')
- end
-
- expect(current_path).to eq project_wiki_path(project, :home)
+ context 'when wiki is empty' do
+ before do
+ visit(project_wikis_path(project))
end
- it 'updates a page that has a path', :js do
- click_on('New page')
+ context 'in a user namespace' do
+ let(:project) { create(:project, namespace: user.namespace) }
- page.within('#modal-new-wiki') do
- fill_in(:new_wiki_path, with: 'one/two/three-test')
- click_on('Create page')
- end
+ it 'redirects back to the home edit page' do
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
- page.within '.wiki-form' do
- fill_in(:wiki_content, with: 'wiki content')
- click_on('Create page')
+ expect(current_path).to eq project_wiki_path(project, :home)
end
- expect(current_path).to include('one/two/three-test')
- expect(find('.wiki-pages')).to have_content('Three')
+ it 'updates a page that has a path', :js do
+ click_on('New page')
- first(:link, text: 'Three').click
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'one/two/three-test')
+ click_on('Create page')
+ end
- expect(find('.nav-text')).to have_content('Three')
+ page.within '.wiki-form' do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
- click_on('Edit')
+ expect(current_path).to include('one/two/three-test')
+ expect(find('.wiki-pages')).to have_content('Three')
- expect(current_path).to include('one/two/three-test')
- expect(page).to have_content('Edit Page')
+ first(:link, text: 'Three').click
- fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
+ expect(find('.nav-text')).to have_content('Three')
- expect(page).to have_content('Updated Wiki Content')
- end
- end
- end
+ click_on('Edit')
- context 'when wiki is not empty' do
- let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
- let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: 'home', content: 'Home page' }) }
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_content('Edit Page')
- before do
- visit(project_wikis_path(project))
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
+ end
end
- context 'in a user namespace' do
- let(:project) { create(:project, namespace: user.namespace) }
+ context 'when wiki is not empty' do
+ let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
+ let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: 'home', content: 'Home page' }) }
- it 'updates a page' do
- click_link('Edit')
+ before do
+ visit(project_wikis_path(project))
+ end
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
+ context 'in a user namespace' do
+ let(:project) { create(:project, namespace: user.namespace) }
- fill_in(:wiki_content, with: 'My awesome wiki!')
- click_button('Save changes')
+ it 'updates a page' do
+ click_link('Edit')
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
- end
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
- it 'shows a validation error message' do
- click_link('Edit')
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Save changes')
- fill_in(:wiki_content, with: '')
- click_button('Save changes')
+ expect(page).to have_content('Home')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
+ end
- expect(page).to have_selector('.wiki-form')
- expect(page).to have_content('Edit Page')
- expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content("Content can't be blank")
- expect(find('textarea#wiki_content').value).to eq('')
- end
+ it 'shows a validation error message' do
+ click_link('Edit')
- it 'shows the autocompletion dropdown', :js do
- click_link('Edit')
+ fill_in(:wiki_content, with: '')
+ click_button('Save changes')
- find('#wiki_content').native.send_keys('')
- fill_in(:wiki_content, with: '@')
+ expect(page).to have_selector('.wiki-form')
+ expect(page).to have_content('Edit Page')
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content("Content can't be blank")
+ expect(find('textarea#wiki_content').value).to eq('')
+ end
- expect(page).to have_selector('.atwho-view')
- end
+ it 'shows the autocompletion dropdown', :js do
+ click_link('Edit')
- it 'shows the error message' do
- click_link('Edit')
+ find('#wiki_content').native.send_keys('')
+ fill_in(:wiki_content, with: '@')
- wiki_page.update(content: 'Update')
+ expect(page).to have_selector('.atwho-view')
+ end
- click_button('Save changes')
+ it 'shows the error message' do
+ click_link('Edit')
- expect(page).to have_content('Someone edited the page the same time you did.')
- end
+ wiki_page.update(content: 'Update')
- it 'updates a page' do
- click_on('Edit')
- fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
+ click_button('Save changes')
- expect(page).to have_content('Updated Wiki Content')
- end
+ expect(page).to have_content('Someone edited the page the same time you did.')
+ end
- it 'cancels edititng of a page' do
- click_on('Edit')
+ it 'updates a page' do
+ click_on('Edit')
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
- page.within(:css, '.wiki-form .form-actions') do
- click_on('Cancel')
+ expect(page).to have_content('Updated Wiki Content')
end
- expect(current_path).to eq(project_wiki_path(project, wiki_page))
+ it 'cancels edititng of a page' do
+ click_on('Edit')
+
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq(project_wiki_path(project, wiki_page))
+ end
end
- end
- context 'in a group namespace' do
- let(:project) { create(:project, namespace: create(:group, :public)) }
+ context 'in a group namespace' do
+ let(:project) { create(:project, namespace: create(:group, :public)) }
- it 'updates a page' do
- click_link('Edit')
+ it 'updates a page' do
+ click_link('Edit')
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
- fill_in(:wiki_content, with: 'My awesome wiki!')
+ fill_in(:wiki_content, with: 'My awesome wiki!')
- click_button('Save changes')
+ click_button('Save changes')
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
+ expect(page).to have_content('Home')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
+ end
end
end
- end
- context 'when the page is in a subdir' do
- let!(:project) { create(:project, namespace: user.namespace) }
- let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
- let(:page_name) { 'page_name' }
- let(:page_dir) { "foo/bar/#{page_name}" }
- let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: page_dir, content: 'Home page' }) }
+ context 'when the page is in a subdir' do
+ let!(:project) { create(:project, namespace: user.namespace) }
+ let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
+ let(:page_name) { 'page_name' }
+ let(:page_dir) { "foo/bar/#{page_name}" }
+ let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: page_dir, content: 'Home page' }) }
- before do
- visit(project_wiki_edit_path(project, wiki_page))
- end
+ before do
+ visit(project_wiki_edit_path(project, wiki_page))
+ end
- it 'moves the page to the root folder' do
- fill_in(:wiki_title, with: "/#{page_name}")
+ it 'moves the page to the root folder', :skip_gitaly_mock do
+ fill_in(:wiki_title, with: "/#{page_name}")
- click_button('Save changes')
+ click_button('Save changes')
- expect(current_path).to eq(project_wiki_path(project, page_name))
- end
+ expect(current_path).to eq(project_wiki_path(project, page_name))
+ end
- it 'moves the page to other dir' do
- new_page_dir = "foo1/bar1/#{page_name}"
+ it 'moves the page to other dir' do
+ new_page_dir = "foo1/bar1/#{page_name}"
- fill_in(:wiki_title, with: new_page_dir)
+ fill_in(:wiki_title, with: new_page_dir)
- click_button('Save changes')
+ click_button('Save changes')
- expect(current_path).to eq(project_wiki_path(project, new_page_dir))
- end
+ expect(current_path).to eq(project_wiki_path(project, new_page_dir))
+ end
- it 'remains in the same place if title has not changed' do
- original_path = project_wiki_path(project, wiki_page)
+ it 'remains in the same place if title has not changed' do
+ original_path = project_wiki_path(project, wiki_page)
- fill_in(:wiki_title, with: page_name)
+ fill_in(:wiki_title, with: page_name)
- click_button('Save changes')
+ click_button('Save changes')
- expect(current_path).to eq(original_path)
- end
+ expect(current_path).to eq(original_path)
+ end
- it 'can be moved to a different dir with a different name' do
- new_page_dir = "foo1/bar1/new_page_name"
+ it 'can be moved to a different dir with a different name' do
+ new_page_dir = "foo1/bar1/new_page_name"
- fill_in(:wiki_title, with: new_page_dir)
+ fill_in(:wiki_title, with: new_page_dir)
- click_button('Save changes')
+ click_button('Save changes')
- expect(current_path).to eq(project_wiki_path(project, new_page_dir))
- end
+ expect(current_path).to eq(project_wiki_path(project, new_page_dir))
+ end
- it 'can be renamed and moved to the root folder' do
- new_name = 'new_page_name'
+ it 'can be renamed and moved to the root folder' do
+ new_name = 'new_page_name'
- fill_in(:wiki_title, with: "/#{new_name}")
+ fill_in(:wiki_title, with: "/#{new_name}")
- click_button('Save changes')
+ click_button('Save changes')
- expect(current_path).to eq(project_wiki_path(project, new_name))
- end
+ expect(current_path).to eq(project_wiki_path(project, new_name))
+ end
- it 'squishes the title before creating the page' do
- new_page_dir = " foo1 / bar1 / #{page_name} "
+ it 'squishes the title before creating the page' do
+ new_page_dir = " foo1 / bar1 / #{page_name} "
- fill_in(:wiki_title, with: new_page_dir)
+ fill_in(:wiki_title, with: new_page_dir)
- click_button('Save changes')
+ click_button('Save changes')
- expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}"))
+ expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}"))
+ end
end
end
+
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'wiki page user update'
+ end
+
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'wiki page user update'
+ end
end
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
index e37436838fd..306e382119a 100644
--- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
@@ -1,146 +1,155 @@
require 'spec_helper'
-# Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages
-describe 'User views a wiki page', :skip_gitaly_mock do
- let(:user) { create(:user) }
- let(:project) { create(:project, namespace: user.namespace) }
- let(:wiki_page) do
- create(:wiki_page,
- wiki: project.wiki,
- attrs: { title: 'home', content: 'Look at this [image](image.jpg)\n\n ![alt text](image.jpg)' })
- end
-
- before do
- project.add_master(user)
- sign_in(user)
- end
+describe 'User views a wiki page' do
+ shared_examples 'wiki page user view' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:wiki_page) do
+ create(:wiki_page,
+ wiki: project.wiki,
+ attrs: { title: 'home', content: 'Look at this [image](image.jpg)\n\n ![alt text](image.jpg)' })
+ end
- context 'when wiki is empty' do
before do
- visit(project_wikis_path(project))
+ project.add_master(user)
+ sign_in(user)
+ end
- click_on('New page')
+ context 'when wiki is empty' do
+ before do
+ visit(project_wikis_path(project))
- page.within('#modal-new-wiki') do
- fill_in(:new_wiki_path, with: 'one/two/three-test')
- click_on('Create page')
- end
+ click_on('New page')
- page.within('.wiki-form') do
- fill_in(:wiki_content, with: 'wiki content')
- click_on('Create page')
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'one/two/three-test')
+ click_on('Create page')
+ end
+
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
end
- end
- it 'shows the history of a page that has a path', :js do
- expect(current_path).to include('one/two/three-test')
+ it 'shows the history of a page that has a path', :js do
+ expect(current_path).to include('one/two/three-test')
- first(:link, text: 'Three').click
- click_on('Page history')
+ first(:link, text: 'Three').click
+ click_on('Page history')
- expect(current_path).to include('one/two/three-test')
+ expect(current_path).to include('one/two/three-test')
- page.within(:css, '.nav-text') do
- expect(page).to have_content('History')
+ page.within(:css, '.nav-text') do
+ expect(page).to have_content('History')
+ end
end
- end
- it 'shows an old version of a page', :js do
- expect(current_path).to include('one/two/three-test')
- expect(find('.wiki-pages')).to have_content('Three')
+ it 'shows an old version of a page', :js do
+ expect(current_path).to include('one/two/three-test')
+ expect(find('.wiki-pages')).to have_content('Three')
- first(:link, text: 'Three').click
+ first(:link, text: 'Three').click
- expect(find('.nav-text')).to have_content('Three')
+ expect(find('.nav-text')).to have_content('Three')
- click_on('Edit')
+ click_on('Edit')
- expect(current_path).to include('one/two/three-test')
- expect(page).to have_content('Edit Page')
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_content('Edit Page')
- fill_in('Content', with: 'Updated Wiki Content')
+ fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
- click_on('Page history')
+ click_on('Save changes')
+ click_on('Page history')
- page.within(:css, '.nav-text') do
- expect(page).to have_content('History')
- end
+ page.within(:css, '.nav-text') do
+ expect(page).to have_content('History')
+ end
- find('a[href*="?version_id"]')
+ find('a[href*="?version_id"]')
+ end
end
- end
- context 'when a page does not have history' do
- before do
- visit(project_wiki_path(project, wiki_page))
- end
+ context 'when a page does not have history' do
+ before do
+ visit(project_wiki_path(project, wiki_page))
+ end
- it 'shows all the pages' do
- expect(page).to have_content(user.name)
- expect(find('.wiki-pages')).to have_content(wiki_page.title.capitalize)
- end
+ it 'shows all the pages' do
+ expect(page).to have_content(user.name)
+ expect(find('.wiki-pages')).to have_content(wiki_page.title.capitalize)
+ end
- it 'shows a file stored in a page' do
- gollum_file_double = double('Gollum::File',
- mime_type: 'image/jpeg',
- name: 'images/image.jpg',
- path: 'images/image.jpg',
- raw_data: '')
- wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
+ it 'shows a file stored in a page' do
+ gollum_file_double = double('Gollum::File',
+ mime_type: 'image/jpeg',
+ name: 'images/image.jpg',
+ path: 'images/image.jpg',
+ raw_data: '')
+ wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
- allow(wiki_file).to receive(:mime_type).and_return('image/jpeg')
- allow_any_instance_of(ProjectWiki).to receive(:find_file).with('image.jpg', nil).and_return(wiki_file)
+ allow(wiki_file).to receive(:mime_type).and_return('image/jpeg')
+ allow_any_instance_of(ProjectWiki).to receive(:find_file).with('image.jpg', nil).and_return(wiki_file)
- expect(page).to have_xpath('//img[@data-src="image.jpg"]')
- expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
+ expect(page).to have_xpath('//img[@data-src="image.jpg"]')
+ expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
- click_on('image')
+ click_on('image')
- expect(current_path).to match('wikis/image.jpg')
- expect(page).not_to have_xpath('/html') # Page should render the image which means there is no html involved
- end
+ expect(current_path).to match('wikis/image.jpg')
+ expect(page).not_to have_xpath('/html') # Page should render the image which means there is no html involved
+ end
- it 'shows the creation page if file does not exist' do
- expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
+ it 'shows the creation page if file does not exist' do
+ expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
- click_on('image')
+ click_on('image')
- expect(current_path).to match('wikis/image.jpg')
- expect(page).to have_content('New Wiki Page')
- expect(page).to have_content('Create page')
+ expect(current_path).to match('wikis/image.jpg')
+ expect(page).to have_content('New Wiki Page')
+ expect(page).to have_content('Create page')
+ end
end
- end
- context 'when a page has history' do
- before do
- wiki_page.update(message: 'updated home', content: 'updated [some link](other-page)')
- end
+ context 'when a page has history' do
+ before do
+ wiki_page.update(message: 'updated home', content: 'updated [some link](other-page)')
+ end
- it 'shows the page history' do
- visit(project_wiki_path(project, wiki_page))
+ it 'shows the page history' do
+ visit(project_wiki_path(project, wiki_page))
- expect(page).to have_selector('a.btn', text: 'Edit')
+ expect(page).to have_selector('a.btn', text: 'Edit')
- click_on('Page history')
+ click_on('Page history')
- expect(page).to have_content(user.name)
- expect(page).to have_content("#{user.username} created page: home")
- expect(page).to have_content('updated home')
+ expect(page).to have_content(user.name)
+ expect(page).to have_content("#{user.username} created page: home")
+ expect(page).to have_content('updated home')
+ end
+
+ it 'does not show the "Edit" button' do
+ visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id))
+
+ expect(page).not_to have_selector('a.btn', text: 'Edit')
+ end
end
- it 'does not show the "Edit" button' do
- visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id))
+ it 'opens a default wiki page', :js do
+ visit(project_path(project))
- expect(page).not_to have_selector('a.btn', text: 'Edit')
+ find('.shortcuts-wiki').click
+
+ expect(page).to have_content('Home · Create Page')
end
end
- it 'opens a default wiki page', :js do
- visit(project_path(project))
-
- find('.shortcuts-wiki').click
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'wiki page user view'
+ end
- expect(page).to have_content('Home · Create Page')
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'wiki page user view'
end
end
diff --git a/spec/features/variables_spec.rb b/spec/features/variables_spec.rb
deleted file mode 100644
index 79ca2b4bb4a..00000000000
--- a/spec/features/variables_spec.rb
+++ /dev/null
@@ -1,145 +0,0 @@
-require 'spec_helper'
-
-describe 'Project variables', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:variable) { create(:ci_variable, key: 'test_key', value: 'test value') }
-
- before do
- sign_in(user)
- project.add_master(user)
- project.variables << variable
-
- visit project_settings_ci_cd_path(project)
- end
-
- it 'shows list of variables' do
- page.within('.variables-table') do
- expect(page).to have_content(variable.key)
- end
- end
-
- it 'adds new secret variable' do
- fill_in('variable_key', with: 'key')
- fill_in('variable_value', with: 'key value')
- click_button('Add new variable')
-
- expect(page).to have_content('Variable was successfully created.')
- page.within('.variables-table') do
- expect(page).to have_content('key')
- expect(page).to have_content('No')
- end
- end
-
- it 'adds empty variable' do
- fill_in('variable_key', with: 'new_key')
- fill_in('variable_value', with: '')
- click_button('Add new variable')
-
- expect(page).to have_content('Variable was successfully created.')
- page.within('.variables-table') do
- expect(page).to have_content('new_key')
- end
- end
-
- it 'adds new protected variable' do
- fill_in('variable_key', with: 'key')
- fill_in('variable_value', with: 'value')
- check('Protected')
- click_button('Add new variable')
-
- expect(page).to have_content('Variable was successfully created.')
- page.within('.variables-table') do
- expect(page).to have_content('key')
- expect(page).to have_content('Yes')
- end
- end
-
- it 'reveals and hides new variable' do
- fill_in('variable_key', with: 'key')
- fill_in('variable_value', with: 'key value')
- click_button('Add new variable')
-
- page.within('.variables-table') do
- expect(page).to have_content('key')
- expect(page).to have_content('******')
- end
-
- click_button('Reveal values')
-
- page.within('.variables-table') do
- expect(page).to have_content('key')
- expect(page).to have_content('key value')
- end
-
- click_button('Hide values')
-
- page.within('.variables-table') do
- expect(page).to have_content('key')
- expect(page).to have_content('******')
- end
- end
-
- it 'deletes variable' do
- page.within('.variables-table') do
- accept_confirm { click_on 'Remove' }
- end
-
- expect(page).not_to have_selector('variables-table')
- end
-
- it 'edits variable' do
- page.within('.variables-table') do
- click_on 'Update'
- end
-
- expect(page).to have_content('Update variable')
- fill_in('variable_key', with: 'key')
- fill_in('variable_value', with: 'key value')
- click_button('Save variable')
-
- expect(page).to have_content('Variable was successfully updated.')
- expect(project.variables(true).first.value).to eq('key value')
- end
-
- it 'edits variable with empty value' do
- page.within('.variables-table') do
- click_on 'Update'
- end
-
- expect(page).to have_content('Update variable')
- fill_in('variable_value', with: '')
- click_button('Save variable')
-
- expect(page).to have_content('Variable was successfully updated.')
- expect(project.variables(true).first.value).to eq('')
- end
-
- it 'edits variable to be protected' do
- page.within('.variables-table') do
- click_on 'Update'
- end
-
- expect(page).to have_content('Update variable')
- check('Protected')
- click_button('Save variable')
-
- expect(page).to have_content('Variable was successfully updated.')
- expect(project.variables(true).first).to be_protected
- end
-
- it 'edits variable to be unprotected' do
- project.variables.first.update(protected: true)
-
- page.within('.variables-table') do
- click_on 'Update'
- end
-
- expect(page).to have_content('Update variable')
- uncheck('Protected')
- click_button('Save variable')
-
- expect(page).to have_content('Variable was successfully updated.')
- expect(project.variables(true).first).not_to be_protected
- end
-end
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index 0a018d2b417..54a07eccaba 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -1,57 +1,8 @@
require 'spec_helper'
describe SnippetsFinder do
- let(:user) { create :user }
- let(:user1) { create :user }
- let(:group) { create :group, :public }
-
- let(:project1) { create(:project, :public, group: group) }
- let(:project2) { create(:project, :private, group: group) }
-
- context 'all snippets visible to a user' do
- let!(:snippet1) { create(:personal_snippet, :private) }
- let!(:snippet2) { create(:personal_snippet, :internal) }
- let!(:snippet3) { create(:personal_snippet, :public) }
- let!(:project_snippet1) { create(:project_snippet, :private) }
- let!(:project_snippet2) { create(:project_snippet, :internal) }
- let!(:project_snippet3) { create(:project_snippet, :public) }
-
- it "returns all private and internal snippets" do
- snippets = described_class.new(user, scope: :all).execute
- expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3)
- expect(snippets).not_to include(snippet1, project_snippet1)
- end
-
- it "returns all public snippets" do
- snippets = described_class.new(nil, scope: :all).execute
- expect(snippets).to include(snippet3, project_snippet3)
- expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2)
- end
-
- it "returns all public and internal snippets for normal user" do
- snippets = described_class.new(user).execute
-
- expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3)
- expect(snippets).not_to include(snippet1, project_snippet1)
- end
-
- it "returns all public snippets for non authorized user" do
- snippets = described_class.new(nil).execute
-
- expect(snippets).to include(snippet3, project_snippet3)
- expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2)
- end
-
- it "returns all public and authored snippets for external user" do
- external_user = create(:user, :external)
- authored_snippet = create(:personal_snippet, :internal, author: external_user)
-
- snippets = described_class.new(external_user).execute
-
- expect(snippets).to include(snippet3, project_snippet3, authored_snippet)
- expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2)
- end
- end
+ include Gitlab::Allowable
+ using RSpec::Parameterized::TableSyntax
context 'filter by visibility' do
let!(:snippet1) { create(:personal_snippet, :private) }
@@ -67,6 +18,7 @@ describe SnippetsFinder do
end
context 'filter by scope' do
+ let(:user) { create :user }
let!(:snippet1) { create(:personal_snippet, :private, author: user) }
let!(:snippet2) { create(:personal_snippet, :internal, author: user) }
let!(:snippet3) { create(:personal_snippet, :public, author: user) }
@@ -84,7 +36,7 @@ describe SnippetsFinder do
expect(snippets).not_to include(snippet2, snippet3)
end
- it "returns all snippets for 'are_interna;' scope" do
+ it "returns all snippets for 'are_internal' scope" do
snippets = described_class.new(user, scope: :are_internal).execute
expect(snippets).to include(snippet2)
@@ -100,6 +52,8 @@ describe SnippetsFinder do
end
context 'filter by author' do
+ let(:user) { create :user }
+ let(:user1) { create :user }
let!(:snippet1) { create(:personal_snippet, :private, author: user) }
let!(:snippet2) { create(:personal_snippet, :internal, author: user) }
let!(:snippet3) { create(:personal_snippet, :public, author: user) }
@@ -147,6 +101,10 @@ describe SnippetsFinder do
end
context 'filter by project' do
+ let(:user) { create :user }
+ let(:group) { create :group, :public }
+ let(:project1) { create(:project, :public, group: group) }
+
before do
@snippet1 = create(:project_snippet, :private, project: project1)
@snippet2 = create(:project_snippet, :internal, project: project1)
@@ -203,4 +161,9 @@ describe SnippetsFinder do
expect(snippets).to include(@snippet1)
end
end
+
+ describe "#execute" do
+ # Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
+ include_examples 'snippet visibility', described_class
+ end
end
diff --git a/spec/fixtures/api/schemas/deployment.json b/spec/fixtures/api/schemas/deployment.json
new file mode 100644
index 00000000000..536e6475c23
--- /dev/null
+++ b/spec/fixtures/api/schemas/deployment.json
@@ -0,0 +1,45 @@
+{
+ "additionalProperties": false,
+ "properties": {
+ "created_at": {
+ "type": "string"
+ },
+ "id": {
+ "type": "integer"
+ },
+ "iid": {
+ "type": "integer"
+ },
+ "last?": {
+ "type": "boolean"
+ },
+ "ref": {
+ "additionalProperties": false,
+ "properties": {
+ "name": {
+ "type": "string"
+ }
+ },
+ "required": [
+ "name"
+ ],
+ "type": "object"
+ },
+ "sha": {
+ "type": "string"
+ },
+ "tag": {
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "sha",
+ "created_at",
+ "iid",
+ "tag",
+ "last?",
+ "ref",
+ "id"
+ ],
+ "type": "object"
+}
diff --git a/spec/fixtures/api/schemas/deployments.json b/spec/fixtures/api/schemas/deployments.json
index 1112f23aab2..7bf50e4f859 100644
--- a/spec/fixtures/api/schemas/deployments.json
+++ b/spec/fixtures/api/schemas/deployments.json
@@ -3,49 +3,7 @@
"properties": {
"deployments": {
"items": {
- "additionalProperties": false,
- "properties": {
- "created_at": {
- "type": "string"
- },
- "id": {
- "type": "integer"
- },
- "iid": {
- "type": "integer"
- },
- "last?": {
- "type": "boolean"
- },
- "ref": {
- "additionalProperties": false,
- "properties": {
- "name": {
- "type": "string"
- }
- },
- "required": [
- "name"
- ],
- "type": "object"
- },
- "sha": {
- "type": "string"
- },
- "tag": {
- "type": "boolean"
- }
- },
- "required": [
- "sha",
- "created_at",
- "iid",
- "tag",
- "last?",
- "ref",
- "id"
- ],
- "type": "object"
+ "$ref": "deployment.json"
},
"minItems": 1,
"type": "array"
diff --git a/spec/fixtures/api/schemas/public_api/v4/blobs.json b/spec/fixtures/api/schemas/public_api/v4/blobs.json
new file mode 100644
index 00000000000..a812815838f
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/blobs.json
@@ -0,0 +1,19 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "basename": { "type": "string" },
+ "data": { "type": "string" },
+ "filename": { "type": ["string"] },
+ "id": { "type": ["string", "null"] },
+ "project_id": { "type": "integer" },
+ "ref": { "type": "string" },
+ "startline": { "type": "integer" }
+ },
+ "required": [
+ "basename", "data", "filename", "id", "ref", "startline", "project_id"
+ ],
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/commit/detail.json b/spec/fixtures/api/schemas/public_api/v4/commit/detail.json
index 88a3cad62f6..477e776a804 100644
--- a/spec/fixtures/api/schemas/public_api/v4/commit/detail.json
+++ b/spec/fixtures/api/schemas/public_api/v4/commit/detail.json
@@ -4,9 +4,9 @@
{ "$ref": "basic.json" },
{
"required" : [
- "stats",
"status",
- "last_pipeline"
+ "last_pipeline",
+ "project_id"
],
"properties": {
"stats": { "$ref": "../commit_stats.json" },
@@ -16,7 +16,8 @@
{ "type": "null" },
{ "$ref": "../pipeline/basic.json" }
]
- }
+ },
+ "project_id": { "type": "integer" }
}
}
]
diff --git a/spec/fixtures/api/schemas/public_api/v4/commits_details.json b/spec/fixtures/api/schemas/public_api/v4/commits_details.json
new file mode 100644
index 00000000000..1f5b1ad86ef
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/commits_details.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "commit/detail.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/issue.json b/spec/fixtures/api/schemas/public_api/v4/issue.json
new file mode 100644
index 00000000000..147f53239e0
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/issue.json
@@ -0,0 +1,96 @@
+{
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": "integer" },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "discussion_locked": { "type": ["boolean", "null"] },
+ "closed_at": { "type": "date" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "labels": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "milestone": {
+ "type": ["object", "null"],
+ "properties": {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": ["integer", "null"] },
+ "group_id": { "type": ["integer", "null"] },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "due_date": { "type": "date" },
+ "start_date": { "type": "date" }
+ },
+ "additionalProperties": false
+ },
+ "assignees": {
+ "type": "array",
+ "items": {
+ "type": ["object", "null"],
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "assignee": {
+ "type": ["object", "null"],
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "user_notes_count": { "type": "integer" },
+ "upvotes": { "type": "integer" },
+ "downvotes": { "type": "integer" },
+ "due_date": { "type": ["date", "null"] },
+ "confidential": { "type": "boolean" },
+ "web_url": { "type": "uri" },
+ "time_stats": {
+ "time_estimate": { "type": "integer" },
+ "total_time_spent": { "type": "integer" },
+ "human_time_estimate": { "type": ["string", "null"] },
+ "human_total_time_spent": { "type": ["string", "null"] }
+ }
+ },
+ "required": [
+ "id", "iid", "project_id", "title", "description",
+ "state", "created_at", "updated_at", "labels",
+ "milestone", "assignees", "author", "user_notes_count",
+ "upvotes", "downvotes", "due_date", "confidential",
+ "web_url"
+ ]
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/issues.json b/spec/fixtures/api/schemas/public_api/v4/issues.json
index 5c08dbc3b96..c76806705e8 100644
--- a/spec/fixtures/api/schemas/public_api/v4/issues.json
+++ b/spec/fixtures/api/schemas/public_api/v4/issues.json
@@ -3,98 +3,7 @@
"items": {
"type": "object",
"properties" : {
- "id": { "type": "integer" },
- "iid": { "type": "integer" },
- "project_id": { "type": "integer" },
- "title": { "type": "string" },
- "description": { "type": ["string", "null"] },
- "state": { "type": "string" },
- "discussion_locked": { "type": ["boolean", "null"] },
- "closed_at": { "type": "date" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "labels": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "milestone": {
- "type": "object",
- "properties": {
- "id": { "type": "integer" },
- "iid": { "type": "integer" },
- "project_id": { "type": ["integer", "null"] },
- "group_id": { "type": ["integer", "null"] },
- "title": { "type": "string" },
- "description": { "type": ["string", "null"] },
- "state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "due_date": { "type": "date" },
- "start_date": { "type": "date" }
- },
- "additionalProperties": false
- },
- "assignees": {
- "type": "array",
- "items": {
- "type": ["object", "null"],
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- }
- },
- "assignee": {
- "type": ["object", "null"],
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- },
- "author": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- },
- "user_notes_count": { "type": "integer" },
- "upvotes": { "type": "integer" },
- "downvotes": { "type": "integer" },
- "due_date": { "type": ["date", "null"] },
- "confidential": { "type": "boolean" },
- "web_url": { "type": "uri" },
- "time_stats": {
- "time_estimate": { "type": "integer" },
- "total_time_spent": { "type": "integer" },
- "human_time_estimate": { "type": ["string", "null"] },
- "human_total_time_spent": { "type": ["string", "null"] }
- }
- },
- "required": [
- "id", "iid", "project_id", "title", "description",
- "state", "created_at", "updated_at", "labels",
- "milestone", "assignees", "author", "user_notes_count",
- "upvotes", "downvotes", "due_date", "confidential",
- "web_url"
- ],
- "additionalProperties": false
+ "$ref": "./issue.json"
+ }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_requests.json b/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
index 034509091a5..e86176e5316 100644
--- a/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
@@ -28,7 +28,7 @@
"additionalProperties": false
},
"assignee": {
- "type": "object",
+ "type": ["object", "null"],
"properties": {
"name": { "type": "string" },
"username": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/milestones.json b/spec/fixtures/api/schemas/public_api/v4/milestones.json
new file mode 100644
index 00000000000..c3c42b6ee60
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/milestones.json
@@ -0,0 +1,24 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": ["integer", "null"] },
+ "group_id": { "type": ["integer", "null"] },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "start_date": { "type": "date" },
+ "due_date": { "type": "date" }
+ },
+ "required": [
+ "id", "iid", "title", "description", "state",
+ "state", "created_at", "updated_at", "start_date", "due_date"
+ ],
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/notes.json b/spec/fixtures/api/schemas/public_api/v4/notes.json
new file mode 100644
index 00000000000..6525f7c2c80
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/notes.json
@@ -0,0 +1,34 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "body": { "type": "string" },
+ "attachment": { "type": ["string", "null"] },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "system": { "type": "boolean" },
+ "noteable_id": { "type": "integer" },
+ "noteable_iid": { "type": "integer" },
+ "noteable_type": { "type": "string" }
+ },
+ "required": [
+ "id", "body", "attachment", "author", "created_at", "updated_at",
+ "system", "noteable_id", "noteable_type"
+ ],
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/projects.json b/spec/fixtures/api/schemas/public_api/v4/projects.json
new file mode 100644
index 00000000000..d89eeea89a5
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/projects.json
@@ -0,0 +1,36 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "name_with_namespace": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "path": { "type": "string" },
+ "path_with_namespace": { "type": "string" },
+ "created_at": { "type": "date" },
+ "default_branch": { "type": ["string", "null"] },
+ "tag_list": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "ssh_url_to_repo": { "type": "string" },
+ "http_url_to_repo": { "type": "string" },
+ "web_url": { "type": "string" },
+ "avatar_url": { "type": ["string", "null"] },
+ "star_count": { "type": "integer" },
+ "forks_count": { "type": "integer" },
+ "last_activity_at": { "type": "date" }
+ },
+ "required": [
+ "id", "name", "name_with_namespace", "description", "path",
+ "path_with_namespace", "created_at", "default_branch", "tag_list",
+ "ssh_url_to_repo", "http_url_to_repo", "web_url", "avatar_url",
+ "star_count", "last_activity_at"
+ ],
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippets.json b/spec/fixtures/api/schemas/public_api/v4/snippets.json
new file mode 100644
index 00000000000..e37e9704649
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/snippets.json
@@ -0,0 +1,33 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "project_id": { "type": ["integer", "null"] },
+ "title": { "type": "string" },
+ "file_name": { "type": ["string", "null"] },
+ "description": { "type": ["string", "null"] },
+ "web_url": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "required": [
+ "id", "title", "file_name", "description", "web_url",
+ "created_at", "updated_at", "author"
+ ],
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/variable.json b/spec/fixtures/api/schemas/variable.json
new file mode 100644
index 00000000000..6f6b044115b
--- /dev/null
+++ b/spec/fixtures/api/schemas/variable.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "key",
+ "value",
+ "protected"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "key": { "type": "string" },
+ "value": { "type": "string" },
+ "protected": { "type": "boolean" },
+ "environment_scope": { "type": "string", "optional": true }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/variables.json b/spec/fixtures/api/schemas/variables.json
new file mode 100644
index 00000000000..8002f39a7b8
--- /dev/null
+++ b/spec/fixtures/api/schemas/variables.json
@@ -0,0 +1,11 @@
+{
+ "type": "object",
+ "required": ["variables"],
+ "properties": {
+ "variables": {
+ "type": "array",
+ "items": { "$ref": "variable.json" }
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index f7a4a7afced..43cb0dfe163 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -63,13 +63,29 @@ describe ApplicationHelper do
end
end
- describe 'avatar_icon' do
+ describe 'avatar_icon_for' do
+ let!(:user) { create(:user, avatar: File.open(uploaded_image_temp_path), email: 'bar@example.com') }
+ let(:email) { 'foo@example.com' }
+ let!(:another_user) { create(:user, avatar: File.open(uploaded_image_temp_path), email: email) }
+
+ it 'prefers the user to retrieve the avatar_url' do
+ expect(helper.avatar_icon_for(user, email).to_s)
+ .to eq(user.avatar.url)
+ end
+
+ it 'falls back to email lookup if no user given' do
+ expect(helper.avatar_icon_for(nil, email).to_s)
+ .to eq(another_user.avatar.url)
+ end
+ end
+
+ describe 'avatar_icon_for_email' do
let(:user) { create(:user, avatar: File.open(uploaded_image_temp_path)) }
context 'using an email' do
context 'when there is a matching user' do
it 'returns a relative URL for the avatar' do
- expect(helper.avatar_icon(user.email).to_s)
+ expect(helper.avatar_icon_for_email(user.email).to_s)
.to eq(user.avatar.url)
end
end
@@ -78,17 +94,37 @@ describe ApplicationHelper do
it 'calls gravatar_icon' do
expect(helper).to receive(:gravatar_icon).with('foo@example.com', 20, 2)
- helper.avatar_icon('foo@example.com', 20, 2)
+ helper.avatar_icon_for_email('foo@example.com', 20, 2)
+ end
+ end
+
+ context 'without an email passed' do
+ it 'calls gravatar_icon' do
+ expect(helper).to receive(:gravatar_icon).with(nil, 20, 2)
+
+ helper.avatar_icon_for_email(nil, 20, 2)
end
end
end
+ end
+
+ describe 'avatar_icon_for_user' do
+ let(:user) { create(:user, avatar: File.open(uploaded_image_temp_path)) }
- describe 'using a user' do
+ context 'with a user object passed' do
it 'returns a relative URL for the avatar' do
- expect(helper.avatar_icon(user).to_s)
+ expect(helper.avatar_icon_for_user(user).to_s)
.to eq(user.avatar.url)
end
end
+
+ context 'without a user object passed' do
+ it 'calls gravatar_icon' do
+ expect(helper).to receive(:gravatar_icon).with(nil, 20, 2)
+
+ helper.avatar_icon_for_user(nil, 20, 2)
+ end
+ end
end
describe 'gravatar_icon' do
diff --git a/spec/helpers/auto_devops_helper_spec.rb b/spec/helpers/auto_devops_helper_spec.rb
index 5e272af6073..1950c2b129b 100644
--- a/spec/helpers/auto_devops_helper_spec.rb
+++ b/spec/helpers/auto_devops_helper_spec.rb
@@ -82,4 +82,39 @@ describe AutoDevopsHelper do
it { is_expected.to eq(false) }
end
end
+
+ describe '.auto_devops_warning_message' do
+ subject { helper.auto_devops_warning_message(project) }
+
+ context 'when the service is missing' do
+ before do
+ allow(helper).to receive(:missing_auto_devops_service?).and_return(true)
+ end
+
+ context 'when the domain is missing' do
+ before do
+ allow(helper).to receive(:missing_auto_devops_domain?).and_return(true)
+ end
+
+ it { is_expected.to match(/Auto Review Apps and Auto Deploy need a domain name and a .* to work correctly./) }
+ end
+
+ context 'when the domain is not missing' do
+ before do
+ allow(helper).to receive(:missing_auto_devops_domain?).and_return(false)
+ end
+
+ it { is_expected.to match(/Auto Review Apps and Auto Deploy need a .* to work correctly./) }
+ end
+ end
+
+ context 'when the domain is missing' do
+ before do
+ allow(helper).to receive(:missing_auto_devops_service?).and_return(false)
+ allow(helper).to receive(:missing_auto_devops_domain?).and_return(true)
+ end
+
+ it { is_expected.to eq('Auto Review Apps and Auto Deploy need a domain name to work correctly.') }
+ end
+ end
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index f44e7ef6843..04c6d259135 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -29,7 +29,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{user.name}'s avatar",
- src: avatar_icon(user, 16),
+ src: avatar_icon_for_user(user, 16),
data: { container: 'body' },
class: 'avatar s16 has-tooltip',
title: user.name
@@ -43,7 +43,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{user.name}'s avatar",
- src: avatar_icon(user, 16),
+ src: avatar_icon_for_user(user, 16),
data: { container: 'body' },
class: "avatar s16 #{options[:css_class]} has-tooltip",
title: user.name
@@ -58,7 +58,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{user.name}'s avatar",
- src: avatar_icon(user, options[:size]),
+ src: avatar_icon_for_user(user, options[:size]),
data: { container: 'body' },
class: "avatar s#{options[:size]} has-tooltip",
title: user.name
@@ -89,7 +89,7 @@ describe AvatarsHelper do
:img,
alt: "#{user.name}'s avatar",
src: LazyImageTagHelper.placeholder_image,
- data: { container: 'body', src: avatar_icon(user, 16) },
+ data: { container: 'body', src: avatar_icon_for_user(user, 16) },
class: "avatar s16 has-tooltip lazy",
title: user.name
)
@@ -104,7 +104,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{user.name}'s avatar",
- src: avatar_icon(user, 16),
+ src: avatar_icon_for_user(user, 16),
data: { container: 'body' },
class: "avatar s16 has-tooltip",
title: user.name
@@ -119,7 +119,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{user.name}'s avatar",
- src: avatar_icon(user, 16),
+ src: avatar_icon_for_user(user, 16),
class: "avatar s16",
title: user.name
)
@@ -137,7 +137,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{user.name}'s avatar",
- src: avatar_icon(user, 16),
+ src: avatar_icon_for_user(user, 16),
data: { container: 'body' },
class: "avatar s16 has-tooltip",
title: user.name
@@ -149,7 +149,7 @@ describe AvatarsHelper do
is_expected.to eq tag(
:img,
alt: "#{options[:user_name]}'s avatar",
- src: avatar_icon(options[:user_email], 16),
+ src: avatar_icon_for_email(options[:user_email], 16),
data: { container: 'body' },
class: "avatar s16 has-tooltip",
title: options[:user_name]
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index 8a80b88da5d..fccde8b7eba 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -20,5 +20,9 @@ describe EventsHelper do
it 'handles nil values' do
expect(helper.event_commit_title(nil)).to eq('')
end
+
+ it 'does not escape HTML entities' do
+ expect(helper.event_commit_title("foo & bar")).to eq("foo & bar")
+ end
end
end
diff --git a/spec/helpers/graph_helper_spec.rb b/spec/helpers/graph_helper_spec.rb
index 400635abdde..1f8a38dc697 100644
--- a/spec/helpers/graph_helper_spec.rb
+++ b/spec/helpers/graph_helper_spec.rb
@@ -7,10 +7,10 @@ describe GraphHelper do
let(:graph) { Network::Graph.new(project, 'master', commit, '') }
it 'filters our refs used by GitLab' do
- allow(commit).to receive(:ref_names).and_return(['refs/merge-requests/abc', 'master', 'refs/tmp/xyz'])
self.instance_variable_set(:@graph, graph)
- refs = get_refs(project.repository, commit)
- expect(refs).to eq('master')
+ refs = refs(project.repository, commit)
+
+ expect(refs).to match('master')
end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index c0251bf7dc0..b67fee2fcc0 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -215,7 +215,7 @@ describe ProjectsHelper do
let(:expected) { double }
before do
- expect(helper).to receive(:avatar_icon).with(user, 16).and_return(expected)
+ expect(helper).to receive(:avatar_icon_for_user).with(user, 16).and_return(expected)
end
it 'returns image tag for member avatar' do
diff --git a/spec/javascripts/awards_handler_spec.js b/spec/javascripts/awards_handler_spec.js
index 268b5b83b73..1c1b3f3ced3 100644
--- a/spec/javascripts/awards_handler_spec.js
+++ b/spec/javascripts/awards_handler_spec.js
@@ -79,7 +79,7 @@ import '~/lib/utils/common_utils';
return expect($emojiMenu.length).toBe(1);
});
});
- return it('should remove emoji menu when body is clicked', function(done) {
+ it('should remove emoji menu when body is clicked', function(done) {
$('.js-add-award').eq(0).click();
return lazyAssert(done, function() {
var $emojiMenu;
@@ -90,6 +90,17 @@ import '~/lib/utils/common_utils';
return expect($('.js-awards-block.current').length).toBe(0);
});
});
+ it('should not remove emoji menu when search is clicked', function(done) {
+ $('.js-add-award').eq(0).click();
+ return lazyAssert(done, function() {
+ var $emojiMenu;
+ $emojiMenu = $('.emoji-menu');
+ $('.emoji-search').click();
+ expect($emojiMenu.length).toBe(1);
+ expect($emojiMenu.hasClass('is-visible')).toBe(true);
+ return expect($('.js-awards-block.current').length).toBe(1);
+ });
+ });
});
describe('::addAwardToEmojiBar', function() {
it('should add emoji to votes block', function() {
diff --git a/spec/javascripts/boards/board_list_spec.js b/spec/javascripts/boards/board_list_spec.js
index a5fcb10b9dd..03df6c06691 100644
--- a/spec/javascripts/boards/board_list_spec.js
+++ b/spec/javascripts/boards/board_list_spec.js
@@ -5,7 +5,7 @@ import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Sortable from 'vendor/Sortable';
-import BoardList from '~/boards/components/board_list';
+import BoardList from '~/boards/components/board_list.vue';
import eventHub from '~/boards/eventhub';
import '~/boards/mixins/sortable_default_options';
import '~/boards/models/issue';
diff --git a/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js b/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js
new file mode 100644
index 00000000000..ee457a9c48c
--- /dev/null
+++ b/spec/javascripts/ci_variable_list/ajax_variable_list_spec.js
@@ -0,0 +1,213 @@
+import $ from 'jquery';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import AjaxFormVariableList from '~/ci_variable_list/ajax_variable_list';
+
+const VARIABLE_PATCH_ENDPOINT = 'http://test.host/frontend-fixtures/builds-project/variables';
+const HIDE_CLASS = 'hide';
+
+describe('AjaxFormVariableList', () => {
+ preloadFixtures('projects/ci_cd_settings.html.raw');
+ preloadFixtures('projects/ci_cd_settings_with_variables.html.raw');
+
+ let container;
+ let saveButton;
+ let errorBox;
+
+ let mock;
+ let ajaxVariableList;
+
+ beforeEach(() => {
+ loadFixtures('projects/ci_cd_settings.html.raw');
+ container = document.querySelector('.js-ci-variable-list-section');
+
+ mock = new MockAdapter(axios);
+
+ const ajaxVariableListEl = document.querySelector('.js-ci-variable-list-section');
+ saveButton = ajaxVariableListEl.querySelector('.js-secret-variables-save-button');
+ errorBox = container.querySelector('.js-ci-variable-error-box');
+ ajaxVariableList = new AjaxFormVariableList({
+ container,
+ formField: 'variables',
+ saveButton,
+ errorBox,
+ saveEndpoint: container.dataset.saveEndpoint,
+ });
+
+ spyOn(ajaxVariableList, 'updateRowsWithPersistedVariables').and.callThrough();
+ spyOn(ajaxVariableList.variableList, 'toggleEnableRow').and.callThrough();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('onSaveClicked', () => {
+ it('shows loading spinner while waiting for the request', (done) => {
+ const loadingIcon = saveButton.querySelector('.js-secret-variables-save-loading-icon');
+
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(() => {
+ expect(loadingIcon.classList.contains(HIDE_CLASS)).toEqual(false);
+
+ return [200, {}];
+ });
+
+ expect(loadingIcon.classList.contains(HIDE_CLASS)).toEqual(true);
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(loadingIcon.classList.contains(HIDE_CLASS)).toEqual(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('calls `updateRowsWithPersistedVariables` with the persisted variables', (done) => {
+ const variablesResponse = [{ id: 1, key: 'foo', value: 'bar' }];
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(200, {
+ variables: variablesResponse,
+ });
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(ajaxVariableList.updateRowsWithPersistedVariables)
+ .toHaveBeenCalledWith(variablesResponse);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('hides any previous error box', (done) => {
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(200);
+
+ expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true);
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('disables remove buttons while waiting for the request', (done) => {
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(() => {
+ expect(ajaxVariableList.variableList.toggleEnableRow).toHaveBeenCalledWith(false);
+
+ return [200, {}];
+ });
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(ajaxVariableList.variableList.toggleEnableRow).toHaveBeenCalledWith(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('hides secret values', (done) => {
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(200, {});
+
+ const row = container.querySelector('.js-row:first-child');
+ const valueInput = row.querySelector('.js-ci-variable-input-value');
+ const valuePlaceholder = row.querySelector('.js-secret-value-placeholder');
+
+ valueInput.value = 'bar';
+ $(valueInput).trigger('input');
+
+ expect(valuePlaceholder.classList.contains(HIDE_CLASS)).toBe(true);
+ expect(valueInput.classList.contains(HIDE_CLASS)).toBe(false);
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(valuePlaceholder.classList.contains(HIDE_CLASS)).toBe(false);
+ expect(valueInput.classList.contains(HIDE_CLASS)).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('shows error box with validation errors', (done) => {
+ const validationError = 'some validation error';
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(400, [
+ validationError,
+ ]);
+
+ expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true);
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(false);
+ expect(errorBox.textContent.trim().replace(/\n+\s+/m, ' ')).toEqual(`Validation failed ${validationError}`);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('shows flash message when request fails', (done) => {
+ mock.onPatch(VARIABLE_PATCH_ENDPOINT).reply(500);
+
+ expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true);
+
+ ajaxVariableList.onSaveClicked()
+ .then(() => {
+ expect(errorBox.classList.contains(HIDE_CLASS)).toEqual(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('updateRowsWithPersistedVariables', () => {
+ beforeEach(() => {
+ loadFixtures('projects/ci_cd_settings_with_variables.html.raw');
+ container = document.querySelector('.js-ci-variable-list-section');
+
+ const ajaxVariableListEl = document.querySelector('.js-ci-variable-list-section');
+ saveButton = ajaxVariableListEl.querySelector('.js-secret-variables-save-button');
+ errorBox = container.querySelector('.js-ci-variable-error-box');
+ ajaxVariableList = new AjaxFormVariableList({
+ container,
+ formField: 'variables',
+ saveButton,
+ errorBox,
+ saveEndpoint: container.dataset.saveEndpoint,
+ });
+ });
+
+ it('removes variable that was removed', () => {
+ expect(container.querySelectorAll('.js-row').length).toBe(3);
+
+ container.querySelector('.js-row-remove-button').click();
+
+ expect(container.querySelectorAll('.js-row').length).toBe(3);
+
+ ajaxVariableList.updateRowsWithPersistedVariables([]);
+
+ expect(container.querySelectorAll('.js-row').length).toBe(2);
+ });
+
+ it('updates new variable row with persisted ID', () => {
+ const row = container.querySelector('.js-row:last-child');
+ const idInput = row.querySelector('.js-ci-variable-input-id');
+ const keyInput = row.querySelector('.js-ci-variable-input-key');
+ const valueInput = row.querySelector('.js-ci-variable-input-value');
+
+ keyInput.value = 'foo';
+ $(keyInput).trigger('input');
+ valueInput.value = 'bar';
+ $(valueInput).trigger('input');
+
+ expect(idInput.value).toEqual('');
+
+ ajaxVariableList.updateRowsWithPersistedVariables([{
+ id: 3,
+ key: 'foo',
+ value: 'bar',
+ }]);
+
+ expect(idInput.value).toEqual('3');
+ expect(row.dataset.isPersisted).toEqual('true');
+ });
+ });
+});
diff --git a/spec/javascripts/ci_variable_list/ci_variable_list_spec.js b/spec/javascripts/ci_variable_list/ci_variable_list_spec.js
index 0170ab458d4..cac785fd3c6 100644
--- a/spec/javascripts/ci_variable_list/ci_variable_list_spec.js
+++ b/spec/javascripts/ci_variable_list/ci_variable_list_spec.js
@@ -1,9 +1,12 @@
import VariableList from '~/ci_variable_list/ci_variable_list';
import getSetTimeoutPromise from '../helpers/set_timeout_promise_helper';
+const HIDE_CLASS = 'hide';
+
describe('VariableList', () => {
preloadFixtures('pipeline_schedules/edit.html.raw');
preloadFixtures('pipeline_schedules/edit_with_variables.html.raw');
+ preloadFixtures('projects/ci_cd_settings.html.raw');
let $wrapper;
let variableList;
@@ -91,51 +94,22 @@ describe('VariableList', () => {
const $inputValue = $row.find('.js-ci-variable-input-value');
const $placeholder = $row.find('.js-secret-value-placeholder');
- expect($placeholder.hasClass('hide')).toBe(false);
- expect($inputValue.hasClass('hide')).toBe(true);
+ expect($placeholder.hasClass(HIDE_CLASS)).toBe(false);
+ expect($inputValue.hasClass(HIDE_CLASS)).toBe(true);
// Reveal values
$wrapper.find('.js-secret-value-reveal-button').click();
- expect($placeholder.hasClass('hide')).toBe(true);
- expect($inputValue.hasClass('hide')).toBe(false);
+ expect($placeholder.hasClass(HIDE_CLASS)).toBe(true);
+ expect($inputValue.hasClass(HIDE_CLASS)).toBe(false);
});
});
});
describe('with all inputs(key, value, protected)', () => {
beforeEach(() => {
- // This markup will be replaced with a fixture when we can render the
- // CI/CD settings page with the new dynamic variable list in https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/4110
- $wrapper = $(`<form class="js-variable-list">
- <ul>
- <li class="js-row">
- <div class="ci-variable-body-item">
- <input class="js-ci-variable-input-key" name="variables[variables_attributes][][key]">
- </div>
-
- <div class="ci-variable-body-item">
- <textarea class="js-ci-variable-input-value" name="variables[variables_attributes][][value]"></textarea>
- </div>
-
- <div class="ci-variable-body-item ci-variable-protected-item">
- <button type="button" class="js-project-feature-toggle project-feature-toggle">
- <input
- type="hidden"
- class="js-ci-variable-input-protected js-project-feature-toggle-input"
- name="variables[variables_attributes][][protected]"
- value="true"
- />
- </button>
- </div>
-
- <button type="button" class="js-row-remove-button"></button>
- </li>
- </ul>
- <button type="button" class="js-secret-value-reveal-button">
- Reveal values
- </button>
- </form>`);
+ loadFixtures('projects/ci_cd_settings.html.raw');
+ $wrapper = $('.js-ci-variable-list-section');
variableList = new VariableList({
container: $wrapper,
@@ -154,10 +128,88 @@ describe('VariableList', () => {
// Check for the correct default in the new row
const $protectedInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-protected');
- expect($protectedInput.val()).toBe('true');
+ expect($protectedInput.val()).toBe('false');
})
.then(done)
.catch(done.fail);
});
});
+
+ describe('toggleEnableRow method', () => {
+ beforeEach(() => {
+ loadFixtures('pipeline_schedules/edit_with_variables.html.raw');
+ $wrapper = $('.js-ci-variable-list-section');
+
+ variableList = new VariableList({
+ container: $wrapper,
+ formField: 'variables',
+ });
+ variableList.init();
+ });
+
+ it('should disable all key inputs', () => {
+ expect($wrapper.find('.js-ci-variable-input-key:not([disabled])').length).toBe(3);
+
+ variableList.toggleEnableRow(false);
+
+ expect($wrapper.find('.js-ci-variable-input-key[disabled]').length).toBe(3);
+ });
+
+ it('should disable all remove buttons', () => {
+ expect($wrapper.find('.js-row-remove-button:not([disabled])').length).toBe(3);
+
+ variableList.toggleEnableRow(false);
+
+ expect($wrapper.find('.js-row-remove-button[disabled]').length).toBe(3);
+ });
+
+ it('should enable all remove buttons', () => {
+ variableList.toggleEnableRow(false);
+ expect($wrapper.find('.js-row-remove-button[disabled]').length).toBe(3);
+
+ variableList.toggleEnableRow(true);
+
+ expect($wrapper.find('.js-row-remove-button:not([disabled])').length).toBe(3);
+ });
+
+ it('should enable all key inputs', () => {
+ variableList.toggleEnableRow(false);
+ expect($wrapper.find('.js-ci-variable-input-key[disabled]').length).toBe(3);
+
+ variableList.toggleEnableRow(true);
+
+ expect($wrapper.find('.js-ci-variable-input-key:not([disabled])').length).toBe(3);
+ });
+ });
+
+ describe('hideValues', () => {
+ beforeEach(() => {
+ loadFixtures('projects/ci_cd_settings.html.raw');
+ $wrapper = $('.js-ci-variable-list-section');
+
+ variableList = new VariableList({
+ container: $wrapper,
+ formField: 'variables',
+ });
+ variableList.init();
+ });
+
+ it('should hide value input and show placeholder stars', () => {
+ const $row = $wrapper.find('.js-row');
+ const $inputValue = $row.find('.js-ci-variable-input-value');
+ const $placeholder = $row.find('.js-secret-value-placeholder');
+
+ $row.find('.js-ci-variable-input-value')
+ .val('foo')
+ .trigger('input');
+
+ expect($placeholder.hasClass(HIDE_CLASS)).toBe(true);
+ expect($inputValue.hasClass(HIDE_CLASS)).toBe(false);
+
+ variableList.hideValues();
+
+ expect($placeholder.hasClass(HIDE_CLASS)).toBe(false);
+ expect($inputValue.hasClass(HIDE_CLASS)).toBe(true);
+ });
+ });
});
diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js
index 44ec9e4eabf..1daccc8dd02 100644
--- a/spec/javascripts/commits_spec.js
+++ b/spec/javascripts/commits_spec.js
@@ -4,6 +4,8 @@ import axios from '~/lib/utils/axios_utils';
import CommitsList from '~/commits';
describe('Commits List', () => {
+ let commitsList;
+
beforeEach(() => {
setFixtures(`
<form class="commits-search-form" action="/h5bp/html5-boilerplate/commits/master">
@@ -11,6 +13,7 @@ describe('Commits List', () => {
</form>
<ol id="commits-list"></ol>
`);
+ commitsList = new CommitsList(25);
});
it('should be defined', () => {
@@ -19,7 +22,7 @@ describe('Commits List', () => {
describe('processCommits', () => {
it('should join commit headers', () => {
- CommitsList.$contentList = $(`
+ commitsList.$contentList = $(`
<div>
<li class="commit-header" data-day="2016-09-20">
<span class="day">20 Sep, 2016</span>
@@ -39,7 +42,7 @@ describe('Commits List', () => {
// The last commit header should be removed
// since the previous one has the same data-day value.
- expect(CommitsList.processCommits(data).find('li.commit-header').length).toBe(0);
+ expect(commitsList.processCommits(data).find('li.commit-header').length).toBe(0);
});
});
@@ -48,8 +51,7 @@ describe('Commits List', () => {
let mock;
beforeEach(() => {
- CommitsList.init(25);
- CommitsList.searchField.val('');
+ commitsList.searchField.val('');
spyOn(history, 'replaceState').and.stub();
mock = new MockAdapter(axios);
@@ -66,11 +68,11 @@ describe('Commits List', () => {
});
it('should save the last search string', (done) => {
- CommitsList.searchField.val('GitLab');
- CommitsList.filterResults()
+ commitsList.searchField.val('GitLab');
+ commitsList.filterResults()
.then(() => {
expect(ajaxSpy).toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('GitLab');
+ expect(commitsList.lastSearch).toEqual('GitLab');
done();
})
@@ -78,10 +80,10 @@ describe('Commits List', () => {
});
it('should not make ajax call if the input does not change', (done) => {
- CommitsList.filterResults()
+ commitsList.filterResults()
.then(() => {
expect(ajaxSpy).not.toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('');
+ expect(commitsList.lastSearch).toEqual('');
done();
})
diff --git a/spec/javascripts/datetime_utility_spec.js b/spec/javascripts/datetime_utility_spec.js
index 2e5b65f5610..a8d09202154 100644
--- a/spec/javascripts/datetime_utility_spec.js
+++ b/spec/javascripts/datetime_utility_spec.js
@@ -105,4 +105,68 @@ describe('dateInWords', () => {
it('should return abbreviated month name', () => {
expect(datetimeUtility.dateInWords(date, true)).toEqual('Jul 1, 2016');
});
+
+ it('should return date in words without year', () => {
+ expect(datetimeUtility.dateInWords(date, true, true)).toEqual('Jul 1');
+ });
+});
+
+describe('monthInWords', () => {
+ const date = new Date('2017-01-20');
+
+ it('returns month name from provided date', () => {
+ expect(datetimeUtility.monthInWords(date)).toBe('January');
+ });
+
+ it('returns abbreviated month name from provided date', () => {
+ expect(datetimeUtility.monthInWords(date, true)).toBe('Jan');
+ });
+});
+
+describe('totalDaysInMonth', () => {
+ it('returns number of days in a month for given date', () => {
+ // 1st Feb, 2016 (leap year)
+ expect(datetimeUtility.totalDaysInMonth(new Date(2016, 1, 1))).toBe(29);
+
+ // 1st Feb, 2017
+ expect(datetimeUtility.totalDaysInMonth(new Date(2017, 1, 1))).toBe(28);
+
+ // 1st Jan, 2017
+ expect(datetimeUtility.totalDaysInMonth(new Date(2017, 0, 1))).toBe(31);
+ });
+});
+
+describe('getSundays', () => {
+ it('returns array of dates representing all Sundays of the month', () => {
+ // December, 2017 (it has 5 Sundays)
+ const dateOfSundays = [3, 10, 17, 24, 31];
+ const sundays = datetimeUtility.getSundays(new Date(2017, 11, 1));
+
+ expect(sundays.length).toBe(5);
+ sundays.forEach((sunday, index) => {
+ expect(sunday.getDate()).toBe(dateOfSundays[index]);
+ });
+ });
+});
+
+describe('getTimeframeWindow', () => {
+ it('returns array of dates representing a timeframe based on provided length and date', () => {
+ const date = new Date(2018, 0, 1);
+ const mockTimeframe = [
+ new Date(2017, 9, 1),
+ new Date(2017, 10, 1),
+ new Date(2017, 11, 1),
+ new Date(2018, 0, 1),
+ new Date(2018, 1, 1),
+ new Date(2018, 2, 31),
+ ];
+ const timeframe = datetimeUtility.getTimeframeWindow(6, date);
+
+ expect(timeframe.length).toBe(6);
+ timeframe.forEach((timeframeItem, index) => {
+ expect(timeframeItem.getFullYear() === mockTimeframe[index].getFullYear()).toBeTruthy();
+ expect(timeframeItem.getMonth() === mockTimeframe[index].getMonth()).toBeTruthy();
+ expect(timeframeItem.getDate() === mockTimeframe[index].getDate()).toBeTruthy();
+ });
+ });
});
diff --git a/spec/javascripts/droplab/drop_down_spec.js b/spec/javascripts/droplab/drop_down_spec.js
index 1225fe2cb66..896a04a1a07 100644
--- a/spec/javascripts/droplab/drop_down_spec.js
+++ b/spec/javascripts/droplab/drop_down_spec.js
@@ -1,8 +1,8 @@
import DropDown from '~/droplab/drop_down';
import utils from '~/droplab/utils';
-import { SELECTED_CLASS, IGNORE_CLASS } from '~/droplab/constants';
+import { SELECTED_CLASS } from '~/droplab/constants';
-describe('DropDown', function () {
+describe('DropLab DropDown', function () {
describe('class constructor', function () {
beforeEach(function () {
spyOn(DropDown.prototype, 'getItems');
@@ -128,93 +128,131 @@ describe('DropDown', function () {
beforeEach(function () {
this.classList = jasmine.createSpyObj('classList', ['contains']);
this.list = { dispatchEvent: () => {} };
- this.dropdown = { hide: () => {}, list: this.list, addSelectedClass: () => {} };
- this.event = { preventDefault: () => {}, target: { classList: this.classList } };
+ this.dropdown = {
+ hideOnClick: true,
+ hide: () => {},
+ list: this.list,
+ addSelectedClass: () => {},
+ };
+ this.event = {
+ preventDefault: () => {},
+ target: {
+ classList: this.classList,
+ closest: () => null,
+ },
+ };
this.customEvent = {};
- this.closestElement = {};
+ this.dummyListItem = document.createElement('li');
+ spyOn(this.event.target, 'closest').and.callFake((selector) => {
+ if (selector === 'li') {
+ return this.dummyListItem;
+ }
+
+ return null;
+ });
spyOn(this.dropdown, 'hide');
spyOn(this.dropdown, 'addSelectedClass');
spyOn(this.list, 'dispatchEvent');
spyOn(this.event, 'preventDefault');
spyOn(window, 'CustomEvent').and.returnValue(this.customEvent);
- spyOn(utils, 'closest').and.returnValues(this.closestElement, undefined);
this.classList.contains.and.returnValue(false);
+ });
+ it('should call event.target.closest', function () {
DropDown.prototype.clickEvent.call(this.dropdown, this.event);
- });
- it('should call utils.closest', function () {
- expect(utils.closest).toHaveBeenCalledWith(this.event.target, 'LI');
+ expect(this.event.target.closest).toHaveBeenCalledWith('.droplab-item-ignore');
+ expect(this.event.target.closest).toHaveBeenCalledWith('li');
});
it('should call addSelectedClass', function () {
- expect(this.dropdown.addSelectedClass).toHaveBeenCalledWith(this.closestElement);
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
+ expect(this.dropdown.addSelectedClass).toHaveBeenCalledWith(this.dummyListItem);
});
it('should call .preventDefault', function () {
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
expect(this.event.preventDefault).toHaveBeenCalled();
});
it('should call .hide', function () {
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
expect(this.dropdown.hide).toHaveBeenCalled();
});
it('should construct CustomEvent', function () {
- expect(window.CustomEvent).toHaveBeenCalledWith('click.dl', jasmine.any(Object));
- });
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
- it('should call .classList.contains checking for IGNORE_CLASS', function () {
- expect(this.classList.contains).toHaveBeenCalledWith(IGNORE_CLASS);
+ expect(window.CustomEvent).toHaveBeenCalledWith('click.dl', jasmine.any(Object));
});
it('should call .dispatchEvent with the customEvent', function () {
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
expect(this.list.dispatchEvent).toHaveBeenCalledWith(this.customEvent);
});
describe('if the target is a UL element', function () {
beforeEach(function () {
- this.event = { preventDefault: () => {}, target: { tagName: 'UL', classList: this.classList } };
-
- spyOn(this.event, 'preventDefault');
- utils.closest.calls.reset();
+ this.event.target = document.createElement('ul');
- DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+ spyOn(this.event.target, 'closest');
});
it('should return immediately', function () {
- expect(utils.closest).not.toHaveBeenCalled();
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
+ expect(this.event.target.closest).not.toHaveBeenCalled();
+ expect(this.dropdown.addSelectedClass).not.toHaveBeenCalled();
});
});
- describe('if the target has the IGNORE_CLASS class', function () {
+ describe('if the target has the droplab-item-ignore class', function () {
beforeEach(function () {
- this.event = { preventDefault: () => {}, target: { tagName: 'LI', classList: this.classList } };
+ this.ignoredButton = document.createElement('button');
+ this.ignoredButton.classList.add('droplab-item-ignore');
+ this.event.target = this.ignoredButton;
- spyOn(this.event, 'preventDefault');
- this.classList.contains.and.returnValue(true);
- utils.closest.calls.reset();
+ spyOn(this.ignoredButton, 'closest').and.callThrough();
+ });
+ it('does not select element', function () {
DropDown.prototype.clickEvent.call(this.dropdown, this.event);
- });
- it('should return immediately', function () {
- expect(utils.closest).not.toHaveBeenCalled();
+ expect(this.ignoredButton.closest.calls.count()).toBe(1);
+ expect(this.ignoredButton.closest).toHaveBeenCalledWith('.droplab-item-ignore');
+ expect(this.dropdown.addSelectedClass).not.toHaveBeenCalled();
});
});
describe('if no selected element exists', function () {
beforeEach(function () {
this.event.preventDefault.calls.reset();
- this.clickEvent = DropDown.prototype.clickEvent.call(this.dropdown, this.event);
- });
-
- it('should return undefined', function () {
- expect(this.clickEvent).toBe(undefined);
+ this.dummyListItem = null;
});
it('should return before .preventDefault is called', function () {
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
expect(this.event.preventDefault).not.toHaveBeenCalled();
+ expect(this.dropdown.addSelectedClass).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('if hideOnClick is false', () => {
+ beforeEach(function () {
+ this.dropdown.hideOnClick = false;
+ this.dropdown.hide.calls.reset();
+ });
+
+ it('should not call .hide', function () {
+ DropDown.prototype.clickEvent.call(this.dropdown, this.event);
+
+ expect(this.dropdown.hide).not.toHaveBeenCalled();
});
});
});
@@ -278,20 +316,23 @@ describe('DropDown', function () {
describe('addEvents', function () {
beforeEach(function () {
- this.list = { addEventListener: () => {} };
+ this.list = {
+ addEventListener: () => {},
+ querySelectorAll: () => [],
+ };
this.dropdown = {
list: this.list,
clickEvent: () => {},
closeDropdown: () => {},
eventWrapper: {},
};
+ });
+ it('should call .addEventListener', function () {
spyOn(this.list, 'addEventListener');
DropDown.prototype.addEvents.call(this.dropdown);
- });
- it('should call .addEventListener', function () {
expect(this.list.addEventListener).toHaveBeenCalledWith('click', jasmine.any(Function));
expect(this.list.addEventListener).toHaveBeenCalledWith('keyup', jasmine.any(Function));
});
diff --git a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
index 79447787fc9..4a516c517ef 100644
--- a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
+++ b/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import eventHub from '~/filtered_search/event_hub';
import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content';
-import '~/filtered_search/filtered_search_token_keys';
+import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys';
const createComponent = (propsData) => {
const Component = Vue.extend(RecentSearchesDropdownContent);
@@ -19,14 +19,14 @@ const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim();
describe('RecentSearchesDropdownContent', () => {
const propsDataWithoutItems = {
items: [],
- allowedKeys: gl.FilteredSearchTokenKeys.getKeys(),
+ allowedKeys: FilteredSearchTokenKeys.getKeys(),
};
const propsDataWithItems = {
items: [
'foo',
'author:@root label:~foo bar',
],
- allowedKeys: gl.FilteredSearchTokenKeys.getKeys(),
+ allowedKeys: FilteredSearchTokenKeys.getKeys(),
};
let vm;
diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/javascripts/filtered_search/dropdown_user_spec.js
index 02415485d19..f1e6119253e 100644
--- a/spec/javascripts/filtered_search/dropdown_user_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_user_spec.js
@@ -3,6 +3,8 @@ import '~/filtered_search/filtered_search_tokenizer';
import '~/filtered_search/filtered_search_dropdown';
import '~/filtered_search/dropdown_user';
+import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys';
+
describe('Dropdown User', () => {
describe('getSearchInput', () => {
let dropdownUser;
@@ -14,7 +16,7 @@ describe('Dropdown User', () => {
spyOn(gl.DropdownUtils, 'getSearchInput').and.callFake(() => {});
dropdownUser = new gl.DropdownUser({
- tokenKeys: gl.FilteredSearchTokenKeys,
+ tokenKeys: FilteredSearchTokenKeys,
});
});
diff --git a/spec/javascripts/filtered_search/dropdown_utils_spec.js b/spec/javascripts/filtered_search/dropdown_utils_spec.js
index b1b3d43f241..d6e1af105f1 100644
--- a/spec/javascripts/filtered_search/dropdown_utils_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_utils_spec.js
@@ -1,6 +1,7 @@
import '~/filtered_search/dropdown_utils';
import '~/filtered_search/filtered_search_tokenizer';
import '~/filtered_search/filtered_search_dropdown_manager';
+import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys';
import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
describe('Dropdown Utils', () => {
@@ -137,7 +138,7 @@ describe('Dropdown Utils', () => {
`);
input = document.getElementById('test');
- allowedKeys = gl.FilteredSearchTokenKeys.getKeys();
+ allowedKeys = FilteredSearchTokenKeys.getKeys();
});
function config() {
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index b8890e4cda1..0ed9a587dc1 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -3,8 +3,8 @@ import * as recentSearchesStoreSrc from '~/filtered_search/stores/recent_searche
import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error';
import RecentSearchesRoot from '~/filtered_search/recent_searches_root';
+import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys';
import '~/lib/utils/common_utils';
-import '~/filtered_search/filtered_search_token_keys';
import '~/filtered_search/filtered_search_tokenizer';
import '~/filtered_search/filtered_search_dropdown_manager';
import '~/filtered_search/filtered_search_manager';
@@ -14,6 +14,7 @@ describe('Filtered Search Manager', () => {
let input;
let manager;
let tokensContainer;
+ const page = 'issues';
const placeholder = 'Search or filter results...';
function dispatchBackspaceEvent(element, eventType) {
@@ -62,7 +63,7 @@ describe('Filtered Search Manager', () => {
input = document.querySelector('.filtered-search');
tokensContainer = document.querySelector('.tokens-container');
- manager = new gl.FilteredSearchManager();
+ manager = new gl.FilteredSearchManager({ page });
manager.setup();
};
@@ -80,19 +81,19 @@ describe('Filtered Search Manager', () => {
});
it('should instantiate RecentSearchesStore with isLocalStorageAvailable', () => {
- manager = new gl.FilteredSearchManager();
+ manager = new gl.FilteredSearchManager({ page });
expect(RecentSearchesService.isAvailable).toHaveBeenCalled();
expect(recentSearchesStoreSrc.default).toHaveBeenCalledWith({
isLocalStorageAvailable,
- allowedKeys: gl.FilteredSearchTokenKeys.getKeys(),
+ allowedKeys: FilteredSearchTokenKeys.getKeys(),
});
});
});
describe('setup', () => {
beforeEach(() => {
- manager = new gl.FilteredSearchManager();
+ manager = new gl.FilteredSearchManager({ page });
});
it('should not instantiate Flash if an RecentSearchesServiceError is caught', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
index 69b424c3af5..fbc3926d332 100644
--- a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
@@ -1,11 +1,11 @@
-import '~/filtered_search/filtered_search_token_keys';
+import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys';
describe('Filtered Search Token Keys', () => {
describe('get', () => {
let tokenKeys;
beforeEach(() => {
- tokenKeys = gl.FilteredSearchTokenKeys.get();
+ tokenKeys = FilteredSearchTokenKeys.get();
});
it('should return tokenKeys', () => {
@@ -21,7 +21,7 @@ describe('Filtered Search Token Keys', () => {
let conditions;
beforeEach(() => {
- conditions = gl.FilteredSearchTokenKeys.getConditions();
+ conditions = FilteredSearchTokenKeys.getConditions();
});
it('should return conditions', () => {
@@ -35,71 +35,71 @@ describe('Filtered Search Token Keys', () => {
describe('searchByKey', () => {
it('should return null when key not found', () => {
- const tokenKey = gl.FilteredSearchTokenKeys.searchByKey('notakey');
+ const tokenKey = FilteredSearchTokenKeys.searchByKey('notakey');
expect(tokenKey === null).toBe(true);
});
it('should return tokenKey when found by key', () => {
- const tokenKeys = gl.FilteredSearchTokenKeys.get();
- const result = gl.FilteredSearchTokenKeys.searchByKey(tokenKeys[0].key);
+ const tokenKeys = FilteredSearchTokenKeys.get();
+ const result = FilteredSearchTokenKeys.searchByKey(tokenKeys[0].key);
expect(result).toEqual(tokenKeys[0]);
});
});
describe('searchBySymbol', () => {
it('should return null when symbol not found', () => {
- const tokenKey = gl.FilteredSearchTokenKeys.searchBySymbol('notasymbol');
+ const tokenKey = FilteredSearchTokenKeys.searchBySymbol('notasymbol');
expect(tokenKey === null).toBe(true);
});
it('should return tokenKey when found by symbol', () => {
- const tokenKeys = gl.FilteredSearchTokenKeys.get();
- const result = gl.FilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol);
+ const tokenKeys = FilteredSearchTokenKeys.get();
+ const result = FilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol);
expect(result).toEqual(tokenKeys[0]);
});
});
describe('searchByKeyParam', () => {
it('should return null when key param not found', () => {
- const tokenKey = gl.FilteredSearchTokenKeys.searchByKeyParam('notakeyparam');
+ const tokenKey = FilteredSearchTokenKeys.searchByKeyParam('notakeyparam');
expect(tokenKey === null).toBe(true);
});
it('should return tokenKey when found by key param', () => {
- const tokenKeys = gl.FilteredSearchTokenKeys.get();
- const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`);
+ const tokenKeys = FilteredSearchTokenKeys.get();
+ const result = FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`);
expect(result).toEqual(tokenKeys[0]);
});
it('should return alternative tokenKey when found by key param', () => {
- const tokenKeys = gl.FilteredSearchTokenKeys.getAlternatives();
- const result = gl.FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`);
+ const tokenKeys = FilteredSearchTokenKeys.getAlternatives();
+ const result = FilteredSearchTokenKeys.searchByKeyParam(`${tokenKeys[0].key}_${tokenKeys[0].param}`);
expect(result).toEqual(tokenKeys[0]);
});
});
describe('searchByConditionUrl', () => {
it('should return null when condition url not found', () => {
- const condition = gl.FilteredSearchTokenKeys.searchByConditionUrl(null);
+ const condition = FilteredSearchTokenKeys.searchByConditionUrl(null);
expect(condition === null).toBe(true);
});
it('should return condition when found by url', () => {
- const conditions = gl.FilteredSearchTokenKeys.getConditions();
- const result = gl.FilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url);
+ const conditions = FilteredSearchTokenKeys.getConditions();
+ const result = FilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url);
expect(result).toBe(conditions[0]);
});
});
describe('searchByConditionKeyValue', () => {
it('should return null when condition tokenKey and value not found', () => {
- const condition = gl.FilteredSearchTokenKeys.searchByConditionKeyValue(null, null);
+ const condition = FilteredSearchTokenKeys.searchByConditionKeyValue(null, null);
expect(condition === null).toBe(true);
});
it('should return condition when found by tokenKey and value', () => {
- const conditions = gl.FilteredSearchTokenKeys.getConditions();
- const result = gl.FilteredSearchTokenKeys
+ const conditions = FilteredSearchTokenKeys.getConditions();
+ const result = FilteredSearchTokenKeys
.searchByConditionKeyValue(conditions[0].tokenKey, conditions[0].value);
expect(result).toEqual(conditions[0]);
});
diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
index 585bea9b499..bf8b66f1110 100644
--- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
@@ -1,8 +1,8 @@
-import '~/filtered_search/filtered_search_token_keys';
+import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys';
import '~/filtered_search/filtered_search_tokenizer';
describe('Filtered Search Tokenizer', () => {
- const allowedKeys = gl.FilteredSearchTokenKeys.getKeys();
+ const allowedKeys = FilteredSearchTokenKeys.getKeys();
describe('processTokens', () => {
it('returns for input containing only search value', () => {
diff --git a/spec/javascripts/fixtures/groups.rb b/spec/javascripts/fixtures/groups.rb
new file mode 100644
index 00000000000..35be52fbf97
--- /dev/null
+++ b/spec/javascripts/fixtures/groups.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe 'Groups (JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin) }
+ let(:group) { create(:group, name: 'frontend-fixtures-group' )}
+
+ render_views
+
+ before(:all) do
+ clean_frontend_fixtures('groups/')
+ end
+
+ before do
+ group.add_master(admin)
+ sign_in(admin)
+ end
+
+ describe Groups::Settings::CiCdController, '(JavaScript fixtures)', type: :controller do
+ it 'groups/ci_cd_settings.html.raw' do |example|
+ get :show,
+ group_id: group
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+ end
+end
diff --git a/spec/javascripts/fixtures/projects.rb b/spec/javascripts/fixtures/projects.rb
index 2a100e7fab5..b344b389241 100644
--- a/spec/javascripts/fixtures/projects.rb
+++ b/spec/javascripts/fixtures/projects.rb
@@ -1,11 +1,14 @@
require 'spec_helper'
-describe ProjectsController, '(JavaScript fixtures)', type: :controller do
+describe 'Projects (JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project, namespace: namespace, path: 'builds-project') }
+ let(:project_variable_populated) { create(:project, namespace: namespace, path: 'builds-project2') }
+ let!(:variable1) { create(:ci_variable, project: project_variable_populated) }
+ let!(:variable2) { create(:ci_variable, project: project_variable_populated) }
render_views
@@ -14,6 +17,9 @@ describe ProjectsController, '(JavaScript fixtures)', type: :controller do
end
before do
+ # EE-specific start
+ # EE specific end
+ project.add_master(admin)
sign_in(admin)
end
@@ -21,12 +27,43 @@ describe ProjectsController, '(JavaScript fixtures)', type: :controller do
remove_repository(project)
end
- it 'projects/dashboard.html.raw' do |example|
- get :show,
- namespace_id: project.namespace.to_param,
- id: project
+ describe ProjectsController, '(JavaScript fixtures)', type: :controller do
+ it 'projects/dashboard.html.raw' do |example|
+ get :show,
+ namespace_id: project.namespace.to_param,
+ id: project
- expect(response).to be_success
- store_frontend_fixture(response, example.description)
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+
+ it 'projects/edit.html.raw' do |example|
+ get :edit,
+ namespace_id: project.namespace.to_param,
+ id: project
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+ end
+
+ describe Projects::Settings::CiCdController, '(JavaScript fixtures)', type: :controller do
+ it 'projects/ci_cd_settings.html.raw' do |example|
+ get :show,
+ namespace_id: project.namespace.to_param,
+ project_id: project
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+
+ it 'projects/ci_cd_settings_with_variables.html.raw' do |example|
+ get :show,
+ namespace_id: project_variable_populated.namespace.to_param,
+ project_id: project_variable_populated
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
end
end
diff --git a/spec/javascripts/gl_form_spec.js b/spec/javascripts/gl_form_spec.js
index 5a8009e57fd..9c1fc0fda9e 100644
--- a/spec/javascripts/gl_form_spec.js
+++ b/spec/javascripts/gl_form_spec.js
@@ -1,10 +1,8 @@
-import Autosize from 'autosize';
+import autosize from 'autosize';
import GLForm from '~/gl_form';
import '~/lib/utils/text_utility';
import '~/lib/utils/common_utils';
-window.autosize = Autosize;
-
describe('GLForm', () => {
describe('when instantiated', function () {
beforeEach((done) => {
@@ -13,14 +11,12 @@ describe('GLForm', () => {
spyOn($.prototype, 'off').and.returnValue(this.textarea);
spyOn($.prototype, 'on').and.returnValue(this.textarea);
spyOn($.prototype, 'css');
- spyOn(window, 'autosize');
- this.glForm = new GLForm(this.form);
+ this.glForm = new GLForm(this.form, false);
setTimeout(() => {
$.prototype.off.calls.reset();
$.prototype.on.calls.reset();
$.prototype.css.calls.reset();
- window.autosize.calls.reset();
done();
});
});
@@ -43,10 +39,6 @@ describe('GLForm', () => {
expect($.prototype.on).toHaveBeenCalledWith('mouseup.autosize', jasmine.any(Function));
});
- it('should autosize the textarea', () => {
- expect(window.autosize).toHaveBeenCalledWith(jasmine.any(Object));
- });
-
it('should set the resize css property to vertical', () => {
expect($.prototype.css).toHaveBeenCalledWith('resize', 'vertical');
});
@@ -74,7 +66,7 @@ describe('GLForm', () => {
spyOn($.prototype, 'data');
spyOn($.prototype, 'outerHeight').and.returnValue(200);
spyOn(window, 'outerHeight').and.returnValue(400);
- spyOn(window.autosize, 'destroy');
+ spyOn(autosize, 'destroy');
this.glForm.destroyAutosize();
});
@@ -88,7 +80,7 @@ describe('GLForm', () => {
});
it('should call autosize destroy', () => {
- expect(window.autosize.destroy).toHaveBeenCalledWith(this.textarea);
+ expect(autosize.destroy).toHaveBeenCalledWith(this.textarea);
});
it('should set the data-height attribute', () => {
@@ -107,9 +99,9 @@ describe('GLForm', () => {
it('should return undefined if the data-height equals the outerHeight', () => {
spyOn($.prototype, 'outerHeight').and.returnValue(200);
spyOn($.prototype, 'data').and.returnValue(200);
- spyOn(window.autosize, 'destroy');
+ spyOn(autosize, 'destroy');
expect(this.glForm.destroyAutosize()).toBeUndefined();
- expect(window.autosize.destroy).not.toHaveBeenCalled();
+ expect(autosize.destroy).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/javascripts/gpg_badges_spec.js b/spec/javascripts/gpg_badges_spec.js
index 7a826487bf9..5decb5e6bbd 100644
--- a/spec/javascripts/gpg_badges_spec.js
+++ b/spec/javascripts/gpg_badges_spec.js
@@ -1,6 +1,9 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import GpgBadges from '~/gpg_badges';
describe('GpgBadges', () => {
+ let mock;
const dummyCommitSha = 'n0m0rec0ffee';
const dummyBadgeHtml = 'dummy html';
const dummyResponse = {
@@ -11,38 +14,43 @@ describe('GpgBadges', () => {
};
beforeEach(() => {
+ mock = new MockAdapter(axios);
setFixtures(`
+ <form
+ class="commits-search-form" data-signatures-path="/hello" action="/hello"
+ method="get">
+ <input name="utf8" type="hidden" value="✓">
+ <input type="search" name="search" id="commits-search"class="form-control search-text-input input-short">
+ </form>
<div class="parent-container">
<div class="js-loading-gpg-badge" data-commit-sha="${dummyCommitSha}"></div>
</div>
`);
});
- it('displays a loading spinner', () => {
- spyOn($, 'get').and.returnValue({
- done() {
- // intentionally left blank
- },
- });
+ afterEach(() => {
+ mock.restore();
+ });
- GpgBadges.fetch();
+ it('displays a loading spinner', (done) => {
+ mock.onGet('/hello').reply(200);
- expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null);
- const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin');
- expect(spinners.length).toBe(1);
+ GpgBadges.fetch().then(() => {
+ expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null);
+ const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin');
+ expect(spinners.length).toBe(1);
+ done();
+ }).catch(done.fail);
});
- it('replaces the loading spinner', () => {
- spyOn($, 'get').and.returnValue({
- done(callback) {
- callback(dummyResponse);
- },
- });
-
- GpgBadges.fetch();
+ it('replaces the loading spinner', (done) => {
+ mock.onGet('/hello').reply(200, dummyResponse);
- expect(document.querySelector('.js-loading-gpg-badge')).toBe(null);
- const parentContainer = document.querySelector('.parent-container');
- expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml);
+ GpgBadges.fetch().then(() => {
+ expect(document.querySelector('.js-loading-gpg-badge')).toBe(null);
+ const parentContainer = document.querySelector('.parent-container');
+ expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml);
+ done();
+ }).catch(done.fail);
});
});
diff --git a/spec/javascripts/groups/components/app_spec.js b/spec/javascripts/groups/components/app_spec.js
index 8338efe915b..3adc29262f3 100644
--- a/spec/javascripts/groups/components/app_spec.js
+++ b/spec/javascripts/groups/components/app_spec.js
@@ -268,10 +268,10 @@ describe('AppComponent', () => {
it('updates props which show modal confirmation dialog', () => {
const group = Object.assign({}, mockParentGroupItem);
- expect(vm.showModal).toBeFalsy();
+ expect(vm.updateModal).toBeFalsy();
expect(vm.groupLeaveConfirmationMessage).toBe('');
vm.showLeaveGroupModal(group, mockParentGroupItem);
- expect(vm.showModal).toBeTruthy();
+ expect(vm.updateModal).toBeTruthy();
expect(vm.groupLeaveConfirmationMessage).toBe(`Are you sure you want to leave the "${group.fullName}" group?`);
});
});
@@ -280,9 +280,9 @@ describe('AppComponent', () => {
it('hides modal confirmation which is shown before leaving the group', () => {
const group = Object.assign({}, mockParentGroupItem);
vm.showLeaveGroupModal(group, mockParentGroupItem);
- expect(vm.showModal).toBeTruthy();
+ expect(vm.updateModal).toBeTruthy();
vm.hideLeaveGroupModal();
- expect(vm.showModal).toBeFalsy();
+ expect(vm.updateModal).toBeFalsy();
});
});
@@ -307,7 +307,7 @@ describe('AppComponent', () => {
spyOn($, 'scrollTo');
vm.leaveGroup();
- expect(vm.showModal).toBeFalsy();
+ expect(vm.updateModal).toBeFalsy();
expect(vm.targetGroup.isBeingRemoved).toBeTruthy();
expect(vm.service.leaveGroup).toHaveBeenCalledWith(vm.targetGroup.leavePath);
setTimeout(() => {
@@ -475,7 +475,7 @@ describe('AppComponent', () => {
it('renders modal confirmation dialog', () => {
vm.groupLeaveConfirmationMessage = 'Are you sure you want to leave the "foo" group?';
- vm.showModal = true;
+ vm.updateModal = true;
const modalDialogEl = vm.$el.querySelector('.modal');
expect(modalDialogEl).not.toBe(null);
expect(modalDialogEl.querySelector('.modal-title').innerText.trim()).toBe('Are you sure?');
diff --git a/spec/javascripts/importer_status_spec.js b/spec/javascripts/importer_status_spec.js
new file mode 100644
index 00000000000..71a2cd51f63
--- /dev/null
+++ b/spec/javascripts/importer_status_spec.js
@@ -0,0 +1,107 @@
+import { ImporterStatus } from '~/importer_status';
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+
+describe('Importer Status', () => {
+ let instance;
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('addToImport', () => {
+ const importUrl = '/import_url';
+
+ beforeEach(() => {
+ setFixtures(`
+ <tr id="repo_123">
+ <td class="import-target"></td>
+ <td class="import-actions job-status">
+ <button name="button" type="submit" class="btn btn-import js-add-to-import">
+ </button>
+ </td>
+ </tr>
+ `);
+ spyOn(ImporterStatus.prototype, 'initStatusPage').and.callFake(() => {});
+ spyOn(ImporterStatus.prototype, 'setAutoUpdate').and.callFake(() => {});
+ instance = new ImporterStatus('', importUrl);
+ });
+
+ it('sets table row to active after post request', (done) => {
+ mock.onPost(importUrl).reply(200, {
+ id: 1,
+ full_path: '/full_path',
+ });
+
+ instance.addToImport({
+ currentTarget: document.querySelector('.js-add-to-import'),
+ })
+ .then(() => {
+ expect(document.querySelector('tr').classList.contains('active')).toEqual(true);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('autoUpdate', () => {
+ const jobsUrl = '/jobs_url';
+
+ beforeEach(() => {
+ const div = document.createElement('div');
+ div.innerHTML = `
+ <div id="project_1">
+ <div class="job-status">
+ </div>
+ </div>
+ `;
+
+ document.body.appendChild(div);
+
+ spyOn(ImporterStatus.prototype, 'initStatusPage').and.callFake(() => {});
+ spyOn(ImporterStatus.prototype, 'setAutoUpdate').and.callFake(() => {});
+ instance = new ImporterStatus(jobsUrl);
+ });
+
+ function setupMock(importStatus) {
+ mock.onGet(jobsUrl).reply(200, [{
+ id: 1,
+ import_status: importStatus,
+ }]);
+ }
+
+ function expectJobStatus(done, status) {
+ instance.autoUpdate()
+ .then(() => {
+ expect(document.querySelector('#project_1').innerText.trim()).toEqual(status);
+ done();
+ })
+ .catch(done.fail);
+ }
+
+ it('sets the job status to done', (done) => {
+ setupMock('finished');
+ expectJobStatus(done, 'done');
+ });
+
+ it('sets the job status to scheduled', (done) => {
+ setupMock('scheduled');
+ expectJobStatus(done, 'scheduled');
+ });
+
+ it('sets the job status to started', (done) => {
+ setupMock('started');
+ expectJobStatus(done, 'started');
+ });
+
+ it('sets the job status to custom status', (done) => {
+ setupMock('custom status');
+ expectJobStatus(done, 'custom status');
+ });
+ });
+});
diff --git a/spec/javascripts/issuable_time_tracker_spec.js b/spec/javascripts/issuable_time_tracker_spec.js
index 8ff93c4f918..365e9fe6a4b 100644
--- a/spec/javascripts/issuable_time_tracker_spec.js
+++ b/spec/javascripts/issuable_time_tracker_spec.js
@@ -2,7 +2,7 @@
import Vue from 'vue';
-import timeTracker from '~/sidebar/components/time_tracking/time_tracker';
+import timeTracker from '~/sidebar/components/time_tracking/time_tracker.vue';
function initTimeTrackingComponent(opts) {
setFixtures(`
diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js
index 03b58e9c1d0..b4599688c6d 100644
--- a/spec/javascripts/job_spec.js
+++ b/spec/javascripts/job_spec.js
@@ -10,6 +10,7 @@ describe('Job', () => {
const JOB_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1`;
let mock;
let response;
+ let job;
function waitForPromise() {
return new Promise(resolve => requestAnimationFrame(resolve));
@@ -22,6 +23,8 @@ describe('Job', () => {
spyOn(urlUtils, 'visitUrl');
+ response = {};
+
mock = new MockAdapter(axios);
mock.onGet(new RegExp(`${JOB_URL}/trace.json?(.*)`)).reply(() => [200, response]);
@@ -30,7 +33,7 @@ describe('Job', () => {
afterEach(() => {
mock.restore();
- response = {};
+ clearTimeout(job.timeout);
});
describe('class constructor', () => {
@@ -43,15 +46,19 @@ describe('Job', () => {
});
describe('setup', () => {
- beforeEach(function () {
- this.job = new Job();
+ beforeEach(function (done) {
+ job = new Job();
+
+ waitForPromise()
+ .then(done)
+ .catch(done.fail);
});
it('copies build options', function () {
- expect(this.job.pagePath).toBe(JOB_URL);
- expect(this.job.buildStatus).toBe('success');
- expect(this.job.buildStage).toBe('test');
- expect(this.job.state).toBe('');
+ expect(job.pagePath).toBe(JOB_URL);
+ expect(job.buildStatus).toBe('success');
+ expect(job.buildStage).toBe('test');
+ expect(job.state).toBe('');
});
it('only shows the jobs matching the current stage', () => {
@@ -84,12 +91,12 @@ describe('Job', () => {
complete: false,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(() => {
expect($('#build-trace .js-build-output').text()).toMatch(/Update/);
- expect(this.job.state).toBe('newstate');
+ expect(job.state).toBe('newstate');
response = {
html: '<span>More</span>',
@@ -103,7 +110,7 @@ describe('Job', () => {
.then(waitForPromise)
.then(() => {
expect($('#build-trace .js-build-output').text()).toMatch(/UpdateMore/);
- expect(this.job.state).toBe('finalstate');
+ expect(job.state).toBe('finalstate');
})
.then(done)
.catch(done.fail);
@@ -117,7 +124,7 @@ describe('Job', () => {
complete: false,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(() => {
@@ -151,7 +158,7 @@ describe('Job', () => {
total: 100,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(() => {
@@ -172,7 +179,7 @@ describe('Job', () => {
total: 100,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(() => {
@@ -191,9 +198,10 @@ describe('Job', () => {
append: false,
size: 50,
total: 100,
+ complete: false,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(() => {
@@ -207,6 +215,7 @@ describe('Job', () => {
append: true,
size: 10,
total: 100,
+ complete: true,
};
})
.then(() => jasmine.clock().tick(4001))
@@ -229,7 +238,7 @@ describe('Job', () => {
total: 100,
};
- this.job = new Job();
+ job = new Job();
expect(
document.querySelector('.js-raw-link').textContent.trim(),
@@ -247,7 +256,7 @@ describe('Job', () => {
total: 100,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(() => {
@@ -269,7 +278,7 @@ describe('Job', () => {
total: 100,
};
- this.job = new Job();
+ job = new Job();
waitForPromise()
.then(done)
@@ -296,7 +305,7 @@ describe('Job', () => {
it('should request build trace with state parameter', (done) => {
spyOn(axios, 'get').and.callThrough();
// eslint-disable-next-line no-new
- new Job();
+ job = new Job();
setTimeout(() => {
expect(axios.get).toHaveBeenCalledWith(
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index 80430011aed..49799c31995 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -480,4 +480,33 @@ describe('common_utils', () => {
expect(commonUtils.spriteIcon('test', 'fa fa-test')).toEqual('<svg class="fa fa-test"><use xlink:href="icons.svg#test" /></svg>');
});
});
+
+ describe('convertObjectPropsToCamelCase', () => {
+ it('returns new object with camelCase property names by converting object with snake_case names', () => {
+ const snakeRegEx = /(_\w)/g;
+ const mockObj = {
+ id: 1,
+ group_name: 'GitLab.org',
+ absolute_web_url: 'https://gitlab.com/gitlab-org/',
+ };
+ const mappings = {
+ id: 'id',
+ groupName: 'group_name',
+ absoluteWebUrl: 'absolute_web_url',
+ };
+
+ const convertedObj = commonUtils.convertObjectPropsToCamelCase(mockObj);
+
+ Object.keys(convertedObj).forEach((prop) => {
+ expect(snakeRegEx.test(prop)).toBeFalsy();
+ expect(convertedObj[prop]).toBe(mockObj[mappings[prop]]);
+ });
+ });
+
+ it('return empty object if method is called with null or undefined', () => {
+ expect(Object.keys(commonUtils.convertObjectPropsToCamelCase(null)).length).toBe(0);
+ expect(Object.keys(commonUtils.convertObjectPropsToCamelCase()).length).toBe(0);
+ expect(Object.keys(commonUtils.convertObjectPropsToCamelCase({})).length).toBe(0);
+ });
+ });
});
diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js
index 69a23d7b2f3..e57a55fa71a 100644
--- a/spec/javascripts/lib/utils/text_utility_spec.js
+++ b/spec/javascripts/lib/utils/text_utility_spec.js
@@ -72,4 +72,10 @@ describe('text_utility', () => {
expect(textUtils.stripHtml('This is a text with <p>html</p>.', ' ')).toEqual('This is a text with html .');
});
});
+
+ describe('convertToCamelCase', () => {
+ it('converts snake_case string to camelCase string', () => {
+ expect(textUtils.convertToCamelCase('snake_case')).toBe('snakeCase');
+ });
+ });
});
diff --git a/spec/javascripts/monitoring/dashboard_state_spec.js b/spec/javascripts/monitoring/dashboard_state_spec.js
index 3319eeb3f31..df3198dd3e2 100644
--- a/spec/javascripts/monitoring/dashboard_state_spec.js
+++ b/spec/javascripts/monitoring/dashboard_state_spec.js
@@ -29,34 +29,6 @@ describe('EmptyState', () => {
expect(component.currentState).toBe(component.states.gettingStarted);
});
- it('buttonPath returns settings path for the state "gettingStarted"', () => {
- const component = createComponent({
- selectedState: 'gettingStarted',
- settingsPath: statePaths.settingsPath,
- documentationPath: statePaths.documentationPath,
- emptyGettingStartedSvgPath: 'foo',
- emptyLoadingSvgPath: 'foo',
- emptyUnableToConnectSvgPath: 'foo',
- });
-
- expect(component.buttonPath).toEqual(statePaths.settingsPath);
- expect(component.buttonPath).not.toEqual(statePaths.documentationPath);
- });
-
- it('buttonPath returns documentation path for any of the other states', () => {
- const component = createComponent({
- selectedState: 'loading',
- settingsPath: statePaths.settingsPath,
- documentationPath: statePaths.documentationPath,
- emptyGettingStartedSvgPath: 'foo',
- emptyLoadingSvgPath: 'foo',
- emptyUnableToConnectSvgPath: 'foo',
- });
-
- expect(component.buttonPath).toEqual(statePaths.documentationPath);
- expect(component.buttonPath).not.toEqual(statePaths.settingsPath);
- });
-
it('showButtonDescription returns a description with a link for the unableToConnect state', () => {
const component = createComponent({
selectedState: 'unableToConnect',
@@ -88,6 +60,7 @@ describe('EmptyState', () => {
const component = createComponent({
selectedState: 'gettingStarted',
settingsPath: statePaths.settingsPath,
+ clustersPath: statePaths.clustersPath,
documentationPath: statePaths.documentationPath,
emptyGettingStartedSvgPath: 'foo',
emptyLoadingSvgPath: 'foo',
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
index 2bbe963e393..f30208b27b6 100644
--- a/spec/javascripts/monitoring/mock_data.js
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -2471,6 +2471,7 @@ export const deploymentData = [
export const statePaths = {
settingsPath: '/root/hello-prometheus/services/prometheus/edit',
+ clustersPath: '/root/hello-prometheus/clusters',
documentationPath: '/help/administration/monitoring/prometheus/index.md',
};
diff --git a/spec/javascripts/notebook/cells/markdown_spec.js b/spec/javascripts/notebook/cells/markdown_spec.js
index 02304bf5d7d..8f8ba231ae8 100644
--- a/spec/javascripts/notebook/cells/markdown_spec.js
+++ b/spec/javascripts/notebook/cells/markdown_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import MarkdownComponent from '~/notebook/cells/markdown.vue';
-import katex from 'vendor/katex';
+import katex from 'katex';
const Component = Vue.extend(MarkdownComponent);
diff --git a/spec/javascripts/notes/components/note_app_spec.js b/spec/javascripts/notes/components/note_app_spec.js
index 36c56cd3862..12d180137a0 100644
--- a/spec/javascripts/notes/components/note_app_spec.js
+++ b/spec/javascripts/notes/components/note_app_spec.js
@@ -2,14 +2,29 @@ import _ from 'underscore';
import Vue from 'vue';
import notesApp from '~/notes/components/notes_app.vue';
import service from '~/notes/services/notes_service';
+import '~/render_gfm';
import * as mockData from '../mock_data';
-import getSetTimeoutPromise from '../../helpers/set_timeout_promise_helper';
+
+const vueMatchers = {
+ toIncludeElement() {
+ return {
+ compare(vm, selector) {
+ const result = {
+ pass: vm.$el.querySelector(selector) !== null,
+ };
+ return result;
+ },
+ };
+ },
+};
describe('note_app', () => {
let mountComponent;
let vm;
beforeEach(() => {
+ jasmine.addMatchers(vueMatchers);
+
const IssueNotesApp = Vue.extend(notesApp);
mountComponent = (data) => {
@@ -105,7 +120,7 @@ describe('note_app', () => {
});
it('should render loading icon', () => {
- expect(vm.$el.querySelector('.js-loading')).toBeDefined();
+ expect(vm).toIncludeElement('.js-loading');
});
it('should render form', () => {
@@ -118,10 +133,14 @@ describe('note_app', () => {
describe('update note', () => {
describe('individual note', () => {
- beforeEach(() => {
+ beforeEach((done) => {
Vue.http.interceptors.push(mockData.individualNoteInterceptor);
spyOn(service, 'updateNote').and.callThrough();
vm = mountComponent();
+ setTimeout(() => {
+ vm.$el.querySelector('.js-note-edit').click();
+ Vue.nextTick(done);
+ }, 0);
});
afterEach(() => {
@@ -131,40 +150,32 @@ describe('note_app', () => {
);
});
- it('renders edit form', (done) => {
- setTimeout(() => {
- vm.$el.querySelector('.js-note-edit').click();
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-vue-issue-note-form')).toBeDefined();
- done();
- });
- }, 0);
+ it('renders edit form', () => {
+ expect(vm).toIncludeElement('.js-vue-issue-note-form');
});
it('calls the service to update the note', (done) => {
- getSetTimeoutPromise()
- .then(() => {
- vm.$el.querySelector('.js-note-edit').click();
- })
- .then(Vue.nextTick)
- .then(() => {
- vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note';
- vm.$el.querySelector('.js-vue-issue-save').click();
-
- expect(service.updateNote).toHaveBeenCalled();
- })
- // Wait for the requests to finish before destroying
- .then(Vue.nextTick)
+ vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note';
+ vm.$el.querySelector('.js-vue-issue-save').click();
+
+ expect(service.updateNote).toHaveBeenCalled();
+ // Wait for the requests to finish before destroying
+ Vue.nextTick()
.then(done)
.catch(done.fail);
});
});
- describe('dicussion note', () => {
- beforeEach(() => {
+ describe('discussion note', () => {
+ beforeEach((done) => {
Vue.http.interceptors.push(mockData.discussionNoteInterceptor);
spyOn(service, 'updateNote').and.callThrough();
vm = mountComponent();
+
+ setTimeout(() => {
+ vm.$el.querySelector('.js-note-edit').click();
+ Vue.nextTick(done);
+ }, 0);
});
afterEach(() => {
@@ -174,30 +185,17 @@ describe('note_app', () => {
);
});
- it('renders edit form', (done) => {
- setTimeout(() => {
- vm.$el.querySelector('.js-note-edit').click();
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-vue-issue-note-form')).toBeDefined();
- done();
- });
- }, 0);
+ it('renders edit form', () => {
+ expect(vm).toIncludeElement('.js-vue-issue-note-form');
});
it('updates the note and resets the edit form', (done) => {
- getSetTimeoutPromise()
- .then(() => {
- vm.$el.querySelector('.js-note-edit').click();
- })
- .then(Vue.nextTick)
- .then(() => {
- vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note';
- vm.$el.querySelector('.js-vue-issue-save').click();
-
- expect(service.updateNote).toHaveBeenCalled();
- })
- // Wait for the requests to finish before destroying
- .then(Vue.nextTick)
+ vm.$el.querySelector('.js-vue-issue-note-form').value = 'this is a note';
+ vm.$el.querySelector('.js-vue-issue-save').click();
+
+ expect(service.updateNote).toHaveBeenCalled();
+ // Wait for the requests to finish before destroying
+ Vue.nextTick()
.then(done)
.catch(done.fail);
});
diff --git a/spec/javascripts/notes/components/noteable_note_spec.js b/spec/javascripts/notes/components/noteable_note_spec.js
index cb63b64724d..88a7ffb0b9c 100644
--- a/spec/javascripts/notes/components/noteable_note_spec.js
+++ b/spec/javascripts/notes/components/noteable_note_spec.js
@@ -56,4 +56,25 @@ describe('issue_note', () => {
done();
}, 0);
});
+
+ describe('cancel edit', () => {
+ it('restores content of updated note', (done) => {
+ const noteBody = 'updated note text';
+ vm.updateNote = () => Promise.resolve();
+
+ vm.formUpdateHandler(noteBody, null, $.noop);
+
+ setTimeout(() => {
+ expect(vm.note.note_html).toEqual(noteBody);
+
+ vm.formCancelHandler();
+
+ setTimeout(() => {
+ expect(vm.note.note_html).toEqual(noteBody);
+
+ done();
+ });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/notes/helpers.js b/spec/javascripts/notes/helpers.js
new file mode 100644
index 00000000000..a7663710a56
--- /dev/null
+++ b/spec/javascripts/notes/helpers.js
@@ -0,0 +1,12 @@
+// eslint-disable-next-line import/prefer-default-export
+export const resetStore = (store) => {
+ store.replaceState({
+ notes: [],
+ targetNoteHash: null,
+ lastFetchedAt: null,
+
+ notesData: {},
+ userData: {},
+ noteableData: {},
+ });
+};
diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js
index f0c800c759d..ccf4bd070c2 100644
--- a/spec/javascripts/notes/mock_data.js
+++ b/spec/javascripts/notes/mock_data.js
@@ -7,6 +7,8 @@ export const notesDataMock = {
notesPath: '/gitlab-org/gitlab-ce/noteable/issue/98/notes',
quickActionsDocsPath: '/help/user/project/quick_actions',
registerPath: '/users/sign_in?redirect_to_referer=yes#register-pane',
+ closeIssuePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=close',
+ reopenIssuePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=reopen',
};
export const userDataMock = {
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
index e092320f9a3..ab80ed7bbfb 100644
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ b/spec/javascripts/notes/stores/actions_spec.js
@@ -1,8 +1,16 @@
+import Vue from 'vue';
+import _ from 'underscore';
import * as actions from '~/notes/stores/actions';
+import store from '~/notes/stores';
import testAction from '../../helpers/vuex_action_helper';
+import { resetStore } from '../helpers';
import { discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data';
describe('Actions Notes Store', () => {
+ afterEach(() => {
+ resetStore(store);
+ });
+
describe('setNotesData', () => {
it('should set received notes data', (done) => {
testAction(actions.setNotesData, null, { notesData: {} }, [
@@ -58,4 +66,67 @@ describe('Actions Notes Store', () => {
], done);
});
});
+
+ describe('async methods', () => {
+ const interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify({}), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(interceptor);
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ });
+
+ describe('closeIssue', () => {
+ it('sets state as closed', (done) => {
+ store.dispatch('closeIssue', { notesData: { closeIssuePath: '' } })
+ .then(() => {
+ expect(store.state.noteableData.state).toEqual('closed');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('reopenIssue', () => {
+ it('sets state as reopened', (done) => {
+ store.dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } })
+ .then(() => {
+ expect(store.state.noteableData.state).toEqual('reopened');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('emitStateChangedEvent', () => {
+ it('emits an event on the document', () => {
+ document.addEventListener('issuable_vue_app:change', (event) => {
+ expect(event.detail.data).toEqual({ id: '1', state: 'closed' });
+ expect(event.detail.isClosed).toEqual(false);
+ });
+
+ store.dispatch('emitStateChangedEvent', { id: '1', state: 'closed' });
+ });
+ });
+
+ describe('toggleIssueLocalState', () => {
+ it('sets issue state as closed', (done) => {
+ testAction(actions.toggleIssueLocalState, 'closed', {}, [
+ { type: 'CLOSE_ISSUE', payload: 'closed' },
+ ], done);
+ });
+
+ it('sets issue state as reopened', (done) => {
+ testAction(actions.toggleIssueLocalState, 'reopened', {}, [
+ { type: 'REOPEN_ISSUE', payload: 'reopened' },
+ ], done);
+ });
+ });
});
diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/javascripts/notes/stores/getters_spec.js
index c5a84b71788..919ffbfdef0 100644
--- a/spec/javascripts/notes/stores/getters_spec.js
+++ b/spec/javascripts/notes/stores/getters_spec.js
@@ -55,4 +55,10 @@ describe('Getters Notes Store', () => {
expect(getters.getCurrentUserLastNote(state)).toEqual(individualNote.notes[0]);
});
});
+
+ describe('issueState', () => {
+ it('should return the issue state', () => {
+ expect(getters.issueState(state)).toEqual(noteableDataMock.state);
+ });
+ });
});
diff --git a/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index 040d14efed2..4655e29eed0 100644
--- a/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js
+++ b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import Translate from '~/vue_shared/translate';
-import IntervalPatternInput from '~/pipeline_schedules/components/interval_pattern_input.vue';
+import IntervalPatternInput from '~/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue';
Vue.use(Translate);
diff --git a/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
index ed481cb60a1..f95a7cef18a 100644
--- a/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js
+++ b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import Cookies from 'js-cookie';
-import PipelineSchedulesCallout from '~/pipeline_schedules/components/pipeline_schedules_callout.vue';
+import PipelineSchedulesCallout from '~/pages/projects/pipeline_schedules/shared/components/pipeline_schedules_callout.vue';
const PipelineSchedulesCalloutComponent = Vue.extend(PipelineSchedulesCallout);
const cookieKey = 'pipeline_schedules_callout_dismissed';
diff --git a/spec/javascripts/pipelines/async_button_spec.js b/spec/javascripts/pipelines/async_button_spec.js
index d010d897642..8ce33d410a7 100644
--- a/spec/javascripts/pipelines/async_button_spec.js
+++ b/spec/javascripts/pipelines/async_button_spec.js
@@ -15,6 +15,7 @@ describe('Pipelines Async Button', () => {
title: 'Foo',
icon: 'repeat',
cssClass: 'bar',
+ id: 123,
},
}).$mount();
});
@@ -38,9 +39,8 @@ describe('Pipelines Async Button', () => {
describe('With confirm dialog', () => {
it('should call the service when confimation is positive', () => {
- spyOn(window, 'confirm').and.returnValue(true);
- eventHub.$on('postAction', (endpoint) => {
- expect(endpoint).toEqual('/foo');
+ eventHub.$on('actionConfirmationModal', (data) => {
+ expect(data.id).toEqual(123);
});
component = new AsyncButtonComponent({
@@ -49,7 +49,7 @@ describe('Pipelines Async Button', () => {
title: 'Foo',
icon: 'fa fa-foo',
cssClass: 'bar',
- confirmActionMessage: 'bar',
+ id: 123,
},
}).$mount();
diff --git a/spec/javascripts/settings_panels_spec.js b/spec/javascripts/settings_panels_spec.js
new file mode 100644
index 00000000000..d433f8c3e07
--- /dev/null
+++ b/spec/javascripts/settings_panels_spec.js
@@ -0,0 +1,29 @@
+import initSettingsPanels from '~/settings_panels';
+
+describe('Settings Panels', () => {
+ preloadFixtures('projects/ci_cd_settings.html.raw');
+
+ beforeEach(() => {
+ loadFixtures('projects/ci_cd_settings.html.raw');
+ });
+
+ describe('initSettingsPane', () => {
+ afterEach(() => {
+ location.hash = '';
+ });
+
+ it('should expand linked hash fragment panel', () => {
+ location.hash = '#js-general-pipeline-settings';
+
+ const pipelineSettingsPanel = document.querySelector('#js-general-pipeline-settings');
+ // Our test environment automatically expands everything so we need to clear that out first
+ pipelineSettingsPanel.classList.remove('expanded');
+
+ expect(pipelineSettingsPanel.classList.contains('expanded')).toBe(false);
+
+ initSettingsPanels();
+
+ expect(pipelineSettingsPanel.classList.contains('expanded')).toBe(true);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
index 720effb5c1c..3d7f4abd420 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
@@ -1,38 +1,22 @@
import Vue from 'vue';
-import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch';
-
-const createComponent = () => {
- const Component = Vue.extend(missingBranchComponent);
- const mr = {
- sourceBranchRemoved: true,
- };
-
- return new Component({
- el: document.createElement('div'),
- propsData: { mr },
- });
-};
+import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
describe('MRWidgetMissingBranch', () => {
- describe('props', () => {
- it('should have props', () => {
- const mrProp = missingBranchComponent.props.mr;
+ let vm;
- expect(mrProp.type instanceof Object).toBeTruthy();
- expect(mrProp.required).toBeTruthy();
- });
+ beforeEach(() => {
+ const Component = Vue.extend(missingBranchComponent);
+ vm = mountComponent(Component, { mr: { sourceBranchRemoved: true } });
});
- describe('components', () => {
- it('should have components added', () => {
- expect(missingBranchComponent.components['mr-widget-merge-help']).toBeDefined();
- });
+ afterEach(() => {
+ vm.$destroy();
});
describe('computed', () => {
describe('missingBranchName', () => {
it('should return proper branch name', () => {
- const vm = createComponent();
expect(vm.missingBranchName).toEqual('source');
vm.mr.sourceBranchRemoved = false;
@@ -43,7 +27,7 @@ describe('MRWidgetMissingBranch', () => {
describe('template', () => {
it('should have correct elements', () => {
- const el = createComponent().$el;
+ const el = vm.$el;
const content = el.textContent.replace(/\n(\s)+/g, ' ').trim();
expect(el.classList.contains('mr-widget-body')).toBeTruthy();
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js
index 33f20ab132d..c89e863d904 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js
@@ -1,17 +1,24 @@
import Vue from 'vue';
-import notAllowedComponent from '~/vue_merge_request_widget/components/states/mr_widget_not_allowed';
+import notAllowedComponent from '~/vue_merge_request_widget/components/states/mr_widget_not_allowed.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
describe('MRWidgetNotAllowed', () => {
- describe('template', () => {
+ let vm;
+ beforeEach(() => {
const Component = Vue.extend(notAllowedComponent);
- const vm = new Component({
- el: document.createElement('div'),
- });
- it('should have correct elements', () => {
- expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
- expect(vm.$el.innerText).toContain('Ready to be merged automatically.');
- expect(vm.$el.innerText).toContain('Ask someone with write access to this repository to merge this request');
- });
+ vm = mountComponent(Component);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('renders success icon', () => {
+ expect(vm.$el.querySelector('.ci-status-icon-success')).not.toBe(null);
+ });
+
+ it('renders informative text', () => {
+ expect(vm.$el.innerText).toContain('Ready to be merged automatically.');
+ expect(vm.$el.innerText).toContain('Ask someone with write access to this repository to merge this request');
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
index d0702f9f503..edab26286bc 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
@@ -1,16 +1,23 @@
import Vue from 'vue';
-import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked';
+import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
describe('MRWidgetPipelineBlocked', () => {
- describe('template', () => {
+ let vm;
+ beforeEach(() => {
const Component = Vue.extend(pipelineBlockedComponent);
- const vm = new Component({
- el: document.createElement('div'),
- });
- it('should have correct elements', () => {
- expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
- expect(vm.$el.innerText).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed');
- });
+ vm = mountComponent(Component);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('renders warning icon', () => {
+ expect(vm.$el.querySelector('.ci-status-icon-warning')).not.toBe(null);
+ });
+
+ it('renders information text', () => {
+ expect(vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ')).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed');
});
});
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index cd00d0a39a3..45035effe81 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -295,6 +295,15 @@ describe('mrWidgetOptions', () => {
expect(notify.notifyMe).not.toHaveBeenCalled();
});
+
+ it('should not notify if no pipeline provided', () => {
+ vm.handleNotification({
+ ...data,
+ pipeline: undefined,
+ });
+
+ expect(notify.notifyMe).not.toHaveBeenCalled();
+ });
});
describe('resumePolling', () => {
diff --git a/spec/javascripts/vue_shared/components/confirmation_input_spec.js b/spec/javascripts/vue_shared/components/confirmation_input_spec.js
deleted file mode 100644
index a6a12614e77..00000000000
--- a/spec/javascripts/vue_shared/components/confirmation_input_spec.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import Vue from 'vue';
-import confirmationInput from '~/vue_shared/components/confirmation_input.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
-
-describe('Confirmation input component', () => {
- const Component = Vue.extend(confirmationInput);
- const props = {
- inputId: 'dummy-id',
- confirmationKey: 'confirmation-key',
- confirmationValue: 'confirmation-value',
- };
- let vm;
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('props', () => {
- beforeEach(() => {
- vm = mountComponent(Component, props);
- });
-
- it('sets id of the input field to inputId', () => {
- expect(vm.$refs.enteredValue.id).toBe(props.inputId);
- });
-
- it('sets name of the input field to confirmationKey', () => {
- expect(vm.$refs.enteredValue.name).toBe(props.confirmationKey);
- });
- });
-
- describe('computed', () => {
- describe('inputLabel', () => {
- it('escapes confirmationValue by default', () => {
- vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng' });
- expect(vm.inputLabel).toBe('Type <code>n&lt;e&gt;&lt;/e&gt;ds escap&quot;ng</code> to confirm:');
- });
-
- it('does not escape confirmationValue if escapeValue is false', () => {
- vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng', shouldEscapeConfirmationValue: false });
- expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:');
- });
- });
- });
-
- describe('methods', () => {
- describe('hasCorrectValue', () => {
- beforeEach(() => {
- vm = mountComponent(Component, props);
- });
-
- it('returns false if entered value is incorrect', () => {
- vm.$refs.enteredValue.value = 'incorrect';
- expect(vm.hasCorrectValue()).toBe(false);
- });
-
- it('returns true if entered value is correct', () => {
- vm.$refs.enteredValue.value = props.confirmationValue;
- expect(vm.hasCorrectValue()).toBe(true);
- });
- });
- });
-});
diff --git a/spec/javascripts/vue_shared/components/gl_modal_spec.js b/spec/javascripts/vue_shared/components/gl_modal_spec.js
new file mode 100644
index 00000000000..d6148cb785b
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/gl_modal_spec.js
@@ -0,0 +1,192 @@
+import Vue from 'vue';
+import GlModal from '~/vue_shared/components/gl_modal.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const modalComponent = Vue.extend(GlModal);
+
+describe('GlModal', () => {
+ let vm;
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('props', () => {
+ describe('with id', () => {
+ const props = {
+ id: 'my-modal',
+ };
+
+ beforeEach(() => {
+ vm = mountComponent(modalComponent, props);
+ });
+
+ it('assigns the id to the modal', () => {
+ expect(vm.$el.id).toBe(props.id);
+ });
+ });
+
+ describe('without id', () => {
+ beforeEach(() => {
+ vm = mountComponent(modalComponent, { });
+ });
+
+ it('does not add an id attribute to the modal', () => {
+ expect(vm.$el.hasAttribute('id')).toBe(false);
+ });
+ });
+
+ describe('with headerTitleText', () => {
+ const props = {
+ headerTitleText: 'my title text',
+ };
+
+ beforeEach(() => {
+ vm = mountComponent(modalComponent, props);
+ });
+
+ it('sets the modal title', () => {
+ const modalTitle = vm.$el.querySelector('.modal-title');
+ expect(modalTitle.innerHTML.trim()).toBe(props.headerTitleText);
+ });
+ });
+
+ describe('with footerPrimaryButtonVariant', () => {
+ const props = {
+ footerPrimaryButtonVariant: 'danger',
+ };
+
+ beforeEach(() => {
+ vm = mountComponent(modalComponent, props);
+ });
+
+ it('sets the primary button class', () => {
+ const primaryButton = vm.$el.querySelector('.modal-footer button:last-of-type');
+ expect(primaryButton).toHaveClass(`btn-${props.footerPrimaryButtonVariant}`);
+ });
+ });
+
+ describe('with footerPrimaryButtonText', () => {
+ const props = {
+ footerPrimaryButtonText: 'my button text',
+ };
+
+ beforeEach(() => {
+ vm = mountComponent(modalComponent, props);
+ });
+
+ it('sets the primary button text', () => {
+ const primaryButton = vm.$el.querySelector('.modal-footer button:last-of-type');
+ expect(primaryButton.innerHTML.trim()).toBe(props.footerPrimaryButtonText);
+ });
+ });
+ });
+
+ it('works with data-toggle="modal"', (done) => {
+ setFixtures(`
+ <button id="modal-button" data-toggle="modal" data-target="#my-modal"></button>
+ <div id="modal-container"></div>
+ `);
+
+ const modalContainer = document.getElementById('modal-container');
+ const modalButton = document.getElementById('modal-button');
+ vm = mountComponent(modalComponent, {
+ id: 'my-modal',
+ }, modalContainer);
+ $(vm.$el).on('shown.bs.modal', () => done());
+
+ modalButton.click();
+ });
+
+ describe('methods', () => {
+ const dummyEvent = 'not really an event';
+
+ beforeEach(() => {
+ vm = mountComponent(modalComponent, { });
+ spyOn(vm, '$emit');
+ });
+
+ describe('emitCancel', () => {
+ it('emits a cancel event', () => {
+ vm.emitCancel(dummyEvent);
+
+ expect(vm.$emit).toHaveBeenCalledWith('cancel', dummyEvent);
+ });
+ });
+
+ describe('emitSubmit', () => {
+ it('emits a submit event', () => {
+ vm.emitSubmit(dummyEvent);
+
+ expect(vm.$emit).toHaveBeenCalledWith('submit', dummyEvent);
+ });
+ });
+ });
+
+ describe('slots', () => {
+ const slotContent = 'this should go into the slot';
+ const modalWithSlot = (slotName) => {
+ let template;
+ if (slotName) {
+ template = `
+ <gl-modal>
+ <template slot="${slotName}">${slotContent}</template>
+ </gl-modal>
+ `;
+ } else {
+ template = `<gl-modal>${slotContent}</gl-modal>`;
+ }
+
+ return Vue.extend({
+ components: {
+ GlModal,
+ },
+ template,
+ });
+ };
+
+ describe('default slot', () => {
+ beforeEach(() => {
+ vm = mountComponent(modalWithSlot());
+ });
+
+ it('sets the modal body', () => {
+ const modalBody = vm.$el.querySelector('.modal-body');
+ expect(modalBody.innerHTML).toBe(slotContent);
+ });
+ });
+
+ describe('header slot', () => {
+ beforeEach(() => {
+ vm = mountComponent(modalWithSlot('header'));
+ });
+
+ it('sets the modal header', () => {
+ const modalHeader = vm.$el.querySelector('.modal-header');
+ expect(modalHeader.innerHTML).toBe(slotContent);
+ });
+ });
+
+ describe('title slot', () => {
+ beforeEach(() => {
+ vm = mountComponent(modalWithSlot('title'));
+ });
+
+ it('sets the modal title', () => {
+ const modalTitle = vm.$el.querySelector('.modal-title');
+ expect(modalTitle.innerHTML).toBe(slotContent);
+ });
+ });
+
+ describe('footer slot', () => {
+ beforeEach(() => {
+ vm = mountComponent(modalWithSlot('footer'));
+ });
+
+ it('sets the modal footer', () => {
+ const modalFooter = vm.$el.querySelector('.modal-footer');
+ expect(modalFooter.innerHTML).toBe(slotContent);
+ });
+ });
+ });
+});
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
index 6ee3d531d6e..f7b1a61f4f8 100644
--- a/spec/lib/backup/repository_spec.rb
+++ b/spec/lib/backup/repository_spec.rb
@@ -33,10 +33,22 @@ describe Backup::Repository do
allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1])
end
- it 'shows the appropriate error' do
- described_class.new.restore
+ context 'hashed storage' do
+ it 'shows the appropriate error' do
+ described_class.new.restore
- expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error")
+ expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} (#{project.disk_path}) - error")
+ end
+ end
+
+ context 'legacy storage' do
+ let!(:project) { create(:project, :legacy_storage) }
+
+ it 'shows the appropriate error' do
+ described_class.new.restore
+
+ expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error")
+ end
end
end
end
diff --git a/spec/lib/banzai/filter/html_entity_filter_spec.rb b/spec/lib/banzai/filter/html_entity_filter_spec.rb
index 91e18d876d5..1d98fc0d5db 100644
--- a/spec/lib/banzai/filter/html_entity_filter_spec.rb
+++ b/spec/lib/banzai/filter/html_entity_filter_spec.rb
@@ -3,17 +3,12 @@ require 'spec_helper'
describe Banzai::Filter::HtmlEntityFilter do
include FilterSpecHelper
- let(:unescaped) { 'foo <strike attr="foo">&&&</strike>' }
- let(:escaped) { 'foo &lt;strike attr=&quot;foo&quot;&gt;&amp;&amp;&amp;&lt;/strike&gt;' }
+ let(:unescaped) { 'foo <strike attr="foo">&&amp;&</strike>' }
+ let(:escaped) { 'foo &lt;strike attr=&quot;foo&quot;&gt;&amp;&amp;amp;&amp;&lt;/strike&gt;' }
it 'converts common entities to their HTML-escaped equivalents' do
output = filter(unescaped)
expect(output).to eq(escaped)
end
-
- it 'does not double-escape' do
- escaped = ERB::Util.html_escape("Merge branch 'blabla' into 'master'")
- expect(filter(escaped)).to eq(escaped)
- end
end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 9f2efa05a01..ef52c572898 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -3,35 +3,86 @@ require 'spec_helper'
describe Banzai::Filter::SyntaxHighlightFilter do
include FilterSpecHelper
+ shared_examples "XSS prevention" do |lang|
+ it "escapes HTML tags" do
+ # This is how a script tag inside a code block is presented to this filter
+ # after Markdown rendering.
+ result = filter(%{<pre lang="#{lang}"><code>&lt;script&gt;alert(1)&lt;/script&gt;</code></pre>})
+
+ expect(result.to_html).not_to include("<script>alert(1)</script>")
+ expect(result.to_html).to include("alert(1)")
+ end
+ end
+
context "when no language is specified" do
it "highlights as plaintext" do
result = filter('<pre><code>def fun end</code></pre>')
+
expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>')
end
+
+ include_examples "XSS prevention", ""
end
context "when a valid language is specified" do
it "highlights as that language" do
result = filter('<pre><code lang="ruby">def fun end</code></pre>')
+
expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>')
end
+
+ include_examples "XSS prevention", "ruby"
end
context "when an invalid language is specified" do
it "highlights as plaintext" do
result = filter('<pre><code lang="gnuplot">This is a test</code></pre>')
+
expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
end
+
+ include_examples "XSS prevention", "gnuplot"
end
- context "when Rouge formatting fails" do
+ context "languages that should be passed through" do
+ %w(math mermaid plantuml).each do |lang|
+ context "when #{lang} is specified" do
+ it "highlights as plaintext but with the correct language attribute and class" do
+ result = filter(%{<pre><code lang="#{lang}">This is a test</code></pre>})
+
+ expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ end
+
+ include_examples "XSS prevention", lang
+ end
+ end
+ end
+
+ context "when Rouge lexing fails" do
before do
- allow_any_instance_of(Rouge::Formatter).to receive(:format).and_raise(StandardError)
+ allow_any_instance_of(Rouge::Lexers::Ruby).to receive(:stream_tokens).and_raise(StandardError)
end
it "highlights as plaintext" do
result = filter('<pre><code lang="ruby">This is a test</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code>This is a test</code></pre>')
+
+ expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre>')
+ end
+
+ include_examples "XSS prevention", "ruby"
+ end
+
+ context "when Rouge lexing fails after a retry" do
+ before do
+ allow_any_instance_of(Rouge::Lexers::PlainText).to receive(:stream_tokens).and_raise(StandardError)
+ end
+
+ it "does not add highlighting classes" do
+ result = filter('<pre><code>This is a test</code></pre>')
+
+ expect(result.to_html).to eq('<pre><code>This is a test</code></pre>')
end
+
+ include_examples "XSS prevention", "ruby"
end
end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index f668f78c2b8..2a0e19ae796 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -95,6 +95,14 @@ module Gitlab
expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>')
end
end
+
+ context 'outfilesuffix' do
+ it 'defaults to adoc' do
+ output = render("Inter-document reference <<README.adoc#>>", context)
+
+ expect(output).to include("a href=\"README.adoc\"")
+ end
+ end
end
def render(*args)
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
index c8df6dd2118..007e93c1db6 100644
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
@@ -15,10 +15,6 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :m
.to receive(:commits_count=).and_return(nil)
end
- after do
- [Project, MergeRequest, MergeRequestDiff].each(&:reset_column_information)
- end
-
def diffs_to_hashes(diffs)
diffs.as_json(only: Gitlab::Git::Diff::SERIALIZE_KEYS).map(&:with_indifferent_access)
end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
index 2b69e718e08..e99257e3481 100644
--- a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
@@ -7,10 +7,6 @@ describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData,
.to receive(:commits_count=).and_return(nil)
end
- after do
- [Project, MergeRequest, MergeRequestDiff].each(&:reset_column_information)
- end
-
describe '#perform' do
let(:mr_with_event) { create(:merge_request) }
let!(:merged_event) { create(:event, :merged, target: mr_with_event) }
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
index be45c00dfe6..c8fa252439a 100644
--- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
@@ -1,6 +1,11 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
+# This migration is using UploadService, which sets uploads.secret that is only
+# added to the DB schema in 20180129193323. Since the test isn't isolated, we
+# just use the latest schema when testing this migration.
+# Ideally, the test should not use factories nor UploadService, and rely on the
+# `table` helper instead.
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq, :migration, schema: 20180129193323 do
include TrackUntrackedUploadsHelpers
subject { described_class.new }
@@ -9,22 +14,16 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
let!(:uploads) { described_class::Upload }
before do
- DatabaseCleaner.clean
- drop_temp_table_if_exists
ensure_temporary_tracking_table_exists
uploads.delete_all
end
- after(:all) do
- drop_temp_table_if_exists
- end
-
context 'with untracked files and tracked files in untracked_files_for_uploads' do
let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
let!(:user1) { create(:user, :with_avatar) }
let!(:user2) { create(:user, :with_avatar) }
- let!(:project1) { create(:project, :with_avatar) }
- let!(:project2) { create(:project, :with_avatar) }
+ let!(:project1) { create(:project, :legacy_storage, :with_avatar) }
+ let!(:project2) { create(:project, :legacy_storage, :with_avatar) }
before do
UploadService.new(project1, uploaded_file, FileUploader).execute # Markdown upload
@@ -48,7 +47,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
it 'adds untracked files to the uploads table' do
expect do
- subject.perform(1, untracked_files_for_uploads.last.id)
+ subject.perform(1, untracked_files_for_uploads.reorder(:id).last.id)
end.to change { uploads.count }.from(4).to(8)
expect(user2.uploads.count).to eq(1)
@@ -117,9 +116,9 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
end
it 'drops the temporary tracking table after processing the batch, if there are no untracked rows left' do
- subject.perform(1, untracked_files_for_uploads.last.id)
+ expect(subject).to receive(:drop_temp_table_if_finished)
- expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_falsey
+ subject.perform(1, untracked_files_for_uploads.last.id)
end
it 'does not block a whole batch because of one bad path' do
@@ -213,13 +212,13 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
end
context 'for a project avatar file path' do
- let(:model) { create(:project, :with_avatar) }
+ let(:model) { create(:project, :legacy_storage, :with_avatar) }
it_behaves_like 'non_markdown_file'
end
context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- let(:model) { create(:project) }
+ let(:model) { create(:project, :legacy_storage) }
before do
# Upload the file
@@ -255,10 +254,6 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
ensure_temporary_tracking_table_exists
end
- after(:all) do
- drop_temp_table_if_exists
- end
-
describe '#upload_path' do
def assert_upload_path(file_path, expected_upload_path)
untracked_file = create_untracked_file(file_path)
@@ -304,7 +299,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
it 'returns the file path relative to the project directory in uploads' do
- project = create(:project)
+ project = create(:project, :legacy_storage)
random_hex = SecureRandom.hex
assert_upload_path("/#{project.full_path}/#{random_hex}/Some file.jpg", "#{random_hex}/Some file.jpg")
@@ -357,7 +352,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
it 'returns FileUploader as a string' do
- project = create(:project)
+ project = create(:project, :legacy_storage)
assert_uploader("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'FileUploader')
end
@@ -409,7 +404,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
it 'returns Project as a string' do
- project = create(:project)
+ project = create(:project, :legacy_storage)
assert_model_type("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'Project')
end
@@ -461,7 +456,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
it 'returns the ID as a string' do
- project = create(:project)
+ project = create(:project, :legacy_storage)
assert_model_id("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", project.id)
end
@@ -483,7 +478,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
end
context 'for a project avatar file path' do
- let(:project) { create(:project, avatar: uploaded_file) }
+ let(:project) { create(:project, :legacy_storage, avatar: uploaded_file) }
let(:untracked_file) { described_class.create!(path: project.uploads.first.path) }
it 'returns the file size' do
@@ -496,7 +491,7 @@ describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
end
context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :legacy_storage) }
let(:untracked_file) { create_untracked_file("/#{project.full_path}/#{project.uploads.first.path}") }
before do
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 370c2490b97..ca77e64ae40 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -1,21 +1,11 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
+describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq, :migration, schema: 20180129193323 do
include TrackUntrackedUploadsHelpers
include MigrationsHelpers
let!(:untracked_files_for_uploads) { described_class::UntrackedFile }
- before do
- DatabaseCleaner.clean
-
- drop_temp_table_if_exists
- end
-
- after do
- drop_temp_table_if_exists
- end
-
around do |example|
# Especially important so the follow-up migration does not get run
Sidekiq::Testing.fake! do
@@ -23,61 +13,11 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
- # E.g. The installation is in use at the time of migration, and someone has
- # just uploaded a file
- shared_examples 'does not add files in /uploads/tmp' do
- let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
- before do
- FileUtils.mkdir(File.dirname(tmp_file))
- FileUtils.touch(tmp_file)
- end
-
- after do
- FileUtils.rm(tmp_file)
- end
-
- it 'does not add files from /uploads/tmp' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
- end
-
- it 'ensures the untracked_files_for_uploads table exists' do
- expect do
- described_class.new.perform
- end.to change { ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads) }.from(false).to(true)
- end
-
- it 'has a path field long enough for really long paths' do
- described_class.new.perform
-
- component = 'a' * 255
-
- long_path = [
- 'uploads',
- component, # project.full_path
- component # filename
- ].flatten.join('/')
-
- record = untracked_files_for_uploads.create!(path: long_path)
- expect(record.reload.path.size).to eq(519)
- end
-
- context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE" do
- around do |example|
- # If this is CI, we use Postgres 9.2 so this whole context should be
- # skipped since we're unable to use ON CONFLICT DO NOTHING or IGNORE.
- if described_class.new.send(:can_bulk_insert_and_ignore_duplicates?)
- example.run
- end
- end
-
+ shared_examples 'prepares the untracked_files_for_uploads table' do
context 'when files were uploaded before and after hashed storage was enabled' do
let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
let!(:user) { create(:user, :with_avatar) }
- let!(:project1) { create(:project, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar, :legacy_storage) }
let(:project2) { create(:project) } # instantiate after enabling hashed_storage
before do
@@ -90,6 +30,21 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
UploadService.new(project2, uploaded_file, FileUploader).execute
end
+ it 'has a path field long enough for really long paths' do
+ described_class.new.perform
+
+ component = 'a' * 255
+
+ long_path = [
+ 'uploads',
+ component, # project.full_path
+ component # filename
+ ].flatten.join('/')
+
+ record = untracked_files_for_uploads.create!(path: long_path)
+ expect(record.reload.path.size).to eq(519)
+ end
+
it 'adds unhashed files to the untracked_files_for_uploads table' do
described_class.new.perform
@@ -113,7 +68,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
it 'correctly schedules the follow-up background migration jobs' do
described_class.new.perform
- expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5)
+ ids = described_class::UntrackedFile.all.order(:id).pluck(:id)
+ expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(ids.first, ids.last)
expect(BackgroundMigrationWorker.jobs.size).to eq(1)
end
@@ -130,91 +86,68 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
context 'when there are files in /uploads/tmp' do
- it_behaves_like 'does not add files in /uploads/tmp'
- end
- end
- end
-
- context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do
- before do
- # If this is CI, we use Postgres 9.2 so this stub has no effect.
- #
- # If this is being run on Postgres 9.5+ or MySQL, then this stub allows us
- # to test the bulk insert functionality without ON CONFLICT DO NOTHING or
- # IGNORE.
- allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true)
- end
-
- context 'when files were uploaded before and after hashed storage was enabled' do
- let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
- let!(:user) { create(:user, :with_avatar) }
- let!(:project1) { create(:project, :with_avatar) }
- let(:project2) { create(:project) } # instantiate after enabling hashed_storage
-
- before do
- # Markdown upload before enabling hashed_storage
- UploadService.new(project1, uploaded_file, FileUploader).execute
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
- stub_application_setting(hashed_storage_enabled: true)
-
- # Markdown upload after enabling hashed_storage
- UploadService.new(project2, uploaded_file, FileUploader).execute
- end
-
- it 'adds unhashed files to the untracked_files_for_uploads table' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
-
- it 'adds files with paths relative to CarrierWave.root' do
- described_class.new.perform
- untracked_files_for_uploads.all.each do |file|
- expect(file.path.start_with?('uploads/')).to be_truthy
+ before do
+ FileUtils.mkdir(File.dirname(tmp_file))
+ FileUtils.touch(tmp_file)
end
- end
-
- it 'does not add hashed files to the untracked_files_for_uploads table' do
- described_class.new.perform
- hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path
- expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
- end
+ after do
+ FileUtils.rm(tmp_file)
+ end
- it 'correctly schedules the follow-up background migration jobs' do
- described_class.new.perform
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
- expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5)
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
end
- # E.g. from a previous failed run of this background migration
- context 'when there is existing data in untracked_files_for_uploads' do
- before do
- described_class.new.perform
- end
+ context 'when the last batch size exactly matches the max batch size' do
+ it 'does not raise error' do
+ stub_const("#{described_class}::FIND_BATCH_SIZE", 5)
- it 'does not error or produce duplicates of existing data' do
expect do
described_class.new.perform
- end.not_to change { untracked_files_for_uploads.count }.from(5)
+ end.not_to raise_error
+
+ expect(untracked_files_for_uploads.count).to eq(5)
end
end
+ end
+ end
- context 'when there are files in /uploads/tmp' do
- it_behaves_like 'does not add files in /uploads/tmp'
- end
+ # If running on Postgres 9.2 (like on CI), this whole context is skipped
+ # since we're unable to use ON CONFLICT DO NOTHING or IGNORE.
+ context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE", if: described_class.new.send(:can_bulk_insert_and_ignore_duplicates?) do
+ it_behaves_like 'prepares the untracked_files_for_uploads table'
+ end
+
+ # If running on Postgres 9.2 (like on CI), the stubbed method has no effect.
+ #
+ # If running on Postgres 9.5+ or MySQL, then this context effectively tests
+ # the bulk insert functionality without ON CONFLICT DO NOTHING or IGNORE.
+ context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do
+ before do
+ allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true)
end
+
+ it_behaves_like 'prepares the untracked_files_for_uploads table'
end
# Very new or lightly-used installations that are running this migration
# may not have an upload directory because they have no uploads.
context 'when no files were ever uploaded' do
- it 'does not add to the untracked_files_for_uploads table (and does not raise error)' do
- described_class.new.perform
+ it 'deletes the `untracked_files_for_uploads` table (and does not raise error)' do
+ background_migration = described_class.new
+
+ expect(background_migration).to receive(:drop_temp_table)
- expect(untracked_files_for_uploads.count).to eq(0)
+ background_migration.perform
end
end
end
diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
index f302e412a6e..eb4b9d8b12f 100644
--- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
@@ -8,11 +8,15 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
subject(:importer) { described_class.new(admin, bare_repository) }
before do
+ @rainbow = Rainbow.enabled
+ Rainbow.enabled = false
+
allow(described_class).to receive(:log)
end
after do
FileUtils.rm_rf(base_dir)
+ Rainbow.enabled = @rainbow
end
shared_examples 'importing a repository' do
@@ -148,7 +152,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
# This is a quick way to get a valid repository instead of copying an
# existing one. Since it's not persisted, the importer will try to
# create the project.
- project = build(:project, :repository)
+ project = build(:project, :legacy_storage, :repository)
original_commit_count = project.repository.commit_count
bare_repo = Gitlab::BareRepositoryImport::Repository.new(project.repository_storage_path, project.repository.path)
diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb
index c2bca816aae..475b5c5cfb2 100644
--- a/spec/lib/gitlab/checks/change_access_spec.rb
+++ b/spec/lib/gitlab/checks/change_access_spec.rb
@@ -177,5 +177,44 @@ describe Gitlab::Checks::ChangeAccess do
expect { subject.exec }.not_to raise_error
end
end
+
+ context 'LFS file lock check' do
+ let(:owner) { create(:user) }
+ let!(:lock) { create(:lfs_file_lock, user: owner, project: project, path: 'README') }
+
+ before do
+ allow(project.repository).to receive(:new_commits).and_return(
+ project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
+ )
+ end
+
+ context 'with LFS not enabled' do
+ it 'skips the validation' do
+ expect_any_instance_of(described_class).not_to receive(:lfs_file_locks_validation)
+
+ subject.exec
+ end
+ end
+
+ context 'with LFS enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'when change is sent by a different user' do
+ it 'raises an error if the user is not allowed to update the file' do
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
+ end
+ end
+
+ context 'when change is sent by the author od the lock' do
+ let(:user) { owner }
+
+ it "doesn't raise any error" do
+ expect { subject.exec }.not_to raise_error
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/loader_spec.rb b/spec/lib/gitlab/ci/config/loader_spec.rb
index 2d44b1f60f1..590fc8904c1 100644
--- a/spec/lib/gitlab/ci/config/loader_spec.rb
+++ b/spec/lib/gitlab/ci/config/loader_spec.rb
@@ -38,6 +38,16 @@ describe Gitlab::Ci::Config::Loader do
end
end
+ context 'when there is an unknown alias' do
+ let(:yml) { 'steps: *bad_alias' }
+
+ describe '#initialize' do
+ it 'raises FormatError' do
+ expect { loader }.to raise_error(Gitlab::Ci::Config::Loader::FormatError, 'Unknown alias: bad_alias')
+ end
+ end
+ end
+
context 'when yaml config is empty' do
let(:yml) { '' }
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 98880fe9f28..f83f932e61e 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1394,11 +1394,15 @@ EOT
describe "Error handling" do
it "fails to parse YAML" do
- expect {Gitlab::Ci::YamlProcessor.new("invalid: yaml: test")}.to raise_error(Psych::SyntaxError)
+ expect do
+ Gitlab::Ci::YamlProcessor.new("invalid: yaml: test")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
end
it "indicates that object is invalid" do
- expect {Gitlab::Ci::YamlProcessor.new("invalid_yaml")}.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
+ expect do
+ Gitlab::Ci::YamlProcessor.new("invalid_yaml")
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
end
it "returns errors if tags parameter is invalid" do
@@ -1688,37 +1692,36 @@ EOT
end
describe "#validation_message" do
+ subject { Gitlab::Ci::YamlProcessor.validation_message(content) }
+
context "when the YAML could not be parsed" do
- it "returns an error about invalid configutaion" do
- content = YAML.dump("invalid: yaml: test")
+ let(:content) { YAML.dump("invalid: yaml: test") }
- expect(Gitlab::Ci::YamlProcessor.validation_message(content))
- .to eq "Invalid configuration format"
- end
+ it { is_expected.to eq "Invalid configuration format" }
end
context "when the tags parameter is invalid" do
- it "returns an error about invalid tags" do
- content = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
+ let(:content) { YAML.dump({ rspec: { script: "test", tags: "mysql" } }) }
- expect(Gitlab::Ci::YamlProcessor.validation_message(content))
- .to eq "jobs:rspec tags should be an array of strings"
- end
+ it { is_expected.to eq "jobs:rspec tags should be an array of strings" }
end
context "when YAML content is empty" do
- it "returns an error about missing content" do
- expect(Gitlab::Ci::YamlProcessor.validation_message(''))
- .to eq "Please provide content of .gitlab-ci.yml"
- end
+ let(:content) { '' }
+
+ it { is_expected.to eq "Please provide content of .gitlab-ci.yml" }
+ end
+
+ context 'when the YAML contains an unknown alias' do
+ let(:content) { 'steps: *bad_alias' }
+
+ it { is_expected.to eq "Unknown alias: bad_alias" }
end
context "when the YAML is valid" do
- it "does not return any errors" do
- content = File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ let(:content) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
- expect(Gitlab::Ci::YamlProcessor.validation_message(content)).to be_nil
- end
+ it { is_expected.to be_nil }
end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index f31475dbd71..b411aaa19da 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -94,7 +94,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
describe '#move_repositories' do
let(:namespace) { create(:group, name: 'hello-group') }
it 'moves a project for a namespace' do
- create(:project, :repository, namespace: namespace, path: 'hello-project')
+ create(:project, :repository, :legacy_storage, namespace: namespace, path: 'hello-project')
expected_path = File.join(TestEnv.repos_path, 'bye-group', 'hello-project.git')
subject.move_repositories(namespace, 'hello-group', 'bye-group')
@@ -104,7 +104,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
it 'moves a namespace in a subdirectory correctly' do
child_namespace = create(:group, name: 'sub-group', parent: namespace)
- create(:project, :repository, namespace: child_namespace, path: 'hello-project')
+ create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
expected_path = File.join(TestEnv.repos_path, 'hello-group', 'renamed-sub-group', 'hello-project.git')
@@ -115,7 +115,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
it 'moves a parent namespace with subdirectories' do
child_namespace = create(:group, name: 'sub-group', parent: namespace)
- create(:project, :repository, namespace: child_namespace, path: 'hello-project')
+ create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
expected_path = File.join(TestEnv.repos_path, 'renamed-group', 'sub-group', 'hello-project.git')
subject.move_repositories(child_namespace, 'hello-group', 'renamed-group')
@@ -166,7 +166,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
describe '#rename_namespace_dependencies' do
it "moves the the repository for a project in the namespace" do
- create(:project, :repository, namespace: namespace, path: "the-path-project")
+ create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
@@ -187,7 +187,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
end
it 'invalidates the markdown cache of related projects' do
- project = create(:project, namespace: namespace, path: "the-path-project")
+ project = create(:project, :legacy_storage, namespace: namespace, path: "the-path-project")
expect(subject).to receive(:remove_cached_html_for_projects).with([project.id])
@@ -243,7 +243,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
describe '#revert_renames', :redis do
it 'renames the routes back to the previous values' do
- project = create(:project, :repository, path: 'a-project', namespace: namespace)
+ project = create(:project, :legacy_storage, :repository, path: 'a-project', namespace: namespace)
subject.rename_namespace(namespace)
expect(subject).to receive(:perform_rename)
@@ -261,7 +261,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
end
it 'moves the repositories back to their original place' do
- project = create(:project, :repository, path: 'a-project', namespace: namespace)
+ project = create(:project, :repository, :legacy_storage, path: 'a-project', namespace: namespace)
project.create_repository
subject.rename_namespace(namespace)
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 0958144643b..b4896d69077 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -5,6 +5,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de
let(:subject) { described_class.new(['the-path'], migration) }
let(:project) do
create(:project,
+ :legacy_storage,
path: 'the-path',
namespace: create(:namespace, path: 'known-parent' ))
end
@@ -17,7 +18,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de
describe '#projects_for_paths' do
it 'searches using nested paths' do
namespace = create(:namespace, path: 'hello')
- project = create(:project, path: 'THE-path', namespace: namespace)
+ project = create(:project, :legacy_storage, path: 'THE-path', namespace: namespace)
result_ids = described_class.new(['Hello/the-path'], migration)
.projects_for_paths.map(&:id)
@@ -26,8 +27,8 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de
end
it 'includes the correct projects' do
- project = create(:project, path: 'THE-path')
- _other_project = create(:project)
+ project = create(:project, :legacy_storage, path: 'THE-path')
+ _other_project = create(:project, :legacy_storage)
result_ids = subject.projects_for_paths.map(&:id)
@@ -36,7 +37,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de
end
describe '#rename_projects' do
- let!(:projects) { create_list(:project, 2, path: 'the-path') }
+ let!(:projects) { create_list(:project, 2, :legacy_storage, path: 'the-path') }
it 'renames each project' do
expect(subject).to receive(:rename_project).twice
@@ -120,7 +121,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :de
describe '#move_repository' do
let(:known_parent) { create(:namespace, path: 'known-parent') }
- let(:project) { create(:project, :repository, path: 'the-path', namespace: known_parent) }
+ let(:project) { create(:project, :repository, :legacy_storage, path: 'the-path', namespace: known_parent) }
it 'moves the repository for a project' do
expected_path = File.join(TestEnv.repos_path, 'known-parent', 'new-repo.git')
diff --git a/spec/lib/gitlab/email/attachment_uploader_spec.rb b/spec/lib/gitlab/email/attachment_uploader_spec.rb
index f61dbc67ad1..45c690842bc 100644
--- a/spec/lib/gitlab/email/attachment_uploader_spec.rb
+++ b/spec/lib/gitlab/email/attachment_uploader_spec.rb
@@ -2,7 +2,7 @@ require "spec_helper"
describe Gitlab::Email::AttachmentUploader do
describe "#execute" do
- let(:project) { build(:project) }
+ let(:project) { create(:project) }
let(:message_raw) { fixture_file("emails/attachment.eml") }
let(:message) { Mail::Message.new(message_raw) }
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 4e9367323cb..83d431a7458 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -24,6 +24,11 @@ describe Gitlab::EncodingHelper do
'removes invalid bytes from ASCII-8bit encoded multibyte string. This can occur when a git diff match line truncates in the middle of a multibyte character. This occurs after the second word in this example. The test string is as short as we can get while still triggering the error condition when not looking at `detect[:confidence]`.',
"mu ns\xC3\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi ".force_encoding('ASCII-8BIT'),
"mu ns\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi "
+ ],
+ [
+ 'string with detected encoding that is not supported in Ruby',
+ "\xFFe,i\xFF,\xB8oi,'\xB8,\xFF,-",
+ "--broken encoding: IBM420_ltr"
]
].each do |description, test_string, xpect|
it description do
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 326ed2f2ecf..13df8531b63 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -39,8 +39,8 @@ describe Gitlab::Gfm::UploadsRewriter do
it 'copies files' do
expect(new_files).to all(exist)
expect(old_paths).not_to match_array new_paths
- expect(old_paths).to all(include(old_project.full_path))
- expect(new_paths).to all(include(new_project.full_path))
+ expect(old_paths).to all(include(old_project.disk_path))
+ expect(new_paths).to all(include(new_project.disk_path))
end
it 'does not remove old files' do
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index 8ac960133c5..59e9e1cc94c 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -178,67 +178,77 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
describe '.batch' do
- let(:blob_references) do
- [
- [SeedRepo::Commit::ID, "files/ruby/popen.rb"],
- [SeedRepo::Commit::ID, 'six']
- ]
- end
+ shared_examples 'loading blobs in batch' do
+ let(:blob_references) do
+ [
+ [SeedRepo::Commit::ID, "files/ruby/popen.rb"],
+ [SeedRepo::Commit::ID, 'six']
+ ]
+ end
- subject { described_class.batch(repository, blob_references) }
+ subject { described_class.batch(repository, blob_references) }
- it { expect(subject.size).to eq(blob_references.size) }
+ it { expect(subject.size).to eq(blob_references.size) }
- context 'first blob' do
- let(:blob) { subject[0] }
+ context 'first blob' do
+ let(:blob) { subject[0] }
- it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) }
- it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) }
- it { expect(blob.path).to eq("files/ruby/popen.rb") }
- it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) }
- it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) }
- it { expect(blob.size).to eq(669) }
- it { expect(blob.mode).to eq("100644") }
- end
+ it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) }
+ it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) }
+ it { expect(blob.path).to eq("files/ruby/popen.rb") }
+ it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) }
+ it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) }
+ it { expect(blob.size).to eq(669) }
+ it { expect(blob.mode).to eq("100644") }
+ end
- context 'second blob' do
- let(:blob) { subject[1] }
+ context 'second blob' do
+ let(:blob) { subject[1] }
- it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') }
- it { expect(blob.data).to eq('') }
- it 'does not mark the blob as binary' do
- expect(blob).not_to be_binary
+ it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') }
+ it { expect(blob.data).to eq('') }
+ it 'does not mark the blob as binary' do
+ expect(blob).not_to be_binary
+ end
end
- end
- context 'limiting' do
- subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) }
+ context 'limiting' do
+ subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) }
- context 'positive' do
- let(:blob_size_limit) { 10 }
+ context 'positive' do
+ let(:blob_size_limit) { 10 }
- it { expect(subject.first.data.size).to eq(10) }
- end
+ it { expect(subject.first.data.size).to eq(10) }
+ end
- context 'zero' do
- let(:blob_size_limit) { 0 }
+ context 'zero' do
+ let(:blob_size_limit) { 0 }
- it 'only loads the metadata' do
- expect(subject.first.size).not_to be(0)
- expect(subject.first.data).to eq('')
+ it 'only loads the metadata' do
+ expect(subject.first.size).not_to be(0)
+ expect(subject.first.data).to eq('')
+ end
end
- end
- context 'negative' do
- let(:blob_size_limit) { -1 }
+ context 'negative' do
+ let(:blob_size_limit) { -1 }
- it 'ignores MAX_DATA_DISPLAY_SIZE' do
- stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100)
+ it 'ignores MAX_DATA_DISPLAY_SIZE' do
+ stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100)
- expect(subject.first.data.size).to eq(669)
+ expect(subject.first.data.size).to eq(669)
+ end
end
end
end
+
+ context 'when Gitaly list_blobs_by_sha_path feature is enabled' do
+ it_behaves_like 'loading blobs in batch'
+ end
+
+ context 'when Gitaly list_blobs_by_sha_path feature is disabled', :disable_gitaly do
+ it_behaves_like 'loading blobs in batch'
+ end
end
describe '.batch_lfs_pointers' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index ec1c7a96f92..edcf8889c27 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -600,12 +600,16 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#refs_hash" do
- let(:refs) { repository.refs_hash }
+ subject { repository.refs_hash }
it "should have as many entries as branches and tags" do
expected_refs = SeedRepo::Repo::BRANCHES + SeedRepo::Repo::TAGS
# We flatten in case a commit is pointed at by more than one branch and/or tag
- expect(refs.values.flatten.size).to eq(expected_refs.size)
+ expect(subject.values.flatten.size).to eq(expected_refs.size)
+ end
+
+ it 'has valid commit ids as keys' do
+ expect(subject.keys).to all( match(Commit::COMMIT_SHA_PATTERN) )
end
end
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index bd8dbf07fa7..761f7732036 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -3,34 +3,38 @@ require 'spec_helper'
describe Gitlab::Git::Wiki do
let(:project) { create(:project) }
let(:user) { project.owner }
- let(:wiki) { ProjectWiki.new(project, user) }
- let(:gollum_wiki) { wiki.wiki }
+ let(:project_wiki) { ProjectWiki.new(project, user) }
+ subject { project_wiki.wiki }
# Remove skip_gitaly_mock flag when gitaly_find_page when
- # https://gitlab.com/gitlab-org/gitaly/merge_requests/539 gets merged
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/42039 is solved
describe '#page', :skip_gitaly_mock do
- it 'returns the right page' do
+ before do
create_page('page1', 'content')
- create_page('foo/page1', 'content')
-
- expect(gollum_wiki.page(title: 'page1', dir: '').url_path).to eq 'page1'
- expect(gollum_wiki.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1'
+ create_page('foo/page1', 'content foo/page1')
+ end
+ after do
destroy_page('page1')
destroy_page('page1', 'foo')
end
+
+ it 'returns the right page' do
+ expect(subject.page(title: 'page1', dir: '').url_path).to eq 'page1'
+ expect(subject.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1'
+ end
end
def create_page(name, content)
- gollum_wiki.write_page(name, :markdown, content, commit_details)
+ subject.write_page(name, :markdown, content, commit_details(name))
end
- def commit_details
- Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit")
+ def commit_details(name)
+ Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "created page #{name}")
end
def destroy_page(title, dir = '')
- page = gollum_wiki.page(title: title, dir: dir)
- wiki.delete_page(page, "test commit")
+ page = subject.page(title: title, dir: dir)
+ project_wiki.delete_page(page, "test commit")
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 3c3697e7aa9..19d3f55501e 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -18,8 +18,9 @@ describe Gitlab::GitAccess do
redirected_path: redirected_path)
end
- let(:push_access_check) { access.check('git-receive-pack', '_any') }
- let(:pull_access_check) { access.check('git-upload-pack', '_any') }
+ let(:changes) { '_any' }
+ let(:push_access_check) { access.check('git-receive-pack', changes) }
+ let(:pull_access_check) { access.check('git-upload-pack', changes) }
describe '#check with single protocols allowed' do
def disable_protocol(protocol)
@@ -646,6 +647,20 @@ describe Gitlab::GitAccess do
end
end
+ describe 'check LFS integrity' do
+ let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature'] }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'checks LFS integrity only for first change' do
+ expect_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).exactly(1).times
+
+ push_access_check
+ end
+ end
+
describe '#check_push_access!' do
before do
merge_into_protected_branch
diff --git a/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb
new file mode 100644
index 00000000000..9db710e759e
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/blobs_stitcher_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::BlobsStitcher do
+ describe 'enumeration' do
+ it 'combines segregated blob messages together' do
+ messages = [
+ OpenStruct.new(oid: 'abcdef1', path: 'path/to/file', size: 1642, revision: 'f00ba7', mode: 0100644, data: "first-line\n"),
+ OpenStruct.new(oid: '', data: 'second-line'),
+ OpenStruct.new(oid: '', data: '', revision: 'f00ba7', path: 'path/to/non-existent/file'),
+ OpenStruct.new(oid: 'abcdef2', path: 'path/to/another-file', size: 2461, revision: 'f00ba8', mode: 0100644, data: "GIF87a\x90\x01".b)
+ ]
+
+ blobs = described_class.new(messages).to_a
+
+ expect(blobs.size).to be(2)
+
+ expect(blobs[0].id).to eq('abcdef1')
+ expect(blobs[0].mode).to eq('100644')
+ expect(blobs[0].name).to eq('file')
+ expect(blobs[0].path).to eq('path/to/file')
+ expect(blobs[0].size).to eq(1642)
+ expect(blobs[0].commit_id).to eq('f00ba7')
+ expect(blobs[0].data).to eq("first-line\nsecond-line")
+ expect(blobs[0].binary?).to be false
+
+ expect(blobs[1].id).to eq('abcdef2')
+ expect(blobs[1].mode).to eq('100644')
+ expect(blobs[1].name).to eq('another-file')
+ expect(blobs[1].path).to eq('path/to/another-file')
+ expect(blobs[1].size).to eq(2461)
+ expect(blobs[1].commit_id).to eq('f00ba8')
+ expect(blobs[1].data).to eq("GIF87a\x90\x01".b)
+ expect(blobs[1].binary?).to be true
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 3722a91c050..001c4d3e10a 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -166,6 +166,32 @@ describe Gitlab::GitalyClient::CommitService do
described_class.new(repository).find_commit(revision)
end
+
+ describe 'caching', :request_store do
+ let(:commit_dbl) { double(id: 'f01b' * 10) }
+
+ context 'when passed revision is a branch name' do
+ it 'calls Gitaly' do
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commit).twice.and_return(double(commit: commit_dbl))
+
+ commit = nil
+ 2.times { commit = described_class.new(repository).find_commit('master') }
+
+ expect(commit).to eq(commit_dbl)
+ end
+ end
+
+ context 'when passed revision is a commit ID' do
+ it 'returns a cached commit' do
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commit).once.and_return(double(commit: commit_dbl))
+
+ commit = nil
+ 2.times { commit = described_class.new(repository).find_commit('f01b' * 10) }
+
+ expect(commit).to eq(commit_dbl)
+ end
+ end
+ end
end
describe '#patch' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 0ecb50f7110..41a55027f4d 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -276,6 +276,7 @@ project:
- fork_network_member
- fork_network
- custom_attributes
+- lfs_file_locks
award_emoji:
- awardable
- user
@@ -290,3 +291,5 @@ push_event_payload:
issue_assignees:
- issue
- assignee
+lfs_file_locks:
+- user
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 5a33fa3fd53..feaab6673cd 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -530,3 +530,9 @@ ProjectCustomAttribute:
- project_id
- key
- value
+LfsFileLock:
+- id
+- path
+- user_id
+- project_id
+- created_at
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
index a685521cbf0..8a3a244be21 100644
--- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -16,7 +16,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
describe 'legacy storage' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :legacy_storage) }
subject(:restorer) { described_class.new(project: project, shared: shared) }
@@ -34,7 +34,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
describe 'hashed storage' do
- let(:project) { create(:project, :hashed) }
+ let(:project) { create(:project) }
subject(:restorer) { described_class.new(project: project, shared: shared) }
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
index 959779523f4..177036c109b 100644
--- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::ImportExport::UploadsSaver do
end
describe 'legacy storage' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :legacy_storage) }
subject(:saver) { described_class.new(shared: shared, project: project) }
@@ -37,7 +37,7 @@ describe Gitlab::ImportExport::UploadsSaver do
end
describe 'hashed storage' do
- let(:project) { create(:project, :hashed) }
+ let(:project) { create(:project) }
subject(:saver) { described_class.new(shared: shared, project: project) }
diff --git a/spec/lib/gitlab/import_export/wiki_restorer_spec.rb b/spec/lib/gitlab/import_export/wiki_restorer_spec.rb
new file mode 100644
index 00000000000..81b654e9c5f
--- /dev/null
+++ b/spec/lib/gitlab/import_export/wiki_restorer_spec.rb
@@ -0,0 +1,45 @@
+require 'spec_helper'
+
+describe Gitlab::ImportExport::WikiRestorer do
+ describe 'restore a wiki Git repo' do
+ let!(:project_with_wiki) { create(:project, :wiki_repo) }
+ let!(:project_without_wiki) { create(:project) }
+ let!(:project) { create(:project) }
+ let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
+ let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(project: project_with_wiki, shared: shared) }
+ let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
+ let(:restorer) do
+ described_class.new(path_to_bundle: bundle_path,
+ shared: shared,
+ project: project.wiki,
+ wiki_enabled: true)
+ end
+
+ before do
+ allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+
+ bundler.save
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ Gitlab::Shell.new.remove_repository(project_with_wiki.wiki.repository_storage_path, project_with_wiki.wiki.disk_path)
+ Gitlab::Shell.new.remove_repository(project.wiki.repository_storage_path, project.wiki.disk_path)
+ end
+
+ it 'restores the wiki repo successfully' do
+ expect(restorer.restore).to be true
+ end
+
+ describe "no wiki in the bundle" do
+ let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(project: project_without_wiki, shared: shared) }
+
+ it 'creates an empty wiki' do
+ expect(restorer.restore).to be true
+
+ expect(project.wiki_repository_exists?).to be true
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ldap/config_spec.rb b/spec/lib/gitlab/ldap/config_spec.rb
index ca2213cd112..e10837578a8 100644
--- a/spec/lib/gitlab/ldap/config_spec.rb
+++ b/spec/lib/gitlab/ldap/config_spec.rb
@@ -5,6 +5,14 @@ describe Gitlab::LDAP::Config do
let(:config) { described_class.new('ldapmain') }
+ describe '.servers' do
+ it 'returns empty array if no server information is available' do
+ allow(Gitlab.config).to receive(:ldap).and_return('enabled' => false)
+
+ expect(described_class.servers).to eq []
+ end
+ end
+
describe '#initialize' do
it 'requires a provider' do
expect { described_class.new }.to raise_error ArgumentError
diff --git a/spec/lib/gitlab/ldap/person_spec.rb b/spec/lib/gitlab/ldap/person_spec.rb
index b54d4000b53..05e1e394bb1 100644
--- a/spec/lib/gitlab/ldap/person_spec.rb
+++ b/spec/lib/gitlab/ldap/person_spec.rb
@@ -66,15 +66,6 @@ describe Gitlab::LDAP::Person do
end
end
- describe '.validate_entry' do
- it 'raises InvalidEntryError' do
- entry['foo'] = 'bar'
-
- expect { described_class.new(entry, 'ldapmain') }
- .to raise_error(Gitlab::LDAP::Person::InvalidEntryError)
- end
- end
-
describe '#name' do
it 'uses the configured name attribute and handles values as an array' do
name = 'John Doe'
diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb
index 8d925460f01..a2ba91dae80 100644
--- a/spec/lib/gitlab/middleware/multipart_spec.rb
+++ b/spec/lib/gitlab/middleware/multipart_spec.rb
@@ -5,15 +5,17 @@ require 'tempfile'
describe Gitlab::Middleware::Multipart do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
+ let(:original_filename) { 'filename' }
it 'opens top-level files' do
Tempfile.open('top-level') do |tempfile|
- env = post_env({ 'file' => tempfile.path }, { 'file.name' => 'filename' }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
+ env = post_env({ 'file' => tempfile.path }, { 'file.name' => original_filename }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['file']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
+ expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
@@ -34,13 +36,14 @@ describe Gitlab::Middleware::Multipart do
it 'opens files one level deep' do
Tempfile.open('one-level') do |tempfile|
- in_params = { 'user' => { 'avatar' => { '.name' => 'filename' } } }
+ in_params = { 'user' => { 'avatar' => { '.name' => original_filename } } }
env = post_env({ 'user[avatar]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['user']['avatar']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
+ expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
@@ -49,13 +52,14 @@ describe Gitlab::Middleware::Multipart do
it 'opens files two levels deep' do
Tempfile.open('two-levels') do |tempfile|
- in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => 'filename' } } } }
+ in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename } } } }
env = post_env({ 'project[milestone][themesong]' => tempfile.path }, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
expect(app).to receive(:call) do |env|
file = Rack::Request.new(env).params['project']['milestone']['themesong']
expect(file).to be_a(::UploadedFile)
expect(file.path).to eq(tempfile.path)
+ expect(file.original_filename).to eq(original_filename)
end
middleware.call(env)
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index 03e0a9e2a03..b8455403bdb 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -724,6 +724,10 @@ describe Gitlab::OAuth::User do
it "does not update the user location" do
expect(gl_user.location).not_to eq(info_hash[:address][:country])
end
+
+ it 'does not create associated user synced attributes metadata' do
+ expect(gl_user.user_synced_attributes_metadata).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index 4a43dbb2371..f02b1cf55fb 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -53,6 +53,15 @@ describe Gitlab::Profiler do
described_class.profile('/', user: user)
end
+ context 'when providing a user without a personal access token' do
+ it 'raises an error' do
+ user = double(:user)
+ allow(user).to receive_message_chain(:personal_access_tokens, :active, :pluck).and_return([])
+
+ expect { described_class.profile('/', user: user) }.to raise_error('Your user must have a personal_access_token')
+ end
+ end
+
it 'uses the private_token for auth if both it and user are set' do
user = double(:user)
user_token = 'user'
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
index c7169717fc1..0697cb2def6 100644
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do
include_examples 'additional metrics query' do
let(:deployment) { create(:deployment, environment: environment) }
- let(:query_params) { [deployment.id] }
+ let(:query_params) { [environment.id, deployment.id] }
it 'queries using specific time' do
expect(client).to receive(:query_range).with(anything,
diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
index ffe3ad85baa..84dc31d9732 100644
--- a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
@@ -31,7 +31,7 @@ describe Gitlab::Prometheus::Queries::DeploymentQuery do
expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100',
time: stop_time)
- expect(subject.query(deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil,
- cpu_values: nil, cpu_before: nil, cpu_after: nil)
+ expect(subject.query(environment.id, deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil,
+ cpu_values: nil, cpu_before: nil, cpu_after: nil)
end
end
diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index de625324092..5d86007f71f 100644
--- a/spec/lib/gitlab/prometheus_client_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::PrometheusClient do
include PrometheusHelpers
- subject { described_class.new(api_url: 'https://prometheus.example.com') }
+ subject { described_class.new(RestClient::Resource.new('https://prometheus.example.com')) }
describe '#ping' do
it 'issues a "query" request to the API endpoint' do
@@ -47,16 +47,28 @@ describe Gitlab::PrometheusClient do
expect(req_stub).to have_been_requested
end
end
+
+ context 'when request returns non json data' do
+ it 'raises a Gitlab::PrometheusError error' do
+ req_stub = stub_prometheus_request(query_url, status: 200, body: 'not json')
+
+ expect { execute_query }
+ .to raise_error(Gitlab::PrometheusError, 'Parsing response failed')
+ expect(req_stub).to have_been_requested
+ end
+ end
end
describe 'failure to reach a provided prometheus url' do
let(:prometheus_url) {"https://prometheus.invalid.example.com"}
+ subject { described_class.new(RestClient::Resource.new(prometheus_url)) }
+
context 'exceptions are raised' do
it 'raises a Gitlab::PrometheusError error when a SocketError is rescued' do
req_stub = stub_prometheus_request_with_exception(prometheus_url, SocketError)
- expect { subject.send(:get, prometheus_url) }
+ expect { subject.send(:get, '/', {}) }
.to raise_error(Gitlab::PrometheusError, "Can't connect to #{prometheus_url}")
expect(req_stub).to have_been_requested
end
@@ -64,15 +76,15 @@ describe Gitlab::PrometheusClient do
it 'raises a Gitlab::PrometheusError error when a SSLError is rescued' do
req_stub = stub_prometheus_request_with_exception(prometheus_url, OpenSSL::SSL::SSLError)
- expect { subject.send(:get, prometheus_url) }
+ expect { subject.send(:get, '/', {}) }
.to raise_error(Gitlab::PrometheusError, "#{prometheus_url} contains invalid SSL data")
expect(req_stub).to have_been_requested
end
- it 'raises a Gitlab::PrometheusError error when a HTTParty::Error is rescued' do
- req_stub = stub_prometheus_request_with_exception(prometheus_url, HTTParty::Error)
+ it 'raises a Gitlab::PrometheusError error when a RestClient::Exception is rescued' do
+ req_stub = stub_prometheus_request_with_exception(prometheus_url, RestClient::Exception)
- expect { subject.send(:get, prometheus_url) }
+ expect { subject.send(:get, '/', {}) }
.to raise_error(Gitlab::PrometheusError, "Network connection error")
expect(req_stub).to have_been_requested
end
diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb
index b4231fcd0fa..b72b8574174 100644
--- a/spec/lib/gitlab/query_limiting/transaction_spec.rb
+++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb
@@ -59,18 +59,6 @@ describe Gitlab::QueryLimiting::Transaction do
expect { transaction.act_upon_results }
.to raise_error(described_class::ThresholdExceededError)
end
-
- it 'reports the error in Sentry if raising an error is disabled' do
- expect(transaction)
- .to receive(:raise_error?)
- .and_return(false)
-
- expect(Raven)
- .to receive(:capture_exception)
- .with(an_instance_of(described_class::ThresholdExceededError))
-
- transaction.act_upon_results
- end
end
end
diff --git a/spec/lib/gitlab/query_limiting_spec.rb b/spec/lib/gitlab/query_limiting_spec.rb
index 2eddab0b8c3..42877b1e2dd 100644
--- a/spec/lib/gitlab/query_limiting_spec.rb
+++ b/spec/lib/gitlab/query_limiting_spec.rb
@@ -12,14 +12,16 @@ describe Gitlab::QueryLimiting do
expect(described_class.enable?).to eq(true)
end
- it 'returns true on GitLab.com' do
+ it 'returns false on GitLab.com' do
+ expect(Rails.env).to receive(:development?).and_return(false)
+ expect(Rails.env).to receive(:test?).and_return(false)
allow(Gitlab).to receive(:com?).and_return(true)
- expect(described_class.enable?).to eq(true)
+ expect(described_class.enable?).to eq(false)
end
- it 'returns true in a non GitLab.com' do
- expect(Gitlab).to receive(:com?).and_return(false)
+ it 'returns false in a non GitLab.com' do
+ allow(Gitlab).to receive(:com?).and_return(false)
expect(Rails.env).to receive(:development?).and_return(false)
expect(Rails.env).to receive(:test?).and_return(false)
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 8b54d72d6f7..a1079e54975 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -18,6 +18,7 @@ describe Gitlab::Regex do
subject { described_class.environment_name_regex }
it { is_expected.to match('foo') }
+ it { is_expected.to match('a') }
it { is_expected.to match('foo-1') }
it { is_expected.to match('FOO') }
it { is_expected.to match('foo/1') }
@@ -25,6 +26,10 @@ describe Gitlab::Regex do
it { is_expected.not_to match('9&foo') }
it { is_expected.not_to match('foo-^') }
it { is_expected.not_to match('!!()()') }
+ it { is_expected.not_to match('/foo') }
+ it { is_expected.not_to match('foo/') }
+ it { is_expected.not_to match('/foo/') }
+ it { is_expected.not_to match('/') }
end
describe '.environment_slug_regex' do
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index 1a925a15e0c..b67bcc77bd4 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -6,11 +6,11 @@ describe ::Gitlab::RepoPath do
context 'a repository storage path' do
it 'parses a full repository path' do
- expect(described_class.parse(project.repository.path)).to eq([project, false, nil])
+ expect(described_class.parse(project.repository.full_path)).to eq([project, false, nil])
end
it 'parses a full wiki path' do
- expect(described_class.parse(project.wiki.repository.path)).to eq([project, true, nil])
+ expect(described_class.parse(project.wiki.repository.full_path)).to eq([project, true, nil])
end
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 17b48b3d062..9dbab95f70e 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -20,9 +20,13 @@ describe Gitlab::SearchResults do
end
describe '#objects' do
- it 'returns without_page collection by default' do
+ it 'returns without_counts collection by default' do
expect(results.objects('projects')).to be_kind_of(Kaminari::PaginatableWithoutCount)
end
+
+ it 'returns with counts collection when requested' do
+ expect(results.objects('projects', 1, false)).not_to be_kind_of(Kaminari::PaginatableWithoutCount)
+ end
end
describe '#projects_count' do
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index 2b61ce38418..4506cbc3982 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -443,7 +443,7 @@ describe Gitlab::Shell do
end
describe '#remove_repository' do
- let!(:project) { create(:project, :repository) }
+ let!(:project) { create(:project, :repository, :legacy_storage) }
let(:disk_path) { "#{project.disk_path}.git" }
it 'returns true when the command succeeds' do
diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb
index c15e29774b6..93d538141ce 100644
--- a/spec/lib/gitlab/ssh_public_key_spec.rb
+++ b/spec/lib/gitlab/ssh_public_key_spec.rb
@@ -37,41 +37,6 @@ describe Gitlab::SSHPublicKey, lib: true do
end
end
- describe '.sanitize(key_content)' do
- let(:content) { build(:key).key }
-
- context 'when key has blank space characters' do
- it 'removes the extra blank space characters' do
- unsanitized = content.insert(100, "\n")
- .insert(40, "\r\n")
- .insert(30, ' ')
-
- sanitized = described_class.sanitize(unsanitized)
- _, body = sanitized.split
-
- expect(sanitized).not_to eq(unsanitized)
- expect(body).not_to match(/\s/)
- end
- end
-
- context "when key doesn't have blank space characters" do
- it "doesn't modify the content" do
- sanitized = described_class.sanitize(content)
-
- expect(sanitized).to eq(content)
- end
- end
-
- context "when key is invalid" do
- it 'returns the original content' do
- unsanitized = "ssh-foo any content=="
- sanitized = described_class.sanitize(unsanitized)
-
- expect(sanitized).to eq(unsanitized)
- end
- end
- end
-
describe '#valid?' do
subject { public_key }
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index dc2bb5b9747..37a0bf1ad36 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -324,7 +324,7 @@ describe Gitlab::Workhorse do
it 'includes a Repository param' do
repo_param = {
storage_name: 'default',
- relative_path: project.full_path + '.git',
+ relative_path: project.disk_path + '.git',
gl_repository: "project-#{project.id}"
}
diff --git a/spec/migrations/README.md b/spec/migrations/README.md
index 45cf25b96de..49760fa62b8 100644
--- a/spec/migrations/README.md
+++ b/spec/migrations/README.md
@@ -89,5 +89,5 @@ end
## Best practices
1. Note that this type of tests do not run within the transaction, we use
-a truncation database cleanup strategy. Do not depend on transaction being
+a deletion database cleanup strategy. Do not depend on transaction being
present.
diff --git a/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb b/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb
index 759e77ac9db..d1bf6bdf9d6 100644
--- a/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb
+++ b/spec/migrations/convert_custom_notification_settings_to_columns_spec.rb
@@ -21,7 +21,7 @@ describe ConvertCustomNotificationSettingsToColumns, :migration do
events[event] = true
end
- user = build(:user).becomes(user_class).tap(&:save!)
+ user = user_class.create!(email: "user-#{SecureRandom.hex}@example.org", username: "user-#{SecureRandom.hex}", encrypted_password: '12345678')
create_params = { user_id: user.id, level: params[:level], events: events }
notification_setting = described_class::NotificationSetting.create(create_params)
@@ -37,7 +37,7 @@ describe ConvertCustomNotificationSettingsToColumns, :migration do
events[event] = true
end
- user = build(:user).becomes(user_class).tap(&:save!)
+ user = user_class.create!(email: "user-#{SecureRandom.hex}@example.org", username: "user-#{SecureRandom.hex}", encrypted_password: '12345678')
create_params = events.merge(user_id: user.id, level: params[:level])
notification_setting = described_class::NotificationSetting.create(create_params)
diff --git a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
index e5793a3c0ee..657113812bd 100644
--- a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
+++ b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'migrate', '20161124141322_migrate_process_commit_worker_jobs.rb')
describe MigrateProcessCommitWorkerJobs do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :legacy_storage, :repository) }
let(:user) { create(:user) }
let(:commit) { project.commit.raw.rugged_commit }
diff --git a/spec/migrations/remove_redundant_pipeline_stages_spec.rb b/spec/migrations/remove_redundant_pipeline_stages_spec.rb
new file mode 100644
index 00000000000..8325f986594
--- /dev/null
+++ b/spec/migrations/remove_redundant_pipeline_stages_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20180119121225_remove_redundant_pipeline_stages.rb')
+
+describe RemoveRedundantPipelineStages, :migration do
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:stages) { table(:ci_stages) }
+ let(:builds) { table(:ci_builds) }
+
+ before do
+ projects.create!(id: 123, name: 'gitlab', path: 'gitlab-ce')
+ pipelines.create!(id: 234, project_id: 123, ref: 'master', sha: 'adf43c3a')
+
+ stages.create!(id: 6, project_id: 123, pipeline_id: 234, name: 'build')
+ stages.create!(id: 10, project_id: 123, pipeline_id: 234, name: 'build')
+ stages.create!(id: 21, project_id: 123, pipeline_id: 234, name: 'build')
+ stages.create!(id: 41, project_id: 123, pipeline_id: 234, name: 'test')
+ stages.create!(id: 62, project_id: 123, pipeline_id: 234, name: 'test')
+ stages.create!(id: 102, project_id: 123, pipeline_id: 234, name: 'deploy')
+
+ builds.create!(id: 1, commit_id: 234, project_id: 123, stage_id: 10)
+ builds.create!(id: 2, commit_id: 234, project_id: 123, stage_id: 21)
+ builds.create!(id: 3, commit_id: 234, project_id: 123, stage_id: 21)
+ builds.create!(id: 4, commit_id: 234, project_id: 123, stage_id: 41)
+ builds.create!(id: 5, commit_id: 234, project_id: 123, stage_id: 62)
+ builds.create!(id: 6, commit_id: 234, project_id: 123, stage_id: 102)
+ end
+
+ it 'removes ambiguous stages and preserves builds' do
+ expect(stages.all.count).to eq 6
+ expect(builds.all.count).to eq 6
+
+ migrate!
+
+ expect(stages.all.count).to eq 1
+ expect(builds.all.count).to eq 6
+ expect(builds.all.pluck(:stage_id).compact).to eq [102]
+ end
+
+ it 'retries when incorrectly added index exception is caught' do
+ allow_any_instance_of(described_class)
+ .to receive(:remove_redundant_pipeline_stages!)
+
+ expect_any_instance_of(described_class)
+ .to receive(:remove_outdated_index!)
+ .exactly(100).times.and_call_original
+
+ expect { migrate! }
+ .to raise_error StandardError, /Failed to add an unique index/
+ end
+
+ it 'does not retry when unknown exception is being raised' do
+ allow(subject).to receive(:remove_outdated_index!)
+ expect(subject).to receive(:remove_redundant_pipeline_stages!).once
+ allow(subject).to receive(:add_unique_index!).and_raise(StandardError)
+
+ expect { subject.up(attempts: 3) }.to raise_error StandardError
+ end
+end
diff --git a/spec/migrations/rename_reserved_project_names_spec.rb b/spec/migrations/rename_reserved_project_names_spec.rb
index e6555b1fe6b..34336d705b1 100644
--- a/spec/migrations/rename_reserved_project_names_spec.rb
+++ b/spec/migrations/rename_reserved_project_names_spec.rb
@@ -3,10 +3,14 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20161221153951_rename_reserved_project_names.rb')
-# This migration uses multiple threads, and thus different transactions. This
-# means data created in this spec may not be visible to some threads. To work
-# around this we use the DELETE cleaning strategy.
-describe RenameReservedProjectNames, :delete do
+# This migration is using factories, which set fields that don't actually
+# exist in the DB schema previous to 20161221153951. Thus we just use the
+# latest schema when testing this migration.
+# This is ok-ish because:
+# 1. This migration is a data migration
+# 2. It only relies on very stable DB fields: routes.id, routes.path, namespaces.id, projects.namespace_id
+# Ideally, the test should not use factories and rely on the `table` helper instead.
+describe RenameReservedProjectNames, :migration, schema: :latest do
let(:migration) { described_class.new }
let!(:project) { create(:project) }
diff --git a/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb b/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb
index 6f7a730edff..528dc54781d 100644
--- a/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb
+++ b/spec/migrations/turn_nested_groups_into_regular_groups_for_mysql_spec.rb
@@ -4,7 +4,7 @@ require Rails.root.join('db', 'migrate', '20170503140202_turn_nested_groups_into
describe TurnNestedGroupsIntoRegularGroupsForMysql do
let!(:parent_group) { create(:group) }
let!(:child_group) { create(:group, parent: parent_group) }
- let!(:project) { create(:project, :empty_repo, namespace: child_group) }
+ let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: child_group) }
let!(:member) { create(:user) }
let(:migration) { described_class.new }
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index ef480e7a80a..ae2d34750a7 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -114,6 +114,40 @@ describe ApplicationSetting do
it { expect(setting.repository_storages).to eq(['default']) }
end
+ context 'auto_devops_domain setting' do
+ context 'when auto_devops_enabled? is true' do
+ before do
+ setting.update(auto_devops_enabled: true)
+ end
+
+ it 'can be blank' do
+ setting.update(auto_devops_domain: '')
+
+ expect(setting).to be_valid
+ end
+
+ context 'with a valid value' do
+ before do
+ setting.update(auto_devops_domain: 'domain.com')
+ end
+
+ it 'is valid' do
+ expect(setting).to be_valid
+ end
+ end
+
+ context 'with an invalid value' do
+ before do
+ setting.update(auto_devops_domain: 'definitelynotahostname')
+ end
+
+ it 'is invalid' do
+ expect(setting).to be_invalid
+ end
+ end
+ end
+ end
+
context 'circuitbreaker settings' do
[:circuitbreaker_failure_count_threshold,
:circuitbreaker_check_interval,
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 0b3d5c6a0bd..2b6b6a61182 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1413,6 +1413,7 @@ describe Ci::Build do
[
{ key: 'CI', value: 'true', public: true },
{ key: 'GITLAB_CI', value: 'true', public: true },
+ { key: 'GITLAB_FEATURES', value: project.namespace.features.join(','), public: true },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true },
{ key: 'CI_SERVER_REVISION', value: Gitlab::REVISION, public: true },
@@ -1589,7 +1590,7 @@ describe Ci::Build do
context 'when the branch is protected' do
before do
- create(:protected_branch, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
@@ -1597,7 +1598,7 @@ describe Ci::Build do
context 'when the tag is protected' do
before do
- create(:protected_tag, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
@@ -1634,7 +1635,7 @@ describe Ci::Build do
context 'when the branch is protected' do
before do
- create(:protected_branch, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
@@ -1642,7 +1643,7 @@ describe Ci::Build do
context 'when the tag is protected' do
before do
- create(:protected_tag, project: build.project, name: build.ref)
+ allow(build.project).to receive(:protected_for?).with(build.ref).and_return(true)
end
it { is_expected.to include(protected_variable) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index b2b64e6ff48..ab170e6351c 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -95,28 +95,68 @@ describe Ci::Runner do
subject { runner.online? }
- context 'never contacted' do
+ before do
+ allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:platform).and_return("darwin")
+ end
+
+ context 'no cache value' do
before do
- runner.contacted_at = nil
+ stub_redis_runner_contacted_at(nil)
end
- it { is_expected.to be_falsey }
- end
+ context 'never contacted' do
+ before do
+ runner.contacted_at = nil
+ end
- context 'contacted long time ago time' do
- before do
- runner.contacted_at = 1.year.ago
+ it { is_expected.to be_falsey }
+ end
+
+ context 'contacted long time ago time' do
+ before do
+ runner.contacted_at = 1.year.ago
+ end
+
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_falsey }
+ context 'contacted 1s ago' do
+ before do
+ runner.contacted_at = 1.second.ago
+ end
+
+ it { is_expected.to be_truthy }
+ end
end
- context 'contacted 1s ago' do
- before do
- runner.contacted_at = 1.second.ago
+ context 'with cache value' do
+ context 'contacted long time ago time' do
+ before do
+ runner.contacted_at = 1.year.ago
+ stub_redis_runner_contacted_at(1.year.ago.to_s)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'contacted 1s ago' do
+ before do
+ runner.contacted_at = 50.minutes.ago
+ stub_redis_runner_contacted_at(1.second.ago.to_s)
+ end
+
+ it { is_expected.to be_truthy }
end
+ end
- it { is_expected.to be_truthy }
+ def stub_redis_runner_contacted_at(value)
+ Gitlab::Redis::SharedState.with do |redis|
+ cache_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:get).with(cache_key)
+ .and_return({ contacted_at: value }.to_json).at_least(:once)
+ end
end
end
@@ -361,6 +401,50 @@ describe Ci::Runner do
end
end
+ describe '#update_cached_info' do
+ let(:runner) { create(:ci_runner) }
+
+ subject { runner.update_cached_info(architecture: '18-bit') }
+
+ context 'when database was updated recently' do
+ before do
+ runner.contacted_at = Time.now
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ subject
+ end
+ end
+
+ context 'when database was not updated recently' do
+ before do
+ runner.contacted_at = 2.hours.ago
+ end
+
+ it 'updates database' do
+ expect_redis_update
+
+ expect { subject }.to change { runner.reload.read_attribute(:contacted_at) }
+ .and change { runner.reload.read_attribute(:architecture) }
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ subject
+ end
+ end
+
+ def expect_redis_update
+ Gitlab::Redis::SharedState.with do |redis|
+ redis_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:set).with(redis_key, anything, any_args)
+ end
+ end
+ end
+
describe '#destroy' do
let(:runner) { create(:ci_runner) }
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 696099f7cf7..01037919530 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -6,6 +6,24 @@ describe Clusters::Applications::Prometheus do
include_examples 'cluster application specs', described_class
+ describe 'transition to installed' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, projects: [project]) }
+ let(:prometheus_service) { double('prometheus_service') }
+
+ subject { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
+
+ before do
+ allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ end
+
+ it 'ensures Prometheus service is activated' do
+ expect(prometheus_service).to receive(:update).with(active: true)
+
+ subject.make_installed
+ end
+ end
+
describe "#chart_values_file" do
subject { create(:clusters_applications_prometheus).chart_values_file }
@@ -13,4 +31,58 @@ describe Clusters::Applications::Prometheus do
expect(subject).to eq("#{Rails.root}/vendor/prometheus/values.yaml")
end
end
+
+ describe '#proxy_client' do
+ context 'cluster is nil' do
+ it 'returns nil' do
+ expect(subject.cluster).to be_nil
+ expect(subject.proxy_client).to be_nil
+ end
+ end
+
+ context "cluster doesn't have kubeclient" do
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ it 'returns nil' do
+ expect(subject.proxy_client).to be_nil
+ end
+ end
+
+ context 'cluster has kubeclient' do
+ let(:kubernetes_url) { 'http://example.com' }
+ let(:k8s_discover_response) do
+ {
+ resources: [
+ {
+ name: 'service',
+ kind: 'Service'
+ }
+ ]
+ }
+ end
+
+ let(:kube_client) { Kubeclient::Client.new(kubernetes_url) }
+
+ let(:cluster) { create(:cluster) }
+ subject { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ before do
+ allow(kube_client.rest_client).to receive(:get).and_return(k8s_discover_response.to_json)
+ allow(subject.cluster).to receive(:kubeclient).and_return(kube_client)
+ end
+
+ it 'creates proxy prometheus rest client' do
+ expect(subject.proxy_client).to be_instance_of(RestClient::Resource)
+ end
+
+ it 'creates proper url' do
+ expect(subject.proxy_client.url).to eq('http://example.com/api/v1/proxy/namespaces/gitlab-managed-apps/service/prometheus-prometheus-server:80')
+ end
+
+ it 'copies options and headers from kube client to proxy client' do
+ expect(subject.proxy_client.options).to eq(kube_client.rest_client.options.merge(headers: kube_client.headers))
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
new file mode 100644
index 00000000000..3d7963120b6
--- /dev/null
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe RedisCacheable do
+ let(:model) { double }
+
+ before do
+ model.extend(described_class)
+ allow(model).to receive(:cache_attribute_key).and_return('key')
+ end
+
+ describe '#cached_attribute' do
+ let(:payload) { { attribute: 'value' } }
+
+ subject { model.cached_attribute(payload.keys.first) }
+
+ it 'gets the cache attribute' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:get).with('key')
+ .and_return(payload.to_json)
+ end
+
+ expect(subject).to eq(payload.values.first)
+ end
+ end
+
+ describe '#cache_attributes' do
+ let(:values) { { name: 'new_name' } }
+
+ subject { model.cache_attributes(values) }
+
+ it 'sets the cache attributes' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with('key', values.to_json, anything)
+ end
+
+ subject
+ end
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 338fb314ee9..4f16b73ef38 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -549,7 +549,7 @@ describe Group do
context 'when the ref is a protected branch' do
before do
- create(:protected_branch, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -557,7 +557,7 @@ describe Group do
context 'when the ref is a protected tag' do
before do
- create(:protected_tag, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -571,6 +571,10 @@ describe Group do
let(:variable_child_2) { create(:ci_group_variable, group: group_child_2) }
let(:variable_child_3) { create(:ci_group_variable, group: group_child_3) }
+ before do
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
+ end
+
it 'returns all variables belong to the group and parent groups' do
expected_array1 = [protected_variable, secret_variable]
expected_array2 = [variable_child, variable_child_2, variable_child_3]
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 7c66c98231b..a5ce245c21d 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -70,5 +70,38 @@ describe Identity do
end
end
end
+
+ context 'after_destroy' do
+ let!(:user) { create(:user) }
+ let(:ldap_identity) { create(:identity, provider: 'ldapmain', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', user: user) }
+ let(:ldap_user_synced_attributes) { { provider: 'ldapmain', name_synced: true, email_synced: true } }
+ let(:other_provider_user_synced_attributes) { { provider: 'other', name_synced: true, email_synced: true } }
+
+ describe 'if user synced attributes metadada provider' do
+ context 'matches the identity provider ' do
+ it 'removes the user synced attributes' do
+ user.create_user_synced_attributes_metadata(ldap_user_synced_attributes)
+
+ expect(user.user_synced_attributes_metadata.provider).to eq 'ldapmain'
+
+ ldap_identity.destroy
+
+ expect(user.reload.user_synced_attributes_metadata).to be_nil
+ end
+ end
+
+ context 'does not matche the identity provider' do
+ it 'does not remove the user synced attributes' do
+ user.create_user_synced_attributes_metadata(other_provider_user_synced_attributes)
+
+ expect(user.user_synced_attributes_metadata.provider).to eq 'other'
+
+ ldap_identity.destroy
+
+ expect(user.reload.user_synced_attributes_metadata.provider).to eq 'other'
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index bf5703ac986..7398fd25aa8 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -72,52 +72,15 @@ describe Key, :mailer do
expect(build(:key)).to be_valid
end
- it 'rejects the unfingerprintable key (not a key)' do
- expect(build(:key, key: 'ssh-rsa an-invalid-key==')).not_to be_valid
- end
-
- where(:factory, :chars, :expected_sections) do
- [
- [:key, ["\n", "\r\n"], 3],
- [:key, [' ', ' '], 3],
- [:key_without_comment, [' ', ' '], 2]
- ]
- end
-
- with_them do
- let!(:key) { create(factory) }
- let!(:original_fingerprint) { key.fingerprint }
-
- it 'accepts a key with blank space characters after stripping them' do
- modified_key = key.key.insert(100, chars.first).insert(40, chars.last)
- _, content = modified_key.split
-
- key.update!(key: modified_key)
-
- expect(key).to be_valid
- expect(key.key.split.size).to eq(expected_sections)
-
- expect(content).not_to match(/\s/)
- expect(original_fingerprint).to eq(key.fingerprint)
- end
- end
- end
-
- context 'validate size' do
- where(:key_content, :result) do
- [
- [Spec::Support::Helpers::KeyGeneratorHelper.new(512).generate, false],
- [Spec::Support::Helpers::KeyGeneratorHelper.new(8192).generate, false],
- [Spec::Support::Helpers::KeyGeneratorHelper.new(1024).generate, true]
- ]
+ it 'accepts a key with newline charecters after stripping them' do
+ key = build(:key)
+ key.key = key.key.insert(100, "\n")
+ key.key = key.key.insert(40, "\r\n")
+ expect(key).to be_valid
end
- with_them do
- it 'validates the size of the key' do
- key = build(:key, key: key_content)
-
- expect(key.valid?).to eq(result)
- end
+ it 'rejects the unfingerprintable key (not a key)' do
+ expect(build(:key, key: 'ssh-rsa an-invalid-key==')).not_to be_valid
end
end
diff --git a/spec/models/lfs_file_lock_spec.rb b/spec/models/lfs_file_lock_spec.rb
new file mode 100644
index 00000000000..ce87b01b49c
--- /dev/null
+++ b/spec/models/lfs_file_lock_spec.rb
@@ -0,0 +1,57 @@
+require 'rails_helper'
+
+describe LfsFileLock do
+ set(:lfs_file_lock) { create(:lfs_file_lock) }
+ subject { lfs_file_lock }
+
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:user) }
+
+ it { is_expected.to validate_presence_of(:project_id) }
+ it { is_expected.to validate_presence_of(:user_id) }
+ it { is_expected.to validate_presence_of(:path) }
+
+ describe '#can_be_unlocked_by?' do
+ let(:developer) { create(:user) }
+ let(:master) { create(:user) }
+
+ before do
+ project = lfs_file_lock.project
+
+ project.add_developer(developer)
+ project.add_master(master)
+ end
+
+ context "when it's forced" do
+ it 'can be unlocked by the author' do
+ user = lfs_file_lock.user
+
+ expect(lfs_file_lock.can_be_unlocked_by?(user, true)).to eq(true)
+ end
+
+ it 'can be unlocked by a master' do
+ expect(lfs_file_lock.can_be_unlocked_by?(master, true)).to eq(true)
+ end
+
+ it "can't be unlocked by other user" do
+ expect(lfs_file_lock.can_be_unlocked_by?(developer, true)).to eq(false)
+ end
+ end
+
+ context "when it isn't forced" do
+ it 'can be unlocked by the author' do
+ user = lfs_file_lock.user
+
+ expect(lfs_file_lock.can_be_unlocked_by?(user)).to eq(true)
+ end
+
+ it "can't be unlocked by a master" do
+ expect(lfs_file_lock.can_be_unlocked_by?(master)).to eq(false)
+ end
+
+ it "can't be unlocked by other user" do
+ expect(lfs_file_lock.can_be_unlocked_by?(developer)).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 191b60e4383..e626efd054d 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -168,84 +168,105 @@ describe Namespace do
end
describe '#move_dir', :request_store do
- let(:namespace) { create(:namespace) }
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
+ shared_examples "namespace restrictions" do
+ context "when any project has container images" do
+ let(:container_repository) { create(:container_repository) }
- it "raises error when directory exists" do
- expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
- end
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
- it "moves dir if path changed" do
- namespace.update_attributes(path: namespace.full_path + '_new')
+ create(:project, namespace: namespace, container_repositories: [container_repository])
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
- end
+ allow(namespace).to receive(:path_was).and_return(namespace.path)
+ allow(namespace).to receive(:path).and_return('new_path')
+ end
- context "when any project has container images" do
- let(:container_repository) { create(:container_repository) }
+ it 'raises an error about not movable project' do
+ expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ end
+ end
+ end
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
+ context 'legacy storage' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
- create(:project, namespace: namespace, container_repositories: [container_repository])
+ it_behaves_like 'namespace restrictions'
- allow(namespace).to receive(:path_was).and_return(namespace.path)
- allow(namespace).to receive(:path).and_return('new_path')
+ it "raises error when directory exists" do
+ expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end
- it 'raises an error about not movable project' do
- expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ it "moves dir if path changed" do
+ namespace.update_attributes(path: namespace.full_path + '_new')
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
end
- end
- context 'with subgroups' do
- let(:parent) { create(:group, name: 'parent', path: 'parent') }
- let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
- let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
- let(:uploads_dir) { FileUploader.root }
- let(:pages_dir) { File.join(TestEnv.pages_path) }
+ context 'with subgroups' do
+ let(:parent) { create(:group, name: 'parent', path: 'parent') }
+ let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, path: 'the-project', namespace: child, skip_disk_validation: true) }
+ let(:uploads_dir) { FileUploader.root }
+ let(:pages_dir) { File.join(TestEnv.pages_path) }
- before do
- FileUtils.mkdir_p(File.join(uploads_dir, 'parent', 'child', 'the-project'))
- FileUtils.mkdir_p(File.join(pages_dir, 'parent', 'child', 'the-project'))
- end
+ before do
+ FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
+ FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
+ end
+
+ context 'renaming child' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
- context 'renaming child' do
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'parent', 'renamed', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'parent', 'renamed', 'the-project')
- expected_pages_path = File.join(pages_dir, 'parent', 'renamed', 'the-project')
+ child.update_attributes!(path: 'renamed')
- child.update_attributes!(path: 'renamed')
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
+ end
+
+ context 'renaming parent' do
+ it 'correctly moves the repository, uploads and pages' do
+ expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
+ expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
+ expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
+ parent.update_attributes!(path: 'renamed')
+
+ expect(File.directory?(expected_repository_path)).to be(true)
+ expect(File.directory?(expected_upload_path)).to be(true)
+ expect(File.directory?(expected_pages_path)).to be(true)
+ end
end
end
+ end
- context 'renaming parent' do
- it 'correctly moves the repository, uploads and pages' do
- expected_repository_path = File.join(TestEnv.repos_path, 'renamed', 'child', 'the-project.git')
- expected_upload_path = File.join(uploads_dir, 'renamed', 'child', 'the-project')
- expected_pages_path = File.join(pages_dir, 'renamed', 'child', 'the-project')
+ context 'hashed storage' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
- parent.update_attributes!(path: 'renamed')
+ it_behaves_like 'namespace restrictions'
- expect(File.directory?(expected_repository_path)).to be(true)
- expect(File.directory?(expected_upload_path)).to be(true)
- expect(File.directory?(expected_pages_path)).to be(true)
- end
+ it "repository directory remains unchanged if path changed" do
+ before_disk_path = project.disk_path
+ namespace.update_attributes(path: namespace.full_path + '_new')
+
+ expect(before_disk_path).to eq(project.disk_path)
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy
end
end
it 'updates project full path in .git/config for each project inside namespace' do
parent = create(:group, name: 'mygroup', path: 'mygroup')
subgroup = create(:group, name: 'mysubgroup', path: 'mysubgroup', parent: parent)
- project_in_parent_group = create(:project, :repository, namespace: parent, name: 'foo1')
- hashed_project_in_subgroup = create(:project, :repository, :hashed, namespace: subgroup, name: 'foo2')
- legacy_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo3')
+ project_in_parent_group = create(:project, :legacy_storage, :repository, namespace: parent, name: 'foo1')
+ hashed_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo2')
+ legacy_project_in_subgroup = create(:project, :legacy_storage, :repository, namespace: subgroup, name: 'foo3')
parent.update(path: 'mygroup_new')
@@ -260,38 +281,18 @@ describe Namespace do
end
describe '#rm_dir', 'callback' do
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
let(:repository_storage_path) { Gitlab.config.repositories.storages.default['path'] }
let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) }
let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") }
let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
- it 'renames its dirs when deleted' do
- allow(GitlabShellWorker).to receive(:perform_in)
-
- namespace.destroy
-
- expect(File.exist?(deleted_path_in_dir)).to be(true)
- end
-
- it 'schedules the namespace for deletion' do
- expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
-
- namespace.destroy
- end
-
- context 'in sub-groups' do
- let(:parent) { create(:group, path: 'parent') }
- let(:child) { create(:group, parent: parent, path: 'child') }
- let!(:project) { create(:project_empty_repo, namespace: child) }
- let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
- let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
- let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
+ context 'legacy storage' do
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
it 'renames its dirs when deleted' do
allow(GitlabShellWorker).to receive(:perform_in)
- child.destroy
+ namespace.destroy
expect(File.exist?(deleted_path_in_dir)).to be(true)
end
@@ -299,14 +300,57 @@ describe Namespace do
it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
- child.destroy
+ namespace.destroy
+ end
+
+ context 'in sub-groups' do
+ let(:parent) { create(:group, path: 'parent') }
+ let(:child) { create(:group, parent: parent, path: 'child') }
+ let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: child) }
+ let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
+ let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
+ let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
+
+ it 'renames its dirs when deleted' do
+ allow(GitlabShellWorker).to receive(:perform_in)
+
+ child.destroy
+
+ expect(File.exist?(deleted_path_in_dir)).to be(true)
+ end
+
+ it 'schedules the namespace for deletion' do
+ expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
+
+ child.destroy
+ end
+ end
+
+ it 'removes the exports folder' do
+ expect(namespace).to receive(:remove_exports!)
+
+ namespace.destroy
end
end
- it 'removes the exports folder' do
- expect(namespace).to receive(:remove_exports!)
+ context 'hashed storage' do
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
+
+ it 'has no repositories base directories to remove' do
+ allow(GitlabShellWorker).to receive(:perform_in)
+
+ expect(File.exist?(path_in_dir)).to be(false)
- namespace.destroy
+ namespace.destroy
+
+ expect(File.exist?(deleted_path_in_dir)).to be(false)
+ end
+
+ it 'removes the exports folder' do
+ expect(namespace).to receive(:remove_exports!)
+
+ namespace.destroy
+ end
end
end
@@ -567,8 +611,8 @@ describe Namespace do
end
describe '#remove_exports' do
- let(:legacy_project) { create(:project, :with_export, namespace: namespace) }
- let(:hashed_project) { create(:project, :with_export, :hashed, namespace: namespace) }
+ let(:legacy_project) { create(:project, :with_export, :legacy_storage, namespace: namespace) }
+ let(:hashed_project) { create(:project, :with_export, namespace: namespace) }
let(:export_path) { Dir.mktmpdir('namespace_remove_exports_spec') }
let(:legacy_export) { legacy_project.export_project_path }
let(:hashed_export) { hashed_project.export_project_path }
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 12069575866..296b91a771c 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -18,7 +18,21 @@ describe ProjectAutoDevops do
context 'when domain is empty' do
let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: '') }
- it { expect(auto_devops).not_to have_domain }
+ context 'when there is an instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
+ end
+
+ it { expect(auto_devops).to have_domain }
+ end
+
+ context 'when there is no instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
+ end
+
+ it { expect(auto_devops).not_to have_domain }
+ end
end
end
@@ -29,9 +43,32 @@ describe ProjectAutoDevops do
let(:domain) { 'example.com' }
it 'returns AUTO_DEVOPS_DOMAIN' do
- expect(auto_devops.variables).to include(
- { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true })
+ expect(auto_devops.variables).to include(domain_variable)
end
end
+
+ context 'when domain is not defined' do
+ let(:domain) { nil }
+
+ context 'when there is an instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return('example.com')
+ end
+
+ it { expect(auto_devops.variables).to include(domain_variable) }
+ end
+
+ context 'when there is no instance domain specified' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:auto_devops_domain).and_return(nil)
+ end
+
+ it { expect(auto_devops.variables).not_to include(domain_variable) }
+ end
+ end
+
+ def domain_variable
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
+ end
end
end
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index bf39e8d7a39..ed17e019d42 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -13,17 +13,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe 'Validations' do
- context 'when service is active' do
+ context 'when manual_configuration is enabled' do
before do
- subject.active = true
+ subject.manual_configuration = true
end
it { is_expected.to validate_presence_of(:api_url) }
end
- context 'when service is inactive' do
+ context 'when manual configuration is disabled' do
before do
- subject.active = false
+ subject.manual_configuration = false
end
it { is_expected.not_to validate_presence_of(:api_url) }
@@ -31,12 +31,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe '#test' do
+ before do
+ service.manual_configuration = true
+ end
+
let!(:req_stub) { stub_prometheus_request(prometheus_query_url('1'), body: prometheus_value_body('vector')) }
context 'success' do
it 'reads the discovery endpoint' do
+ expect(service.test[:result]).to eq('Checked API endpoint')
expect(service.test[:success]).to be_truthy
- expect(req_stub).to have_been_requested
+ expect(req_stub).to have_been_requested.twice
end
end
@@ -70,6 +75,25 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
+ describe '#matched_metrics' do
+ let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricsQuery }
+ let(:client) { double(:client, label_values: nil) }
+
+ context 'with valid data' do
+ subject { service.matched_metrics }
+
+ before do
+ allow(service).to receive(:client).and_return(client)
+ synchronous_reactive_cache(service)
+ end
+
+ it 'returns reactive data' do
+ expect(subject[:success]).to be_truthy
+ expect(subject[:data]).to eq([])
+ end
+ end
+ end
+
describe '#deployment_metrics' do
let(:deployment) { build_stubbed(:deployment) }
let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
@@ -83,7 +107,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
let(:fake_deployment_time) { 10 }
before do
- stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
+ stub_reactive_cache(service, prometheus_data, deployment_query, deployment.environment.id, deployment.id)
end
it 'returns reactive data' do
@@ -96,13 +120,17 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
describe '#calculate_reactive_cache' do
let(:environment) { create(:environment, slug: 'env-slug') }
-
- around do |example|
- Timecop.freeze { example.run }
+ before do
+ service.manual_configuration = true
+ service.active = true
end
subject do
- service.calculate_reactive_cache(environment_query.to_s, environment.id)
+ service.calculate_reactive_cache(environment_query.name, environment.id)
+ end
+
+ around do |example|
+ Timecop.freeze { example.run }
end
context 'when service is inactive' do
@@ -132,4 +160,193 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
end
+
+ describe '#client' do
+ context 'manual configuration is enabled' do
+ let(:api_url) { 'http://some_url' }
+ before do
+ subject.manual_configuration = true
+ subject.api_url = api_url
+ end
+
+ it 'returns simple rest client from api_url' do
+ expect(subject.client).to be_instance_of(Gitlab::PrometheusClient)
+ expect(subject.client.rest_client.url).to eq(api_url)
+ end
+ end
+
+ context 'manual configuration is disabled' do
+ let!(:cluster_for_all) { create(:cluster, environment_scope: '*', projects: [project]) }
+ let!(:cluster_for_dev) { create(:cluster, environment_scope: 'dev', projects: [project]) }
+
+ let!(:prometheus_for_dev) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_dev) }
+ let(:proxy_client) { double('proxy_client') }
+
+ before do
+ service.manual_configuration = false
+ end
+
+ context 'with cluster for all environments with prometheus installed' do
+ let!(:prometheus_for_all) { create(:clusters_applications_prometheus, :installed, cluster: cluster_for_all) }
+
+ context 'without environment supplied' do
+ it 'returns client handling all environments' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
+
+ expect(service.client).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client.rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with dev environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'dev') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with prod environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'prod') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_all).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+ end
+
+ context 'with cluster for all environments without prometheus installed' do
+ context 'without environment supplied' do
+ it 'raises PrometheusError because cluster was not found' do
+ expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
+ end
+ end
+
+ context 'with dev environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'dev') }
+
+ it 'returns dev cluster client' do
+ expect(service).to receive(:client_from_cluster).with(cluster_for_dev).and_return(proxy_client).twice
+
+ expect(service.client(environment.id)).to be_instance_of(Gitlab::PrometheusClient)
+ expect(service.client(environment.id).rest_client).to eq(proxy_client)
+ end
+ end
+
+ context 'with prod environment supplied' do
+ let!(:environment) { create(:environment, project: project, name: 'prod') }
+
+ it 'raises PrometheusError because cluster was not found' do
+ expect { service.client }.to raise_error(Gitlab::PrometheusError, /couldn't find cluster with Prometheus installed/)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#prometheus_installed?' do
+ context 'clusters with installed prometheus' do
+ let!(:cluster) { create(:cluster, projects: [project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+
+ it 'returns true' do
+ expect(service.prometheus_installed?).to be(true)
+ end
+ end
+
+ context 'clusters without prometheus installed' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
+
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+
+ context 'clusters without prometheus' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+
+ context 'no clusters' do
+ it 'returns false' do
+ expect(service.prometheus_installed?).to be(false)
+ end
+ end
+ end
+
+ describe '#synchronize_service_state! before_save callback' do
+ context 'no clusters with prometheus are installed' do
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it 'activates service when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ end
+
+ it 'keeps service inactive when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(false)
+ end
+ end
+
+ context 'when service is active' do
+ before do
+ service.active = true
+ end
+
+ it 'keeps the service active when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ end
+
+ it 'inactivates the service when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.to change { service.active }.from(true).to(false)
+ end
+ end
+ end
+
+ context 'with prometheus installed in the cluster' do
+ before do
+ allow(service).to receive(:prometheus_installed?).and_return(true)
+ end
+
+ context 'when service is inactive' do
+ before do
+ service.active = false
+ end
+
+ it 'activates service when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ end
+
+ it 'activates service when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.to change { service.active }.from(false).to(true)
+ end
+ end
+
+ context 'when service is active' do
+ before do
+ service.active = true
+ end
+
+ it 'keeps service active when manual_configuration is enabled' do
+ expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ end
+
+ it 'keeps service active when manual_configuration is disabled' do
+ expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index a63f5d6d5a1..ee04d74d848 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -80,6 +80,7 @@ describe Project do
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
+ it { is_expected.to have_many(:lfs_file_locks) }
context 'after initialized' do
it "has a project_feature" do
@@ -2070,7 +2071,7 @@ describe Project do
create(:ci_variable, :protected, value: 'protected', project: project)
end
- subject { project.secret_variables_for(ref: 'ref') }
+ subject { project.reload.secret_variables_for(ref: 'ref') }
before do
stub_application_setting(
@@ -2091,7 +2092,7 @@ describe Project do
context 'when the ref is a protected branch' do
before do
- create(:protected_branch, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -2099,7 +2100,7 @@ describe Project do
context 'when the ref is a protected tag' do
before do
- create(:protected_tag, name: 'ref', project: project)
+ allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
@@ -2124,6 +2125,8 @@ describe Project do
context 'when the ref is a protected branch' do
before do
+ allow(project).to receive(:repository).and_call_original
+ allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(true)
create(:protected_branch, name: 'ref', project: project)
end
@@ -2134,6 +2137,8 @@ describe Project do
context 'when the ref is a protected tag' do
before do
+ allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(false)
+ allow(project).to receive_message_chain(:repository, :tag_exists?).and_return(true)
create(:protected_tag, name: 'ref', project: project)
end
@@ -2503,6 +2508,7 @@ describe Project do
end
describe '#remove_exports' do
+ let(:legacy_project) { create(:project, :legacy_storage, :with_export) }
let(:project) { create(:project, :with_export) }
it 'removes the exports directory for the project' do
@@ -2515,15 +2521,29 @@ describe Project do
expect(File.exist?(project.export_path)).to be_falsy
end
- it 'is a no-op when there is no namespace' do
+ it 'is a no-op on legacy projects when there is no namespace' do
+ export_path = legacy_project.export_path
+
+ legacy_project.update_column(:namespace_id, nil)
+
+ expect(FileUtils).not_to receive(:rm_rf).with(export_path)
+
+ legacy_project.remove_exports
+
+ expect(File.exist?(export_path)).to be_truthy
+ end
+
+ it 'runs on hashed storage projects when there is no namespace' do
export_path = project.export_path
+
project.update_column(:namespace_id, nil)
- expect(FileUtils).not_to receive(:rm_rf).with(export_path)
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original
project.remove_exports
- expect(File.exist?(export_path)).to be_truthy
+ expect(File.exist?(export_path)).to be_falsy
end
it 'is run when the project is destroyed' do
@@ -2544,7 +2564,7 @@ describe Project do
end
context 'legacy storage' do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, :legacy_storage) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
@@ -2718,6 +2738,8 @@ describe Project do
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
+ let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
+ let(:hashed_path) { File.join(hashed_prefix, hash) }
before do
stub_application_setting(hashed_storage_enabled: true)
@@ -2743,14 +2765,12 @@ describe Project do
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
- expect(project.base_dir).to eq("@hashed/#{hash[0..1]}/#{hash[2..3]}")
+ expect(project.base_dir).to eq(hashed_prefix)
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
- hashed_path = "@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}"
-
expect(project.disk_path).to eq(hashed_path)
end
end
@@ -2759,7 +2779,7 @@ describe Project do
it 'delegates to gitlab_shell to ensure namespace is created' do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
- expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, "@hashed/#{hash[0..1]}/#{hash[2..3]}")
+ expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, hashed_prefix)
project.ensure_storage_path_exists
end
@@ -3009,18 +3029,40 @@ describe Project do
subject { project.auto_devops_variables }
- context 'when enabled in settings' do
+ context 'when enabled in instance settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
context 'when domain is empty' do
before do
+ stub_application_setting(auto_devops_domain: nil)
+ end
+
+ it 'variables does not include AUTO_DEVOPS_DOMAIN' do
+ is_expected.not_to include(domain_variable)
+ end
+ end
+
+ context 'when domain is configured' do
+ before do
+ stub_application_setting(auto_devops_domain: 'example.com')
+ end
+
+ it 'variables includes AUTO_DEVOPS_DOMAIN' do
+ is_expected.to include(domain_variable)
+ end
+ end
+ end
+
+ context 'when explicitely enabled' do
+ context 'when domain is empty' do
+ before do
create(:project_auto_devops, project: project, domain: nil)
end
- it 'variables are empty' do
- is_expected.to be_empty
+ it 'variables does not include AUTO_DEVOPS_DOMAIN' do
+ is_expected.not_to include(domain_variable)
end
end
@@ -3029,11 +3071,15 @@ describe Project do
create(:project_auto_devops, project: project, domain: 'example.com')
end
- it "variables are not empty" do
- is_expected.not_to be_empty
+ it 'variables includes AUTO_DEVOPS_DOMAIN' do
+ is_expected.to include(domain_variable)
end
end
end
+
+ def domain_variable
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
+ end
end
describe '#latest_successful_builds_for' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 02a5ee54262..0bc07dc7a85 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -262,6 +262,28 @@ describe Repository do
end
end
+ describe '#new_commits' do
+ let(:new_refs) do
+ double(:git_rev_list, new_refs: %w[
+ c1acaa58bbcbc3eafe538cb8274ba387047b69f8
+ 5937ac0a7beb003549fc5fd26fc247adbce4a52e
+ ])
+ end
+
+ it 'delegates to Gitlab::Git::RevList' do
+ expect(Gitlab::Git::RevList).to receive(:new).with(
+ repository.raw,
+ newrev: 'aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj').and_return(new_refs)
+
+ commits = repository.new_commits('aaaabbbbccccddddeeeeffffgggghhhhiiiijjjj')
+
+ expect(commits).to eq([
+ repository.commit('c1acaa58bbcbc3eafe538cb8274ba387047b69f8'),
+ repository.commit('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
+ ])
+ end
+ end
+
describe '#commits_by' do
set(:project) { create(:project, :repository) }
@@ -851,6 +873,18 @@ describe Repository do
expect(repository.license_key).to be_nil
end
+ it 'returns nil when the commit SHA does not exist' do
+ allow(repository.head_commit).to receive(:sha).and_return('1' * 40)
+
+ expect(repository.license_key).to be_nil
+ end
+
+ it 'returns nil when master does not exist' do
+ repository.rm_branch(user, 'master')
+
+ expect(repository.license_key).to be_nil
+ end
+
it 'returns the license key' do
repository.create_file(user, 'LICENSE',
Licensee::License.new('mit').content,
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index cb02d526a98..1815696a8a0 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -893,6 +893,14 @@ describe User do
end
end
+ describe '.find_for_database_authentication' do
+ it 'strips whitespace from login' do
+ user = create(:user)
+
+ expect(described_class.find_for_database_authentication({ login: " #{user.username} " })).to eq user
+ end
+ end
+
describe '.find_by_any_email' do
it 'finds by primary email' do
user = create(:user, email: 'foo@example.com')
@@ -1586,14 +1594,37 @@ describe User do
describe '#authorized_groups' do
let!(:user) { create(:user) }
let!(:private_group) { create(:group) }
+ let!(:child_group) { create(:group, parent: private_group) }
+
+ let!(:project_group) { create(:group) }
+ let!(:project) { create(:project, group: project_group) }
before do
private_group.add_user(user, Gitlab::Access::MASTER)
+ project.add_master(user)
end
subject { user.authorized_groups }
- it { is_expected.to eq([private_group]) }
+ it { is_expected.to contain_exactly private_group, project_group }
+ end
+
+ describe '#membership_groups' do
+ let!(:user) { create(:user) }
+ let!(:parent_group) { create(:group) }
+ let!(:child_group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.add_user(user, Gitlab::Access::MASTER)
+ end
+
+ subject { user.membership_groups }
+
+ if Group.supports_nested_groups?
+ it { is_expected.to contain_exactly parent_group, child_group }
+ else
+ it { is_expected.to contain_exactly parent_group }
+ end
end
describe '#authorized_projects', :delete do
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index d53ba497ed1..b2b7721674c 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -188,162 +188,181 @@ describe WikiPage do
end
end
- describe '#create', :skip_gitaly_mock do
- context 'with valid attributes' do
- it 'raises an error if a page with the same path already exists' do
- create_page('New Page', 'content')
- create_page('foo/bar', 'content')
- expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
- expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
-
- destroy_page('New Page')
- destroy_page('bar', 'foo')
- end
+ describe '#create' do
+ shared_examples 'create method' do
+ context 'with valid attributes' do
+ it 'raises an error if a page with the same path already exists' do
+ create_page('New Page', 'content')
+ create_page('foo/bar', 'content')
+ expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
+ expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
+
+ destroy_page('New Page')
+ destroy_page('bar', 'foo')
+ end
- it 'if the title is preceded by a / it is removed' do
- create_page('/New Page', 'content')
+ it 'if the title is preceded by a / it is removed' do
+ create_page('/New Page', 'content')
- expect(wiki.find_page('New Page')).not_to be_nil
+ expect(wiki.find_page('New Page')).not_to be_nil
- destroy_page('New Page')
+ destroy_page('New Page')
+ end
end
end
- end
- # Remove skip_gitaly_mock flag when gitaly_update_page implements moving pages
- describe "#update", :skip_gitaly_mock do
- before do
- create_page("Update", "content")
- @page = wiki.find_page("Update")
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'create method'
end
- after do
- destroy_page(@page.title, @page.directory)
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'create method'
end
+ end
- context "with valid attributes" do
- it "updates the content of the page" do
- new_content = "new content"
-
- @page.update(content: new_content)
+ describe "#update" do
+ shared_examples 'update method' do
+ before do
+ create_page("Update", "content")
@page = wiki.find_page("Update")
+ end
- expect(@page.content).to eq("new content")
+ after do
+ destroy_page(@page.title, @page.directory)
end
- it "updates the title of the page" do
- new_title = "Index v.1.2.4"
+ context "with valid attributes" do
+ it "updates the content of the page" do
+ new_content = "new content"
- @page.update(title: new_title)
- @page = wiki.find_page(new_title)
+ @page.update(content: new_content)
+ @page = wiki.find_page("Update")
- expect(@page.title).to eq(new_title)
- end
+ expect(@page.content).to eq("new content")
+ end
- it "returns true" do
- expect(@page.update(content: "more content")).to be_truthy
+ it "updates the title of the page" do
+ new_title = "Index v.1.2.4"
+
+ @page.update(title: new_title)
+ @page = wiki.find_page(new_title)
+
+ expect(@page.title).to eq(new_title)
+ end
+
+ it "returns true" do
+ expect(@page.update(content: "more content")).to be_truthy
+ end
end
- end
- context 'with same last commit sha' do
- it 'returns true' do
- expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ context 'with same last commit sha' do
+ it 'returns true' do
+ expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ end
end
- end
- context 'with different last commit sha' do
- it 'raises exception' do
- expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ context 'with different last commit sha' do
+ it 'raises exception' do
+ expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ end
end
- end
- context 'when renaming a page' do
- it 'raises an error if the page already exists' do
- create_page('Existing Page', 'content')
+ context 'when renaming a page' do
+ it 'raises an error if the page already exists' do
+ create_page('Existing Page', 'content')
- expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(@page.title).to eq 'Update'
- expect(@page.content).to eq 'new_content'
+ expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(@page.title).to eq 'Update'
+ expect(@page.content).to eq 'new_content'
- destroy_page('Existing Page')
- end
+ destroy_page('Existing Page')
+ end
- it 'updates the content and rename the file' do
- new_title = 'Renamed Page'
- new_content = 'updated content'
+ it 'updates the content and rename the file' do
+ new_title = 'Renamed Page'
+ new_content = 'updated content'
- expect(@page.update(title: new_title, content: new_content)).to be_truthy
+ expect(@page.update(title: new_title, content: new_content)).to be_truthy
- @page = wiki.find_page(new_title)
+ @page = wiki.find_page(new_title)
- expect(@page).not_to be_nil
- expect(@page.content).to eq new_content
+ expect(@page).not_to be_nil
+ expect(@page.content).to eq new_content
+ end
end
- end
-
- context 'when moving a page' do
- it 'raises an error if the page already exists' do
- create_page('foo/Existing Page', 'content')
- expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(@page.title).to eq 'Update'
- expect(@page.content).to eq 'new_content'
+ context 'when moving a page' do
+ it 'raises an error if the page already exists' do
+ create_page('foo/Existing Page', 'content')
- destroy_page('Existing Page', 'foo')
- end
+ expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(@page.title).to eq 'Update'
+ expect(@page.content).to eq 'new_content'
- it 'updates the content and moves the file' do
- new_title = 'foo/Other Page'
- new_content = 'new_content'
+ destroy_page('Existing Page', 'foo')
+ end
- expect(@page.update(title: new_title, content: new_content)).to be_truthy
+ it 'updates the content and moves the file' do
+ new_title = 'foo/Other Page'
+ new_content = 'new_content'
- page = wiki.find_page(new_title)
+ expect(@page.update(title: new_title, content: new_content)).to be_truthy
- expect(page).not_to be_nil
- expect(page.content).to eq new_content
- end
+ page = wiki.find_page(new_title)
- context 'in subdir' do
- before do
- create_page('foo/Existing Page', 'content')
- @page = wiki.find_page('foo/Existing Page')
+ expect(page).not_to be_nil
+ expect(page.content).to eq new_content
end
- it 'moves the page to the root folder if the title is preceded by /' do
- expect(@page.slug).to eq 'foo/Existing-Page'
- expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq 'Existing-Page'
+ context 'in subdir' do
+ before do
+ create_page('foo/Existing Page', 'content')
+ @page = wiki.find_page('foo/Existing Page')
+ end
+
+ it 'moves the page to the root folder if the title is preceded by /', :skip_gitaly_mock do
+ expect(@page.slug).to eq 'foo/Existing-Page'
+ expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq 'Existing-Page'
+ end
+
+ it 'does nothing if it has the same title' do
+ original_path = @page.slug
+
+ expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq original_path
+ end
end
- it 'does nothing if it has the same title' do
- original_path = @page.slug
+ context 'in root dir' do
+ it 'does nothing if the title is preceded by /' do
+ original_path = @page.slug
- expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq original_path
+ expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy
+ expect(@page.slug).to eq original_path
+ end
end
end
- context 'in root dir' do
- it 'does nothing if the title is preceded by /' do
- original_path = @page.slug
+ context "with invalid attributes" do
+ it 'aborts update if title blank' do
+ expect(@page.update(title: '', content: 'new_content')).to be_falsey
+ expect(@page.content).to eq 'new_content'
- expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq original_path
+ page = wiki.find_page('Update')
+ expect(page.content).to eq 'content'
+
+ @page.title = 'Update'
end
end
end
- context "with invalid attributes" do
- it 'aborts update if title blank' do
- expect(@page.update(title: '', content: 'new_content')).to be_falsey
- expect(@page.content).to eq 'new_content'
-
- page = wiki.find_page('Update')
- expect(page.content).to eq 'content'
+ context 'when Gitaly is enabled' do
+ it_behaves_like 'update method'
+ end
- @page.title = 'Update'
- end
+ context 'when Gitaly is disabled', :skip_gitaly_mock do
+ it_behaves_like 'update method'
end
end
diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb
index b70c8646a3d..50bb0899eba 100644
--- a/spec/policies/personal_snippet_policy_spec.rb
+++ b/spec/policies/personal_snippet_policy_spec.rb
@@ -1,5 +1,6 @@
require 'spec_helper'
+# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
describe PersonalSnippetPolicy do
let(:regular_user) { create(:user) }
let(:external_user) { create(:user, :external) }
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index cdba1b09fc1..4d32e06b553 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -1,5 +1,6 @@
require 'spec_helper'
+# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
describe ProjectSnippetPolicy do
let(:regular_user) { create(:user) }
let(:external_user) { create(:user, :external) }
diff --git a/spec/presenters/ci/group_variable_presenter_spec.rb b/spec/presenters/ci/group_variable_presenter_spec.rb
index d404028405b..cb58a757564 100644
--- a/spec/presenters/ci/group_variable_presenter_spec.rb
+++ b/spec/presenters/ci/group_variable_presenter_spec.rb
@@ -35,29 +35,20 @@ describe Ci::GroupVariablePresenter do
end
describe '#form_path' do
- context 'when variable is persisted' do
- subject { described_class.new(variable).form_path }
+ subject { described_class.new(variable).form_path }
- it { is_expected.to eq(group_variable_path(group, variable)) }
- end
-
- context 'when variable is not persisted' do
- let(:variable) { build(:ci_group_variable, group: group) }
- subject { described_class.new(variable).form_path }
-
- it { is_expected.to eq(group_variables_path(group)) }
- end
+ it { is_expected.to eq(group_settings_ci_cd_path(group)) }
end
describe '#edit_path' do
subject { described_class.new(variable).edit_path }
- it { is_expected.to eq(group_variable_path(group, variable)) }
+ it { is_expected.to eq(group_variables_path(group)) }
end
describe '#delete_path' do
subject { described_class.new(variable).delete_path }
- it { is_expected.to eq(group_variable_path(group, variable)) }
+ it { is_expected.to eq(group_variables_path(group)) }
end
end
diff --git a/spec/presenters/ci/variable_presenter_spec.rb b/spec/presenters/ci/variable_presenter_spec.rb
index db62f86edb0..e3ce88372ea 100644
--- a/spec/presenters/ci/variable_presenter_spec.rb
+++ b/spec/presenters/ci/variable_presenter_spec.rb
@@ -35,29 +35,20 @@ describe Ci::VariablePresenter do
end
describe '#form_path' do
- context 'when variable is persisted' do
- subject { described_class.new(variable).form_path }
+ subject { described_class.new(variable).form_path }
- it { is_expected.to eq(project_variable_path(project, variable)) }
- end
-
- context 'when variable is not persisted' do
- let(:variable) { build(:ci_variable, project: project) }
- subject { described_class.new(variable).form_path }
-
- it { is_expected.to eq(project_variables_path(project)) }
- end
+ it { is_expected.to eq(project_settings_ci_cd_path(project)) }
end
describe '#edit_path' do
subject { described_class.new(variable).edit_path }
- it { is_expected.to eq(project_variable_path(project, variable)) }
+ it { is_expected.to eq(project_variables_path(project)) }
end
describe '#delete_path' do
subject { described_class.new(variable).delete_path }
- it { is_expected.to eq(project_variable_path(project, variable)) }
+ it { is_expected.to eq(project_variables_path(project)) }
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index ff5f207487b..31959d28fee 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -465,6 +465,72 @@ describe API::Commits do
end
end
+ describe 'GET /projects/:id/repository/commits/:sha/refs' do
+ let(:project) { create(:project, :public, :repository) }
+ let(:tag) { project.repository.find_tag('v1.1.0') }
+ let(:commit_id) { tag.dereferenced_target.id }
+ let(:route) { "/projects/#{project_id}/repository/commits/#{commit_id}/refs" }
+
+ context 'when ref does not exist' do
+ let(:commit_id) { 'unknown' }
+
+ it_behaves_like '404 response' do
+ let(:request) { get api(route, current_user) }
+ let(:message) { '404 Commit Not Found' }
+ end
+ end
+
+ context 'when repository is disabled' do
+ include_context 'disabled repository'
+
+ it_behaves_like '403 response' do
+ let(:request) { get api(route, current_user) }
+ end
+ end
+
+ context 'for a valid commit' do
+ it 'returns all refs with no scope' do
+ get api(route, current_user), per_page: 100
+
+ refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
+ refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
+ end
+
+ it 'returns all refs' do
+ get api(route, current_user), type: 'all', per_page: 100
+
+ refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
+ refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
+ end
+
+ it 'returns the branch refs' do
+ get api(route, current_user), type: 'branch', per_page: 100
+
+ refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
+ end
+
+ it 'returns the tag refs' do
+ get api(route, current_user), type: 'tag', per_page: 100
+
+ refs = project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]}
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
+ end
+ end
+ end
+
describe 'GET /projects/:id/repository/commits/:sha' do
let(:commit) { project.repository.commit }
let(:commit_id) { commit.id }
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index a4f198eb5c9..64fa7dc824c 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -142,12 +142,12 @@ describe API::GroupVariables do
end
it 'updates variable data' do
- initial_variable = group.variables.first
+ initial_variable = group.variables.reload.first
value_before = initial_variable.value
put api("/groups/#{group.id}/variables/#{variable.key}", user), value: 'VALUE_1_UP', protected: true
- updated_variable = group.variables.first
+ updated_variable = group.variables.reload.first
expect(response).to have_gitlab_http_status(200)
expect(value_before).to eq(variable.value)
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index ea6b0a71849..c7df6251d74 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -366,20 +366,9 @@ describe API::Internal do
end
end
- context 'project as /namespace/project' do
- it do
- push(key, project_with_repo_path('/' + project.full_path))
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response["status"]).to be_truthy
- expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
- expect(json_response["gl_repository"]).to eq("project-#{project.id}")
- end
- end
-
context 'project as namespace/project' do
it do
- push(key, project_with_repo_path(project.full_path))
+ push(key, project)
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
@@ -496,8 +485,10 @@ describe API::Internal do
end
context 'project does not exist' do
- it do
- pull(key, project_with_repo_path('gitlab/notexist'))
+ it 'returns a 200 response with status: false' do
+ project.destroy
+
+ pull(key, project)
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_falsey
@@ -569,6 +560,7 @@ describe API::Internal do
end
context 'the project path was changed' do
+ let(:project) { create(:project, :repository, :legacy_storage) }
let!(:old_path_to_repo) { project.repository.path_to_repo }
let!(:repository) { project.repository }
@@ -858,9 +850,14 @@ describe API::Internal do
end
end
- def project_with_repo_path(path)
- double().tap do |fake_project|
- allow(fake_project).to receive_message_chain('repository.path_to_repo' => path)
+ def gl_repository_for(project_or_wiki)
+ case project_or_wiki
+ when ProjectWiki
+ project_or_wiki.project.gl_repository(is_wiki: true)
+ when Project
+ project_or_wiki.gl_repository(is_wiki: false)
+ else
+ nil
end
end
@@ -868,18 +865,8 @@ describe API::Internal do
post(
api("/internal/allowed"),
key_id: key.id,
- project: project.repository.path_to_repo,
- action: 'git-upload-pack',
- secret_token: secret_token,
- protocol: protocol
- )
- end
-
- def pull_with_path(key, path_to_repo, protocol = 'ssh')
- post(
- api("/internal/allowed"),
- key_id: key.id,
- project: path_to_repo,
+ project: project.full_path,
+ gl_repository: gl_repository_for(project),
action: 'git-upload-pack',
secret_token: secret_token,
protocol: protocol
@@ -891,20 +878,8 @@ describe API::Internal do
api("/internal/allowed"),
changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master',
key_id: key.id,
- project: project.repository.path_to_repo,
- action: 'git-receive-pack',
- secret_token: secret_token,
- protocol: protocol,
- env: env
- )
- end
-
- def push_with_path(key, path_to_repo, protocol = 'ssh', env: nil)
- post(
- api("/internal/allowed"),
- changes: 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master',
- key_id: key.id,
- project: path_to_repo,
+ project: project.full_path,
+ gl_repository: gl_repository_for(project),
action: 'git-receive-pack',
secret_token: secret_token,
protocol: protocol,
@@ -917,7 +892,8 @@ describe API::Internal do
api("/internal/allowed"),
ref: 'master',
key_id: key.id,
- project: project.repository.path_to_repo,
+ project: project.full_path,
+ gl_repository: gl_repository_for(project),
action: 'git-upload-archive',
secret_token: secret_token,
protocol: 'ssh'
@@ -929,7 +905,7 @@ describe API::Internal do
api("/internal/lfs_authenticate"),
key_id: key_id,
secret_token: secret_token,
- project: project.repository.path_to_repo
+ project: project.full_path
)
end
end
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
new file mode 100644
index 00000000000..987f6e26971
--- /dev/null
+++ b/spec/requests/api/project_import_spec.rb
@@ -0,0 +1,102 @@
+require 'spec_helper'
+
+describe API::ProjectImport do
+ let(:export_path) { "#{Dir.tmpdir}/project_export_spec" }
+ let(:user) { create(:user) }
+ let(:file) { File.join(Rails.root, 'spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
+ let(:namespace) { create(:group) }
+ before do
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+
+ namespace.add_owner(user)
+ end
+
+ after do
+ FileUtils.rm_rf(export_path, secure: true)
+ end
+
+ describe 'POST /projects/import' do
+ it 'schedules an import using a namespace' do
+ stub_import(namespace)
+
+ post api('/projects/import', user), path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id
+
+ expect(response).to have_gitlab_http_status(201)
+ end
+
+ it 'schedules an import using the namespace path' do
+ stub_import(namespace)
+
+ post api('/projects/import', user), path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path
+
+ expect(response).to have_gitlab_http_status(201)
+ end
+
+ it 'schedules an import at the user namespace level' do
+ stub_import(user.namespace)
+
+ post api('/projects/import', user), path: 'test-import2', file: fixture_file_upload(file)
+
+ expect(response).to have_gitlab_http_status(201)
+ end
+
+ it 'schedules an import at the user namespace level' do
+ expect_any_instance_of(Project).not_to receive(:import_schedule)
+ expect(::Projects::CreateService).not_to receive(:new)
+
+ post api('/projects/import', user), namespace: 'nonexistent', path: 'test-import2', file: fixture_file_upload(file)
+
+ expect(response).to have_gitlab_http_status(404)
+ expect(json_response['message']).to eq('404 Namespace Not Found')
+ end
+
+ it 'does not schedule an import if the user has no permission to the namespace' do
+ expect_any_instance_of(Project).not_to receive(:import_schedule)
+
+ post(api('/projects/import', create(:user)),
+ path: 'test-import3',
+ file: fixture_file_upload(file),
+ namespace: namespace.full_path)
+
+ expect(response).to have_gitlab_http_status(404)
+ expect(json_response['message']).to eq('404 Namespace Not Found')
+ end
+
+ it 'does not schedule an import if the user uploads no valid file' do
+ expect_any_instance_of(Project).not_to receive(:import_schedule)
+
+ post api('/projects/import', user), path: 'test-import3', file: './random/test'
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['error']).to eq('file is invalid')
+ end
+
+ def stub_import(namespace)
+ expect_any_instance_of(Project).to receive(:import_schedule)
+ expect(::Projects::CreateService).to receive(:new).with(user, hash_including(namespace_id: namespace.id)).and_call_original
+ end
+ end
+
+ describe 'GET /projects/:id/import' do
+ it 'returns the import status' do
+ project = create(:project, import_status: 'started')
+ project.add_master(user)
+
+ get api("/projects/#{project.id}/import", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to include('import_status' => 'started')
+ end
+
+ it 'returns the import status and the error if failed' do
+ project = create(:project, import_status: 'failed', import_error: 'error')
+ project.add_master(user)
+
+ get api("/projects/#{project.id}/import", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to include('import_status' => 'failed',
+ 'import_error' => 'error')
+ end
+ end
+end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index f11cd638d96..00dd8897e6a 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -460,7 +460,7 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(201)
project.each_pair do |k, v|
- next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k)
+ next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled storage_version].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
@@ -622,12 +622,8 @@ describe API::Projects do
end
describe 'POST /projects/user/:id' do
- before do
- expect(project).to be_persisted
- end
-
it 'creates new project without path but with name and return 201' do
- expect { post api("/projects/user/#{user.id}", admin), name: 'Foo Project' }.to change {Project.count}.by(1)
+ expect { post api("/projects/user/#{user.id}", admin), name: 'Foo Project' }.to change { Project.count }.by(1)
expect(response).to have_gitlab_http_status(201)
project = Project.last
@@ -666,8 +662,9 @@ describe API::Projects do
post api("/projects/user/#{user.id}", admin), project
expect(response).to have_gitlab_http_status(201)
+
project.each_pair do |k, v|
- next if %i[has_external_issue_tracker path].include?(k)
+ next if %i[has_external_issue_tracker path storage_version].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 0bd88748479..f10b6e43d09 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -8,6 +8,7 @@ describe API::Runner do
before do
stub_gitlab_calls
stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(Ci::Runner).to receive(:cache_attributes)
end
describe '/api/v4/runners' do
@@ -408,7 +409,7 @@ describe API::Runner do
expect { request_job }.to change { runner.reload.contacted_at }
end
- %w(name version revision platform architecture).each do |param|
+ %w(version revision platform architecture).each do |param|
context "when info parameter '#{param}' is present" do
let(:value) { "#{param}_value" }
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
new file mode 100644
index 00000000000..9052a18c60b
--- /dev/null
+++ b/spec/requests/api/search_spec.rb
@@ -0,0 +1,318 @@
+require 'spec_helper'
+
+describe API::Search do
+ set(:user) { create(:user) }
+ set(:group) { create(:group) }
+ set(:project) { create(:project, :public, name: 'awesome project', group: group) }
+ set(:repo_project) { create(:project, :public, :repository, group: group) }
+
+ shared_examples 'response is correct' do |schema:, size: 1|
+ it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to match_response_schema(schema) }
+ it { expect(response).to include_limited_pagination_headers }
+ it { expect(json_response.size).to eq(size) }
+ end
+
+ describe 'GET /search' do
+ context 'when user is not authenticated' do
+ it 'returns 401 error' do
+ get api('/search'), scope: 'projects', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when scope is not supported' do
+ it 'returns 400 error' do
+ get api('/search', user), scope: 'unsupported', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
+ context 'when scope is missing' do
+ it 'returns 400 error' do
+ get api('/search', user), search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
+ context 'with correct params' do
+ context 'for projects scope' do
+ before do
+ get api('/search', user), scope: 'projects', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/projects'
+ end
+
+ context 'for issues scope' do
+ before do
+ create(:issue, project: project, title: 'awesome issue')
+
+ get api('/search', user), scope: 'issues', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/issues'
+ end
+
+ context 'for merge_requests scope' do
+ before do
+ create(:merge_request, source_project: repo_project, title: 'awesome mr')
+
+ get api('/search', user), scope: 'merge_requests', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/merge_requests'
+ end
+
+ context 'for milestones scope' do
+ before do
+ create(:milestone, project: project, title: 'awesome milestone')
+
+ get api('/search', user), scope: 'milestones', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
+ end
+
+ context 'for snippet_titles scope' do
+ before do
+ create(:snippet, :public, title: 'awesome snippet', content: 'snippet content')
+
+ get api('/search', user), scope: 'snippet_titles', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/snippets'
+ end
+
+ context 'for snippet_blobs scope' do
+ before do
+ create(:snippet, :public, title: 'awesome snippet', content: 'snippet content')
+
+ get api('/search', user), scope: 'snippet_blobs', search: 'content'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/snippets'
+ end
+ end
+ end
+
+ describe "GET /groups/:id/-/search" do
+ context 'when user is not authenticated' do
+ it 'returns 401 error' do
+ get api("/groups/#{group.id}/-/search"), scope: 'projects', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when scope is not supported' do
+ it 'returns 400 error' do
+ get api("/groups/#{group.id}/-/search", user), scope: 'unsupported', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
+ context 'when scope is missing' do
+ it 'returns 400 error' do
+ get api("/groups/#{group.id}/-/search", user), search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
+ context 'when group does not exist' do
+ it 'returns 404 error' do
+ get api('/groups/9999/-/search', user), scope: 'issues', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when user does can not see the group' do
+ it 'returns 404 error' do
+ private_group = create(:group, :private)
+
+ get api("/groups/#{private_group.id}/-/search", user), scope: 'issues', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'with correct params' do
+ context 'for projects scope' do
+ before do
+ get api("/groups/#{group.id}/-/search", user), scope: 'projects', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/projects'
+ end
+
+ context 'for issues scope' do
+ before do
+ create(:issue, project: project, title: 'awesome issue')
+
+ get api("/groups/#{group.id}/-/search", user), scope: 'issues', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/issues'
+ end
+
+ context 'for merge_requests scope' do
+ before do
+ create(:merge_request, source_project: repo_project, title: 'awesome mr')
+
+ get api("/groups/#{group.id}/-/search", user), scope: 'merge_requests', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/merge_requests'
+ end
+
+ context 'for milestones scope' do
+ before do
+ create(:milestone, project: project, title: 'awesome milestone')
+
+ get api("/groups/#{group.id}/-/search", user), scope: 'milestones', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
+ end
+
+ context 'for milestones scope with group path as id' do
+ before do
+ another_project = create(:project, :public)
+ create(:milestone, project: project, title: 'awesome milestone')
+ create(:milestone, project: another_project, title: 'awesome milestone other project')
+
+ get api("/groups/#{CGI.escape(group.full_path)}/-/search", user), scope: 'milestones', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
+ end
+ end
+ end
+
+ describe "GET /projects/:id/search" do
+ context 'when user is not authenticated' do
+ it 'returns 401 error' do
+ get api("/projects/#{project.id}/-/search"), scope: 'issues', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when scope is not supported' do
+ it 'returns 400 error' do
+ get api("/projects/#{project.id}/-/search", user), scope: 'unsupported', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
+ context 'when scope is missing' do
+ it 'returns 400 error' do
+ get api("/projects/#{project.id}/-/search", user), search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
+ context 'when project does not exist' do
+ it 'returns 404 error' do
+ get api('/projects/9999/-/search', user), scope: 'issues', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when user does can not see the project' do
+ it 'returns 404 error' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ get api("/projects/#{project.id}/-/search", user), scope: 'issues', search: 'awesome'
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'with correct params' do
+ context 'for issues scope' do
+ before do
+ create(:issue, project: project, title: 'awesome issue')
+
+ get api("/projects/#{project.id}/-/search", user), scope: 'issues', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/issues'
+ end
+
+ context 'for merge_requests scope' do
+ before do
+ create(:merge_request, source_project: repo_project, title: 'awesome mr')
+
+ get api("/projects/#{repo_project.id}/-/search", user), scope: 'merge_requests', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/merge_requests'
+ end
+
+ context 'for milestones scope' do
+ before do
+ create(:milestone, project: project, title: 'awesome milestone')
+
+ get api("/projects/#{project.id}/-/search", user), scope: 'milestones', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
+ end
+
+ context 'for notes scope' do
+ before do
+ create(:note_on_merge_request, project: project, note: 'awesome note')
+
+ get api("/projects/#{project.id}/-/search", user), scope: 'notes', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/notes'
+ end
+
+ context 'for wiki_blobs scope' do
+ before do
+ wiki = create(:project_wiki, project: project)
+ create(:wiki_page, wiki: wiki, attrs: { title: 'home', content: "Awesome page" })
+
+ get api("/projects/#{project.id}/-/search", user), scope: 'wiki_blobs', search: 'awesome'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/blobs'
+ end
+
+ context 'for commits scope' do
+ before do
+ get api("/projects/#{repo_project.id}/-/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details'
+ end
+
+ context 'for commits scope with project path as id' do
+ before do
+ get api("/projects/#{CGI.escape(repo_project.full_path)}/-/search", user), scope: 'commits', search: '498214de67004b1da3d820901307bed2a68a8ef6'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/commits_details'
+ end
+
+ context 'for blobs scope' do
+ before do
+ get api("/projects/#{repo_project.id}/-/search", user), scope: 'blobs', search: 'monitors'
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/blobs', size: 2
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 74198c8eb4f..b3e253befc6 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -32,6 +32,27 @@ describe API::Snippets do
expect(json_response).to be_an Array
expect(json_response.size).to eq(0)
end
+
+ it 'returns 404 for non-authenticated' do
+ create(:personal_snippet, :internal)
+
+ get api("/snippets/")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+
+ it 'does not return snippets related to a project with disable feature visibility' do
+ project = create(:project)
+ create(:project_member, project: project, user: user)
+ public_snippet = create(:personal_snippet, :public, author: user, project: project)
+ project.project_feature.update_attribute(:snippets_access_level, 0)
+
+ get api("/snippets/", user)
+
+ json_response.each do |snippet|
+ expect(snippet["id"]).not_to eq(public_snippet.id)
+ end
+ end
end
describe 'GET /snippets/public' do
diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb
index fb3a33cadff..2ee8d150dc8 100644
--- a/spec/requests/api/todos_spec.rb
+++ b/spec/requests/api/todos_spec.rb
@@ -129,6 +129,12 @@ describe API::Todos do
post api("/todos/#{pending_1.id}/mark_as_done", john_doe)
end
+
+ it 'returns 404 if the todo does not belong to the current user' do
+ post api("/todos/#{pending_1.id}/mark_as_done", author_1)
+
+ expect(response.status).to eq(404)
+ end
end
end
diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb
index 5d99d9495f3..bf36d3e245a 100644
--- a/spec/requests/api/v3/projects_spec.rb
+++ b/spec/requests/api/v3/projects_spec.rb
@@ -401,7 +401,7 @@ describe API::V3::Projects do
post v3_api('/projects', user), project
project.each_pair do |k, v|
- next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k)
+ next if %i[storage_version has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
@@ -545,7 +545,7 @@ describe API::V3::Projects do
expect(response).to have_gitlab_http_status(201)
project.each_pair do |k, v|
- next if %i[has_external_issue_tracker path].include?(k)
+ next if %i[storage_version has_external_issue_tracker path].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
diff --git a/spec/requests/api/v3/todos_spec.rb b/spec/requests/api/v3/todos_spec.rb
index 53fd962272a..ea648e3917f 100644
--- a/spec/requests/api/v3/todos_spec.rb
+++ b/spec/requests/api/v3/todos_spec.rb
@@ -38,6 +38,12 @@ describe API::V3::Todos do
delete v3_api("/todos/#{pending_1.id}", john_doe)
end
+
+ it 'returns 404 if the todo does not belong to the current user' do
+ delete v3_api("/todos/#{pending_1.id}", author_1)
+
+ expect(response.status).to eq(404)
+ end
end
end
diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/variables_spec.rb
index 79ee6c126f6..62215ea3d7d 100644
--- a/spec/requests/api/variables_spec.rb
+++ b/spec/requests/api/variables_spec.rb
@@ -122,12 +122,12 @@ describe API::Variables do
describe 'PUT /projects/:id/variables/:key' do
context 'authorized user with proper permissions' do
it 'updates variable data' do
- initial_variable = project.variables.first
+ initial_variable = project.variables.reload.first
value_before = initial_variable.value
put api("/projects/#{project.id}/variables/#{variable.key}", user), value: 'VALUE_1_UP', protected: true
- updated_variable = project.variables.first
+ updated_variable = project.variables.reload.first
expect(response).to have_gitlab_http_status(200)
expect(value_before).to eq(variable.value)
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 2e2dccdafad..942e5b2bb1b 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -163,7 +163,7 @@ describe 'Git HTTP requests' do
download(path) do |response|
json_body = ActiveSupport::JSON.decode(response.body)
- expect(json_body['RepoPath']).to include(wiki.repository.full_path)
+ expect(json_body['RepoPath']).to include(wiki.repository.disk_path)
end
end
end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 930ef49b7f3..971b45c411d 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -1208,7 +1208,7 @@ describe 'Git LFS API and storage' do
end
def post_lfs_json(url, body = nil, headers = nil)
- post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => 'application/vnd.git-lfs+json'))
+ post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
end
def json_response
diff --git a/spec/requests/lfs_locks_api_spec.rb b/spec/requests/lfs_locks_api_spec.rb
new file mode 100644
index 00000000000..e44a11a7232
--- /dev/null
+++ b/spec/requests/lfs_locks_api_spec.rb
@@ -0,0 +1,159 @@
+require 'spec_helper'
+
+describe 'Git LFS File Locking API' do
+ include WorkhorseHelpers
+
+ let(:project) { create(:project) }
+ let(:master) { create(:user) }
+ let(:developer) { create(:user) }
+ let(:guest) { create(:user) }
+ let(:path) { 'README.md' }
+ let(:headers) do
+ {
+ 'Authorization' => authorization
+ }.compact
+ end
+
+ shared_examples 'unauthorized request' do
+ context 'when user is not authorized' do
+ let(:authorization) { authorize_user(guest) }
+
+ it 'returns a forbidden 403 response' do
+ post_lfs_json url, body, headers
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+ end
+
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+
+ project.add_developer(master)
+ project.add_developer(developer)
+ project.add_guest(guest)
+ end
+
+ describe 'Create File Lock endpoint' do
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" }
+ let(:authorization) { authorize_user(developer) }
+ let(:body) { { path: path } }
+
+ include_examples 'unauthorized request'
+
+ context 'with an existent lock' do
+ before do
+ lock_file('README.md', developer)
+ end
+
+ it 'return an error message' do
+ post_lfs_json url, body, headers
+
+ expect(response).to have_gitlab_http_status(409)
+
+ expect(json_response.keys).to match_array(%w(lock message documentation_url))
+ expect(json_response['message']).to match(/already locked/)
+ end
+
+ it 'returns the existen lock' do
+ post_lfs_json url, body, headers
+
+ expect(json_response['lock']['path']).to eq('README.md')
+ end
+ end
+
+ context 'without an existent lock' do
+ it 'creates the lock' do
+ post_lfs_json url, body, headers
+
+ expect(response).to have_gitlab_http_status(201)
+
+ expect(json_response['lock'].keys).to match_array(%w(id path locked_at owner))
+ end
+ end
+ end
+
+ describe 'Listing File Locks endpoint' do
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" }
+ let(:authorization) { authorize_user(developer) }
+
+ include_examples 'unauthorized request'
+
+ it 'returns the list of locked files' do
+ lock_file('README.md', developer)
+ lock_file('README', developer)
+
+ do_get url, nil, headers
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['locks'].size).to eq(2)
+ expect(json_response['locks'].first.keys).to match_array(%w(id path locked_at owner))
+ end
+ end
+
+ describe 'List File Locks for verification endpoint' do
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/verify" }
+ let(:authorization) { authorize_user(developer) }
+
+ include_examples 'unauthorized request'
+
+ it 'returns the list of locked files grouped by owner' do
+ lock_file('README.md', master)
+ lock_file('README', developer)
+
+ post_lfs_json url, nil, headers
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['ours'].size).to eq(1)
+ expect(json_response['ours'].first['path']).to eq('README')
+ expect(json_response['theirs'].size).to eq(1)
+ expect(json_response['theirs'].first['path']).to eq('README.md')
+ end
+ end
+
+ describe 'Delete File Lock endpoint' do
+ let!(:lock) { lock_file('README.md', developer) }
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/#{lock[:id]}/unlock" }
+ let(:authorization) { authorize_user(developer) }
+
+ include_examples 'unauthorized request'
+
+ context 'with an existent lock' do
+ it 'deletes the lock' do
+ post_lfs_json url, nil, headers
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'returns the deleted lock' do
+ post_lfs_json url, nil, headers
+
+ expect(json_response['lock'].keys).to match_array(%w(id path locked_at owner))
+ end
+ end
+ end
+
+ def lock_file(path, author)
+ result = Lfs::LockFileService.new(project, author, { path: path }).execute
+
+ result[:lock]
+ end
+
+ def authorize_user(user)
+ ActionController::HttpAuthentication::Basic.encode_credentials(user.username, user.password)
+ end
+
+ def post_lfs_json(url, body = nil, headers = nil)
+ post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
+ end
+
+ def do_get(url, params = nil, headers = nil)
+ get(url, (params || {}), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
+ end
+
+ def json_response
+ @json_response ||= JSON.parse(response.body)
+ end
+end
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index 1a5ad9b04e4..de829011e58 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -65,13 +65,23 @@ describe 'OpenID Connect requests' do
)
end
- let(:public_email) { build :email, email: 'public@example.com' }
- let(:private_email) { build :email, email: 'private@example.com' }
+ let!(:public_email) { build :email, email: 'public@example.com' }
+ let!(:private_email) { build :email, email: 'private@example.com' }
- it 'includes all user information' do
+ let!(:group1) { create :group, path: 'group1' }
+ let!(:group2) { create :group, path: 'group2' }
+ let!(:group3) { create :group, path: 'group3', parent: group2 }
+ let!(:group4) { create :group, path: 'group4', parent: group3 }
+
+ before do
+ group1.add_user(user, GroupMember::OWNER)
+ group3.add_user(user, Gitlab::Access::DEVELOPER)
+ end
+
+ it 'includes all user information and group memberships' do
request_user_info
- expect(json_response).to eq({
+ expect(json_response).to match(a_hash_including({
'sub' => hashed_subject,
'name' => 'Alice',
'nickname' => 'alice',
@@ -79,8 +89,13 @@ describe 'OpenID Connect requests' do
'email_verified' => true,
'website' => 'https://example.com',
'profile' => 'http://localhost/alice',
- 'picture' => "http://localhost/uploads/-/system/user/avatar/#{user.id}/dk.png"
- })
+ 'picture' => "http://localhost/uploads/-/system/user/avatar/#{user.id}/dk.png",
+ 'groups' => anything
+ }))
+
+ expected_groups = %w[group1 group2/group3]
+ expected_groups << 'group2/group3/group4' if Group.supports_nested_groups?
+ expect(json_response['groups']).to match_array(expected_groups)
end
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 0fec14d0cce..b18e922b063 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -22,6 +22,7 @@ describe 'Rack Attack global throttles' do
let(:url_that_does_not_require_authentication) { '/users/sign_in' }
let(:url_that_requires_authentication) { '/dashboard/snippets' }
+ let(:url_api_internal) { '/api/v4/internal/check' }
let(:api_partial_url) { '/todos' }
around do |example|
@@ -172,6 +173,15 @@ describe 'Rack Attack global throttles' do
get url_that_does_not_require_authentication
expect(response).to have_http_status 200
end
+
+ context 'when the request is to the api internal endpoints' do
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ get url_api_internal, secret_token: Gitlab::Shell.secret_token
+ expect(response).to have_http_status 200
+ end
+ end
+ end
end
context 'when the throttle is disabled' do
diff --git a/spec/serializers/group_variable_entity_spec.rb b/spec/serializers/group_variable_entity_spec.rb
new file mode 100644
index 00000000000..f6de7d01f98
--- /dev/null
+++ b/spec/serializers/group_variable_entity_spec.rb
@@ -0,0 +1,14 @@
+require 'spec_helper'
+
+describe GroupVariableEntity do
+ let(:variable) { create(:ci_group_variable) }
+ let(:entity) { described_class.new(variable) }
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ it 'contains required fields' do
+ expect(subject).to include(:id, :key, :value, :protected)
+ end
+ end
+end
diff --git a/spec/serializers/lfs_file_lock_entity_spec.rb b/spec/serializers/lfs_file_lock_entity_spec.rb
new file mode 100644
index 00000000000..5919f473a90
--- /dev/null
+++ b/spec/serializers/lfs_file_lock_entity_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe LfsFileLockEntity do
+ let(:user) { create(:user) }
+ let(:resource) { create(:lfs_file_lock, user: user) }
+
+ let(:request) { double('request', current_user: user) }
+
+ subject { described_class.new(resource, request: request).as_json }
+
+ it 'exposes basic attrs of the lock' do
+ expect(subject).to include(:id, :path, :locked_at)
+ end
+
+ it 'exposes the owner info' do
+ expect(subject).to include(:owner)
+ expect(subject[:owner][:name]).to eq(user.name)
+ end
+end
diff --git a/spec/serializers/variable_entity_spec.rb b/spec/serializers/variable_entity_spec.rb
new file mode 100644
index 00000000000..effc0022633
--- /dev/null
+++ b/spec/serializers/variable_entity_spec.rb
@@ -0,0 +1,14 @@
+require 'spec_helper'
+
+describe VariableEntity do
+ let(:variable) { create(:ci_variable) }
+ let(:entity) { described_class.new(variable) }
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ it 'contains required fields' do
+ expect(subject).to include(:id, :key, :value, :protected)
+ end
+ end
+end
diff --git a/spec/services/ci/ensure_stage_service_spec.rb b/spec/services/ci/ensure_stage_service_spec.rb
new file mode 100644
index 00000000000..d17e30763d7
--- /dev/null
+++ b/spec/services/ci/ensure_stage_service_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe Ci::EnsureStageService, '#execute' do
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+
+ let(:stage) { create(:ci_stage_entity) }
+ let(:job) { build(:ci_build) }
+
+ let(:service) { described_class.new(project, user) }
+
+ context 'when build has a stage assigned' do
+ it 'does not create a new stage' do
+ job.assign_attributes(stage_id: stage.id)
+
+ expect { service.execute(job) }.not_to change { Ci::Stage.count }
+ end
+ end
+
+ context 'when build does not have a stage assigned' do
+ it 'creates a new stage' do
+ job.assign_attributes(stage_id: nil, stage: 'test')
+
+ expect { service.execute(job) }.to change { Ci::Stage.count }.by(1)
+ end
+ end
+
+ context 'when build is invalid' do
+ it 'does not create a new stage' do
+ job.assign_attributes(stage_id: nil, ref: nil)
+
+ expect { service.execute(job) }.not_to change { Ci::Stage.count }
+ end
+ end
+
+ context 'when new stage can not be created because of an exception' do
+ before do
+ allow(Ci::Stage).to receive(:create!)
+ .and_raise(ActiveRecord::RecordNotUnique.new('Duplicates!'))
+ end
+
+ it 'retries up to two times' do
+ job.assign_attributes(stage_id: nil)
+
+ expect(service).to receive(:find_stage).exactly(2).times
+
+ expect { service.execute(job) }
+ .to raise_error(Ci::EnsureStageService::EnsureStageError)
+ end
+ end
+end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 2c2f48e323d..db9c216d3f4 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -5,7 +5,11 @@ describe Ci::RetryBuildService do
set(:project) { create(:project) }
set(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:stage) do
+ Ci::Stage.create!(project: project, pipeline: pipeline, name: 'test')
+ end
+
+ let(:build) { create(:ci_build, pipeline: pipeline, stage_id: stage.id) }
let(:service) do
described_class.new(project, user)
@@ -27,29 +31,27 @@ describe Ci::RetryBuildService do
user_id auto_canceled_by_id retried failure_reason].freeze
shared_examples 'build duplication' do
- let(:stage) do
- # TODO, we still do not have factory for new stages, we will need to
- # switch existing factory to persist stages, instead of using LegacyStage
- #
- Ci::Stage.create!(project: project, pipeline: pipeline, name: 'test')
- end
+ let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let(:build) do
create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace_artifact, :teardown_environment,
- description: 'my-job', stage: 'test', pipeline: pipeline,
- auto_canceled_by: create(:ci_empty_pipeline, project: project)) do |build|
- ##
- # TODO, workaround for FactoryBot limitation when having both
- # stage (text) and stage_id (integer) columns in the table.
- build.stage_id = stage.id
- end
+ description: 'my-job', stage: 'test', stage_id: stage.id,
+ pipeline: pipeline, auto_canceled_by: another_pipeline)
+ end
+
+ before do
+ # Make sure that build has both `stage_id` and `stage` because FactoryBot
+ # can reset one of the fields when assigning another. We plan to deprecate
+ # and remove legacy `stage` column in the future.
+ build.update_attributes(stage: 'test', stage_id: stage.id)
end
describe 'clone accessors' do
CLONE_ACCESSORS.each do |attribute|
it "clones #{attribute} build attribute" do
+ expect(build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).to eq build.send(attribute)
end
@@ -122,10 +124,12 @@ describe Ci::RetryBuildService do
context 'when there are subsequent builds that are skipped' do
let!(:subsequent_build) do
- create(:ci_build, :skipped, stage_idx: 1, pipeline: pipeline)
+ create(:ci_build, :skipped, stage_idx: 2,
+ pipeline: pipeline,
+ stage: 'deploy')
end
- it 'resumes pipeline processing in subsequent stages' do
+ it 'resumes pipeline processing in a subsequent stage' do
service.execute(build)
expect(subsequent_build.reload).to be_created
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index ac4b9c02ba7..e8216abb08b 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -6,7 +6,7 @@ describe Groups::DestroyService do
let!(:user) { create(:user) }
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
- let!(:project) { create(:project, namespace: group) }
+ let!(:project) { create(:project, :legacy_storage, namespace: group) }
let!(:notification_setting) { create(:notification_setting, source: group)}
let(:gitlab_shell) { Gitlab::Shell.new }
let(:remove_path) { group.path + "+#{group.id}+deleted" }
@@ -141,7 +141,7 @@ describe Groups::DestroyService do
end
context 'legacy storage' do
- let!(:project) { create(:project, :empty_repo, namespace: group) }
+ let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
@@ -149,7 +149,7 @@ describe Groups::DestroyService do
end
context 'hashed storage' do
- let!(:project) { create(:project, :hashed, :empty_repo, namespace: group) }
+ let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
diff --git a/spec/services/issues/fetch_referenced_merge_requests_service_spec.rb b/spec/services/issues/fetch_referenced_merge_requests_service_spec.rb
new file mode 100644
index 00000000000..4e58179f45f
--- /dev/null
+++ b/spec/services/issues/fetch_referenced_merge_requests_service_spec.rb
@@ -0,0 +1,35 @@
+require 'spec_helper.rb'
+
+describe Issues::FetchReferencedMergeRequestsService do
+ let(:project) { create(:project) }
+ let(:issue) { create(:issue, project: project) }
+ let(:other_project) { create(:project) }
+
+ let(:mr) { create(:merge_request, source_project: project, target_project: project, id: 2)}
+ let(:other_mr) { create(:merge_request, source_project: other_project, target_project: other_project, id: 1)}
+
+ let(:user) { create(:user) }
+ let(:service) { described_class.new(project, user) }
+
+ context 'with mentioned merge requests' do
+ it 'returns a list of sorted merge requests' do
+ allow(issue).to receive(:referenced_merge_requests).with(user).and_return([other_mr, mr])
+
+ mrs, closed_by_mrs = service.execute(issue)
+
+ expect(mrs).to match_array([mr, other_mr])
+ expect(closed_by_mrs).to match_array([])
+ end
+ end
+
+ context 'with closed-by merge requests' do
+ it 'returns a list of sorted merge requests' do
+ allow(issue).to receive(:closed_by_merge_requests).with(user).and_return([other_mr, mr])
+
+ mrs, closed_by_mrs = service.execute(issue)
+
+ expect(mrs).to match_array([])
+ expect(closed_by_mrs).to match_array([mr, other_mr])
+ end
+ end
+end
diff --git a/spec/services/lfs/lock_file_service_spec.rb b/spec/services/lfs/lock_file_service_spec.rb
new file mode 100644
index 00000000000..3e58eea2501
--- /dev/null
+++ b/spec/services/lfs/lock_file_service_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe Lfs::LockFileService do
+ let(:project) { create(:project) }
+ let(:current_user) { create(:user) }
+
+ subject { described_class.new(project, current_user, params) }
+
+ describe '#execute' do
+ let(:params) { { path: 'README.md' } }
+
+ context 'when not authorized' do
+ it "doesn't succeed" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(403)
+ expect(result[:message]).to eq('You have no permissions')
+ end
+ end
+
+ context 'when authorized' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'with an existent lock' do
+ let!(:lock) { create(:lfs_file_lock, project: project) }
+
+ it "doesn't succeed" do
+ expect(subject.execute[:status]).to eq(:error)
+ end
+
+ it "doesn't create the Lock" do
+ expect do
+ subject.execute
+ end.not_to change { LfsFileLock.count }
+ end
+ end
+
+ context 'without an existent lock' do
+ it "succeeds" do
+ expect(subject.execute[:status]).to eq(:success)
+ end
+
+ it "creates the Lock" do
+ expect do
+ subject.execute
+ end.to change { LfsFileLock.count }.by(1)
+ end
+ end
+
+ context 'when an error is raised' do
+ it "doesn't succeed" do
+ allow_any_instance_of(described_class).to receive(:create_lock!).and_raise(StandardError)
+
+ expect(subject.execute[:status]).to eq(:error)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/lfs/locks_finder_service_spec.rb b/spec/services/lfs/locks_finder_service_spec.rb
new file mode 100644
index 00000000000..e409b77babf
--- /dev/null
+++ b/spec/services/lfs/locks_finder_service_spec.rb
@@ -0,0 +1,101 @@
+require 'spec_helper'
+
+describe Lfs::LocksFinderService do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:params) { {} }
+
+ subject { described_class.new(project, user, params) }
+
+ shared_examples 'no results' do
+ it 'returns an empty list' do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:locks]).to be_blank
+ end
+ end
+
+ describe '#execute' do
+ let!(:lock_1) { create(:lfs_file_lock, project: project) }
+ let!(:lock_2) { create(:lfs_file_lock, project: project, path: 'README') }
+
+ context 'find by id' do
+ context 'with results' do
+ let(:params) do
+ { id: lock_1.id }
+ end
+
+ it 'returns the record' do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:locks].size).to eq(1)
+ expect(result[:locks].first).to eq(lock_1)
+ end
+ end
+
+ context 'without results' do
+ let(:params) do
+ { id: 123 }
+ end
+
+ include_examples 'no results'
+ end
+ end
+
+ context 'find by path' do
+ context 'with results' do
+ let(:params) do
+ { path: lock_1.path }
+ end
+
+ it 'returns the record' do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:locks].size).to eq(1)
+ expect(result[:locks].first).to eq(lock_1)
+ end
+ end
+
+ context 'without results' do
+ let(:params) do
+ { path: 'not-found' }
+ end
+
+ include_examples 'no results'
+ end
+ end
+
+ context 'find all' do
+ context 'with results' do
+ it 'returns all the records' do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:locks].size).to eq(2)
+ end
+ end
+
+ context 'without results' do
+ before do
+ LfsFileLock.delete_all
+ end
+
+ include_examples 'no results'
+ end
+ end
+
+ context 'when an error is raised' do
+ it "doesn't succeed" do
+ allow_any_instance_of(described_class).to receive(:find_locks).and_raise(StandardError)
+
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:locks]).to be_blank
+ end
+ end
+ end
+end
diff --git a/spec/services/lfs/unlock_file_service_spec.rb b/spec/services/lfs/unlock_file_service_spec.rb
new file mode 100644
index 00000000000..4bea112b9c6
--- /dev/null
+++ b/spec/services/lfs/unlock_file_service_spec.rb
@@ -0,0 +1,105 @@
+require 'spec_helper'
+
+describe Lfs::UnlockFileService do
+ let(:project) { create(:project) }
+ let(:current_user) { create(:user) }
+ let(:lock_author) { create(:user) }
+ let!(:lock) { create(:lfs_file_lock, user: lock_author, project: project) }
+ let(:params) { {} }
+
+ subject { described_class.new(project, current_user, params) }
+
+ describe '#execute' do
+ context 'when not authorized' do
+ it "doesn't succeed" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(403)
+ expect(result[:message]).to eq('You have no permissions')
+ end
+ end
+
+ context 'when authorized' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'when lock does not exists' do
+ let(:params) { { id: 123 } }
+ it "doesn't succeed" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(404)
+ end
+ end
+
+ context 'when unlocked by the author' do
+ let(:current_user) { lock_author }
+ let(:params) { { id: lock.id } }
+
+ it "succeeds" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:lock]).to be_present
+ end
+ end
+
+ context 'when unlocked by a different user' do
+ let(:current_user) { create(:user) }
+ let(:params) { { id: lock.id } }
+
+ it "doesn't succeed" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to match(/is locked by GitLab User #{lock_author.id}/)
+ expect(result[:http_status]).to eq(403)
+ end
+ end
+
+ context 'when forced' do
+ let(:developer) { create(:user) }
+ let(:master) { create(:user) }
+
+ before do
+ project.add_developer(developer)
+ project.add_master(master)
+ end
+
+ context 'by a regular user' do
+ let(:current_user) { developer }
+ let(:params) do
+ { id: lock.id,
+ force: true }
+ end
+
+ it "doesn't succeed" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to match(/You must have master access/)
+ expect(result[:http_status]).to eq(403)
+ end
+ end
+
+ context 'by a master user' do
+ let(:current_user) { master }
+ let(:params) do
+ { id: lock.id,
+ force: true }
+ end
+
+ it "succeeds" do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:lock]).to be_present
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/members/authorized_destroy_service_spec.rb b/spec/services/members/authorized_destroy_service_spec.rb
index 757c45708b9..9cf6f64a078 100644
--- a/spec/services/members/authorized_destroy_service_spec.rb
+++ b/spec/services/members/authorized_destroy_service_spec.rb
@@ -21,6 +21,15 @@ describe Members::AuthorizedDestroyService do
.to change { Member.count }.from(3).to(2)
end
+ it "doesn't destroy invited project member notification_settings" do
+ project.add_developer(member_user)
+
+ member = create :project_member, :invited, project: project
+
+ expect { described_class.new(member, member_user).execute }
+ .not_to change { NotificationSetting.count }
+ end
+
it 'destroys invited group member' do
group.add_developer(member_user)
@@ -29,38 +38,73 @@ describe Members::AuthorizedDestroyService do
expect { described_class.new(member, member_user).execute }
.to change { Member.count }.from(2).to(1)
end
+
+ it "doesn't destroy invited group member notification_settings" do
+ group.add_developer(member_user)
+
+ member = create :group_member, :invited, group: group
+
+ expect { described_class.new(member, member_user).execute }
+ .not_to change { NotificationSetting.count }
+ end
+ end
+
+ context 'Requested user' do
+ it "doesn't destroy member notification_settings" do
+ member = create(:project_member, user: member_user, requested_at: Time.now)
+
+ expect { described_class.new(member, member_user).execute }
+ .not_to change { NotificationSetting.count }
+ end
end
context 'Group member' do
- it "unassigns issues and merge requests" do
+ let(:member) { group.members.find_by(user_id: member_user.id) }
+
+ before do
group.add_developer(member_user)
+ end
+ it "unassigns issues and merge requests" do
issue = create :issue, project: group_project, assignees: [member_user]
create :issue, assignees: [member_user]
merge_request = create :merge_request, target_project: group_project, source_project: group_project, assignee: member_user
create :merge_request, target_project: project, source_project: project, assignee: member_user
- member = group.members.find_by(user_id: member_user.id)
-
expect { described_class.new(member, member_user).execute }
.to change { number_of_assigned_issuables(member_user) }.from(4).to(2)
expect(issue.reload.assignee_ids).to be_empty
expect(merge_request.reload.assignee_id).to be_nil
end
+
+ it 'destroys member notification_settings' do
+ group.add_developer(member_user)
+ member = group.members.find_by(user_id: member_user.id)
+
+ expect { described_class.new(member, member_user).execute }
+ .to change { member_user.notification_settings.count }.by(-1)
+ end
end
context 'Project member' do
- it "unassigns issues and merge requests" do
+ let(:member) { project.members.find_by(user_id: member_user.id) }
+
+ before do
project.add_developer(member_user)
+ end
+ it "unassigns issues and merge requests" do
create :issue, project: project, assignees: [member_user]
create :merge_request, target_project: project, source_project: project, assignee: member_user
- member = project.members.find_by(user_id: member_user.id)
-
expect { described_class.new(member, member_user).execute }
.to change { number_of_assigned_issuables(member_user) }.from(2).to(0)
end
+
+ it 'destroys member notification_settings' do
+ expect { described_class.new(member, member_user).execute }
+ .to change { member_user.notification_settings.count }.by(-1)
+ end
end
end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index e56d335a7d6..3a935d98540 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -1,6 +1,7 @@
require 'spec_helper'
describe MergeRequests::BuildService do
+ using RSpec::Parameterized::TableSyntax
include RepoHelpers
let(:project) { create(:project, :repository) }
@@ -111,6 +112,7 @@ describe MergeRequests::BuildService do
context 'one commit in the diff' do
let(:commits) { Commit.decorate([commit_1], project) }
+ let(:commit_description) { commit_1.safe_message.split(/\n+/, 2).last }
before do
stub_compare
@@ -125,7 +127,7 @@ describe MergeRequests::BuildService do
end
it 'uses the description of the commit as the description of the merge request' do
- expect(merge_request.description).to eq(commit_1.safe_message.split(/\n+/, 2).last)
+ expect(merge_request.description).to eq(commit_description)
end
context 'merge request already has a description set' do
@@ -148,68 +150,32 @@ describe MergeRequests::BuildService do
end
end
- context 'branch starts with issue IID followed by a hyphen' do
- let(:source_branch) { "#{issue.iid}-fix-issue" }
-
- it 'appends "Closes #$issue-iid" to the description' do
- expect(merge_request.description).to eq("#{commit_1.safe_message.split(/\n+/, 2).last}\n\nCloses ##{issue.iid}")
+ context 'when the source branch matches an issue' do
+ where(:issue_tracker, :source_branch, :closing_message) do
+ :jira | 'FOO-123-fix-issue' | 'Closes FOO-123'
+ :jira | 'fix-issue' | nil
+ :custom_issue_tracker | '123-fix-issue' | 'Closes #123'
+ :custom_issue_tracker | 'fix-issue' | nil
+ :internal | '123-fix-issue' | 'Closes #123'
+ :internal | 'fix-issue' | nil
end
- context 'merge request already has a description set' do
- let(:description) { 'Merge request description' }
-
- it 'appends "Closes #$issue-iid" to the description' do
- expect(merge_request.description).to eq("#{description}\n\nCloses ##{issue.iid}")
+ with_them do
+ before do
+ if issue_tracker == :internal
+ issue.update!(iid: 123)
+ else
+ create(:"#{issue_tracker}_service", project: project)
+ end
end
- end
- context 'commit has no description' do
- let(:commits) { Commit.decorate([commit_2], project) }
+ it 'appends the closing description' do
+ expected_description = [commit_description, closing_message].compact.join("\n\n")
- it 'sets the description to "Closes #$issue-iid"' do
- expect(merge_request.description).to eq("Closes ##{issue.iid}")
+ expect(merge_request.description).to eq(expected_description)
end
end
end
-
- context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do
- let(:source_branch) { '12345-fix-issue' }
-
- before do
- allow(project).to receive(:external_issue_tracker).and_return(false)
- allow(project).to receive(:issues_enabled?).and_return(false)
- end
-
- it 'uses the title of the commit as the title of the merge request' do
- expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first)
- end
-
- it 'uses the description of the commit as the description of the merge request' do
- commit_description = commit_1.safe_message.split(/\n+/, 2).last
-
- expect(merge_request.description).to eq("#{commit_description}")
- end
- end
-
- context 'branch starts with JIRA-formatted external issue IID followed by a hyphen' do
- let(:source_branch) { 'EXMPL-12345-fix-issue' }
-
- before do
- allow(project).to receive(:external_issue_tracker).and_return(true)
- allow(project).to receive(:issues_enabled?).and_return(false)
- allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern)
- end
-
- it 'uses the title of the commit as the title of the merge request' do
- expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first)
- end
-
- it 'uses the description of the commit as the description of the merge request and appends the closes text' do
- commit_description = commit_1.safe_message.split(/\n+/, 2).last
-
- expect(merge_request.description).to eq("#{commit_description}\n\nCloses EXMPL-12345")
- end
- end
end
context 'more than one commit in the diff' do
@@ -239,80 +205,62 @@ describe MergeRequests::BuildService do
end
end
- context 'branch starts with GitLab issue IID followed by a hyphen' do
- let(:source_branch) { "#{issue.iid}-fix-issue" }
-
- it 'sets the title to: Resolves "$issue-title"' do
- expect(merge_request.title).to eq("Resolve \"#{issue.title}\"")
+ context 'when the source branch matches an issue' do
+ where(:issue_tracker, :source_branch, :title, :closing_message) do
+ :jira | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
+ :jira | 'fix-issue' | 'Fix issue' | nil
+ :custom_issue_tracker | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
+ :custom_issue_tracker | 'fix-issue' | 'Fix issue' | nil
+ :internal | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
+ :internal | 'fix-issue' | 'Fix issue' | nil
+ :internal | '124-fix-issue' | '124 fix issue' | nil
end
- context 'when issue is not accessible to user' do
+ with_them do
before do
- project.team.truncate
- end
-
- it 'uses branch title as the merge request title' do
- expect(merge_request.title).to eq("#{issue.iid} fix issue")
+ if issue_tracker == :internal
+ issue.update!(iid: 123)
+ else
+ create(:"#{issue_tracker}_service", project: project)
+ end
end
- end
-
- context 'issue does not exist' do
- let(:source_branch) { "#{issue.iid.succ}-fix-issue" }
- it 'uses the title of the branch as the merge request title' do
- expect(merge_request.title).to eq("#{issue.iid.succ} fix issue")
+ it 'sets the correct title' do
+ expect(merge_request.title).to eq(title)
end
- end
-
- context 'issue is confidential' do
- let(:issue_confidential) { true }
- it 'uses the title of the branch as the merge request title' do
- expect(merge_request.title).to eq("#{issue.iid} fix issue")
+ it 'sets the closing description' do
+ expect(merge_request.description).to eq(closing_message)
end
end
end
- context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do
- let(:source_branch) { '12345-fix-issue' }
+ context 'when the issue is not accessible to user' do
+ let(:source_branch) { "#{issue.iid}-fix-issue" }
before do
- allow(project).to receive(:external_issue_tracker).and_return(false)
- allow(project).to receive(:issues_enabled?).and_return(false)
+ project.team.truncate
end
- it 'sets the title to the humanized branch title' do
- expect(merge_request.title).to eq('12345 fix issue')
+ it 'uses branch title as the merge request title' do
+ expect(merge_request.title).to eq("#{issue.iid} fix issue")
end
- end
-
- context 'branch starts with JIRA-formatted external issue IID' do
- let(:source_branch) { 'EXMPL-12345' }
- before do
- allow(project).to receive(:external_issue_tracker).and_return(true)
- allow(project).to receive(:issues_enabled?).and_return(false)
- allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern)
+ it 'does not set a description' do
+ expect(merge_request.description).to be_nil
end
+ end
- it 'sets the title to the humanized branch title' do
- expect(merge_request.title).to eq('Resolve EXMPL-12345')
- end
+ context 'when the issue is confidential' do
+ let(:source_branch) { "#{issue.iid}-fix-issue" }
+ let(:issue_confidential) { true }
- it 'appends the closes text' do
- expect(merge_request.description).to eq('Closes EXMPL-12345')
+ it 'uses the title of the branch as the merge request title' do
+ expect(merge_request.title).to eq("#{issue.iid} fix issue")
end
- context 'followed by hyphenated text' do
- let(:source_branch) { 'EXMPL-12345-fix-issue' }
-
- it 'sets the title to the humanized branch title' do
- expect(merge_request.title).to eq('Resolve EXMPL-12345 "Fix issue"')
- end
-
- it 'appends the closes text' do
- expect(merge_request.description).to eq('Closes EXMPL-12345')
- end
+ it 'does not set a description' do
+ expect(merge_request.description).to be_nil
end
end
end
diff --git a/spec/services/projects/create_from_template_service_spec.rb b/spec/services/projects/create_from_template_service_spec.rb
index 9919ec254c6..609d678caea 100644
--- a/spec/services/projects/create_from_template_service_spec.rb
+++ b/spec/services/projects/create_from_template_service_spec.rb
@@ -4,8 +4,10 @@ describe Projects::CreateFromTemplateService do
let(:user) { create(:user) }
let(:project_params) do
{
- path: user.to_param,
- template_name: 'rails'
+ path: user.to_param,
+ template_name: 'rails',
+ description: 'project description',
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE
}
end
@@ -22,5 +24,7 @@ describe Projects::CreateFromTemplateService do
expect(project).to be_saved
expect(project.scheduled?).to be(true)
+ expect(project.description).to match('project description')
+ expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
end
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 15699574b3a..fb6d7171ac3 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Projects::HashedStorage::MigrateAttachmentsService do
subject(:service) { described_class.new(project) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :legacy_storage) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 7b536cc05cb..747bd4529a0 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Projects::HashedStorage::MigrateRepositoryService do
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :repository, :wiki_repo) }
+ let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo) }
let(:service) { described_class.new(project) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
diff --git a/spec/services/projects/hashed_storage_migration_service_spec.rb b/spec/services/projects/hashed_storage_migration_service_spec.rb
index 466f0b5d7c2..e8e18bb3ac0 100644
--- a/spec/services/projects/hashed_storage_migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage_migration_service_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Projects::HashedStorageMigrationService do
- let(:project) { create(:project, :empty_repo, :wiki_repo) }
+ let(:project) { create(:project, :empty_repo, :wiki_repo, :legacy_storage) }
subject(:service) { described_class.new(project) }
describe '#execute' do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index ef68742a463..ae0e22e3dc0 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -4,7 +4,7 @@ describe Projects::TransferService do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:user) { create(:user) }
let(:group) { create(:group) }
- let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) }
context 'namespace -> namespace' do
before do
@@ -214,7 +214,7 @@ describe Projects::TransferService do
end
context 'when hashed storage in use' do
- let(:hashed_project) { create(:project, :repository, :hashed, namespace: user.namespace) }
+ let(:hashed_project) { create(:project, :repository, namespace: user.namespace) }
before do
group.add_owner(user)
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index fc6aa713d6f..a0b97ceead9 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -150,6 +150,8 @@ describe Projects::UpdateService do
let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
context 'with legacy storage' do
+ let(:project) { create(:project, :legacy_storage, :repository, creator: user, namespace: user.namespace) }
+
before do
gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing")
end
diff --git a/spec/services/search/snippet_service_spec.rb b/spec/services/search/snippet_service_spec.rb
index bc7885b03d9..8ad162ad66e 100644
--- a/spec/services/search/snippet_service_spec.rb
+++ b/spec/services/search/snippet_service_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Search::SnippetService do
let(:author) { create(:author) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :public) }
let!(:public_snippet) { create(:snippet, :public, content: 'password: XXX') }
let!(:internal_snippet) { create(:snippet, :internal, content: 'password: XXX') }
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index bb3d73edf8e..11c75ddfcf8 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -173,7 +173,7 @@ describe Users::DestroyService do
end
context 'legacy storage' do
- let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
+ let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
@@ -181,7 +181,7 @@ describe Users::DestroyService do
end
context 'hashed storage' do
- let!(:project) { create(:project, :empty_repo, :hashed, namespace: user.namespace) }
+ let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/controllers/githubish_import_controller_shared_examples.rb
index a0839eefe6c..3321f920666 100644
--- a/spec/support/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/controllers/githubish_import_controller_shared_examples.rb
@@ -92,6 +92,7 @@ end
shared_examples 'a GitHub-ish import controller: POST create' do
let(:user) { create(:user) }
+ let(:project) { create(:project) }
let(:provider_username) { user.username }
let(:provider_user) { OpenStruct.new(login: provider_username) }
let(:provider_repo) do
@@ -107,14 +108,34 @@ shared_examples 'a GitHub-ish import controller: POST create' do
assign_session_token(provider)
end
+ it 'returns 200 response when the project is imported successfully' do
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
+ .and_return(double(execute: project))
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'returns 422 response when the project could not be imported' do
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
+ .and_return(double(execute: build(:project)))
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+
context "when the repository owner is the provider user" do
context "when the provider user and GitLab user's usernames match" do
it "takes the current user's namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -124,9 +145,9 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the current user's namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -151,9 +172,9 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the existing namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, existing_namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
@@ -163,9 +184,9 @@ shared_examples 'a GitHub-ish import controller: POST create' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -174,17 +195,17 @@ shared_examples 'a GitHub-ish import controller: POST create' do
context "when current user can create namespaces" do
it "creates the namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: true))
+ .to receive(:new).and_return(double(execute: project))
- expect { post :create, target_namespace: provider_repo.name, format: :js }.to change(Namespace, :count).by(1)
+ expect { post :create, target_namespace: provider_repo.name, format: :json }.to change(Namespace, :count).by(1)
end
it "takes the new namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, an_instance_of(Group), user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, target_namespace: provider_repo.name, format: :js
+ post :create, target_namespace: provider_repo.name, format: :json
end
end
@@ -195,17 +216,17 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it "doesn't create the namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: true))
+ .to receive(:new).and_return(double(execute: project))
- expect { post :create, format: :js }.not_to change(Namespace, :count)
+ expect { post :create, format: :json }.not_to change(Namespace, :count)
end
it "takes the current user's namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, format: :js
+ post :create, format: :json
end
end
end
@@ -221,21 +242,21 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, test_namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: test_namespace.name, new_name: test_name, format: :js }
+ post :create, { target_namespace: test_namespace.name, new_name: test_name, format: :json }
end
it 'takes the selected name and default namespace' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { new_name: test_name, format: :js }
+ post :create, { new_name: test_name, format: :json }
end
end
- context 'user has chosen an existing nested namespace and name for the project' do
+ context 'user has chosen an existing nested namespace and name for the project', :postgresql do
let(:parent_namespace) { create(:group, name: 'foo', owner: user) }
let(:nested_namespace) { create(:group, name: 'bar', parent: parent_namespace) }
let(:test_name) { 'test_name' }
@@ -247,63 +268,124 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, nested_namespace, user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js }
+ post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :json }
end
end
- context 'user has chosen a non-existent nested namespaces and name for the project' do
+ context 'user has chosen a non-existent nested namespaces and name for the project', :postgresql do
let(:test_name) { 'test_name' }
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json }
end
it 'creates the namespaces' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js } }
+ expect { post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json } }
.to change { Namespace.count }.by(2)
end
it 'new namespace has the right parent' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :js }
+ post :create, { target_namespace: 'foo/bar', new_name: test_name, format: :json }
expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
end
end
- context 'user has chosen existent and non-existent nested namespaces and name for the project' do
+ context 'user has chosen existent and non-existent nested namespaces and name for the project', :postgresql do
let(:test_name) { 'test_name' }
let!(:parent_namespace) { create(:group, name: 'foo', owner: user) }
+ before do
+ parent_namespace.add_owner(user)
+ end
+
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js }
+ post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json }
end
it 'creates the namespaces' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: true))
+ .and_return(double(execute: project))
- expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } }
+ expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :json } }
.to change { Namespace.count }.by(2)
end
+
+ it 'does not create a new namespace under the user namespace' do
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
+ .and_return(double(execute: build_stubbed(:project)))
+
+ expect { post :create, { target_namespace: "#{user.namespace_path}/test_group", new_name: test_name, format: :js } }
+ .not_to change { Namespace.count }
+ end
+ end
+
+ context 'user cannot create a subgroup inside a group is not a member of' do
+ let(:test_name) { 'test_name' }
+ let!(:parent_namespace) { create(:group, name: 'foo') }
+
+ it 'does not take the selected namespace and name' do
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
+ .and_return(double(execute: build_stubbed(:project)))
+
+ post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js }
+ end
+
+ it 'does not create the namespaces' do
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
+ .and_return(double(execute: build_stubbed(:project)))
+
+ expect { post :create, { target_namespace: 'foo/foobar/bar', new_name: test_name, format: :js } }
+ .not_to change { Namespace.count }
+ end
+ end
+
+ context 'user can use a group without having permissions to create a group' do
+ let(:test_name) { 'test_name' }
+ let!(:group) { create(:group, name: 'foo') }
+
+ it 'takes the selected namespace and name' do
+ group.add_owner(user)
+ user.update!(can_create_group: false)
+
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, test_name, group, user, access_params, type: provider)
+ .and_return(double(execute: build_stubbed(:project)))
+
+ post :create, { target_namespace: 'foo', new_name: test_name, format: :js }
+ end
+ end
+
+ context 'when user can not create projects in the chosen namespace' do
+ it 'returns 422 response' do
+ other_namespace = create(:group, name: 'other_namespace')
+
+ post :create, { target_namespace: other_namespace.name, format: :json }
+
+ expect(response).to have_gitlab_http_status(422)
+ end
end
end
end
diff --git a/spec/support/factory_girl.rb b/spec/support/factory_bot.rb
index c7890e49c66..c7890e49c66 100644
--- a/spec/support/factory_girl.rb
+++ b/spec/support/factory_bot.rb
diff --git a/spec/support/features/variable_list_shared_examples.rb b/spec/support/features/variable_list_shared_examples.rb
new file mode 100644
index 00000000000..0d8f7a7aae6
--- /dev/null
+++ b/spec/support/features/variable_list_shared_examples.rb
@@ -0,0 +1,269 @@
+shared_examples 'variable list' do
+ it 'shows list of variables' do
+ page.within('.js-ci-variable-list-section') do
+ expect(first('.js-ci-variable-input-key').value).to eq(variable.key)
+ end
+ end
+
+ it 'adds new secret variable' do
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('key')
+ find('.js-ci-variable-input-value').set('key value')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ # We check the first row because it re-sorts to alphabetical order on refresh
+ page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key value')
+ end
+ end
+
+ it 'adds empty variable' do
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('key')
+ find('.js-ci-variable-input-value').set('')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ # We check the first row because it re-sorts to alphabetical order on refresh
+ page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('')
+ end
+ end
+
+ it 'adds new protected variable' do
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('key')
+ find('.js-ci-variable-input-value').set('key value')
+ find('.ci-variable-protected-item .js-project-feature-toggle').click
+
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ # We check the first row because it re-sorts to alphabetical order on refresh
+ page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key value')
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
+ end
+ end
+
+ it 'reveals and hides variables' do
+ page.within('.js-ci-variable-list-section') do
+ expect(first('.js-ci-variable-input-key').value).to eq(variable.key)
+ expect(first('.js-ci-variable-input-value', visible: false).value).to eq(variable.value)
+ expect(page).to have_content('*' * 20)
+
+ click_button('Reveal value')
+
+ expect(first('.js-ci-variable-input-key').value).to eq(variable.key)
+ expect(first('.js-ci-variable-input-value').value).to eq(variable.value)
+ expect(page).not_to have_content('*' * 20)
+
+ click_button('Hide value')
+
+ expect(first('.js-ci-variable-input-key').value).to eq(variable.key)
+ expect(first('.js-ci-variable-input-value', visible: false).value).to eq(variable.value)
+ expect(page).to have_content('*' * 20)
+ end
+ end
+
+ it 'deletes variable' do
+ page.within('.js-ci-variable-list-section') do
+ expect(page).to have_selector('.js-row', count: 2)
+
+ first('.js-row-remove-button').click
+
+ click_button('Save variables')
+ wait_for_requests
+
+ expect(page).to have_selector('.js-row', count: 1)
+ end
+ end
+
+ it 'edits variable' do
+ page.within('.js-ci-variable-list-section') do
+ click_button('Reveal value')
+
+ page.within('.js-row:nth-child(1)') do
+ find('.js-ci-variable-input-key').set('new_key')
+ find('.js-ci-variable-input-value').set('new_value')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ page.within('.js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('new_key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('new_value')
+ end
+ end
+ end
+
+ it 'edits variable with empty value' do
+ page.within('.js-ci-variable-list-section') do
+ click_button('Reveal value')
+
+ page.within('.js-row:nth-child(1)') do
+ find('.js-ci-variable-input-key').set('new_key')
+ find('.js-ci-variable-input-value').set('')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ page.within('.js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('new_key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('')
+ end
+ end
+ end
+
+ it 'edits variable to be protected' do
+ # Create the unprotected variable
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('unprotected_key')
+ find('.js-ci-variable-input-value').set('unprotected_value')
+
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('false')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ # We check the first row because it re-sorts to alphabetical order on refresh
+ page.within('.js-ci-variable-list-section .js-row:nth-child(2)') do
+ find('.ci-variable-protected-item .js-project-feature-toggle').click
+
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ # We check the first row because it re-sorts to alphabetical order on refresh
+ page.within('.js-ci-variable-list-section .js-row:nth-child(2)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('unprotected_key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('unprotected_value')
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
+ end
+ end
+
+ it 'edits variable to be unprotected' do
+ # Create the protected variable
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('protected_key')
+ find('.js-ci-variable-input-value').set('protected_value')
+ find('.ci-variable-protected-item .js-project-feature-toggle').click
+
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
+ find('.ci-variable-protected-item .js-project-feature-toggle').click
+
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('false')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
+ expect(find('.js-ci-variable-input-key').value).to eq('protected_key')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('protected_value')
+ expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('false')
+ end
+ end
+
+ it 'handles multiple edits and deletion in the middle' do
+ page.within('.js-ci-variable-list-section') do
+ # Create 2 variables
+ page.within('.js-row:last-child') do
+ find('.js-ci-variable-input-key').set('akey')
+ find('.js-ci-variable-input-value').set('akeyvalue')
+ end
+ page.within('.js-row:last-child') do
+ find('.js-ci-variable-input-key').set('zkey')
+ find('.js-ci-variable-input-value').set('zkeyvalue')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ expect(page).to have_selector('.js-row', count: 4)
+
+ # Remove the `akey` variable
+ page.within('.js-row:nth-child(2)') do
+ first('.js-row-remove-button').click
+ end
+
+ # Add another variable
+ page.within('.js-row:last-child') do
+ find('.js-ci-variable-input-key').set('ckey')
+ find('.js-ci-variable-input-value').set('ckeyvalue')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ visit page_path
+
+ # Expect to find 3 variables(4 rows) in alphbetical order
+ expect(page).to have_selector('.js-row', count: 4)
+ row_keys = all('.js-ci-variable-input-key')
+ expect(row_keys[0].value).to eq('ckey')
+ expect(row_keys[1].value).to eq('test_key')
+ expect(row_keys[2].value).to eq('zkey')
+ expect(row_keys[3].value).to eq('')
+ end
+ end
+
+ it 'shows validation error box about duplicate keys' do
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('samekey')
+ find('.js-ci-variable-input-value').set('value1')
+ end
+ page.within('.js-ci-variable-list-section .js-row:last-child') do
+ find('.js-ci-variable-input-key').set('samekey')
+ find('.js-ci-variable-input-value').set('value2')
+ end
+
+ click_button('Save variables')
+ wait_for_requests
+
+ # We check the first row because it re-sorts to alphabetical order on refresh
+ page.within('.js-ci-variable-list-section') do
+ expect(find('.js-ci-variable-error-box')).to have_content(/Validation failed Variables have duplicate values \(.+\)/)
+ end
+ end
+end
diff --git a/spec/support/fixture_helpers.rb b/spec/support/fixture_helpers.rb
index 128aaaf25fe..8854382dc6b 100644
--- a/spec/support/fixture_helpers.rb
+++ b/spec/support/fixture_helpers.rb
@@ -1,12 +1,12 @@
module FixtureHelpers
- def fixture_file(filename)
+ def fixture_file(filename, dir: '')
return '' if filename.blank?
- File.read(expand_fixture_path(filename))
+ File.read(expand_fixture_path(filename, dir: dir))
end
- def expand_fixture_path(filename)
- File.expand_path(Rails.root.join('spec/fixtures/', filename))
+ def expand_fixture_path(filename, dir: '')
+ File.expand_path(Rails.root.join(dir, 'spec', 'fixtures', filename))
end
end
diff --git a/spec/support/matchers/pagination_matcher.rb b/spec/support/matchers/pagination_matcher.rb
index 60f5e8239a7..9a7697e2bfc 100644
--- a/spec/support/matchers/pagination_matcher.rb
+++ b/spec/support/matchers/pagination_matcher.rb
@@ -3,3 +3,9 @@ RSpec::Matchers.define :include_pagination_headers do |expected|
expect(actual.headers).to include('X-Total', 'X-Total-Pages', 'X-Per-Page', 'X-Page', 'X-Next-Page', 'X-Prev-Page', 'Link')
end
end
+
+RSpec::Matchers.define :include_limited_pagination_headers do |expected|
+ match do |actual|
+ expect(actual.headers).to include('X-Per-Page', 'X-Page', 'X-Next-Page', 'X-Prev-Page', 'Link')
+ end
+end
diff --git a/spec/support/migrations_helpers.rb b/spec/support/migrations_helpers.rb
index 6522d74ba89..6bf976a2cf9 100644
--- a/spec/support/migrations_helpers.rb
+++ b/spec/support/migrations_helpers.rb
@@ -15,18 +15,27 @@ module MigrationsHelpers
ActiveRecord::Migrator.migrations(migrations_paths)
end
- def reset_column_in_migration_models
+ def clear_schema_cache!
ActiveRecord::Base.connection_pool.connections.each do |conn|
conn.schema_cache.clear!
end
+ end
- described_class.constants.sort.each do |name|
- const = described_class.const_get(name)
+ def reset_column_in_all_models
+ clear_schema_cache!
- if const.is_a?(Class) && const < ActiveRecord::Base
- const.reset_column_information
- end
- end
+ # Reset column information for the most offending classes **after** we
+ # migrated the schema up, otherwise, column information could be
+ # outdated. We have a separate method for this so we can override it in EE.
+ ActiveRecord::Base.descendants.each(&method(:reset_column_information))
+
+ # Without that, we get errors because of missing attributes, e.g.
+ # super: no superclass method `elasticsearch_indexing' for #<ApplicationSetting:0x00007f85628508d8>
+ ApplicationSetting.define_attribute_methods
+ end
+
+ def reset_column_information(klass)
+ klass.reset_column_information
end
def previous_migration
@@ -36,7 +45,13 @@ module MigrationsHelpers
end
def migration_schema_version
- self.class.metadata[:schema] || previous_migration.version
+ metadata_schema = self.class.metadata[:schema]
+
+ if metadata_schema == :latest
+ migrations.last.version
+ else
+ metadata_schema || previous_migration.version
+ end
end
def schema_migrate_down!
@@ -45,15 +60,17 @@ module MigrationsHelpers
migration_schema_version)
end
- reset_column_in_migration_models
+ reset_column_in_all_models
end
def schema_migrate_up!
+ reset_column_in_all_models
+
disable_migrations_output do
ActiveRecord::Migrator.migrate(migrations_paths)
end
- reset_column_in_migration_models
+ reset_column_in_all_models
end
def disable_migrations_output
diff --git a/spec/support/reactive_caching_helpers.rb b/spec/support/reactive_caching_helpers.rb
index 34124f02133..e22dd974c6a 100644
--- a/spec/support/reactive_caching_helpers.rb
+++ b/spec/support/reactive_caching_helpers.rb
@@ -13,6 +13,12 @@ module ReactiveCachingHelpers
write_reactive_cache(subject, data, *qualifiers) if data
end
+ def synchronous_reactive_cache(subject)
+ allow(service).to receive(:with_reactive_cache) do |*args, &block|
+ block.call(service.calculate_reactive_cache(*args))
+ end
+ end
+
def read_reactive_cache(subject, *qualifiers)
Rails.cache.read(reactive_cache_key(subject, *qualifiers))
end
diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb
new file mode 100644
index 00000000000..d7acf8c0032
--- /dev/null
+++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb
@@ -0,0 +1,123 @@
+shared_examples 'GET #show lists all variables' do
+ it 'renders the variables as json' do
+ subject
+
+ expect(response).to match_response_schema('variables')
+ end
+
+ it 'has only one variable' do
+ subject
+
+ expect(json_response['variables'].count).to eq(1)
+ end
+end
+
+shared_examples 'PATCH #update updates variables' do
+ let(:variable_attributes) do
+ { id: variable.id,
+ key: variable.key,
+ value: variable.value,
+ protected: variable.protected?.to_s }
+ end
+ let(:new_variable_attributes) do
+ { key: 'new_key',
+ value: 'dummy_value',
+ protected: 'false' }
+ end
+
+ context 'with invalid new variable parameters' do
+ let(:variables_attributes) do
+ [
+ variable_attributes.merge(value: 'other_value'),
+ new_variable_attributes.merge(key: '...?')
+ ]
+ end
+
+ it 'does not update the existing variable' do
+ expect { subject }.not_to change { variable.reload.value }
+ end
+
+ it 'does not create the new variable' do
+ expect { subject }.not_to change { owner.variables.count }
+ end
+
+ it 'returns a bad request response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'with duplicate new variable parameters' do
+ let(:variables_attributes) do
+ [
+ new_variable_attributes,
+ new_variable_attributes.merge(value: 'other_value')
+ ]
+ end
+
+ it 'does not update the existing variable' do
+ expect { subject }.not_to change { variable.reload.value }
+ end
+
+ it 'does not create the new variable' do
+ expect { subject }.not_to change { owner.variables.count }
+ end
+
+ it 'returns a bad request response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'with valid new variable parameters' do
+ let(:variables_attributes) do
+ [
+ variable_attributes.merge(value: 'other_value'),
+ new_variable_attributes
+ ]
+ end
+
+ it 'updates the existing variable' do
+ expect { subject }.to change { variable.reload.value }.to('other_value')
+ end
+
+ it 'creates the new variable' do
+ expect { subject }.to change { owner.variables.count }.by(1)
+ end
+
+ it 'returns a successful response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'has all variables in response' do
+ subject
+
+ expect(response).to match_response_schema('variables')
+ end
+ end
+
+ context 'with a deleted variable' do
+ let(:variables_attributes) { [variable_attributes.merge(_destroy: 'true')] }
+
+ it 'destroys the variable' do
+ expect { subject }.to change { owner.variables.count }.by(-1)
+ expect { variable.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+
+ it 'returns a successful response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'has all variables in response' do
+ subject
+
+ expect(response).to match_response_schema('variables')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
index 4e18804b937..9fc2fbef449 100644
--- a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
@@ -17,12 +17,88 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
end
end
- it 'filters by custom attributes' do
- get api("/#{attributable_name}", admin), custom_attributes: { foo: 'foo', bar: 'bar' }
+ context 'with an authorized user' do
+ it 'filters by custom attributes' do
+ get api("/#{attributable_name}", admin), custom_attributes: { foo: 'foo', bar: 'bar' }
- expect(response).to have_gitlab_http_status(200)
- expect(json_response.size).to be 1
- expect(json_response.first['id']).to eq attributable.id
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.size).to be 1
+ expect(json_response.first['id']).to eq attributable.id
+ end
+ end
+ end
+
+ describe "GET /#{attributable_name} with custom attributes" do
+ before do
+ other_attributable
+ end
+
+ context 'with an unauthorized user' do
+ it 'does not include custom attributes' do
+ get api("/#{attributable_name}", user), with_custom_attributes: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.size).to be 2
+ expect(json_response.first).not_to include 'custom_attributes'
+ end
+ end
+
+ context 'with an authorized user' do
+ it 'does not include custom attributes by default' do
+ get api("/#{attributable_name}", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.size).to be 2
+ expect(json_response.first).not_to include 'custom_attributes'
+ expect(json_response.second).not_to include 'custom_attributes'
+ end
+
+ it 'includes custom attributes if requested' do
+ get api("/#{attributable_name}", admin), with_custom_attributes: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.size).to be 2
+
+ attributable_response = json_response.find { |r| r['id'] == attributable.id }
+ other_attributable_response = json_response.find { |r| r['id'] == other_attributable.id }
+
+ expect(attributable_response['custom_attributes']).to contain_exactly(
+ { 'key' => 'foo', 'value' => 'foo' },
+ { 'key' => 'bar', 'value' => 'bar' }
+ )
+
+ expect(other_attributable_response['custom_attributes']).to eq []
+ end
+ end
+ end
+
+ describe "GET /#{attributable_name}/:id with custom attributes" do
+ context 'with an unauthorized user' do
+ it 'does not include custom attributes' do
+ get api("/#{attributable_name}/#{attributable.id}", user), with_custom_attributes: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).not_to include 'custom_attributes'
+ end
+ end
+
+ context 'with an authorized user' do
+ it 'does not include custom attributes by default' do
+ get api("/#{attributable_name}/#{attributable.id}", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).not_to include 'custom_attributes'
+ end
+
+ it 'includes custom attributes if requested' do
+ get api("/#{attributable_name}/#{attributable.id}", admin), with_custom_attributes: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['custom_attributes']).to contain_exactly(
+ { 'key' => 'foo', 'value' => 'foo' },
+ { 'key' => 'bar', 'value' => 'bar' }
+ )
+ end
end
end
@@ -33,14 +109,16 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it_behaves_like 'an unauthorized API user'
end
- it 'returns all custom attributes' do
- get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin)
+ context 'with an authorized user' do
+ it 'returns all custom attributes' do
+ get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to contain_exactly(
- { 'key' => 'foo', 'value' => 'foo' },
- { 'key' => 'bar', 'value' => 'bar' }
- )
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to contain_exactly(
+ { 'key' => 'foo', 'value' => 'foo' },
+ { 'key' => 'bar', 'value' => 'bar' }
+ )
+ end
end
end
@@ -51,11 +129,13 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it_behaves_like 'an unauthorized API user'
end
- it 'returns a single custom attribute' do
- get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
+ context 'with an authorized user' do
+ it'returns a single custom attribute' do
+ get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' })
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' })
+ end
end
end
@@ -66,24 +146,26 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it_behaves_like 'an unauthorized API user'
end
- it 'creates a new custom attribute' do
- expect do
- put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), value: 'new'
- end.to change { attributable.custom_attributes.count }.by(1)
+ context 'with an authorized user' do
+ it 'creates a new custom attribute' do
+ expect do
+ put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), value: 'new'
+ end.to change { attributable.custom_attributes.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to eq({ 'key' => 'new', 'value' => 'new' })
- expect(attributable.custom_attributes.find_by(key: 'new').value).to eq 'new'
- end
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to eq({ 'key' => 'new', 'value' => 'new' })
+ expect(attributable.custom_attributes.find_by(key: 'new').value).to eq 'new'
+ end
- it 'updates an existing custom attribute' do
- expect do
- put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), value: 'new'
- end.not_to change { attributable.custom_attributes.count }
+ it 'updates an existing custom attribute' do
+ expect do
+ put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), value: 'new'
+ end.not_to change { attributable.custom_attributes.count }
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to eq({ 'key' => 'foo', 'value' => 'new' })
- expect(custom_attribute1.reload.value).to eq 'new'
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to eq({ 'key' => 'foo', 'value' => 'new' })
+ expect(custom_attribute1.reload.value).to eq 'new'
+ end
end
end
@@ -94,13 +176,15 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it_behaves_like 'an unauthorized API user'
end
- it 'deletes an existing custom attribute' do
- expect do
- delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
- end.to change { attributable.custom_attributes.count }.by(-1)
+ context 'with an authorized user' do
+ it 'deletes an existing custom attribute' do
+ expect do
+ delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
+ end.to change { attributable.custom_attributes.count }.by(-1)
- expect(response).to have_gitlab_http_status(204)
- expect(attributable.custom_attributes.find_by(key: 'foo')).to be_nil
+ expect(response).to have_gitlab_http_status(204)
+ expect(attributable.custom_attributes.find_by(key: 'foo')).to be_nil
+ end
end
end
end
diff --git a/spec/support/snippet_visibility.rb b/spec/support/snippet_visibility.rb
new file mode 100644
index 00000000000..1cb904823d2
--- /dev/null
+++ b/spec/support/snippet_visibility.rb
@@ -0,0 +1,304 @@
+RSpec.shared_examples 'snippet visibility' do
+ let!(:author) { create(:user) }
+ let!(:member) { create(:user) }
+ let!(:external) { create(:user, :external) }
+
+ let!(:snippet_type_visibilities) do
+ {
+ public: Snippet::PUBLIC,
+ internal: Snippet::INTERNAL,
+ private: Snippet::PRIVATE
+ }
+ end
+
+ context "For project snippets" do
+ let!(:users) do
+ {
+ unauthenticated: nil,
+ external: external,
+ non_member: create(:user),
+ member: member,
+ author: author
+ }
+ end
+
+ let!(:project_type_visibilities) do
+ {
+ public: Gitlab::VisibilityLevel::PUBLIC,
+ internal: Gitlab::VisibilityLevel::INTERNAL,
+ private: Gitlab::VisibilityLevel::PRIVATE
+ }
+ end
+
+ let(:project_feature_visibilities) do
+ {
+ enabled: ProjectFeature::ENABLED,
+ private: ProjectFeature::PRIVATE,
+ disabled: ProjectFeature::DISABLED
+ }
+ end
+
+ where(:project_type, :feature_visibility, :user_type, :snippet_type, :outcome) do
+ [
+ # Public projects
+ [:public, :enabled, :unauthenticated, :public, true],
+ [:public, :enabled, :unauthenticated, :internal, false],
+ [:public, :enabled, :unauthenticated, :private, false],
+
+ [:public, :enabled, :external, :public, true],
+ [:public, :enabled, :external, :internal, false],
+ [:public, :enabled, :external, :private, false],
+
+ [:public, :enabled, :non_member, :public, true],
+ [:public, :enabled, :non_member, :internal, true],
+ [:public, :enabled, :non_member, :private, false],
+
+ [:public, :enabled, :member, :public, true],
+ [:public, :enabled, :member, :internal, true],
+ [:public, :enabled, :member, :private, true],
+
+ [:public, :enabled, :author, :public, true],
+ [:public, :enabled, :author, :internal, true],
+ [:public, :enabled, :author, :private, true],
+
+ [:public, :private, :unauthenticated, :public, false],
+ [:public, :private, :unauthenticated, :internal, false],
+ [:public, :private, :unauthenticated, :private, false],
+
+ [:public, :private, :external, :public, false],
+ [:public, :private, :external, :internal, false],
+ [:public, :private, :external, :private, false],
+
+ [:public, :private, :non_member, :public, false],
+ [:public, :private, :non_member, :internal, false],
+ [:public, :private, :non_member, :private, false],
+
+ [:public, :private, :member, :public, true],
+ [:public, :private, :member, :internal, true],
+ [:public, :private, :member, :private, true],
+
+ [:public, :private, :author, :public, true],
+ [:public, :private, :author, :internal, true],
+ [:public, :private, :author, :private, true],
+
+ [:public, :disabled, :unauthenticated, :public, false],
+ [:public, :disabled, :unauthenticated, :internal, false],
+ [:public, :disabled, :unauthenticated, :private, false],
+
+ [:public, :disabled, :external, :public, false],
+ [:public, :disabled, :external, :internal, false],
+ [:public, :disabled, :external, :private, false],
+
+ [:public, :disabled, :non_member, :public, false],
+ [:public, :disabled, :non_member, :internal, false],
+ [:public, :disabled, :non_member, :private, false],
+
+ [:public, :disabled, :member, :public, false],
+ [:public, :disabled, :member, :internal, false],
+ [:public, :disabled, :member, :private, false],
+
+ [:public, :disabled, :author, :public, false],
+ [:public, :disabled, :author, :internal, false],
+ [:public, :disabled, :author, :private, false],
+
+ # Internal projects
+ [:internal, :enabled, :unauthenticated, :public, false],
+ [:internal, :enabled, :unauthenticated, :internal, false],
+ [:internal, :enabled, :unauthenticated, :private, false],
+
+ [:internal, :enabled, :external, :public, false],
+ [:internal, :enabled, :external, :internal, false],
+ [:internal, :enabled, :external, :private, false],
+
+ [:internal, :enabled, :non_member, :public, true],
+ [:internal, :enabled, :non_member, :internal, true],
+ [:internal, :enabled, :non_member, :private, false],
+
+ [:internal, :enabled, :member, :public, true],
+ [:internal, :enabled, :member, :internal, true],
+ [:internal, :enabled, :member, :private, true],
+
+ [:internal, :enabled, :author, :public, true],
+ [:internal, :enabled, :author, :internal, true],
+ [:internal, :enabled, :author, :private, true],
+
+ [:internal, :private, :unauthenticated, :public, false],
+ [:internal, :private, :unauthenticated, :internal, false],
+ [:internal, :private, :unauthenticated, :private, false],
+
+ [:internal, :private, :external, :public, false],
+ [:internal, :private, :external, :internal, false],
+ [:internal, :private, :external, :private, false],
+
+ [:internal, :private, :non_member, :public, false],
+ [:internal, :private, :non_member, :internal, false],
+ [:internal, :private, :non_member, :private, false],
+
+ [:internal, :private, :member, :public, true],
+ [:internal, :private, :member, :internal, true],
+ [:internal, :private, :member, :private, true],
+
+ [:internal, :private, :author, :public, true],
+ [:internal, :private, :author, :internal, true],
+ [:internal, :private, :author, :private, true],
+
+ [:internal, :disabled, :unauthenticated, :public, false],
+ [:internal, :disabled, :unauthenticated, :internal, false],
+ [:internal, :disabled, :unauthenticated, :private, false],
+
+ [:internal, :disabled, :external, :public, false],
+ [:internal, :disabled, :external, :internal, false],
+ [:internal, :disabled, :external, :private, false],
+
+ [:internal, :disabled, :non_member, :public, false],
+ [:internal, :disabled, :non_member, :internal, false],
+ [:internal, :disabled, :non_member, :private, false],
+
+ [:internal, :disabled, :member, :public, false],
+ [:internal, :disabled, :member, :internal, false],
+ [:internal, :disabled, :member, :private, false],
+
+ [:internal, :disabled, :author, :public, false],
+ [:internal, :disabled, :author, :internal, false],
+ [:internal, :disabled, :author, :private, false],
+
+ # Private projects
+ [:private, :enabled, :unauthenticated, :public, false],
+ [:private, :enabled, :unauthenticated, :internal, false],
+ [:private, :enabled, :unauthenticated, :private, false],
+
+ [:private, :enabled, :external, :public, true],
+ [:private, :enabled, :external, :internal, true],
+ [:private, :enabled, :external, :private, true],
+
+ [:private, :enabled, :non_member, :public, false],
+ [:private, :enabled, :non_member, :internal, false],
+ [:private, :enabled, :non_member, :private, false],
+
+ [:private, :enabled, :member, :public, true],
+ [:private, :enabled, :member, :internal, true],
+ [:private, :enabled, :member, :private, true],
+
+ [:private, :enabled, :author, :public, true],
+ [:private, :enabled, :author, :internal, true],
+ [:private, :enabled, :author, :private, true],
+
+ [:private, :private, :unauthenticated, :public, false],
+ [:private, :private, :unauthenticated, :internal, false],
+ [:private, :private, :unauthenticated, :private, false],
+
+ [:private, :private, :external, :public, true],
+ [:private, :private, :external, :internal, true],
+ [:private, :private, :external, :private, true],
+
+ [:private, :private, :non_member, :public, false],
+ [:private, :private, :non_member, :internal, false],
+ [:private, :private, :non_member, :private, false],
+
+ [:private, :private, :member, :public, true],
+ [:private, :private, :member, :internal, true],
+ [:private, :private, :member, :private, true],
+
+ [:private, :private, :author, :public, true],
+ [:private, :private, :author, :internal, true],
+ [:private, :private, :author, :private, true],
+
+ [:private, :disabled, :unauthenticated, :public, false],
+ [:private, :disabled, :unauthenticated, :internal, false],
+ [:private, :disabled, :unauthenticated, :private, false],
+
+ [:private, :disabled, :external, :public, false],
+ [:private, :disabled, :external, :internal, false],
+ [:private, :disabled, :external, :private, false],
+
+ [:private, :disabled, :non_member, :public, false],
+ [:private, :disabled, :non_member, :internal, false],
+ [:private, :disabled, :non_member, :private, false],
+
+ [:private, :disabled, :member, :public, false],
+ [:private, :disabled, :member, :internal, false],
+ [:private, :disabled, :member, :private, false],
+
+ [:private, :disabled, :author, :public, false],
+ [:private, :disabled, :author, :internal, false],
+ [:private, :disabled, :author, :private, false]
+ ]
+ end
+
+ with_them do
+ let!(:project) { create(:project, visibility_level: project_type_visibilities[project_type]) }
+ let!(:project_feature) { project.project_feature.update_column(:snippets_access_level, project_feature_visibilities[feature_visibility]) }
+ let!(:user) { users[user_type] }
+ let!(:snippet) { create(:project_snippet, visibility_level: snippet_type_visibilities[snippet_type], project: project, author: author) }
+ let!(:members) do
+ project.add_developer(author)
+ project.add_developer(member)
+ project.add_developer(external) if project.private?
+ end
+
+ context "For #{params[:project_type]} project and #{params[:user_type]} users" do
+ it 'should agree with the read_project_snippet policy' do
+ expect(can?(user, :read_project_snippet, snippet)).to eq(outcome)
+ end
+
+ it 'should return proper outcome' do
+ results = described_class.new(user, project: project).execute
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+ end
+
+ context "Without a given project and #{params[:user_type]} users" do
+ it 'should return proper outcome' do
+ results = described_class.new(user).execute
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+ end
+ end
+ end
+
+ context 'For personal snippets' do
+ let!(:users) do
+ {
+ unauthenticated: nil,
+ external: external,
+ non_member: create(:user),
+ author: author
+ }
+ end
+
+ where(:snippet_visibility, :user_type, :outcome) do
+ [
+ [:public, :unauthenticated, true],
+ [:public, :external, true],
+ [:public, :non_member, true],
+ [:public, :author, true],
+
+ [:internal, :unauthenticated, false],
+ [:internal, :external, false],
+ [:internal, :non_member, true],
+ [:internal, :author, true],
+
+ [:private, :unauthenticated, false],
+ [:private, :external, false],
+ [:private, :non_member, false],
+ [:private, :author, true]
+ ]
+ end
+
+ with_them do
+ let!(:user) { users[user_type] }
+ let!(:snippet) { create(:personal_snippet, visibility_level: snippet_type_visibilities[snippet_visibility], author: author) }
+
+ context "For personal and #{params[:snippet_visibility]} snippets with #{params[:user_type]} user" do
+ it 'should agree with read_personal_snippet policy' do
+ expect(can?(user, :read_personal_snippet, snippet)).to eq(outcome)
+ end
+
+ it 'should return proper outcome' do
+ results = described_class.new(user).execute
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
index 5752078d2a0..a8b3ed1f41c 100644
--- a/spec/support/track_untracked_uploads_helpers.rb
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -8,10 +8,6 @@ module TrackUntrackedUploadsHelpers
Gitlab::BackgroundMigration::PrepareUntrackedUploads.new.send(:ensure_temporary_tracking_table_exists)
end
- def drop_temp_table_if_exists
- ActiveRecord::Base.connection.drop_table(:untracked_files_for_uploads) if ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)
- end
-
def create_or_update_appearance(attrs)
a = Appearance.first_or_initialize(title: 'foo', description: 'bar')
a.update!(attrs)
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index 6a92e7fae51..b42ce982b27 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe FileUploader do
let(:group) { create(:group, name: 'awesome') }
- let(:project) { create(:project, namespace: group, name: 'project') }
+ let(:project) { create(:project, :legacy_storage, namespace: group, name: 'project') }
let(:uploader) { described_class.new(project) }
let(:upload) { double(model: project, path: 'secret/foo.jpg') }
@@ -16,12 +16,12 @@ describe FileUploader do
shared_examples 'uses hashed storage' do
context 'when rolled out attachments' do
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
+
before do
allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
end
- let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
-
it_behaves_like 'builds correct paths',
store_dir: %r{ca/fe/fe/ed/\h+},
absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
diff --git a/spec/validators/variable_duplicates_validator_spec.rb b/spec/validators/variable_duplicates_validator_spec.rb
new file mode 100644
index 00000000000..0b71a67f94d
--- /dev/null
+++ b/spec/validators/variable_duplicates_validator_spec.rb
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe VariableDuplicatesValidator do
+ let(:validator) { described_class.new(attributes: [:variables], **options) }
+
+ describe '#validate_each' do
+ let(:project) { build(:project) }
+
+ subject { validator.validate_each(project, :variables, project.variables) }
+
+ context 'with no scope' do
+ let(:options) { {} }
+ let(:variables) { build_list(:ci_variable, 2, project: project) }
+
+ before do
+ project.variables << variables
+ end
+
+ it 'does not have any errors' do
+ subject
+
+ expect(project.errors.empty?).to be true
+ end
+
+ context 'with duplicates' do
+ before do
+ project.variables.build(key: variables.first.key, value: 'dummy_value')
+ end
+
+ it 'has a duplicate key error' do
+ subject
+
+ expect(project.errors).to have_key(:variables)
+ end
+ end
+ end
+
+ context 'with a scope attribute' do
+ let(:options) { { scope: :environment_scope } }
+ let(:first_variable) { build(:ci_variable, key: 'test_key', environment_scope: '*', project: project) }
+ let(:second_variable) { build(:ci_variable, key: 'test_key', environment_scope: 'prod', project: project) }
+
+ before do
+ project.variables << first_variable
+ project.variables << second_variable
+ end
+
+ it 'does not have any errors' do
+ subject
+
+ expect(project.errors.empty?).to be true
+ end
+
+ context 'with duplicates' do
+ before do
+ project.variables.build(key: second_variable.key, value: 'dummy_value', environment_scope: second_variable.environment_scope)
+ end
+
+ it 'has a duplicate key error' do
+ subject
+
+ expect(project.errors).to have_key(:variables)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/check_gcp_project_billing_worker_spec.rb b/spec/workers/check_gcp_project_billing_worker_spec.rb
index 7b7a7c1bc44..526ecf75921 100644
--- a/spec/workers/check_gcp_project_billing_worker_spec.rb
+++ b/spec/workers/check_gcp_project_billing_worker_spec.rb
@@ -6,6 +6,11 @@ describe CheckGcpProjectBillingWorker do
subject { described_class.new.perform('token_key') }
+ before do
+ allow(described_class).to receive(:get_billing_state)
+ allow_any_instance_of(described_class).to receive(:update_billing_change_counter)
+ end
+
context 'when there is a token in redis' do
before do
allow(described_class).to receive(:get_session_token).and_return(token)
@@ -23,11 +28,8 @@ describe CheckGcpProjectBillingWorker do
end
it 'stores billing status in redis' do
- redis_double = double
-
expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
- expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis_double)
- expect(redis_double).to receive(:set).with(described_class.redis_shared_state_key_for(token), anything, anything)
+ expect(described_class).to receive(:set_billing_state).with(token, true)
subject
end
@@ -48,7 +50,7 @@ describe CheckGcpProjectBillingWorker do
context 'when there is no token in redis' do
before do
- allow_any_instance_of(described_class).to receive(:get_session_token).and_return(nil)
+ allow(described_class).to receive(:get_session_token).and_return(nil)
end
it 'does not call the service' do
@@ -58,4 +60,57 @@ describe CheckGcpProjectBillingWorker do
end
end
end
+
+ describe 'billing change counter' do
+ subject { described_class.new.perform('token_key') }
+
+ before do
+ allow(described_class).to receive(:get_session_token).and_return('bogustoken')
+ allow_any_instance_of(described_class).to receive(:try_obtain_lease_for).and_return('randomuuid')
+ allow(described_class).to receive(:set_billing_state)
+ end
+
+ context 'when previous state was false' do
+ before do
+ expect(described_class).to receive(:get_billing_state).and_return(false)
+ end
+
+ context 'when the current state is false' do
+ before do
+ expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([])
+ end
+
+ it 'increments the billing change counter' do
+ expect_any_instance_of(described_class).to receive_message_chain(:billing_changed_counter, :increment)
+
+ subject
+ end
+ end
+
+ context 'when the current state is true' do
+ before do
+ expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
+ end
+
+ it 'increments the billing change counter' do
+ expect_any_instance_of(described_class).to receive_message_chain(:billing_changed_counter, :increment)
+
+ subject
+ end
+ end
+ end
+
+ context 'when previous state was true' do
+ before do
+ expect(described_class).to receive(:get_billing_state).and_return(true)
+ expect(CheckGcpProjectBillingService).to receive_message_chain(:new, :execute).and_return([double])
+ end
+
+ it 'increment the billing change counter' do
+ expect_any_instance_of(described_class).to receive_message_chain(:billing_changed_counter, :increment)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 4912baa348c..6c66658d8c3 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -68,7 +68,7 @@ describe RepositoryForkWorker do
end
it "handles bad fork" do
- error_message = "Unable to fork project #{fork_project.id} for repository #{project.full_path} -> #{fork_project.full_path}"
+ error_message = "Unable to fork project #{fork_project.id} for repository #{project.disk_path} -> #{fork_project.disk_path}"
expect_fork_repository.and_return(false)
diff --git a/spec/workers/storage_migrator_worker_spec.rb b/spec/workers/storage_migrator_worker_spec.rb
index 8619ff2f7da..ff625164142 100644
--- a/spec/workers/storage_migrator_worker_spec.rb
+++ b/spec/workers/storage_migrator_worker_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe StorageMigratorWorker do
subject(:worker) { described_class.new }
- let(:projects) { create_list(:project, 2) }
+ let(:projects) { create_list(:project, 2, :legacy_storage) }
describe '#perform' do
let(:ids) { projects.map(&:id) }