summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorRobert Speicher <rspeicher@gmail.com>2021-01-20 13:34:23 -0600
committerRobert Speicher <rspeicher@gmail.com>2021-01-20 13:34:23 -0600
commit6438df3a1e0fb944485cebf07976160184697d72 (patch)
tree00b09bfd170e77ae9391b1a2f5a93ef6839f2597 /spec
parent42bcd54d971da7ef2854b896a7b34f4ef8601067 (diff)
downloadgitlab-ce-6438df3a1e0fb944485cebf07976160184697d72.tar.gz
Add latest changes from gitlab-org/gitlab@13-8-stable-eev13.8.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/dev_ops_report_controller_spec.rb6
-rw-r--r--spec/controllers/admin/projects_controller_spec.rb30
-rw-r--r--spec/controllers/application_controller_spec.rb43
-rw-r--r--spec/controllers/concerns/redis_tracking_spec.rb2
-rw-r--r--spec/controllers/concerns/spammable_actions_spec.rb198
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb9
-rw-r--r--spec/controllers/dashboard/snippets_controller_spec.rb19
-rw-r--r--spec/controllers/explore/projects_controller_spec.rb2
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb41
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb36
-rw-r--r--spec/controllers/metrics_controller_spec.rb75
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb76
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb6
-rw-r--r--spec/controllers/projects/feature_flags_controller_spec.rb418
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb24
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb34
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb44
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb74
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb66
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb133
-rw-r--r--spec/controllers/projects_controller_spec.rb25
-rw-r--r--spec/controllers/registrations_controller_spec.rb8
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb2
-rw-r--r--spec/db/schema_spec.rb2
-rw-r--r--spec/factories/alerts_service_data.rb8
-rw-r--r--spec/factories/boards.rb4
-rw-r--r--spec/factories/ci/builds.rb8
-rw-r--r--spec/factories/ci_platform_metrics.rb2
-rw-r--r--spec/factories/namespace_onboarding_actions.rb8
-rw-r--r--spec/factories/namespace_package_settings.rb14
-rw-r--r--spec/factories/onboarding_progresses.rb7
-rw-r--r--spec/factories/packages.rb22
-rw-r--r--spec/factories/packages/debian/distribution.rb25
-rw-r--r--spec/factories/packages/debian/file_metadatum.rb60
-rw-r--r--spec/factories/packages/debian/group_architecture.rb9
-rw-r--r--spec/factories/packages/debian/project_architecture.rb9
-rw-r--r--spec/factories/packages/package_file.rb91
-rw-r--r--spec/factories/project_group_links.rb2
-rw-r--r--spec/factories/project_members.rb4
-rw-r--r--spec/factories/projects.rb4
-rw-r--r--spec/factories/services.rb18
-rw-r--r--spec/factories/usage_data.rb4
-rw-r--r--spec/factories_spec.rb2
-rw-r--r--spec/features/admin/admin_groups_spec.rb1
-rw-r--r--spec/features/admin/admin_runners_spec.rb6
-rw-r--r--spec/features/admin/admin_settings_spec.rb31
-rw-r--r--spec/features/alert_management/user_filters_alerts_by_status_spec.rb1
-rw-r--r--spec/features/alert_management/user_searches_alerts_spec.rb1
-rw-r--r--spec/features/alert_management/user_updates_alert_status_spec.rb1
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb6
-rw-r--r--spec/features/dashboard/projects_spec.rb2
-rw-r--r--spec/features/groups/board_spec.rb11
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb2
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb1
-rw-r--r--spec/features/groups/members/manage_members_spec.rb28
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb1
-rw-r--r--spec/features/groups/members/master_manages_access_requests_spec.rb1
-rw-r--r--spec/features/groups/members/sort_members_spec.rb196
-rw-r--r--spec/features/groups/settings/ci_cd_spec.rb2
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb59
-rw-r--r--spec/features/incidents/incident_details_spec.rb2
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb163
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb358
-rw-r--r--spec/features/issues/issue_state_spec.rb10
-rw-r--r--spec/features/issues/user_comments_on_issue_spec.rb1
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb4
-rw-r--r--spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb2
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb1
-rw-r--r--spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb9
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb22
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb46
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb10
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb4
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_uses_quick_actions_spec.rb1
-rw-r--r--spec/features/projects/ci/lint_spec.rb4
-rw-r--r--spec/features/projects/commit/user_reverts_commit_spec.rb38
-rw-r--r--spec/features/projects/environments_pod_logs_spec.rb2
-rw-r--r--spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb36
-rw-r--r--spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb117
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb56
-rw-r--r--spec/features/projects/issues/design_management/user_uploads_designs_spec.rb2
-rw-r--r--spec/features/projects/issues/email_participants_spec.rb34
-rw-r--r--spec/features/projects/jobs/permissions_spec.rb57
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb2
-rw-r--r--spec/features/projects/members/group_members_spec.rb55
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb17
-rw-r--r--spec/features/projects/members/invite_group_spec.rb10
-rw-r--r--spec/features/projects/members/list_spec.rb5
-rw-r--r--spec/features/projects/members/master_manages_access_requests_spec.rb1
-rw-r--r--spec/features/projects/members/tabs_spec.rb73
-rw-r--r--spec/features/projects/releases/user_creates_release_spec.rb2
-rw-r--r--spec/features/projects/services/user_activates_alerts_spec.rb74
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb6
-rw-r--r--spec/features/projects/snippets/user_deletes_snippet_spec.rb4
-rw-r--r--spec/features/promotion_spec.rb2
-rw-r--r--spec/features/protected_tags_spec.rb2
-rw-r--r--spec/features/runners_spec.rb34
-rw-r--r--spec/features/security/dashboard_access_spec.rb2
-rw-r--r--spec/features/security/profile_access_spec.rb12
-rw-r--r--spec/features/users/login_spec.rb67
-rw-r--r--spec/features/users/signup_spec.rb6
-rw-r--r--spec/finders/alert_management/alerts_finder_spec.rb6
-rw-r--r--spec/finders/ci/daily_build_group_report_results_finder_spec.rb6
-rw-r--r--spec/finders/ci/pipelines_for_merge_request_finder_spec.rb18
-rw-r--r--spec/finders/cluster_ancestors_finder_spec.rb12
-rw-r--r--spec/finders/concerns/packages/finder_helper_spec.rb161
-rw-r--r--spec/finders/group_projects_finder_spec.rb38
-rw-r--r--spec/finders/groups_finder_spec.rb23
-rw-r--r--spec/finders/issues_finder_spec.rb176
-rw-r--r--spec/finders/merge_requests_finder_spec.rb55
-rw-r--r--spec/finders/packages/debian/distributions_finder_spec.rb28
-rw-r--r--spec/finders/packages/group_packages_finder_spec.rb8
-rw-r--r--spec/finders/packages/maven/package_finder_spec.rb80
-rw-r--r--spec/finders/packages/nuget/package_finder_spec.rb119
-rw-r--r--spec/finders/packages/packages_finder_spec.rb6
-rw-r--r--spec/finders/projects_finder_spec.rb4
-rw-r--r--spec/finders/snippets_finder_spec.rb36
-rw-r--r--spec/finders/template_finder_spec.rb2
-rw-r--r--spec/finders/users_finder_spec.rb27
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json1
-rw-r--r--spec/fixtures/api/schemas/entities/member.json (renamed from spec/fixtures/api/schemas/group_member.json)49
-rw-r--r--spec/fixtures/api/schemas/entities/member_user.json22
-rw-r--r--spec/fixtures/api/schemas/environment.json6
-rw-r--r--spec/fixtures/api/schemas/graphql/namespace/package_settings.json12
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_composer_details.json12
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json36
-rw-r--r--spec/fixtures/api/schemas/members.json (renamed from spec/fixtures/api/schemas/group_members.json)2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/notes.json1
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json20
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_moves.json6
-rw-r--r--spec/fixtures/api/schemas/rollout_status.json52
-rw-r--r--spec/fixtures/emails/valid_note_on_issuable.eml24
-rw-r--r--spec/fixtures/whats_new/blank.yml9
-rw-r--r--spec/fixtures/whats_new/invalid.yml20
-rw-r--r--spec/fixtures/whats_new/valid.yml20
-rw-r--r--spec/frontend/__helpers__/README.md5
-rw-r--r--spec/frontend/__helpers__/backoff_helper.js (renamed from spec/frontend/helpers/backoff_helper.js)4
-rw-r--r--spec/frontend/__helpers__/class_spec_helper.js (renamed from spec/frontend/helpers/class_spec_helper.js)0
-rw-r--r--spec/frontend/__helpers__/class_spec_helper_spec.js (renamed from spec/frontend/helpers/class_spec_helper_spec.js)0
-rw-r--r--spec/frontend/__helpers__/datetime_helpers.js6
-rw-r--r--spec/frontend/__helpers__/dom_events_helper.js (renamed from spec/frontend/helpers/dom_events_helper.js)2
-rw-r--r--spec/frontend/__helpers__/dom_shims/README.md (renamed from spec/frontend/helpers/dom_shims/README.md)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/create_object_url.js (renamed from spec/frontend/helpers/dom_shims/create_object_url.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/element_scroll_by.js (renamed from spec/frontend/helpers/dom_shims/element_scroll_by.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/element_scroll_into_view.js (renamed from spec/frontend/helpers/dom_shims/element_scroll_into_view.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/element_scroll_to.js (renamed from spec/frontend/helpers/dom_shims/element_scroll_to.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/form_element.js (renamed from spec/frontend/helpers/dom_shims/form_element.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/get_client_rects.js (renamed from spec/frontend/helpers/dom_shims/get_client_rects.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/get_client_rects_spec.js (renamed from spec/frontend/helpers/dom_shims/get_client_rects_spec.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/image_element_properties.js (renamed from spec/frontend/helpers/dom_shims/image_element_properties.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/index.js (renamed from spec/frontend/helpers/dom_shims/index.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/inner_text.js (renamed from spec/frontend/helpers/dom_shims/inner_text.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/range.js (renamed from spec/frontend/helpers/dom_shims/range.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/scroll_by.js (renamed from spec/frontend/helpers/dom_shims/scroll_by.js)0
-rw-r--r--spec/frontend/__helpers__/dom_shims/size_properties.js (renamed from spec/frontend/helpers/dom_shims/size_properties.js)2
-rw-r--r--spec/frontend/__helpers__/dom_shims/window_scroll_to.js (renamed from spec/frontend/helpers/dom_shims/window_scroll_to.js)0
-rw-r--r--spec/frontend/__helpers__/emoji.js (renamed from spec/frontend/helpers/emoji.js)10
-rw-r--r--spec/frontend/__helpers__/experimentation_helper.js (renamed from spec/frontend/helpers/experimentation_helper.js)0
-rw-r--r--spec/frontend/__helpers__/fake_date.js (renamed from spec/frontend/helpers/fake_date.js)6
-rw-r--r--spec/frontend/__helpers__/fake_date_spec.js (renamed from spec/frontend/helpers/fake_date_spec.js)0
-rw-r--r--spec/frontend/__helpers__/fake_request_animation_frame.js (renamed from spec/frontend/helpers/fake_request_animation_frame.js)2
-rw-r--r--spec/frontend/__helpers__/filtered_search_spec_helper.js (renamed from spec/frontend/helpers/filtered_search_spec_helper.js)0
-rw-r--r--spec/frontend/__helpers__/fixtures.js (renamed from spec/frontend/helpers/fixtures.js)2
-rw-r--r--spec/frontend/__helpers__/init_vue_mr_page_helper.js (renamed from spec/frontend/helpers/init_vue_mr_page_helper.js)0
-rw-r--r--spec/frontend/__helpers__/jest_helpers.js (renamed from spec/frontend/helpers/jest_helpers.js)2
-rw-r--r--spec/frontend/__helpers__/jquery.js (renamed from spec/frontend/helpers/jquery.js)0
-rw-r--r--spec/frontend/__helpers__/keep_alive_component_helper.js (renamed from spec/frontend/helpers/keep_alive_component_helper.js)0
-rw-r--r--spec/frontend/__helpers__/keep_alive_component_helper_spec.js (renamed from spec/frontend/helpers/keep_alive_component_helper_spec.js)0
-rw-r--r--spec/frontend/__helpers__/local_storage_helper.js (renamed from spec/frontend/helpers/local_storage_helper.js)6
-rw-r--r--spec/frontend/__helpers__/local_storage_helper_spec.js (renamed from spec/frontend/helpers/local_storage_helper_spec.js)0
-rw-r--r--spec/frontend/__helpers__/locale_helper.js (renamed from spec/frontend/helpers/locale_helper.js)2
-rw-r--r--spec/frontend/__helpers__/mock_apollo_helper.js (renamed from spec/frontend/helpers/mock_apollo_helper.js)0
-rw-r--r--spec/frontend/__helpers__/mock_dom_observer.js (renamed from spec/frontend/helpers/mock_dom_observer.js)0
-rw-r--r--spec/frontend/__helpers__/mock_window_location_helper.js (renamed from spec/frontend/helpers/mock_window_location_helper.js)2
-rw-r--r--spec/frontend/__helpers__/set_timeout_promise_helper.js (renamed from spec/frontend/helpers/set_timeout_promise_helper.js)2
-rw-r--r--spec/frontend/__helpers__/set_window_location_helper.js (renamed from spec/frontend/helpers/set_window_location_helper.js)0
-rw-r--r--spec/frontend/__helpers__/set_window_location_helper_spec.js (renamed from spec/frontend/helpers/set_window_location_helper_spec.js)2
-rw-r--r--spec/frontend/__helpers__/stub_children.js (renamed from spec/frontend/helpers/stub_children.js)2
-rw-r--r--spec/frontend/__helpers__/stub_component.js (renamed from spec/frontend/helpers/stub_component.js)0
-rw-r--r--spec/frontend/__helpers__/stub_transition.js8
-rw-r--r--spec/frontend/__helpers__/test_constants.js (renamed from spec/frontend/helpers/test_constants.js)0
-rw-r--r--spec/frontend/__helpers__/text_helper.js (renamed from spec/frontend/helpers/text_helper.js)6
-rw-r--r--spec/frontend/__helpers__/timeout.js (renamed from spec/frontend/helpers/timeout.js)6
-rw-r--r--spec/frontend/__helpers__/tracking_helper.js (renamed from spec/frontend/helpers/tracking_helper.js)2
-rw-r--r--spec/frontend/__helpers__/user_mock_data_helper.js (renamed from spec/frontend/helpers/user_mock_data_helper.js)7
-rw-r--r--spec/frontend/__helpers__/vue_mock_directive.js (renamed from spec/frontend/helpers/vue_mock_directive.js)2
-rw-r--r--spec/frontend/__helpers__/vue_mount_component_helper.js (renamed from spec/frontend/helpers/vue_mount_component_helper.js)0
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper.js (renamed from spec/frontend/helpers/vue_test_utils_helper.js)10
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper_spec.js (renamed from spec/frontend/helpers/vue_test_utils_helper_spec.js)0
-rw-r--r--spec/frontend/__helpers__/vuex_action_helper.js (renamed from spec/frontend/helpers/vuex_action_helper.js)6
-rw-r--r--spec/frontend/__helpers__/vuex_action_helper_spec.js (renamed from spec/frontend/helpers/vuex_action_helper_spec.js)24
-rw-r--r--spec/frontend/__helpers__/wait_for_promises.js1
-rw-r--r--spec/frontend/__helpers__/wait_for_text.js (renamed from spec/frontend/helpers/wait_for_text.js)0
-rw-r--r--spec/frontend/__helpers__/wait_using_real_timer.js (renamed from spec/frontend/helpers/wait_using_real_timer.js)4
-rw-r--r--spec/frontend/__helpers__/web_worker_mock.js (renamed from spec/frontend/helpers/web_worker_mock.js)0
-rw-r--r--spec/frontend/__mocks__/@gitlab/ui.js6
-rw-r--r--spec/frontend/__mocks__/lodash/debounce.js2
-rw-r--r--spec/frontend/__mocks__/lodash/defer.js5
-rw-r--r--spec/frontend/__mocks__/lodash/throttle.js2
-rw-r--r--spec/frontend/activities_spec.js10
-rw-r--r--spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap1
-rw-r--r--spec/frontend/add_context_commits_modal/store/actions_spec.js26
-rw-r--r--spec/frontend/admin/statistics_panel/components/app_spec.js2
-rw-r--r--spec/frontend/admin/statistics_panel/store/actions_spec.js10
-rw-r--r--spec/frontend/admin/users/components/user_avatar_spec.js63
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js9
-rw-r--r--spec/frontend/admin/users/mock_data.js5
-rw-r--r--spec/frontend/alert_handler_spec.js2
-rw-r--r--spec/frontend/alert_management/components/alert_details_spec.js13
-rw-r--r--spec/frontend/alert_management/components/alert_management_empty_state_spec.js12
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js94
-rw-r--r--spec/frontend/alert_management/components/alert_status_spec.js2
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js18
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js2
-rw-r--r--spec/frontend/alert_management/mocks/alerts_provide_config.json4
-rw-r--r--spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js49
-rw-r--r--spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap121
-rw-r--r--spec/frontend/alerts_settings/alert_mapping_builder_spec.js10
-rw-r--r--spec/frontend/alerts_settings/alerts_integrations_list_spec.js6
-rw-r--r--spec/frontend/alerts_settings/alerts_settings_form_spec.js43
-rw-r--r--spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js17
-rw-r--r--spec/frontend/alerts_settings/util.js6
-rw-r--r--spec/frontend/analytics/instance_statistics/components/app_spec.js4
-rw-r--r--spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js2
-rw-r--r--spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js2
-rw-r--r--spec/frontend/analytics/instance_statistics/components/users_chart_spec.js2
-rw-r--r--spec/frontend/api/api_utils_spec.js35
-rw-r--r--spec/frontend/api_spec.js194
-rw-r--r--spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap29
-rw-r--r--spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js88
-rw-r--r--spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js6
-rw-r--r--spec/frontend/authentication/u2f/authenticate_spec.js9
-rw-r--r--spec/frontend/authentication/u2f/register_spec.js7
-rw-r--r--spec/frontend/awards_handler_spec.js14
-rw-r--r--spec/frontend/badges/components/badge_form_spec.js6
-rw-r--r--spec/frontend/badges/components/badge_list_row_spec.js8
-rw-r--r--spec/frontend/badges/components/badge_list_spec.js12
-rw-r--r--spec/frontend/badges/components/badge_spec.js12
-rw-r--r--spec/frontend/badges/store/actions_spec.js77
-rw-r--r--spec/frontend/batch_comments/components/diff_file_drafts_spec.js16
-rw-r--r--spec/frontend/batch_comments/components/draft_note_spec.js6
-rw-r--r--spec/frontend/batch_comments/components/drafts_count_spec.js2
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js2
-rw-r--r--spec/frontend/batch_comments/components/publish_button_spec.js2
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js26
-rw-r--r--spec/frontend/behaviors/autosize_spec.js2
-rw-r--r--spec/frontend/behaviors/bind_in_out_spec.js2
-rw-r--r--spec/frontend/behaviors/copy_as_gfm_spec.js10
-rw-r--r--spec/frontend/behaviors/gl_emoji_spec.js2
-rw-r--r--spec/frontend/behaviors/load_startup_css_spec.js2
-rw-r--r--spec/frontend/behaviors/markdown/paste_markdown_table_spec.js10
-rw-r--r--spec/frontend/behaviors/quick_submit_spec.js2
-rw-r--r--spec/frontend/behaviors/requires_input_spec.js24
-rw-r--r--spec/frontend/behaviors/secret_values_spec.js22
-rw-r--r--spec/frontend/behaviors/shortcuts/keybindings_spec.js2
-rw-r--r--spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js36
-rw-r--r--spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js14
-rw-r--r--spec/frontend/blob/components/blob_content_error_spec.js4
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_edit_header_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_header_filepath_spec.js14
-rw-r--r--spec/frontend/blob/components/blob_header_spec.js2
-rw-r--r--spec/frontend/blob/pipeline_tour_success_modal_spec.js15
-rw-r--r--spec/frontend/blob/sketch/index_spec.js6
-rw-r--r--spec/frontend/blob/utils_spec.js2
-rw-r--r--spec/frontend/blob/viewer/index_spec.js10
-rw-r--r--spec/frontend/blob_edit/edit_blob_spec.js10
-rw-r--r--spec/frontend/boards/board_list_deprecated_spec.js275
-rw-r--r--spec/frontend/boards/board_list_helper.js2
-rw-r--r--spec/frontend/boards/board_list_new_spec.js268
-rw-r--r--spec/frontend/boards/board_list_spec.js346
-rw-r--r--spec/frontend/boards/board_new_issue_deprecated_spec.js (renamed from spec/frontend/boards/board_new_issue_spec.js)2
-rw-r--r--spec/frontend/boards/boards_store_spec.js32
-rw-r--r--spec/frontend/boards/components/board_assignee_dropdown_spec.js15
-rw-r--r--spec/frontend/boards/components/board_card_layout_spec.js70
-rw-r--r--spec/frontend/boards/components/board_card_spec.js7
-rw-r--r--spec/frontend/boards/components/board_column_deprecated_spec.js (renamed from spec/frontend/boards/components/board_column_new_spec.js)49
-rw-r--r--spec/frontend/boards/components/board_column_spec.js49
-rw-r--r--spec/frontend/boards/components/board_configuration_options_spec.js49
-rw-r--r--spec/frontend/boards/components/board_content_spec.js18
-rw-r--r--spec/frontend/boards/components/board_form_spec.js216
-rw-r--r--spec/frontend/boards/components/board_list_header_deprecated_spec.js (renamed from spec/frontend/boards/components/board_list_header_new_spec.js)140
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js136
-rw-r--r--spec/frontend/boards/components/board_new_issue_spec.js (renamed from spec/frontend/boards/components/board_new_issue_new_spec.js)6
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js8
-rw-r--r--spec/frontend/boards/components/issue_count_spec.js2
-rw-r--r--spec/frontend/boards/components/issue_due_date_spec.js2
-rw-r--r--spec/frontend/boards/components/issue_time_estimate_deprecated_spec.js64
-rw-r--r--spec/frontend/boards/components/issue_time_estimate_spec.js54
-rw-r--r--spec/frontend/boards/components/sidebar/board_editable_item_spec.js25
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js182
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js18
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js2
-rw-r--r--spec/frontend/boards/components/sidebar/remove_issue_spec.js2
-rw-r--r--spec/frontend/boards/issue_card_deprecated_spec.js (renamed from spec/frontend/boards/issue_card_spec.js)38
-rw-r--r--spec/frontend/boards/issue_card_inner_spec.js372
-rw-r--r--spec/frontend/boards/list_spec.js4
-rw-r--r--spec/frontend/boards/mock_data.js36
-rw-r--r--spec/frontend/boards/project_select_deprecated_spec.js261
-rw-r--r--spec/frontend/boards/project_select_spec.js154
-rw-r--r--spec/frontend/boards/stores/actions_spec.js307
-rw-r--r--spec/frontend/boards/stores/getters_spec.js6
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js100
-rw-r--r--spec/frontend/branches/branches_delete_modal_spec.js2
-rw-r--r--spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js23
-rw-r--r--spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js21
-rw-r--r--spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js8
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js6
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js2
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_table_spec.js2
-rw-r--r--spec/frontend/ci_variable_list/store/actions_spec.js20
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js2
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap186
-rw-r--r--spec/frontend/clusters/components/application_row_spec.js2
-rw-r--r--spec/frontend/clusters/components/applications_spec.js10
-rw-r--r--spec/frontend/clusters/components/fluentd_output_settings_spec.js12
-rw-r--r--spec/frontend/clusters/components/uninstall_application_button_spec.js2
-rw-r--r--spec/frontend/clusters/services/application_state_machine_spec.js18
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js23
-rw-r--r--spec/frontend/clusters_list/components/ancestor_notice_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/node_error_help_text_spec.js2
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js10
-rw-r--r--spec/frontend/code_navigation/store/actions_spec.js14
-rw-r--r--spec/frontend/collapsed_sidebar_todo_spec.js14
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js4
-rw-r--r--spec/frontend/commit/pipelines/pipelines_spec.js30
-rw-r--r--spec/frontend/commits_spec.js4
-rw-r--r--spec/frontend/commons/nav/user_merge_requests_spec.js25
-rw-r--r--spec/frontend/confirm_modal_spec.js14
-rw-r--r--spec/frontend/contributors/store/actions_spec.js4
-rw-r--r--spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js39
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js2
-rw-r--r--spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js5
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/actions_spec.js2
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/getters_spec.js2
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js2
-rw-r--r--spec/frontend/create_cluster/gke_cluster/gapi_loader_spec.js47
-rw-r--r--spec/frontend/create_cluster/gke_cluster/helpers.js8
-rw-r--r--spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js12
-rw-r--r--spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js4
-rw-r--r--spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js2
-rw-r--r--spec/frontend/create_item_dropdown_spec.js17
-rw-r--r--spec/frontend/create_merge_request_dropdown_spec.js4
-rw-r--r--spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js170
-rw-r--r--spec/frontend/cycle_analytics/limit_warning_component_spec.js2
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js7
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js2
-rw-r--r--spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js2
-rw-r--r--spec/frontend/deploy_keys/components/app_spec.js20
-rw-r--r--spec/frontend/deploy_keys/components/key_spec.js13
-rw-r--r--spec/frontend/deploy_keys/components/keys_panel_spec.js11
-rw-r--r--spec/frontend/deprecated_jquery_dropdown_spec.js9
-rw-r--r--spec/frontend/design_management/components/design_notes/design_discussion_spec.js24
-rw-r--r--spec/frontend/design_management/components/design_notes/design_reply_form_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_overlay_spec.js6
-rw-r--r--spec/frontend/design_management/components/design_presentation_spec.js42
-rw-r--r--spec/frontend/design_management/components/design_scaler_spec.js2
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap6
-rw-r--r--spec/frontend/design_management/components/toolbar/index_spec.js2
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap2
-rw-r--r--spec/frontend/design_management/components/upload/design_version_dropdown_spec.js4
-rw-r--r--spec/frontend/design_management/pages/index_spec.js284
-rw-r--r--spec/frontend/design_management/router_spec.js4
-rw-r--r--spec/frontend/design_management/utils/error_messages_spec.js4
-rw-r--r--spec/frontend/diffs/components/app_spec.js388
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js14
-rw-r--r--spec/frontend/diffs/components/compare_dropdown_layout_spec.js2
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js92
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_discussions_spec.js9
-rw-r--r--spec/frontend/diffs/components/diff_expansion_cell_spec.js53
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js35
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js28
-rw-r--r--spec/frontend/diffs/components/diff_gutter_avatars_spec.js4
-rw-r--r--spec/frontend/diffs/components/diff_line_note_form_spec.js41
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js101
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js16
-rw-r--r--spec/frontend/diffs/components/diff_stats_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js83
-rw-r--r--spec/frontend/diffs/components/file_row_stats_spec.js24
-rw-r--r--spec/frontend/diffs/components/hidden_files_warning_spec.js6
-rw-r--r--spec/frontend/diffs/components/image_diff_overlay_spec.js24
-rw-r--r--spec/frontend/diffs/components/no_changes_spec.js75
-rw-r--r--spec/frontend/diffs/components/parallel_diff_table_row_spec.js14
-rw-r--r--spec/frontend/diffs/components/settings_dropdown_spec.js16
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js16
-rw-r--r--spec/frontend/diffs/store/actions_spec.js123
-rw-r--r--spec/frontend/diffs/store/getters_spec.js33
-rw-r--r--spec/frontend/diffs/store/getters_versions_dropdowns_spec.js10
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js17
-rw-r--r--spec/frontend/diffs/store/utils_spec.js79
-rw-r--r--spec/frontend/diffs/utils/diff_file_spec.js30
-rw-r--r--spec/frontend/diffs/utils/file_reviews_spec.js146
-rw-r--r--spec/frontend/diffs/utils/merge_request_spec.js31
-rw-r--r--spec/frontend/diffs/utils/uuids_spec.js2
-rw-r--r--spec/frontend/dirty_submit/dirty_submit_collection_spec.js6
-rw-r--r--spec/frontend/dirty_submit/dirty_submit_form_spec.js8
-rw-r--r--spec/frontend/droplab/drop_down_spec.js8
-rw-r--r--spec/frontend/droplab/plugins/ajax_filter_spec.js12
-rw-r--r--spec/frontend/editor/editor_ci_schema_ext_spec.js107
-rw-r--r--spec/frontend/editor/editor_lite_extension_base_spec.js4
-rw-r--r--spec/frontend/editor/editor_lite_spec.js14
-rw-r--r--spec/frontend/editor/editor_markdown_ext_spec.js4
-rw-r--r--spec/frontend/emoji/emoji_spec.js14
-rw-r--r--spec/frontend/environment.js4
-rw-r--r--spec/frontend/environments/canary_ingress_spec.js102
-rw-r--r--spec/frontend/environments/canary_update_modal_spec.js126
-rw-r--r--spec/frontend/environments/deploy_board_component_spec.js139
-rw-r--r--spec/frontend/environments/environment_actions_spec.js4
-rw-r--r--spec/frontend/environments/environment_monitoring_spec.js3
-rw-r--r--spec/frontend/environments/environment_table_spec.js128
-rw-r--r--spec/frontend/environments/environment_terminal_button_spec.js2
-rw-r--r--spec/frontend/environments/environments_app_spec.js32
-rw-r--r--spec/frontend/environments/environments_folder_view_spec.js59
-rw-r--r--spec/frontend/environments/environments_store_spec.js56
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js4
-rw-r--r--spec/frontend/environments/mock_data.js41
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js6
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_actions_spec.js24
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js20
-rw-r--r--spec/frontend/error_tracking/store/actions_spec.js4
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js6
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js19
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js16
-rw-r--r--spec/frontend/error_tracking_settings/store/actions_spec.js22
-rw-r--r--spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/environments_dropdown_spec.js15
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_spec.js20
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_table_spec.js23
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js11
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/strategy_parameters_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/user_lists_table_spec.js2
-rw-r--r--spec/frontend/feature_flags/store/edit/actions_spec.js24
-rw-r--r--spec/frontend/feature_flags/store/index/actions_spec.js58
-rw-r--r--spec/frontend/feature_flags/store/index/mutations_spec.js8
-rw-r--r--spec/frontend/feature_flags/store/new/actions_spec.js12
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_helper_spec.js4
-rw-r--r--spec/frontend/filterable_list_spec.js2
-rw-r--r--spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js13
-rw-r--r--spec/frontend/filtered_search/dropdown_utils_spec.js2
-rw-r--r--spec/frontend/filtered_search/filtered_search_manager_spec.js28
-rw-r--r--spec/frontend/filtered_search/filtered_search_token_keys_spec.js2
-rw-r--r--spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js4
-rw-r--r--spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js25
-rw-r--r--spec/frontend/filtered_search/recent_searches_root_spec.js2
-rw-r--r--spec/frontend/filtered_search/services/recent_searches_service_spec.js20
-rw-r--r--spec/frontend/filtered_search/visual_token_value_spec.js38
-rw-r--r--spec/frontend/fixtures/static/create_item_dropdown.html29
-rw-r--r--spec/frontend/fixtures/static/deprecated_jquery_dropdown.html12
-rw-r--r--spec/frontend/fixtures/static/line_highlighter.html100
-rw-r--r--spec/frontend/fixtures/static/project_select_combo_button.html6
-rw-r--r--spec/frontend/flash_spec.js2
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js14
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js2
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_search_input_spec.js4
-rw-r--r--spec/frontend/frequent_items/store/actions_spec.js28
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js41
-rw-r--r--spec/frontend/gl_field_errors_spec.js32
-rw-r--r--spec/frontend/gl_form_spec.js24
-rw-r--r--spec/frontend/gpg_badges_spec.js10
-rw-r--r--spec/frontend/group_settings/components/shared_runners_form_spec.js2
-rw-r--r--spec/frontend/groups/components/app_spec.js13
-rw-r--r--spec/frontend/groups/components/group_item_spec.js8
-rw-r--r--spec/frontend/groups/components/invite_members_banner_spec.js10
-rw-r--r--spec/frontend/groups/components/item_stats_spec.js2
-rw-r--r--spec/frontend/groups/components/visibility_level_dropdown_spec.js9
-rw-r--r--spec/frontend/groups/members/components/app_spec.js22
-rw-r--r--spec/frontend/helpers/README.md3
-rw-r--r--spec/frontend/helpers/event_hub_factory_spec.js10
-rw-r--r--spec/frontend/helpers/help_page_helper_spec.js29
-rw-r--r--spec/frontend/helpers/monitor_helper_spec.js6
-rw-r--r--spec/frontend/helpers/startup_css_helper_spec.js19
-rw-r--r--spec/frontend/helpers/wait_for_promises.js1
-rw-r--r--spec/frontend/ide/components/activity_bar_spec.js4
-rw-r--r--spec/frontend/ide/components/branches/search_list_spec.js2
-rw-r--r--spec/frontend/ide/components/commit_sidebar/actions_spec.js6
-rw-r--r--spec/frontend/ide/components/commit_sidebar/empty_state_spec.js2
-rw-r--r--spec/frontend/ide/components/commit_sidebar/list_item_spec.js10
-rw-r--r--spec/frontend/ide/components/commit_sidebar/list_spec.js2
-rw-r--r--spec/frontend/ide/components/commit_sidebar/message_field_spec.js16
-rw-r--r--spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js14
-rw-r--r--spec/frontend/ide/components/commit_sidebar/radio_group_spec.js14
-rw-r--r--spec/frontend/ide/components/commit_sidebar/success_message_spec.js4
-rw-r--r--spec/frontend/ide/components/error_message_spec.js4
-rw-r--r--spec/frontend/ide/components/file_row_extra_spec.js35
-rw-r--r--spec/frontend/ide/components/file_templates/bar_spec.js11
-rw-r--r--spec/frontend/ide/components/file_templates/dropdown_spec.js8
-rw-r--r--spec/frontend/ide/components/ide_review_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_sidebar_nav_spec.js7
-rw-r--r--spec/frontend/ide/components/ide_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_status_bar_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_status_mr_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_tree_list_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_tree_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/detail/description_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/detail/scroll_button_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/detail_spec.js6
-rw-r--r--spec/frontend/ide/components/jobs/item_spec.js4
-rw-r--r--spec/frontend/ide/components/jobs/list_spec.js4
-rw-r--r--spec/frontend/ide/components/jobs/stage_spec.js7
-rw-r--r--spec/frontend/ide/components/merge_requests/list_spec.js12
-rw-r--r--spec/frontend/ide/components/nav_dropdown_button_spec.js8
-rw-r--r--spec/frontend/ide/components/nav_dropdown_spec.js6
-rw-r--r--spec/frontend/ide/components/new_dropdown/button_spec.js4
-rw-r--r--spec/frontend/ide/components/new_dropdown/index_spec.js2
-rw-r--r--spec/frontend/ide/components/new_dropdown/modal_spec.js2
-rw-r--r--spec/frontend/ide/components/new_dropdown/upload_spec.js10
-rw-r--r--spec/frontend/ide/components/panes/collapsible_sidebar_spec.js4
-rw-r--r--spec/frontend/ide/components/panes/right_spec.js2
-rw-r--r--spec/frontend/ide/components/pipelines/list_spec.js22
-rw-r--r--spec/frontend/ide/components/preview/clientside_spec.js54
-rw-r--r--spec/frontend/ide/components/repo_commit_section_spec.js28
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js68
-rw-r--r--spec/frontend/ide/components/repo_tabs_spec.js2
-rw-r--r--spec/frontend/ide/components/resizable_panel_spec.js2
-rw-r--r--spec/frontend/ide/components/shared/tokened_input_spec.js6
-rw-r--r--spec/frontend/ide/components/terminal/session_spec.js4
-rw-r--r--spec/frontend/ide/components/terminal/terminal_controls_spec.js2
-rw-r--r--spec/frontend/ide/components/terminal/terminal_spec.js57
-rw-r--r--spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js2
-rw-r--r--spec/frontend/ide/file_helpers.js2
-rw-r--r--spec/frontend/ide/helpers.js11
-rw-r--r--spec/frontend/ide/ide_router_extension_spec.js4
-rw-r--r--spec/frontend/ide/ide_router_spec.js2
-rw-r--r--spec/frontend/ide/lib/common/model_spec.js2
-rw-r--r--spec/frontend/ide/lib/create_diff_spec.js8
-rw-r--r--spec/frontend/ide/lib/create_file_diff_spec.js14
-rw-r--r--spec/frontend/ide/lib/diff/controller_spec.js4
-rw-r--r--spec/frontend/ide/lib/editor_spec.js2
-rw-r--r--spec/frontend/ide/lib/editorconfig/parser_spec.js6
-rw-r--r--spec/frontend/ide/lib/errors_spec.js2
-rw-r--r--spec/frontend/ide/lib/files_spec.js11
-rw-r--r--spec/frontend/ide/lib/languages/hcl_spec.js36
-rw-r--r--spec/frontend/ide/lib/mirror_spec.js4
-rw-r--r--spec/frontend/ide/services/index_spec.js33
-rw-r--r--spec/frontend/ide/stores/actions/file_spec.js38
-rw-r--r--spec/frontend/ide/stores/actions/merge_request_spec.js38
-rw-r--r--spec/frontend/ide/stores/actions/project_spec.js30
-rw-r--r--spec/frontend/ide/stores/actions/tree_spec.js29
-rw-r--r--spec/frontend/ide/stores/actions_spec.js90
-rw-r--r--spec/frontend/ide/stores/extend_spec.js4
-rw-r--r--spec/frontend/ide/stores/getters_spec.js2
-rw-r--r--spec/frontend/ide/stores/integration_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/branches/actions_spec.js12
-rw-r--r--spec/frontend/ide/stores/modules/branches/mutations_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/clientside/actions_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/commit/actions_spec.js95
-rw-r--r--spec/frontend/ide/stores/modules/commit/getters_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/editor/setup_spec.js11
-rw-r--r--spec/frontend/ide/stores/modules/file_templates/actions_spec.js22
-rw-r--r--spec/frontend/ide/stores/modules/merge_requests/actions_spec.js16
-rw-r--r--spec/frontend/ide/stores/modules/pane/actions_spec.js12
-rw-r--r--spec/frontend/ide/stores/modules/pipelines/actions_spec.js48
-rw-r--r--spec/frontend/ide/stores/modules/pipelines/mutations_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js8
-rw-r--r--spec/frontend/ide/stores/mutations/file_spec.js4
-rw-r--r--spec/frontend/ide/stores/plugins/terminal_sync_spec.js9
-rw-r--r--spec/frontend/ide/sync_router_and_store_spec.js4
-rw-r--r--spec/frontend/ide/utils_spec.js2
-rw-r--r--spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js2
-rw-r--r--spec/frontend/image_diff/helpers/utils_helper_spec.js2
-rw-r--r--spec/frontend/image_diff/image_diff_spec.js2
-rw-r--r--spec/frontend/image_diff/init_discussion_tab_spec.js4
-rw-r--r--spec/frontend/image_diff/replaced_image_diff_spec.js18
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_row_spec.js6
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js2
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js8
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js18
-rw-r--r--spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js29
-rw-r--r--spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js2
-rw-r--r--spec/frontend/import_entities/import_projects/store/actions_spec.js2
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js14
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap1
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap1
-rw-r--r--spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js13
-rw-r--r--spec/frontend/integrations/edit/components/dynamic_field_spec.js251
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js8
-rw-r--r--spec/frontend/integrations/edit/components/trigger_fields_spec.js2
-rw-r--r--spec/frontend/invite_member/components/invite_member_modal_spec.js10
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js227
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js39
-rw-r--r--spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js17
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_block_spec.js7
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_list_spec.js7
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_root_spec.js4
-rw-r--r--spec/frontend/issuable_list/components/issuable_item_spec.js9
-rw-r--r--spec/frontend/issuable_show/components/issuable_body_spec.js7
-rw-r--r--spec/frontend/issuable_show/components/issuable_edit_form_spec.js2
-rw-r--r--spec/frontend/issuable_show/components/issuable_header_spec.js2
-rw-r--r--spec/frontend/issuable_show/components/issuable_show_root_spec.js2
-rw-r--r--spec/frontend/issuable_spec.js2
-rw-r--r--spec/frontend/issuable_suggestions/components/app_spec.js14
-rw-r--r--spec/frontend/issuable_suggestions/components/item_spec.js2
-rw-r--r--spec/frontend/issuable_suggestions/mock_data.js2
-rw-r--r--spec/frontend/issue_show/components/app_spec.js12
-rw-r--r--spec/frontend/issue_show/components/description_spec.js2
-rw-r--r--spec/frontend/issue_show/components/edit_actions_spec.js12
-rw-r--r--spec/frontend/issue_show/components/fields/description_spec.js4
-rw-r--r--spec/frontend/issue_show/components/form_spec.js2
-rw-r--r--spec/frontend/issue_show/components/header_actions_spec.js9
-rw-r--r--spec/frontend/issue_show/components/incidents/highlight_bar_spec.js2
-rw-r--r--spec/frontend/issue_show/components/incidents/incident_tabs_spec.js75
-rw-r--r--spec/frontend/issue_show/components/pinned_links_spec.js14
-rw-r--r--spec/frontend/issue_show/issue_spec.js2
-rw-r--r--spec/frontend/issue_spec.js2
-rw-r--r--spec/frontend/issues_list/components/issuable_spec.js16
-rw-r--r--spec/frontend/issues_list/components/issuables_list_app_spec.js16
-rw-r--r--spec/frontend/jira_connect/api_spec.js107
-rw-r--r--spec/frontend/jira_connect/components/app_spec.js80
-rw-r--r--spec/frontend/jira_connect/components/groups_list_item_spec.js46
-rw-r--r--spec/frontend/jira_connect/components/groups_list_spec.js71
-rw-r--r--spec/frontend/jira_connect/mock_data.js15
-rw-r--r--spec/frontend/jira_connect/store/mutations_spec.js18
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap164
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js2
-rw-r--r--spec/frontend/jira_import/components/jira_import_progress_spec.js2
-rw-r--r--spec/frontend/jira_import/components/jira_import_setup_spec.js2
-rw-r--r--spec/frontend/jobs/components/artifacts_block_spec.js4
-rw-r--r--spec/frontend/jobs/components/commit_block_spec.js2
-rw-r--r--spec/frontend/jobs/components/empty_state_spec.js14
-rw-r--r--spec/frontend/jobs/components/erased_block_spec.js2
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js22
-rw-r--r--spec/frontend/jobs/components/job_container_item_spec.js2
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js2
-rw-r--r--spec/frontend/jobs/components/job_sidebar_details_container_spec.js9
-rw-r--r--spec/frontend/jobs/components/jobs_container_spec.js2
-rw-r--r--spec/frontend/jobs/components/log/line_spec.js5
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js6
-rw-r--r--spec/frontend/jobs/components/manual_variables_form_spec.js8
-rw-r--r--spec/frontend/jobs/components/sidebar_spec.js2
-rw-r--r--spec/frontend/jobs/components/stages_dropdown_spec.js2
-rw-r--r--spec/frontend/jobs/components/stuck_block_spec.js2
-rw-r--r--spec/frontend/jobs/components/trigger_block_spec.js4
-rw-r--r--spec/frontend/jobs/store/actions_spec.js56
-rw-r--r--spec/frontend/jobs/store/helpers.js2
-rw-r--r--spec/frontend/lazy_loader_spec.js6
-rw-r--r--spec/frontend/lib/dompurify_spec.js10
-rw-r--r--spec/frontend/lib/utils/ajax_cache_spec.js8
-rw-r--r--spec/frontend/lib/utils/apollo_startup_js_link_spec.js72
-rw-r--r--spec/frontend/lib/utils/chart_utils_spec.js6
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js140
-rw-r--r--spec/frontend/lib/utils/datetime_range_spec.js2
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js168
-rw-r--r--spec/frontend/lib/utils/dom_utils_spec.js4
-rw-r--r--spec/frontend/lib/utils/favicon_ci_spec.js50
-rw-r--r--spec/frontend/lib/utils/favicon_spec.js39
-rw-r--r--spec/frontend/lib/utils/forms_spec.js8
-rw-r--r--spec/frontend/lib/utils/highlight_spec.js4
-rw-r--r--spec/frontend/lib/utils/icon_utils_spec.js12
-rw-r--r--spec/frontend/lib/utils/mock_data.js3
-rw-r--r--spec/frontend/lib/utils/poll_spec.js20
-rw-r--r--spec/frontend/lib/utils/poll_until_complete_spec.js2
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js4
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js18
-rw-r--r--spec/frontend/lib/utils/users_cache_spec.js60
-rw-r--r--spec/frontend/line_highlighter_spec.js9
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js12
-rw-r--r--spec/frontend/logs/components/log_advanced_filters_spec.js6
-rw-r--r--spec/frontend/logs/components/log_control_buttons_spec.js2
-rw-r--r--spec/frontend/logs/components/log_simple_filters_spec.js2
-rw-r--r--spec/frontend/logs/stores/actions_spec.js2
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js2
-rw-r--r--spec/frontend/matchers.js9
-rw-r--r--spec/frontend/matchers_spec.js4
-rw-r--r--spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js4
-rw-r--r--spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/resend_invite_button_spec.js8
-rw-r--r--spec/frontend/members/components/action_buttons/user_action_buttons_spec.js4
-rw-r--r--spec/frontend/members/components/avatars/user_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/filter_sort/filter_sort_container_spec.js2
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js7
-rw-r--r--spec/frontend/members/components/filter_sort/sort_dropdown_spec.js8
-rw-r--r--spec/frontend/members/components/modals/leave_modal_spec.js4
-rw-r--r--spec/frontend/members/components/modals/remove_group_link_modal_spec.js2
-rw-r--r--spec/frontend/members/components/table/created_at_spec.js2
-rw-r--r--spec/frontend/members/components/table/expiration_datepicker_spec.js10
-rw-r--r--spec/frontend/members/components/table/expires_at_spec.js4
-rw-r--r--spec/frontend/members/components/table/member_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_source_spec.js6
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js51
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js8
-rw-r--r--spec/frontend/members/mock_data.js2
-rw-r--r--spec/frontend/members/utils_spec.js2
-rw-r--r--spec/frontend/merge_request/components/status_box_spec.js81
-rw-r--r--spec/frontend/merge_request_spec.js18
-rw-r--r--spec/frontend/merge_request_tabs_spec.js5
-rw-r--r--spec/frontend/milestones/milestone_combobox_spec.js126
-rw-r--r--spec/frontend/mini_pipeline_graph_dropdown_spec.js6
-rw-r--r--spec/frontend/mocks/ce/lib/utils/axios_utils.js14
-rw-r--r--spec/frontend/mocks/mocks_helper.js6
-rw-r--r--spec/frontend/mocks/mocks_helper_spec.js10
-rw-r--r--spec/frontend/monitoring/alert_widget_spec.js29
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap1
-rw-r--r--spec/frontend/monitoring/components/alert_widget_form_spec.js4
-rw-r--r--spec/frontend/monitoring/components/charts/annotations_spec.js6
-rw-r--r--spec/frontend/monitoring/components/charts/anomaly_spec.js4
-rw-r--r--spec/frontend/monitoring/components/charts/bar_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/column_spec.js4
-rw-r--r--spec/frontend/monitoring/components/charts/gauge_spec.js4
-rw-r--r--spec/frontend/monitoring/components/charts/options_spec.js12
-rw-r--r--spec/frontend/monitoring/components/charts/stacked_column_spec.js6
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js22
-rw-r--r--spec/frontend/monitoring/components/dashboard_actions_menu_spec.js8
-rw-r--r--spec/frontend/monitoring/components/dashboard_header_spec.js12
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_spec.js12
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js38
-rw-r--r--spec/frontend/monitoring/components/dashboards_dropdown_spec.js22
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js52
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js75
-rw-r--r--spec/frontend/monitoring/components/embeds/embed_group_spec.js4
-rw-r--r--spec/frontend/monitoring/components/graph_group_spec.js2
-rw-r--r--spec/frontend/monitoring/components/group_empty_state_spec.js2
-rw-r--r--spec/frontend/monitoring/components/links_section_spec.js2
-rw-r--r--spec/frontend/monitoring/components/refresh_button_spec.js4
-rw-r--r--spec/frontend/monitoring/components/variables/dropdown_field_spec.js11
-rw-r--r--spec/frontend/monitoring/fixture_data.js4
-rw-r--r--spec/frontend/monitoring/graph_data.js4
-rw-r--r--spec/frontend/monitoring/mock_data.js3
-rw-r--r--spec/frontend/monitoring/requests/index_spec.js20
-rw-r--r--spec/frontend/monitoring/router_spec.js14
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js44
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js2
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js7
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js26
-rw-r--r--spec/frontend/monitoring/store_utils.js8
-rw-r--r--spec/frontend/monitoring/utils_spec.js6
-rw-r--r--spec/frontend/mr_popover/index_spec.js2
-rw-r--r--spec/frontend/new_branch_spec.js6
-rw-r--r--spec/frontend/notebook/cells/code_spec.js6
-rw-r--r--spec/frontend/notebook/cells/output/index_spec.js54
-rw-r--r--spec/frontend/notebook/cells/output/latex_spec.js40
-rw-r--r--spec/frontend/notebook/cells/prompt_spec.js4
-rw-r--r--spec/frontend/notebook/index_spec.js6
-rw-r--r--spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap21
-rw-r--r--spec/frontend/notes/components/comment_field_layout_spec.js137
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js15
-rw-r--r--spec/frontend/notes/components/diff_discussion_header_spec.js10
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js33
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_filter_note_spec.js10
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js26
-rw-r--r--spec/frontend/notes/components/discussion_jump_to_next_button_spec.js43
-rw-r--r--spec/frontend/notes/components/discussion_notes_spec.js5
-rw-r--r--spec/frontend/notes/components/discussion_resolve_button_spec.js2
-rw-r--r--spec/frontend/notes/components/email_participants_warning_spec.js70
-rw-r--r--spec/frontend/notes/components/multiline_comment_form_spec.js89
-rw-r--r--spec/frontend/notes/components/multiline_comment_utils_spec.js4
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js28
-rw-r--r--spec/frontend/notes/components/note_attachment_spec.js2
-rw-r--r--spec/frontend/notes/components/note_awards_list_spec.js8
-rw-r--r--spec/frontend/notes/components/note_body_spec.js2
-rw-r--r--spec/frontend/notes/components/note_form_spec.js86
-rw-r--r--spec/frontend/notes/components/note_header_spec.js4
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js80
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js4
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js44
-rw-r--r--spec/frontend/notes/components/toggle_replies_widget_spec.js2
-rw-r--r--spec/frontend/notes/helpers.js2
-rw-r--r--spec/frontend/notes/mixins/discussion_navigation_spec.js2
-rw-r--r--spec/frontend/notes/old_notes_spec.js74
-rw-r--r--spec/frontend/notes/stores/actions_spec.js130
-rw-r--r--spec/frontend/notes/stores/getters_spec.js25
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js8
-rw-r--r--spec/frontend/oauth_remember_me_spec.js6
-rw-r--r--spec/frontend/operation_settings/components/metrics_settings_spec.js1
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap8
-rw-r--r--spec/frontend/packages/details/components/additional_metadata_spec.js4
-rw-r--r--spec/frontend/packages/details/components/conan_installation_spec.js12
-rw-r--r--spec/frontend/packages/details/components/maven_installation_spec.js18
-rw-r--r--spec/frontend/packages/details/components/npm_installation_spec.js24
-rw-r--r--spec/frontend/packages/details/components/nuget_installation_spec.js12
-rw-r--r--spec/frontend/packages/details/components/package_files_spec.js1
-rw-r--r--spec/frontend/packages/details/components/package_history_spec.js10
-rw-r--r--spec/frontend/packages/details/store/actions_spec.js20
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap16
-rw-r--r--spec/frontend/packages/list/components/packages_sort_spec.js2
-rw-r--r--spec/frontend/packages/list/stores/actions_spec.js34
-rw-r--r--spec/frontend/packages/shared/components/package_path_spec.js8
-rw-r--r--spec/frontend/packages/shared/components/package_tags_spec.js12
-rw-r--r--spec/frontend/packages/shared/utils_spec.js2
-rw-r--r--spec/frontend/pager_spec.js12
-rw-r--r--spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js4
-rw-r--r--spec/frontend/pages/admin/application_settings/account_and_limits_spec.js2
-rw-r--r--spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js12
-rw-r--r--spec/frontend/pages/admin/users/components/delete_user_modal_spec.js6
-rw-r--r--spec/frontend/pages/admin/users/new/index_spec.js4
-rw-r--r--spec/frontend/pages/dashboard/todos/index/todos_spec.js8
-rw-r--r--spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js2
-rw-r--r--spec/frontend/pages/labels/components/promote_label_modal_spec.js12
-rw-r--r--spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js12
-rw-r--r--spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js120
-rw-r--r--spec/frontend/pages/profiles/show/emoji_menu_spec.js4
-rw-r--r--spec/frontend/pages/projects/edit/mount_search_settings_spec.js25
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js4
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js19
-rw-r--r--spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap1
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js4
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js14
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js29
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js49
-rw-r--r--spec/frontend/pages/search/show/refresh_counts_spec.js7
-rw-r--r--spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js6
-rw-r--r--spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js2
-rw-r--r--spec/frontend/pdf/index_spec.js6
-rw-r--r--spec/frontend/pdf/page_spec.js2
-rw-r--r--spec/frontend/performance_bar/components/detailed_metric_spec.js8
-rw-r--r--spec/frontend/performance_bar/index_spec.js11
-rw-r--r--spec/frontend/performance_bar/stores/performance_bar_store_spec.js2
-rw-r--r--spec/frontend/pipeline_editor/components/commit/commit_form_spec.js4
-rw-r--r--spec/frontend/pipeline_editor/components/info/validation_segment_spec.js113
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js33
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js81
-rw-r--r--spec/frontend/pipeline_editor/components/text_editor_spec.js67
-rw-r--r--spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js91
-rw-r--r--spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap4
-rw-r--r--spec/frontend/pipeline_editor/graphql/resolvers_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/mock_data.js129
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js235
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js31
-rw-r--r--spec/frontend/pipelines/blank_state_spec.js2
-rw-r--r--spec/frontend/pipelines/components/dag/dag_annotations_spec.js6
-rw-r--r--spec/frontend/pipelines/components/dag/dag_graph_spec.js26
-rw-r--r--spec/frontend/pipelines/components/dag/dag_spec.js2
-rw-r--r--spec/frontend/pipelines/components/dag/parsing_utils_spec.js5
-rw-r--r--spec/frontend/pipelines/components/pipelines_filtered_search_spec.js6
-rw-r--r--spec/frontend/pipelines/empty_state_spec.js105
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js6
-rw-r--r--spec/frontend/pipelines/graph/graph_component_legacy_spec.js80
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js45
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js4
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js4
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_spec.js4
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js76
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js7
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_spec.js66
-rw-r--r--spec/frontend/pipelines/nav_controls_spec.js2
-rw-r--r--spec/frontend/pipelines/pipeline_graph/mock_data.js10
-rw-r--r--spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js18
-rw-r--r--spec/frontend/pipelines/pipeline_graph/utils_spec.js44
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_actions_spec.js28
-rw-r--r--spec/frontend/pipelines/pipelines_artifacts_spec.js16
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js98
-rw-r--r--spec/frontend/pipelines/pipelines_store_spec.js5
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js21
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js2
-rw-r--r--spec/frontend/pipelines/shared/links_layer_spec.js99
-rw-r--r--spec/frontend/pipelines/stage_spec.js77
-rw-r--r--spec/frontend/pipelines/stores/pipeline_store_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/stores/actions_spec.js22
-rw-r--r--spec/frontend/pipelines/test_reports/stores/getters_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/test_case_details_spec.js2
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js6
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_spec.js2
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js2
-rw-r--r--spec/frontend/pipelines/unwrapping_utils_spec.js26
-rw-r--r--spec/frontend/profile/account/components/delete_account_modal_spec.js20
-rw-r--r--spec/frontend/profile/account/components/update_username_spec.js2
-rw-r--r--spec/frontend/project_find_file_spec.js11
-rw-r--r--spec/frontend/projects/commit/components/branches_dropdown_spec.js166
-rw-r--r--spec/frontend/projects/commit/components/form_modal_spec.js155
-rw-r--r--spec/frontend/projects/commit/components/form_trigger_spec.js44
-rw-r--r--spec/frontend/projects/commit/mock_data.js27
-rw-r--r--spec/frontend/projects/commit/store/actions_spec.js111
-rw-r--r--spec/frontend/projects/commit/store/getters_spec.js21
-rw-r--r--spec/frontend/projects/commit/store/mutations_spec.js57
-rw-r--r--spec/frontend/projects/commits/components/author_select_spec.js26
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap1
-rw-r--r--spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap67
-rw-r--r--spec/frontend/projects/components/shared/delete_button_spec.js16
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/app_spec.js24
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js2
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js2
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap (renamed from spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap)2
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap9
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js134
-rw-r--r--spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js (renamed from spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js)15
-rw-r--r--spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js (renamed from spec/frontend/projects/pipelines/charts/components/app_legacy_spec.js)36
-rw-r--r--spec/frontend/projects/pipelines/charts/mock_data.js16
-rw-r--r--spec/frontend/projects/project_import_gitlab_project_spec.js2
-rw-r--r--spec/frontend/projects/project_new_spec.js7
-rw-r--r--spec/frontend/projects/projects_filterable_list_spec.js2
-rw-r--r--spec/frontend/projects/settings/access_dropdown_spec.js10
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js4
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js4
-rw-r--r--spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js8
-rw-r--r--spec/frontend/prometheus_metrics/prometheus_metrics_spec.js13
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js24
-rw-r--r--spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap87
-rw-r--r--spec/frontend/registry/explorer/components/delete_button_spec.js2
-rw-r--r--spec/frontend/registry/explorer/components/details_page/delete_alert_spec.js2
-rw-r--r--spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js2
-rw-r--r--spec/frontend/registry/explorer/components/details_page/details_header_spec.js121
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js6
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_list_spec.js24
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_loader_spec.js6
-rw-r--r--spec/frontend/registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap2
-rw-r--r--spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js21
-rw-r--r--spec/frontend/registry/explorer/components/list_page/image_list_spec.js20
-rw-r--r--spec/frontend/registry/explorer/components/list_page/registry_header_spec.js7
-rw-r--r--spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js93
-rw-r--r--spec/frontend/registry/explorer/mock_data.js32
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js27
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js100
-rw-r--r--spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap4
-rw-r--r--spec/frontend/registry/settings/components/expiration_dropdown_spec.js7
-rw-r--r--spec/frontend/registry/settings/components/expiration_input_spec.js2
-rw-r--r--spec/frontend/registry/settings/components/expiration_run_text_spec.js2
-rw-r--r--spec/frontend/registry/settings/components/expiration_toggle_spec.js2
-rw-r--r--spec/frontend/registry/settings/components/registry_settings_app_spec.js4
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js4
-rw-r--r--spec/frontend/registry/settings/mock_data.js2
-rw-r--r--spec/frontend/registry/settings/utils_spec.js5
-rw-r--r--spec/frontend/related_merge_requests/components/related_merge_requests_spec.js7
-rw-r--r--spec/frontend/related_merge_requests/store/actions_spec.js12
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap48
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js6
-rw-r--r--spec/frontend/releases/components/app_index_spec.js34
-rw-r--r--spec/frontend/releases/components/app_show_spec.js2
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js6
-rw-r--r--spec/frontend/releases/components/issuable_stats_spec.js2
-rw-r--r--spec/frontend/releases/components/release_block_assets_spec.js14
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js32
-rw-r--r--spec/frontend/releases/components/releases_pagination_graphql_spec.js4
-rw-r--r--spec/frontend/releases/components/releases_pagination_rest_spec.js2
-rw-r--r--spec/frontend/releases/components/releases_pagination_spec.js2
-rw-r--r--spec/frontend/releases/components/tag_field_new_spec.js2
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js4
-rw-r--r--spec/frontend/releases/stores/modules/list/helpers.js2
-rw-r--r--spec/frontend/reports/accessibility_report/store/actions_spec.js12
-rw-r--r--spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js12
-rw-r--r--spec/frontend/reports/codequality_report/store/actions_spec.js12
-rw-r--r--spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js6
-rw-r--r--spec/frontend/reports/components/grouped_issues_list_spec.js6
-rw-r--r--spec/frontend/reports/components/grouped_test_reports_app_spec.js70
-rw-r--r--spec/frontend/reports/components/issue_status_icon_spec.js2
-rw-r--r--spec/frontend/reports/components/modal_open_name_spec.js47
-rw-r--r--spec/frontend/reports/components/modal_spec.js57
-rw-r--r--spec/frontend/reports/components/report_link_spec.js2
-rw-r--r--spec/frontend/reports/components/report_section_spec.js18
-rw-r--r--spec/frontend/reports/components/test_issue_body_spec.js4
-rw-r--r--spec/frontend/reports/store/actions_spec.js30
-rw-r--r--spec/frontend/reports/store/mutations_spec.js27
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js16
-rw-r--r--spec/frontend/repository/components/directory_download_links_spec.js5
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js10
-rw-r--r--spec/frontend/repository/utils/dom_spec.js2
-rw-r--r--spec/frontend/right_sidebar_spec.js6
-rw-r--r--spec/frontend/search/index_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/confidentiality_filter_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/radio_filter_spec.js6
-rw-r--r--spec/frontend/search/sidebar/components/status_filter_spec.js2
-rw-r--r--spec/frontend/search/topbar/components/searchable_dropdown_spec.js4
-rw-r--r--spec/frontend/search_autocomplete_spec.js10
-rw-r--r--spec/frontend/search_settings/components/search_settings_spec.js106
-rw-r--r--spec/frontend/search_settings/index_spec.js36
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap1
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js5
-rw-r--r--spec/frontend/self_monitor/store/actions_spec.js20
-rw-r--r--spec/frontend/sentry/sentry_config_spec.js4
-rw-r--r--spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js5
-rw-r--r--spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap2
-rw-r--r--spec/frontend/serverless/components/area_spec.js4
-rw-r--r--spec/frontend/serverless/components/function_row_spec.js2
-rw-r--r--spec/frontend/serverless/components/missing_prometheus_spec.js2
-rw-r--r--spec/frontend/serverless/components/pod_box_spec.js2
-rw-r--r--spec/frontend/serverless/components/url_spec.js2
-rw-r--r--spec/frontend/serverless/mock_data.js5
-rw-r--r--spec/frontend/serverless/store/actions_spec.js12
-rw-r--r--spec/frontend/serverless/utils.js6
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js24
-rw-r--r--spec/frontend/shared/popover_spec.js20
-rw-r--r--spec/frontend/shortcuts_spec.js6
-rw-r--r--spec/frontend/sidebar/assignee_title_spec.js2
-rw-r--r--spec/frontend/sidebar/assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js7
-rw-r--r--spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js2
-rw-r--r--spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js22
-rw-r--r--spec/frontend/sidebar/components/severity/severity_spec.js2
-rw-r--r--spec/frontend/sidebar/components/severity/sidebar_severity_spec.js22
-rw-r--r--spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js12
-rw-r--r--spec/frontend/sidebar/confidential/edit_form_spec.js2
-rw-r--r--spec/frontend/sidebar/lock/edit_form_buttons_spec.js2
-rw-r--r--spec/frontend/sidebar/lock/edit_form_spec.js2
-rw-r--r--spec/frontend/sidebar/lock/issuable_lock_form_spec.js4
-rw-r--r--spec/frontend/sidebar/participants_spec.js2
-rw-r--r--spec/frontend/sidebar/reviewer_title_spec.js23
-rw-r--r--spec/frontend/sidebar/reviewers_spec.js2
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/sidebar_mediator_spec.js2
-rw-r--r--spec/frontend/sidebar/sidebar_move_issue_spec.js18
-rw-r--r--spec/frontend/sidebar/sidebar_store_spec.js2
-rw-r--r--spec/frontend/sidebar/subscriptions_spec.js2
-rw-r--r--spec/frontend/sidebar/todo_spec.js7
-rw-r--r--spec/frontend/snippet/collapsible_input_spec.js12
-rw-r--r--spec/frontend/snippets/components/edit_spec.js14
-rw-r--r--spec/frontend/snippets/components/embed_dropdown_spec.js2
-rw-r--r--spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js22
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js39
-rw-r--r--spec/frontend/snippets/components/snippet_visibility_edit_spec.js2
-rw-r--r--spec/frontend/snippets/utils/blob_spec.js2
-rw-r--r--spec/frontend/static_site_editor/components/edit_area_spec.js67
-rw-r--r--spec/frontend/static_site_editor/components/edit_meta_controls_spec.js2
-rw-r--r--spec/frontend/static_site_editor/graphql/resolvers/file_spec.js2
-rw-r--r--spec/frontend/static_site_editor/services/load_source_content_spec.js2
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js2
-rw-r--r--spec/frontend/syntax_highlight_spec.js2
-rw-r--r--spec/frontend/task_list_spec.js4
-rw-r--r--spec/frontend/terraform/components/states_table_actions_spec.js2
-rw-r--r--spec/frontend/terraform/components/terraform_list_spec.js2
-rw-r--r--spec/frontend/test_setup.js27
-rw-r--r--spec/frontend/toggle_buttons_spec.js2
-rw-r--r--spec/frontend/tooltips/components/tooltips_spec.js12
-rw-r--r--spec/frontend/tooltips/index_spec.js8
-rw-r--r--spec/frontend/tracking_spec.js2
-rw-r--r--spec/frontend/transfer_edit_spec.js38
-rw-r--r--spec/frontend/user_lists/components/add_user_modal_spec.js2
-rw-r--r--spec/frontend/user_lists/components/edit_user_list_spec.js4
-rw-r--r--spec/frontend/user_lists/components/new_user_list_spec.js4
-rw-r--r--spec/frontend/user_lists/components/user_list_spec.js16
-rw-r--r--spec/frontend/user_lists/store/show/mutations_spec.js6
-rw-r--r--spec/frontend/user_popovers_spec.js4
-rw-r--r--spec/frontend/version_check_image_spec.js4
-rw-r--r--spec/frontend/vue_alerts_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js39
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js50
-rw-r--r--spec/frontend/vue_mr_widget/components/artifacts_list_spec.js20
-rw-r--r--spec/frontend/vue_mr_widget/components/mock_data.js14
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js16
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js12
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js20
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js268
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/review_app_link_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap183
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js421
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js64
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js3
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js11
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js47
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js24
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js12
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js444
-rw-r--r--spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js14
-rw-r--r--spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap1
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap1
-rw-r--r--spec/frontend/vue_shared/components/actions_button_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/alert_details_table_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/awards_list_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/color_picker/color_picker_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/commit_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/confirm_modal_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js103
-rw-r--r--spec/frontend/vue_shared/components/deployment_instance/mock_data.js144
-rw-r--r--spec/frontend/vue_shared/components/deprecated_modal_2_spec.js258
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/dismissible_container_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/editor_lite_spec.js42
-rw-r--r--spec/frontend/vue_shared/components/expand_button_spec.js57
-rw-r--r--spec/frontend/vue_shared/components/file_finder/index_spec.js30
-rw-r--r--spec/frontend/vue_shared/components/file_finder/item_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/file_tree_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js51
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js13
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap13
-rw-r--r--spec/frontend/vue_shared/components/gfm_autocomplete/utils_spec.js83
-rw-r--r--spec/frontend/vue_shared/components/gl_countdown_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/gl_modal_vuex_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_assignees_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_milestone_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js2
-rw-r--r--spec/frontend/vue_shared/components/local_storage_sync_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap1
-rw-r--r--spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestions_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/navigation_tabs_spec.js82
-rw-r--r--spec/frontend/vue_shared/components/notes/noteable_warning_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/ordered_layout_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js19
-rw-r--r--spec/frontend/vue_shared/components/pagination_links_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/pikaday_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/project_avatar/default_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/project_selector/project_selector_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/registry/details_row_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/registry/list_item_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/registry/metadata_item_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/registry/title_area_spec.js42
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/upload_image_tab_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/mock_data.js8
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/security_reports/help_icon_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/security_reports/security_summary_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/sidebar/date_picker_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js32
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js30
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/slot_switch_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/stacked_progress_bar_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/table_pagination_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/tabs/tab_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js11
-rw-r--r--spec/frontend/vue_shared/directives/tooltip_spec.js8
-rw-r--r--spec/frontend/vue_shared/directives/validation_spec.js6
-rw-r--r--spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js2
-rw-r--r--spec/frontend/vue_shared/security_reports/mock_data.js75
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js22
-rw-r--r--spec/frontend/vue_shared/security_reports/store/getters_spec.js2
-rw-r--r--spec/frontend/vue_shared/security_reports/store/modules/sast/actions_spec.js16
-rw-r--r--spec/frontend/vue_shared/security_reports/store/modules/secret_detection/actions_spec.js16
-rw-r--r--spec/frontend/vue_shared/security_reports/utils_spec.js7
-rw-r--r--spec/frontend/vuex_shared/modules/modal/actions_spec.js8
-rw-r--r--spec/frontend/whats_new/components/app_spec.js4
-rw-r--r--spec/frontend/wikis_spec.js222
-rw-r--r--spec/frontend/zen_mode_spec.js5
-rw-r--r--spec/frontend_integration/ide/helpers/ide_helper.js66
-rw-r--r--spec/frontend_integration/ide/helpers/start.js14
-rw-r--r--spec/frontend_integration/ide/ide_integration_spec.js25
-rw-r--r--spec/frontend_integration/ide/user_opens_file_spec.js4
-rw-r--r--spec/frontend_integration/test_helpers/factories/commit_id.js4
-rw-r--r--spec/frontend_integration/test_helpers/fixtures.js6
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/index.js2
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/404.js4
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/ci.js2
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/graphql.js2
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/index.js2
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/projects.js2
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/repository.js20
-rw-r--r--spec/frontend_integration/test_helpers/setup/index.js1
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_testing_library.js3
-rw-r--r--spec/frontend_integration/test_helpers/snapshot_serializer.js2
-rw-r--r--spec/frontend_integration/test_helpers/utils/obj_spec.js6
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb98
-rw-r--r--spec/graphql/mutations/releases/create_spec.rb7
-rw-r--r--spec/graphql/mutations/releases/update_spec.rb7
-rw-r--r--spec/graphql/resolvers/ci/config_resolver_spec.rb29
-rw-r--r--spec/graphql/resolvers/ci/jobs_resolver_spec.rb3
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/package_details_resolver_spec.rb21
-rw-r--r--spec/graphql/resolvers/projects_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/release_milestones_resolver_spec.rb23
-rw-r--r--spec/graphql/resolvers/user_notes_count_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/users_resolver_spec.rb26
-rw-r--r--spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb44
-rw-r--r--spec/graphql/types/base_enum_spec.rb70
-rw-r--r--spec/graphql/types/board_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/config/job_restriction_type_spec.rb13
-rw-r--r--spec/graphql/types/ci/config/job_type_spec.rb9
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb1
-rw-r--r--spec/graphql/types/issue_type_spec.rb3
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb14
-rw-r--r--spec/graphql/types/namespace/package_settings_type_spec.rb17
-rw-r--r--spec/graphql/types/notes/note_type_spec.rb1
-rw-r--r--spec/graphql/types/packages/composer/details_type_spec.rb23
-rw-r--r--spec/graphql/types/packages/composer/json_type_spec.rb15
-rw-r--r--spec/graphql/types/packages/composer/metadatum_type_spec.rb15
-rw-r--r--spec/graphql/types/packages/package_type_enum_spec.rb (renamed from spec/graphql/types/package_type_enum_spec.rb)0
-rw-r--r--spec/graphql/types/packages/package_type_spec.rb (renamed from spec/graphql/types/package_type_spec.rb)2
-rw-r--r--spec/graphql/types/packages/tag_type_spec.rb15
-rw-r--r--spec/graphql/types/project_type_spec.rb2
-rw-r--r--spec/graphql/types/projects/service_type_spec.rb2
-rw-r--r--spec/graphql/types/query_type_spec.rb6
-rw-r--r--spec/graphql/types/repository_type_spec.rb2
-rw-r--r--spec/helpers/blob_helper_spec.rb11
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb19
-rw-r--r--spec/helpers/ci/triggers_helper_spec.rb31
-rw-r--r--spec/helpers/commits_helper_spec.rb33
-rw-r--r--spec/helpers/dashboard_helper_spec.rb6
-rw-r--r--spec/helpers/graph_helper_spec.rb12
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb12
-rw-r--r--spec/helpers/invite_members_helper_spec.rb65
-rw-r--r--spec/helpers/jira_connect_helper_spec.rb15
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb33
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb145
-rw-r--r--spec/helpers/projects_helper_spec.rb29
-rw-r--r--spec/helpers/services_helper_spec.rb50
-rw-r--r--spec/helpers/tree_helper_spec.rb3
-rw-r--r--spec/helpers/users_helper_spec.rb6
-rw-r--r--spec/helpers/visibility_level_helper_spec.rb38
-rw-r--r--spec/initializers/carrierwave_patch_spec.rb97
-rw-r--r--spec/javascripts/fly_out_nav_browser_spec.js9
-rw-r--r--spec/javascripts/lib/utils/browser_spec.js106
-rw-r--r--spec/javascripts/matchers.js7
-rw-r--r--spec/javascripts/test_bundle.js20
-rw-r--r--spec/javascripts/test_constants.js2
-rw-r--r--spec/lib/api/entities/snippet_repository_storage_move_spec.rb25
-rw-r--r--spec/lib/api/helpers/authentication_spec.rb207
-rw-r--r--spec/lib/api/helpers_spec.rb47
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb300
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb4
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb95
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb85
-rw-r--r--spec/lib/banzai/filter/asset_proxy_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/reference_redactor_filter_spec.rb31
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb24
-rw-r--r--spec/lib/banzai/filter/truncate_source_filter_spec.rb31
-rw-r--r--spec/lib/banzai/pipeline/description_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb4
-rw-r--r--spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb7
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb16
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb10
-rw-r--r--spec/lib/bulk_imports/importers/group_importer_spec.rb5
-rw-r--r--spec/lib/bulk_imports/importers/groups_importer_spec.rb36
-rw-r--r--spec/lib/bulk_imports/pipeline_spec.rb14
-rw-r--r--spec/lib/constraints/admin_constrainer_spec.rb2
-rw-r--r--spec/lib/container_registry/client_spec.rb100
-rw-r--r--spec/lib/declarative_enum_spec.rb147
-rw-r--r--spec/lib/expand_variables_spec.rb37
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb7
-rw-r--r--spec/lib/gitlab/api_authentication/builder_spec.rb76
-rw-r--r--spec/lib/gitlab/api_authentication/sent_through_builder_spec.rb17
-rw-r--r--spec/lib/gitlab/api_authentication/token_locator_spec.rb55
-rw-r--r--spec/lib/gitlab/api_authentication/token_resolver_spec.rb117
-rw-r--r--spec/lib/gitlab/api_authentication/token_type_builder_spec.rb16
-rw-r--r--spec/lib/gitlab/asset_proxy_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb58
-rw-r--r--spec/lib/gitlab/auth/current_user_mode_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb22
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb5
-rw-r--r--spec/lib/gitlab/auth_spec.rb5
-rw-r--r--spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb82
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb91
-rw-r--r--spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb113
-rw-r--r--spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb121
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb97
-rw-r--r--spec/lib/gitlab/ci/config/entry/artifacts_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/config/entry/variables_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb126
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb114
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb39
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/reports/test_failure_history_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/status/group/factory_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/syntax_templates_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/templates/npm_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/variables/collection/sorted_spec.rb251
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb34
-rw-r--r--spec/lib/gitlab/composer/version_index_spec.rb49
-rw-r--r--spec/lib/gitlab/config/entry/composable_hash_spec.rb2
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb32
-rw-r--r--spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb49
-rw-r--r--spec/lib/gitlab/cycle_analytics/code_event_fetcher_spec.rb13
-rw-r--r--spec/lib/gitlab/cycle_analytics/code_stage_spec.rb129
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb182
-rw-r--r--spec/lib/gitlab/cycle_analytics/issue_event_fetcher_spec.rb9
-rw-r--r--spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb136
-rw-r--r--spec/lib/gitlab/cycle_analytics/plan_event_fetcher_spec.rb17
-rw-r--r--spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb116
-rw-r--r--spec/lib/gitlab/cycle_analytics/production_event_fetcher_spec.rb9
-rw-r--r--spec/lib/gitlab/cycle_analytics/review_event_fetcher_spec.rb9
-rw-r--r--spec/lib/gitlab/cycle_analytics/review_stage_spec.rb90
-rw-r--r--spec/lib/gitlab/cycle_analytics/staging_event_fetcher_spec.rb13
-rw-r--r--spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb99
-rw-r--r--spec/lib/gitlab/cycle_analytics/test_event_fetcher_spec.rb13
-rw-r--r--spec/lib/gitlab/cycle_analytics/test_stage_spec.rb57
-rw-r--r--spec/lib/gitlab/danger/base_linter_spec.rb53
-rw-r--r--spec/lib/gitlab/danger/changelog_spec.rb71
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb56
-rw-r--r--spec/lib/gitlab/danger/roulette_spec.rb63
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb8
-rw-r--r--spec/lib/gitlab/danger/title_linting_spec.rb56
-rw-r--r--spec/lib/gitlab/danger/weightage/maintainers_spec.rb34
-rw-r--r--spec/lib/gitlab/danger/weightage/reviewers_spec.rb63
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb1
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb1
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb190
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_creator_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/replace_table_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb139
-rw-r--r--spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb83
-rw-r--r--spec/lib/gitlab/database/postgres_hll/buckets_spec.rb33
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb82
-rw-r--r--spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb139
-rw-r--r--spec/lib/gitlab/database/reindexing/index_selection_spec.rb2
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_action_spec.rb112
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb9
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb30
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb58
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb234
-rw-r--r--spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb61
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb10
-rw-r--r--spec/lib/gitlab/email/handler_spec.rb5
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb21
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb10
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb6
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb2
-rw-r--r--spec/lib/gitlab/faraday/error_callback_spec.rb59
-rw-r--r--spec/lib/gitlab/git/changed_path_spec.rb29
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb2
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb31
-rw-r--r--spec/lib/gitlab/git/wiki_page_version_spec.rb28
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb89
-rw-r--r--spec/lib/gitlab/git_access_spec.rb60
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb6
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/gitpod_spec.rb73
-rw-r--r--spec/lib/gitlab/graphql/batch_key_spec.rb78
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb41
-rw-r--r--spec/lib/gitlab/graphql/queries_spec.rb343
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml4
-rw-r--r--spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb24
-rw-r--r--spec/lib/gitlab/kubernetes/kubectl_cmd_spec.rb5
-rw-r--r--spec/lib/gitlab/kubernetes/pod_cmd_spec.rb14
-rw-r--r--spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb12
-rw-r--r--spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb34
-rw-r--r--spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb53
-rw-r--r--spec/lib/gitlab/middleware/multipart_spec.rb (renamed from spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb)20
-rw-r--r--spec/lib/gitlab/middleware/multipart_with_handler_spec.rb196
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb6
-rw-r--r--spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb1
-rw-r--r--spec/lib/gitlab/project_template_spec.rb1
-rw-r--r--spec/lib/gitlab/prometheus/internal_spec.rb48
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb212
-rw-r--r--spec/lib/gitlab/search_results_spec.rb39
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb9
-rw-r--r--spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb17
-rw-r--r--spec/lib/gitlab/throttle_spec.rb28
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb55
-rw-r--r--spec/lib/gitlab/tracking_spec.rb43
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb123
-rw-r--r--spec/lib/gitlab/usage/metric_spec.rb29
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb31
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb156
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb151
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb (renamed from spec/lib/gitlab/usage_data_counters/guest_package_event_counter_spec.rb)8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb6
-rw-r--r--spec/lib/gitlab/user_access_spec.rb32
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb112
-rw-r--r--spec/lib/gitlab/utils_spec.rb17
-rw-r--r--spec/lib/gitlab/uuid_spec.rb19
-rw-r--r--spec/lib/gitlab/visibility_level_spec.rb48
-rw-r--r--spec/lib/release_highlights/validator/entry_spec.rb87
-rw-r--r--spec/lib/release_highlights/validator_spec.rb85
-rw-r--r--spec/lib/uploaded_file_spec.rb194
-rw-r--r--spec/mailers/emails/merge_requests_spec.rb2
-rw-r--r--spec/mailers/emails/pipelines_spec.rb32
-rw-r--r--spec/mailers/notify_spec.rb86
-rw-r--r--spec/migrations/20210112143418_remove_duplicate_services2_spec.rb52
-rw-r--r--spec/migrations/add_has_external_wiki_trigger_spec.rb128
-rw-r--r--spec/migrations/remove_alerts_service_records_spec.rb30
-rw-r--r--spec/migrations/remove_duplicate_labels_from_groups_spec.rb227
-rw-r--r--spec/migrations/schedule_populate_finding_uuid_for_vulnerability_feedback_spec.rb37
-rw-r--r--spec/models/alert_management/http_integration_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb39
-rw-r--r--spec/models/audit_event_archived_spec.rb (renamed from spec/models/audit_event_partitioned_spec.rb)20
-rw-r--r--spec/models/ci/bridge_spec.rb12
-rw-r--r--spec/models/ci/build_need_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb249
-rw-r--r--spec/models/ci/commit_with_pipeline_spec.rb (renamed from spec/models/commit_with_pipeline_spec.rb)2
-rw-r--r--spec/models/ci/group_spec.rb12
-rw-r--r--spec/models/ci/job_artifact_spec.rb33
-rw-r--r--spec/models/ci/ref_spec.rb60
-rw-r--r--spec/models/clusters/applications/knative_spec.rb6
-rw-r--r--spec/models/commit_spec.rb13
-rw-r--r--spec/models/commit_status_spec.rb19
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb19
-rw-r--r--spec/models/concerns/each_batch_spec.rb16
-rw-r--r--spec/models/concerns/milestoneable_spec.rb4
-rw-r--r--spec/models/concerns/noteable_spec.rb104
-rw-r--r--spec/models/cycle_analytics/code_spec.rb73
-rw-r--r--spec/models/cycle_analytics/issue_spec.rb46
-rw-r--r--spec/models/cycle_analytics/plan_spec.rb52
-rw-r--r--spec/models/cycle_analytics/project_level_spec.rb34
-rw-r--r--spec/models/cycle_analytics/project_level_stage_adapter_spec.rb38
-rw-r--r--spec/models/cycle_analytics/review_spec.rb34
-rw-r--r--spec/models/cycle_analytics/staging_spec.rb56
-rw-r--r--spec/models/cycle_analytics/test_spec.rb73
-rw-r--r--spec/models/deployment_spec.rb50
-rw-r--r--spec/models/experiment_spec.rb58
-rw-r--r--spec/models/group_spec.rb20
-rw-r--r--spec/models/list_spec.rb67
-rw-r--r--spec/models/members/group_member_spec.rb12
-rw-r--r--spec/models/merge_request_spec.rb148
-rw-r--r--spec/models/namespace/package_setting_spec.rb81
-rw-r--r--spec/models/namespace_onboarding_action_spec.rb53
-rw-r--r--spec/models/namespace_spec.rb23
-rw-r--r--spec/models/onboarding_progress_spec.rb107
-rw-r--r--spec/models/packages/debian/file_metadatum_spec.rb91
-rw-r--r--spec/models/packages/debian/group_architecture_spec.rb7
-rw-r--r--spec/models/packages/debian/group_distribution_spec.rb7
-rw-r--r--spec/models/packages/debian/project_architecture_spec.rb7
-rw-r--r--spec/models/packages/debian/project_distribution_spec.rb7
-rw-r--r--spec/models/packages/package_file_spec.rb1
-rw-r--r--spec/models/packages/package_spec.rb24
-rw-r--r--spec/models/project_import_state_spec.rb2
-rw-r--r--spec/models/project_pages_metadatum_spec.rb21
-rw-r--r--spec/models/project_services/alerts_service_spec.rb110
-rw-r--r--spec/models/project_services/jira_service_spec.rb53
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb4
-rw-r--r--spec/models/project_spec.rb310
-rw-r--r--spec/models/project_wiki_spec.rb6
-rw-r--r--spec/models/protectable_dropdown_spec.rb32
-rw-r--r--spec/models/release_highlight_spec.rb2
-rw-r--r--spec/models/release_spec.rb12
-rw-r--r--spec/models/repository_spec.rb18
-rw-r--r--spec/models/snippet_repository_storage_move_spec.rb2
-rw-r--r--spec/models/snippet_spec.rb86
-rw-r--r--spec/models/terraform/state_spec.rb16
-rw-r--r--spec/models/user_spec.rb20
-rw-r--r--spec/policies/group_member_policy_spec.rb65
-rw-r--r--spec/policies/namespace_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb42
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb28
-rw-r--r--spec/presenters/packages/nuget/service_index_presenter_spec.rb55
-rw-r--r--spec/rack_servers/puma_spec.rb2
-rw-r--r--spec/rack_servers/unicorn_spec.rb2
-rw-r--r--spec/requests/api/api_spec.rb2
-rw-r--r--spec/requests/api/boards_spec.rb11
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb27
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb6
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb7
-rw-r--r--spec/requests/api/generic_packages_spec.rb189
-rw-r--r--spec/requests/api/graphql/ci/ci_cd_setting_spec.rb10
-rw-r--r--spec/requests/api/graphql/ci/config_spec.rb304
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb155
-rw-r--r--spec/requests/api/graphql/ci/pipelines_spec.rb112
-rw-r--r--spec/requests/api/graphql/group/group_members_spec.rb16
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb16
-rw-r--r--spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb58
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb142
-rw-r--r--spec/requests/api/graphql/mutations/releases/create_spec.rb22
-rw-r--r--spec/requests/api/graphql/mutations/releases/update_spec.rb10
-rw-r--r--spec/requests/api/graphql/namespace/package_settings_spec.rb33
-rw-r--r--spec/requests/api/graphql/packages/package_composer_details_spec.rb39
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb34
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb26
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb72
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb155
-rw-r--r--spec/requests/api/graphql/project/project_members_spec.rb9
-rw-r--r--spec/requests/api/graphql/project/release_spec.rb33
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb66
-rw-r--r--spec/requests/api/graphql/user/starred_projects_query_spec.rb24
-rw-r--r--spec/requests/api/graphql/users_spec.rb46
-rw-r--r--spec/requests/api/group_packages_spec.rb4
-rw-r--r--spec/requests/api/invitations_spec.rb84
-rw-r--r--spec/requests/api/jobs_spec.rb141
-rw-r--r--spec/requests/api/labels_spec.rb27
-rw-r--r--spec/requests/api/lint_spec.rb4
-rw-r--r--spec/requests/api/maven_packages_spec.rb50
-rw-r--r--spec/requests/api/merge_requests_spec.rb139
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb122
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb384
-rw-r--r--spec/requests/api/project_packages_spec.rb4
-rw-r--r--spec/requests/api/project_repository_storage_moves_spec.rb220
-rw-r--r--spec/requests/api/project_templates_spec.rb17
-rw-r--r--spec/requests/api/projects_spec.rb38
-rw-r--r--spec/requests/api/releases_spec.rb35
-rw-r--r--spec/requests/api/snippet_repository_storage_moves_spec.rb12
-rw-r--r--spec/requests/api/terraform/state_spec.rb37
-rw-r--r--spec/requests/api/todos_spec.rb2
-rw-r--r--spec/requests/api/users_spec.rb184
-rw-r--r--spec/requests/jwks_controller_spec.rb (renamed from spec/controllers/jwks_controller_spec.rb)6
-rw-r--r--spec/requests/lfs_http_spec.rb16
-rw-r--r--spec/requests/profiles/notifications_controller_spec.rb2
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb134
-rw-r--r--spec/requests/projects/incident_management/pagerduty_incidents_spec.rb2
-rw-r--r--spec/requests/rack_attack_global_spec.rb82
-rw-r--r--spec/requests/runner_setup_controller_spec.rb (renamed from spec/controllers/runner_setup_controller_spec.rb)4
-rw-r--r--spec/requests/users_controller_spec.rb (renamed from spec/controllers/users_controller_spec.rb)474
-rw-r--r--spec/routing/notifications_routing_spec.rb8
-rw-r--r--spec/routing/project_routing_spec.rb24
-rw-r--r--spec/routing/routing_spec.rb92
-rw-r--r--spec/rubocop/cop/active_record_association_reload_spec.rb2
-rw-r--r--spec/rubocop/cop/api/base_spec.rb2
-rw-r--r--spec/rubocop/cop/avoid_becomes_spec.rb2
-rw-r--r--spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb2
-rw-r--r--spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb2
-rw-r--r--spec/rubocop/cop/avoid_return_from_blocks_spec.rb2
-rw-r--r--spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb2
-rw-r--r--spec/rubocop/cop/ban_catch_throw_spec.rb2
-rw-r--r--spec/rubocop/cop/code_reuse/finder_spec.rb2
-rw-r--r--spec/rubocop/cop/code_reuse/presenter_spec.rb2
-rw-r--r--spec/rubocop/cop/code_reuse/serializer_spec.rb2
-rw-r--r--spec/rubocop/cop/code_reuse/service_class_spec.rb2
-rw-r--r--spec/rubocop/cop/code_reuse/worker_spec.rb2
-rw-r--r--spec/rubocop/cop/default_scope_spec.rb2
-rw-r--r--spec/rubocop/cop/destroy_all_spec.rb2
-rw-r--r--spec/rubocop/cop/filename_length_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/bulk_insert_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/change_timezone_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/except_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/httparty_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/intersect_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/json_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/predicate_memoization_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/rails_logger_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/union_spec.rb2
-rw-r--r--spec/rubocop/cop/graphql/authorize_types_spec.rb2
-rw-r--r--spec/rubocop/cop/graphql/descriptions_spec.rb2
-rw-r--r--spec/rubocop/cop/graphql/gid_expected_type_spec.rb2
-rw-r--r--spec/rubocop/cop/graphql/id_type_spec.rb2
-rw-r--r--spec/rubocop/cop/graphql/json_type_spec.rb2
-rw-r--r--spec/rubocop/cop/graphql/resolver_type_spec.rb2
-rw-r--r--spec/rubocop/cop/group_public_or_visible_to_user_spec.rb2
-rw-r--r--spec/rubocop/cop/ignored_columns_spec.rb2
-rw-r--r--spec/rubocop/cop/include_sidekiq_worker_spec.rb2
-rw-r--r--spec/rubocop/cop/inject_enterprise_edition_module_spec.rb2
-rw-r--r--spec/rubocop/cop/lint/last_keyword_argument_spec.rb29
-rw-r--r--spec/rubocop/cop/migration/add_column_with_default_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_concurrent_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_reference_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_timestamps_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/datetime_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/drop_table_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/hash_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/prevent_strings_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/remove_column_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/remove_concurrent_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/remove_index_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/safer_boolean_column_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/timestamps_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/update_column_in_batches_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb2
-rw-r--r--spec/rubocop/cop/performance/ar_count_each_spec.rb2
-rw-r--r--spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb2
-rw-r--r--spec/rubocop/cop/performance/readlines_each_spec.rb2
-rw-r--r--spec/rubocop/cop/prefer_class_methods_over_module_spec.rb2
-rw-r--r--spec/rubocop/cop/project_path_helper_spec.rb2
-rw-r--r--spec/rubocop/cop/put_group_routes_under_scope_spec.rb2
-rw-r--r--spec/rubocop/cop/put_project_routes_under_scope_spec.rb2
-rw-r--r--spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb2
-rw-r--r--spec/rubocop/cop/qa/element_with_pattern_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/any_instance_of_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/be_success_matcher_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/env_assignment_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/htt_party_basic_auth_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/timecop_freeze_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/timecop_travel_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/top_level_describe_path_spec.rb2
-rw-r--r--spec/rubocop/cop/rspec/web_mock_enable_spec.rb22
-rw-r--r--spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb2
-rw-r--r--spec/rubocop/cop/safe_params_spec.rb2
-rw-r--r--spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb2
-rw-r--r--spec/rubocop/cop/scalability/cron_worker_context_spec.rb2
-rw-r--r--spec/rubocop/cop/scalability/file_uploads_spec.rb2
-rw-r--r--spec/rubocop/cop/scalability/idempotent_worker_spec.rb2
-rw-r--r--spec/rubocop/cop/sidekiq_options_queue_spec.rb2
-rw-r--r--spec/rubocop/cop/static_translation_definition_spec.rb2
-rw-r--r--spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb2
-rw-r--r--spec/rubocop/cop/usage_data/large_table_spec.rb2
-rw-r--r--spec/rubocop/qa_helpers_spec.rb2
-rw-r--r--spec/serializers/analytics_stage_serializer_spec.rb56
-rw-r--r--spec/serializers/build_details_entity_spec.rb39
-rw-r--r--spec/serializers/deploy_key_entity_spec.rb8
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb1
-rw-r--r--spec/serializers/environment_entity_spec.rb22
-rw-r--r--spec/serializers/member_entity_spec.rb71
-rw-r--r--spec/serializers/member_serializer_spec.rb32
-rw-r--r--spec/serializers/member_user_entity_spec.rb38
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb12
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb11
-rw-r--r--spec/serializers/merge_requests/pipeline_entity_spec.rb4
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb2
-rw-r--r--spec/serializers/runner_entity_spec.rb2
-rw-r--r--spec/services/alert_management/alerts/todo/create_service_spec.rb4
-rw-r--r--spec/services/alert_management/sync_alert_service_data_service_spec.rb55
-rw-r--r--spec/services/ci/build_report_result_service_spec.rb6
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb47
-rw-r--r--spec/services/ci/create_pipeline_service/dry_run_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb73
-rw-r--r--spec/services/ci/destroy_expired_job_artifacts_service_spec.rb76
-rw-r--r--spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb (renamed from spec/services/ci/pipelines/create_artifact_service_spec.rb)2
-rw-r--r--spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb81
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_delayed_and_needs_test.yml41
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_manual_and_needs_test.yml40
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb14
-rw-r--r--spec/services/ci/play_build_service_spec.rb25
-rw-r--r--spec/services/ci/process_build_service_spec.rb40
-rw-r--r--spec/services/ci/retry_build_service_spec.rb16
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb22
-rw-r--r--spec/services/ci/test_failure_history_service_spec.rb21
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb21
-rw-r--r--spec/services/container_expiration_policies/cleanup_service_spec.rb43
-rw-r--r--spec/services/draft_notes/create_service_spec.rb17
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb22
-rw-r--r--spec/services/feature_flags/create_service_spec.rb24
-rw-r--r--spec/services/feature_flags/update_service_spec.rb24
-rw-r--r--spec/services/git/branch_push_service_spec.rb4
-rw-r--r--spec/services/groups/create_service_spec.rb8
-rw-r--r--spec/services/groups/destroy_service_spec.rb111
-rw-r--r--spec/services/incident_management/pager_duty/process_webhook_service_spec.rb2
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb51
-rw-r--r--spec/services/issues/close_service_spec.rb27
-rw-r--r--spec/services/issues/export_csv_service_spec.rb4
-rw-r--r--spec/services/jira_connect/sync_service_spec.rb4
-rw-r--r--spec/services/jira_connect_subscriptions/create_service_spec.rb12
-rw-r--r--spec/services/members/create_service_spec.rb51
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb22
-rw-r--r--spec/services/merge_requests/cleanup_refs_service_spec.rb20
-rw-r--r--spec/services/merge_requests/close_service_spec.rb9
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb3
-rw-r--r--spec/services/merge_requests/create_service_spec.rb6
-rw-r--r--spec/services/merge_requests/export_csv_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb30
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb49
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb9
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb9
-rw-r--r--spec/services/merge_requests/update_service_spec.rb29
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb102
-rw-r--r--spec/services/notes/create_service_spec.rb30
-rw-r--r--spec/services/notes/destroy_service_spec.rb8
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb8
-rw-r--r--spec/services/notes/update_service_spec.rb14
-rw-r--r--spec/services/notification_service_spec.rb12
-rw-r--r--spec/services/onboarding_progress_service_spec.rb36
-rw-r--r--spec/services/packages/create_event_service_spec.rb25
-rw-r--r--spec/services/packages/debian/create_package_file_service_spec.rb106
-rw-r--r--spec/services/packages/debian/extract_metadata_service_spec.rb59
-rw-r--r--spec/services/packages/debian/get_or_create_incoming_service_spec.rb36
-rw-r--r--spec/services/packages/maven/find_or_create_package_service_spec.rb54
-rw-r--r--spec/services/packages/nuget/search_service_spec.rb150
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb118
-rw-r--r--spec/services/pages/zip_directory_service_spec.rb28
-rw-r--r--spec/services/post_receive_service_spec.rb11
-rw-r--r--spec/services/projects/after_import_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb116
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb4
-rw-r--r--spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb17
-rw-r--r--spec/services/projects/fork_service_spec.rb18
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb134
-rw-r--r--spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb43
-rw-r--r--spec/services/projects/update_pages_service_spec.rb13
-rw-r--r--spec/services/projects/update_service_spec.rb43
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb212
-rw-r--r--spec/services/repositories/housekeeping_service_spec.rb14
-rw-r--r--spec/services/resource_events/change_state_service_spec.rb9
-rw-r--r--spec/services/service_desk_settings/update_service_spec.rb13
-rw-r--r--spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb12
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb137
-rw-r--r--spec/services/todo_service_spec.rb258
-rw-r--r--spec/services/users/update_service_spec.rb2
-rw-r--r--spec/services/web_hook_service_spec.rb9
-rw-r--r--spec/spec_helper.rb42
-rw-r--r--spec/support/atlassian/jira_connect/schemata.rb337
-rw-r--r--spec/support/capybara.rb2
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_for_sast.yml3
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_for_sast_default_analyzers.yml15
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_for_sast_excluded_analyzers.yml14
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_includes.yml26
-rw-r--r--spec/support/helpers/database/database_helpers.rb15
-rw-r--r--spec/support/helpers/database/partitioning_helpers.rb96
-rw-r--r--spec/support/helpers/database/table_schema_helpers.rb149
-rw-r--r--spec/support/helpers/database/trigger_helpers.rb68
-rw-r--r--spec/support/helpers/database_helpers.rb13
-rw-r--r--spec/support/helpers/graphql_helpers.rb42
-rw-r--r--spec/support/helpers/multipart_helpers.rb20
-rw-r--r--spec/support/helpers/partitioning_helpers.rb94
-rw-r--r--spec/support/helpers/rack_attack_spec_helpers.rb21
-rw-r--r--spec/support/helpers/stub_experiments.rb8
-rw-r--r--spec/support/helpers/stub_feature_flags.rb4
-rw-r--r--spec/support/helpers/table_schema_helpers.rb112
-rw-r--r--spec/support/helpers/test_env.rb9
-rw-r--r--spec/support/helpers/trigger_helpers.rb66
-rw-r--r--spec/support/matchers/be_sorted.rb71
-rw-r--r--spec/support/matchers/be_valid_json.rb16
-rw-r--r--spec/support/matchers/schema_matcher.rb10
-rw-r--r--spec/support/rspec.rb4
-rw-r--r--spec/support/shared_contexts/email_shared_context.rb245
-rw-r--r--spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb48
-rw-r--r--spec/support/shared_contexts/finders/users_finder_shared_contexts.rb1
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb1
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb25
-rw-r--r--spec/support/shared_contexts/read_ci_configuration_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb10
-rw-r--r--spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/unique_hll_events_examples.rb2
-rw-r--r--spec/support/shared_examples/features/file_uploads_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb10
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/finders/packages/debian/distributions_finder_shared_examples.rb79
-rw-r--r--spec/support/shared_examples/finders/packages_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb6
-rw-r--r--spec/support/shared_examples/lib/banzai/filters/sanitization_filter_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/cycle_analytics/base_stage_shared_examples.rb74
-rw-r--r--spec/support/shared_examples/lib/gitlab/cycle_analytics/default_query_config_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/lib/gitlab/middleware/multipart_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/lib/gitlab/project_search_results_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/incident_management_activity_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/metrics/sampler_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/models/boards/listable_shared_examples.rb97
-rw-r--r--spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb48
-rw-r--r--spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb225
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb92
-rw-r--r--spec/support/shared_examples/requests/api/boards_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb69
-rw-r--r--spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb146
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb58
-rw-r--r--spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb219
-rw-r--r--spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/requests/releases_shared_examples.rb61
-rw-r--r--spec/support/shared_examples/requests/sessionless_auth_request_shared_examples.rb84
-rw-r--r--spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb21
-rw-r--r--spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb80
-rw-r--r--spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb65
-rw-r--r--spec/support/shared_examples/services/merge_request_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/services/onboarding_progress_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb118
-rw-r--r--spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/workers/update_repository_move_shared_examples.rb39
-rw-r--r--spec/support_specs/matchers/be_sorted_spec.rb33
-rw-r--r--spec/tasks/gitlab/git_rake_spec.rb34
-rw-r--r--spec/tasks/gitlab/packages/events_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/pages_rake_spec.rb67
-rw-r--r--spec/tooling/lib/tooling/kubernetes_client_spec.rb176
-rw-r--r--spec/tooling/lib/tooling/test_file_finder_spec.rb175
-rw-r--r--spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb52
-rw-r--r--spec/uploaders/packages/package_file_uploader_spec.rb71
-rw-r--r--spec/views/layouts/_head.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb111
-rw-r--r--spec/views/notify/pipeline_failed_email.html.haml_spec.rb2
-rw-r--r--spec/views/notify/pipeline_failed_email.text.erb_spec.rb2
-rw-r--r--spec/views/notify/pipeline_fixed_email.html.haml_spec.rb2
-rw-r--r--spec/views/notify/pipeline_fixed_email.text.erb_spec.rb2
-rw-r--r--spec/views/notify/pipeline_success_email.html.haml_spec.rb2
-rw-r--r--spec/views/notify/pipeline_success_email.text.erb_spec.rb2
-rw-r--r--spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb4
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb51
-rw-r--r--spec/views/shared/wikis/_sidebar.html.haml_spec.rb24
-rw-r--r--spec/workers/bulk_import_worker_spec.rb71
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb39
-rw-r--r--spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb (renamed from spec/workers/ci/pipelines/create_artifact_worker_spec.rb)6
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb36
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb82
-rw-r--r--spec/workers/experiments/record_conversion_event_worker_spec.rb35
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb16
-rw-r--r--spec/workers/gitlab_performance_bar_stats_worker_spec.rb2
-rw-r--r--spec/workers/group_destroy_worker_spec.rb6
-rw-r--r--spec/workers/jira_connect/sync_branch_worker_spec.rb89
-rw-r--r--spec/workers/jira_connect/sync_deployments_worker_spec.rb60
-rw-r--r--spec/workers/jira_connect/sync_feature_flags_worker_spec.rb60
-rw-r--r--spec/workers/jira_connect/sync_merge_request_worker_spec.rb47
-rw-r--r--spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb32
-rw-r--r--spec/workers/namespaces/onboarding_user_added_worker_spec.rb10
-rw-r--r--spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb29
-rw-r--r--spec/workers/project_update_repository_storage_worker_spec.rb42
-rw-r--r--spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb12
-rw-r--r--spec/workers/snippet_update_repository_storage_worker_spec.rb15
1875 files changed, 35554 insertions, 18429 deletions
diff --git a/spec/controllers/admin/dev_ops_report_controller_spec.rb b/spec/controllers/admin/dev_ops_report_controller_spec.rb
index 0be30fff0c2..913921b9630 100644
--- a/spec/controllers/admin/dev_ops_report_controller_spec.rb
+++ b/spec/controllers/admin/dev_ops_report_controller_spec.rb
@@ -3,6 +3,12 @@
require 'spec_helper'
RSpec.describe Admin::DevOpsReportController do
+ describe 'show_adoption?' do
+ it 'is always false' do
+ expect(controller.show_adoption?).to be false
+ end
+ end
+
describe 'GET #show' do
context 'as admin' do
let(:user) { create(:admin) }
diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb
index b5f411c9121..d81b067ffb6 100644
--- a/spec/controllers/admin/projects_controller_spec.rb
+++ b/spec/controllers/admin/projects_controller_spec.rb
@@ -77,4 +77,34 @@ RSpec.describe Admin::ProjectsController do
expect(response.body).to match(project.name)
end
end
+
+ describe 'PUT /projects/transfer/:id' do
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:new_namespace) { create(:namespace) }
+
+ it 'updates namespace' do
+ put :transfer, params: { namespace_id: project.namespace.path, new_namespace_id: new_namespace.id, id: project.path }
+
+ project.reload
+
+ expect(project.namespace).to eq(new_namespace)
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to redirect_to(admin_project_path(project))
+ end
+
+ context 'when project transfer fails' do
+ it 'flashes error' do
+ old_namespace = project.namespace
+
+ put :transfer, params: { namespace_id: old_namespace.path, new_namespace_id: nil, id: project.path }
+
+ project.reload
+
+ expect(project.namespace).to eq(old_namespace)
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to redirect_to(admin_project_path(project))
+ expect(flash[:alert]).to eq s_('TransferProject|Please select a new namespace for your project.')
+ end
+ end
+ end
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 9342513d224..4a729008e67 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -720,6 +720,49 @@ RSpec.describe ApplicationController do
end
end
+ describe '#stream_csv_headers' do
+ controller(described_class) do
+ def index
+ respond_to do |format|
+ format.csv do
+ stream_csv_headers('test.csv')
+
+ self.response_body = fixture_file_upload('spec/fixtures/csv_comma.csv')
+ end
+ end
+ end
+ end
+
+ subject { get :index, format: :csv }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'sets no-cache headers', :aggregate_failures do
+ subject
+
+ expect(response.headers['Cache-Control']).to eq 'no-cache, no-store'
+ expect(response.headers['Pragma']).to eq 'no-cache'
+ expect(response.headers['Expires']).to eq 'Fri, 01 Jan 1990 00:00:00 GMT'
+ end
+
+ it 'sets stream headers', :aggregate_failures do
+ subject
+
+ expect(response.headers['Content-Length']).to be nil
+ expect(response.headers['X-Accel-Buffering']).to eq 'no'
+ expect(response.headers['Last-Modified']).to eq '0'
+ end
+
+ it 'sets the csv specific headers', :aggregate_failures do
+ subject
+
+ expect(response.headers['Content-Type']).to eq 'text/csv; charset=utf-8; header=present'
+ expect(response.headers['Content-Disposition']).to eq "attachment; filename=\"test.csv\""
+ end
+ end
+
context 'Gitlab::Session' do
controller(described_class) do
prepend_before_action do
diff --git a/spec/controllers/concerns/redis_tracking_spec.rb b/spec/controllers/concerns/redis_tracking_spec.rb
index 831f5ad7bb1..ef59adf8c1d 100644
--- a/spec/controllers/concerns/redis_tracking_spec.rb
+++ b/spec/controllers/concerns/redis_tracking_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe RedisTracking do
def expect_tracking
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
- .with(instance_of(String), 'g_compliance_approval_rules')
+ .with('g_compliance_approval_rules', values: instance_of(String))
end
def expect_no_tracking
diff --git a/spec/controllers/concerns/spammable_actions_spec.rb b/spec/controllers/concerns/spammable_actions_spec.rb
new file mode 100644
index 00000000000..3b5b4d11a9b
--- /dev/null
+++ b/spec/controllers/concerns/spammable_actions_spec.rb
@@ -0,0 +1,198 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SpammableActions do
+ controller(ActionController::Base) do
+ include SpammableActions
+
+ # #create is used to test spammable_params
+ # for testing purposes
+ def create
+ spam_params = spammable_params
+
+ # replace the actual request with a string in the JSON response, all we care is that it got set
+ spam_params[:request] = 'this is the request' if spam_params[:request]
+
+ # just return the params in the response so they can be verified in this fake controller spec.
+ # Normally, they are processed further by the controller action
+ render json: spam_params.to_json, status: :ok
+ end
+
+ # #update is used to test recaptcha_check_with_fallback
+ # for testing purposes
+ def update
+ should_redirect = params[:should_redirect] == 'true'
+
+ recaptcha_check_with_fallback(should_redirect) { render json: :ok }
+ end
+
+ private
+
+ def spammable_path
+ '/fake_spammable_path'
+ end
+ end
+
+ before do
+ # Ordinarily we would not stub a method on the class under test, but :ensure_spam_config_loaded!
+ # returns false in the test environment, and is also strong_memoized, so we need to stub it
+ allow(controller).to receive(:ensure_spam_config_loaded!) { true }
+ end
+
+ describe '#spammable_params' do
+ subject { post :create, format: :json, params: params }
+
+ shared_examples 'expects request param only' do
+ it do
+ subject
+
+ expect(response).to be_successful
+ expect(json_response).to eq({ 'request' => 'this is the request' })
+ end
+ end
+
+ shared_examples 'expects all spammable params' do
+ it do
+ subject
+
+ expect(response).to be_successful
+ expect(json_response['request']).to eq('this is the request')
+ expect(json_response['recaptcha_verified']).to eq(true)
+ expect(json_response['spam_log_id']).to eq('1')
+ end
+ end
+
+ let(:recaptcha_response) { nil }
+ let(:spam_log_id) { nil }
+
+ context 'when recaptcha response is not present' do
+ let(:params) do
+ {
+ spam_log_id: spam_log_id
+ }
+ end
+
+ it_behaves_like 'expects request param only'
+ end
+
+ context 'when recaptcha response is present' do
+ let(:recaptcha_response) { 'abd123' }
+ let(:params) do
+ {
+ 'g-recaptcha-response': recaptcha_response,
+ spam_log_id: spam_log_id
+ }
+ end
+
+ context 'when verify_recaptcha returns falsey' do
+ before do
+ expect(controller).to receive(:verify_recaptcha).with(
+ {
+ response: recaptcha_response
+ }) { false }
+ end
+
+ it_behaves_like 'expects request param only'
+ end
+
+ context 'when verify_recaptcha returns truthy' do
+ let(:spam_log_id) { 1 }
+
+ before do
+ expect(controller).to receive(:verify_recaptcha).with(
+ {
+ response: recaptcha_response
+ }) { true }
+ end
+
+ it_behaves_like 'expects all spammable params'
+ end
+ end
+ end
+
+ describe '#recaptcha_check_with_fallback' do
+ shared_examples 'yields to block' do
+ it do
+ subject
+
+ expect(json_response).to eq({ json: 'ok' })
+ end
+ end
+
+ let(:format) { :html }
+
+ subject { post :update, format: format, params: params }
+
+ let(:spammable) { double(:spammable) }
+ let(:should_redirect) { nil }
+ let(:params) do
+ {
+ should_redirect: should_redirect
+ }
+ end
+
+ before do
+ routes.draw { get 'update' => 'anonymous#update' }
+ allow(controller).to receive(:spammable) { spammable }
+ end
+
+ context 'when should_redirect is true and spammable is valid' do
+ let(:should_redirect) { true }
+
+ before do
+ allow(spammable).to receive(:valid?) { true }
+ end
+
+ it 'redirects to spammable_path' do
+ expect(subject).to redirect_to('/fake_spammable_path')
+ end
+ end
+
+ context 'when should_redirect is false or spammable is not valid' do
+ before do
+ allow(spammable).to receive(:valid?) { false }
+ end
+
+ # NOTE: Not adding coverage of details of render_recaptcha?, the plan is to refactor it out
+ # of this module anyway as part of adding support for the GraphQL reCAPTCHA flow.
+
+ context 'when render_recaptcha? is true' do
+ before do
+ expect(controller).to receive(:render_recaptcha?) { true }
+ end
+
+ context 'when format is :html' do
+ it 'renders :verify' do
+ expect(controller).to receive(:render).with(:verify)
+
+ subject
+ end
+ end
+
+ context 'when format is :json' do
+ let(:format) { :json }
+ let(:recaptcha_html) { '<recaptcha-html/>' }
+
+ it 'renders json with recaptcha_html' do
+ expect(controller).to receive(:render_to_string).with(
+ {
+ partial: 'shared/recaptcha_form',
+ formats: :html,
+ locals: {
+ spammable: spammable,
+ script: false,
+ has_submit: false
+ }
+ }
+ ) { recaptcha_html }
+
+ subject
+
+ expect(json_response).to eq({ 'recaptcha_html' => recaptcha_html })
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index 2719b7c8a24..dcec8012f03 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures do
end
context 'user logged in' do
- let_it_be(:project) { create(:project) }
- let_it_be(:project2) { create(:project) }
+ let_it_be(:project) { create(:project, name: 'Project 1') }
+ let_it_be(:project2) { create(:project, name: 'Project Two') }
let(:projects) { [project, project2] }
before_all do
@@ -36,10 +36,7 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures do
end
end
- it 'orders the projects by last activity by default' do
- project.update!(last_repository_updated_at: 3.days.ago, last_activity_at: 3.days.ago)
- project2.update!(last_repository_updated_at: 10.days.ago, last_activity_at: 10.days.ago)
-
+ it 'orders the projects by name by default' do
get :index
expect(assigns(:projects)).to eq(projects)
diff --git a/spec/controllers/dashboard/snippets_controller_spec.rb b/spec/controllers/dashboard/snippets_controller_spec.rb
index d981f738e70..016a9f53129 100644
--- a/spec/controllers/dashboard/snippets_controller_spec.rb
+++ b/spec/controllers/dashboard/snippets_controller_spec.rb
@@ -28,5 +28,24 @@ RSpec.describe Dashboard::SnippetsController do
end
it_behaves_like 'snippets sort order'
+
+ context 'when views are rendered' do
+ render_views
+
+ it 'avoids N+1 database queries' do
+ # Warming call to load everything non snippet related
+ get(:index)
+
+ project = create(:project, namespace: user.namespace)
+ create(:project_snippet, project: project, author: user)
+
+ control_count = ActiveRecord::QueryRecorder.new { get(:index) }.count
+
+ project = create(:project, namespace: user.namespace)
+ create(:project_snippet, project: project, author: user)
+
+ expect { get(:index) }.not_to exceed_query_limit(control_count)
+ end
+ end
end
end
diff --git a/spec/controllers/explore/projects_controller_spec.rb b/spec/controllers/explore/projects_controller_spec.rb
index 4ec890a528f..cfbd129388d 100644
--- a/spec/controllers/explore/projects_controller_spec.rb
+++ b/spec/controllers/explore/projects_controller_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Explore::ProjectsController do
end
shared_examples "blocks high page numbers" do
- let(:page_limit) { 200 }
+ let(:page_limit) { described_class::PAGE_LIMIT }
context "page number is too high" do
[:index, :trending, :starred].each do |endpoint|
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index 33041f1af9f..b2320615778 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe Groups::LabelsController do
before do
group.add_owner(user)
- # by default FFs are enabled in specs so we turn it off
- stub_feature_flags(show_inherited_labels: false)
sign_in(user)
end
@@ -34,41 +32,12 @@ RSpec.describe Groups::LabelsController do
subgroup.add_owner(user)
end
- RSpec.shared_examples 'returns ancestor group labels' do
- it 'returns ancestor group labels' do
- get :index, params: params, format: :json
+ it 'returns ancestor group labels' do
+ params = { group_id: subgroup, only_group_labels: true }
+ get :index, params: params, format: :json
- label_ids = json_response.map {|label| label['title']}
- expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
- end
- end
-
- context 'when include_ancestor_groups true' do
- let(:params) { { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true } }
-
- it_behaves_like 'returns ancestor group labels'
- end
-
- context 'when include_ancestor_groups false' do
- let(:params) { { group_id: subgroup, only_group_labels: true } }
-
- it 'does not return ancestor group labels', :aggregate_failures do
- get :index, params: params, format: :json
-
- label_ids = json_response.map {|label| label['title']}
- expect(label_ids).to match_array([subgroup_label_1.title])
- expect(label_ids).not_to include([group_label_1.title])
- end
- end
-
- context 'when show_inherited_labels enabled' do
- let(:params) { { group_id: subgroup } }
-
- before do
- stub_feature_flags(show_inherited_labels: true)
- end
-
- it_behaves_like 'returns ancestor group labels'
+ label_ids = json_response.map {|label| label['title']}
+ expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
end
end
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 436daed0af6..d1c138617bb 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -63,9 +63,16 @@ RSpec.describe Import::BulkImportsController do
)
end
+ let(:client_params) do
+ {
+ top_level_only: true,
+ min_access_level: Gitlab::Access::MAINTAINER
+ }
+ end
+
before do
allow(controller).to receive(:client).and_return(client)
- allow(client).to receive(:get).with('groups', top_level_only: true).and_return(client_response)
+ allow(client).to receive(:get).with('groups', client_params).and_return(client_response)
end
it 'returns serialized group data' do
@@ -73,6 +80,17 @@ RSpec.describe Import::BulkImportsController do
expect(json_response).to eq({ importable_data: client_response.parsed_response }.as_json)
end
+
+ context 'when filtering' do
+ it 'returns filtered result' do
+ filter = 'test'
+ search_params = client_params.merge(search: filter)
+
+ expect(client).to receive(:get).with('groups', search_params).and_return(client_response)
+
+ get :status, format: :json, params: { filter: filter }
+ end
+ end
end
context 'when host url is local or not http' do
@@ -131,6 +149,22 @@ RSpec.describe Import::BulkImportsController do
end
end
+ describe 'GET realtime_changes' do
+ let_it_be(:bulk_import) { create(:bulk_import, :created, user: user) }
+
+ it 'returns bulk imports created by current user' do
+ get :realtime_changes
+
+ expect(json_response).to eq([{ 'id' => bulk_import.id, 'status_name' => bulk_import.status_name.to_s }])
+ end
+
+ it 'sets a Poll-Interval header' do
+ get :realtime_changes
+
+ expect(response.headers['Poll-Interval']).to eq(Import::BulkImportsController::POLLING_INTERVAL.to_s)
+ end
+ end
+
describe 'POST create' do
let(:instance_url) { "http://fake-intance" }
let(:pat) { "fake-pat" }
diff --git a/spec/controllers/metrics_controller_spec.rb b/spec/controllers/metrics_controller_spec.rb
index f350d7378dc..9fa90dde997 100644
--- a/spec/controllers/metrics_controller_spec.rb
+++ b/spec/controllers/metrics_controller_spec.rb
@@ -28,8 +28,38 @@ RSpec.describe MetricsController, :request_store do
end
end
+ shared_examples_for 'protected metrics endpoint' do |examples|
+ context 'accessed from whitelisted ip' do
+ before do
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
+ end
+
+ it_behaves_like examples
+ end
+
+ context 'accessed from ip in whitelisted range' do
+ before do
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(ip_in_whitelisted_range)
+ end
+
+ it_behaves_like examples
+ end
+
+ context 'accessed from not whitelisted ip' do
+ before do
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(not_whitelisted_ip)
+ end
+
+ it 'returns the expected error response' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
describe '#index' do
- shared_examples_for 'endpoint providing metrics' do
+ shared_examples_for 'providing metrics' do
it 'returns prometheus metrics' do
get :index
@@ -51,32 +81,35 @@ RSpec.describe MetricsController, :request_store do
end
end
- context 'accessed from whitelisted ip' do
- before do
- allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
- end
-
- it_behaves_like 'endpoint providing metrics'
- end
+ include_examples 'protected metrics endpoint', 'providing metrics'
+ end
- context 'accessed from ip in whitelisted range' do
- before do
- allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(ip_in_whitelisted_range)
+ describe '#system' do
+ shared_examples_for 'providing system stats' do
+ let(:summary) do
+ {
+ version: 'ruby-3.0-patch1',
+ memory_rss: 1024
+ }
end
- it_behaves_like 'endpoint providing metrics'
- end
+ it 'renders system stats JSON' do
+ expect(Prometheus::PidProvider).to receive(:worker_id).and_return('worker-0')
+ expect(Gitlab::Metrics::System).to receive(:summary).and_return(summary)
- context 'accessed from not whitelisted ip' do
- before do
- allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(not_whitelisted_ip)
- end
-
- it 'returns the expected error response' do
- get :index
+ get :system
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response_json['version']).to eq('ruby-3.0-patch1')
+ expect(response_json['worker_id']).to eq('worker-0')
+ expect(response_json['memory_rss']).to eq(1024)
end
end
+
+ include_examples 'protected metrics endpoint', 'providing system stats'
+ end
+
+ def response_json
+ Gitlab::Json.parse(response.body)
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index a56425c2a22..16be7394174 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -7,6 +7,82 @@ RSpec.describe Projects::BlobController do
let(:project) { create(:project, :public, :repository) }
+ describe "GET new" do
+ context 'with no jobs' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:file_name) { '.gitlab-ci.yml' }
+
+ def request
+ get(:new, params: { namespace_id: project.namespace, project_id: project, id: 'master', file_name: file_name } )
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ stub_experiment(ci_syntax_templates: experiment_active)
+ stub_experiment_for_subject(ci_syntax_templates: in_experiment_group)
+ end
+
+ context 'when the experiment is not active' do
+ let(:experiment_active) { false }
+ let(:in_experiment_group) { false }
+
+ it 'does not record the experiment user' do
+ expect(Experiment).not_to receive(:add_user)
+
+ request
+ end
+ end
+
+ context 'when the experiment is active and the user is in the control group' do
+ let(:experiment_active) { true }
+ let(:in_experiment_group) { false }
+
+ it 'records the experiment user in the control group' do
+ expect(Experiment).to receive(:add_user)
+ .with(:ci_syntax_templates, :control, user, namespace_id: project.namespace_id)
+
+ request
+ end
+ end
+
+ context 'when the experiment is active and the user is in the experimental group' do
+ let(:experiment_active) { true }
+ let(:in_experiment_group) { true }
+
+ it 'records the experiment user in the experimental group' do
+ expect(Experiment).to receive(:add_user)
+ .with(:ci_syntax_templates, :experimental, user, namespace_id: project.namespace_id)
+
+ request
+ end
+
+ context 'when requesting a non default config file type' do
+ let(:file_name) { '.non_default_ci_config' }
+ let(:project) { create(:project, :public, :repository, ci_config_path: file_name) }
+
+ it 'records the experiment user in the experimental group' do
+ expect(Experiment).to receive(:add_user)
+ .with(:ci_syntax_templates, :experimental, user, namespace_id: project.namespace_id)
+
+ request
+ end
+ end
+
+ context 'when requesting a different file type' do
+ let(:file_name) { '.gitignore' }
+
+ it 'does not record the experiment user' do
+ expect(Experiment).not_to receive(:add_user)
+
+ request
+ end
+ end
+ end
+ end
+ end
+
describe "GET show" do
def request
get(:show, params: { namespace_id: project.namespace, project_id: project, id: id })
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index d1142cbd129..83ad36b217f 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::EnvironmentsController do
include MetricsDashboardHelpers
+ include KubernetesHelpers
let_it_be(:project) { create(:project) }
let_it_be(:maintainer) { create(:user, name: 'main-dos').tap { |u| project.add_maintainer(u) } }
@@ -34,6 +35,9 @@ RSpec.describe Projects::EnvironmentsController do
context 'when requesting JSON response for folders' do
before do
+ allow_any_instance_of(Environment).to receive(:has_terminals?).and_return(true)
+ allow_any_instance_of(Environment).to receive(:rollout_status).and_return(kube_deployment_rollout_status)
+
create(:environment, project: project,
name: 'staging/review-1',
state: :available)
@@ -91,9 +95,11 @@ RSpec.describe Projects::EnvironmentsController do
it 'responds with a payload describing available environments' do
expect(environments.count).to eq 2
expect(environments.first['name']).to eq 'production'
+ expect(environments.first['latest']['rollout_status']).to be_present
expect(environments.second['name']).to eq 'staging'
expect(environments.second['size']).to eq 2
expect(environments.second['latest']['name']).to eq 'staging/review-2'
+ expect(environments.second['latest']['rollout_status']).to be_present
end
it 'contains values describing environment scopes sizes' do
diff --git a/spec/controllers/projects/feature_flags_controller_spec.rb b/spec/controllers/projects/feature_flags_controller_spec.rb
index d5fc80bd5a7..f69cc0ddfd8 100644
--- a/spec/controllers/projects/feature_flags_controller_spec.rb
+++ b/spec/controllers/projects/feature_flags_controller_spec.rb
@@ -845,413 +845,64 @@ RSpec.describe Projects::FeatureFlagsController do
put(:update, params: params, format: :json, as: :json)
end
- before do
- stub_feature_flags(
- feature_flags_legacy_read_only: false,
- feature_flags_legacy_read_only_override: false
- )
- end
-
- subject { put(:update, params: params, format: :json) }
-
- let!(:feature_flag) do
- create(:operations_feature_flag,
- :legacy_flag,
- name: 'ci_live_trace',
- active: true,
- project: project)
- end
-
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- name: 'ci_new_live_trace'
- }
- }
- end
-
- it 'returns 200' do
- is_expected.to have_gitlab_http_status(:ok)
- end
-
- it 'updates the name of the feature flag name' do
- subject
-
- expect(json_response['name']).to eq('ci_new_live_trace')
- end
-
- it 'matches json schema' do
- is_expected.to match_response_schema('feature_flag')
- end
-
- context 'when updates active' do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- active: false
- }
- }
- end
-
- it 'updates active from true to false' do
- expect { subject }
- .to change { feature_flag.reload.active }.from(true).to(false)
- end
-
- it "does not change default scope's active" do
- expect { subject }
- .not_to change { feature_flag.default_scope.reload.active }.from(true)
- end
-
- it 'updates active from false to true when an inactive feature flag has an active scope' do
- feature_flag = create(:operations_feature_flag, project: project, name: 'my_flag', active: false)
- create(:operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: 'production', active: true)
-
- put_request(feature_flag, { active: true })
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('feature_flag')
- expect(json_response['active']).to eq(true)
- expect(feature_flag.reload.active).to eq(true)
- expect(feature_flag.default_scope.reload.active).to eq(false)
- end
- end
-
- context 'when user is reporter' do
- let(:user) { reporter }
-
- it 'returns 404' do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
-
- context "when creates an additional scope for production environment" do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- scopes_attributes: [{ environment_scope: 'production', active: false }]
- }
- }
- end
-
- it 'creates a production scope' do
- expect { subject }.to change { feature_flag.reload.scopes.count }.by(1)
-
- expect(json_response['scopes'].last['environment_scope']).to eq('production')
- expect(json_response['scopes'].last['active']).to be_falsy
- end
- end
-
- context "when creates a default scope" do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- scopes_attributes: [{ environment_scope: '*', active: false }]
- }
- }
- end
-
- it 'returns 400' do
- is_expected.to have_gitlab_http_status(:bad_request)
- end
- end
-
- context "when updates a default scope's active value" do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- scopes_attributes: [
- {
- id: feature_flag.default_scope.id,
- environment_scope: '*',
- active: false
- }
- ]
- }
- }
- end
-
- it "updates successfully" do
- subject
-
- expect(json_response['scopes'].first['environment_scope']).to eq('*')
- expect(json_response['scopes'].first['active']).to be_falsy
- end
- end
-
- context "when changes default scope's spec" do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- scopes_attributes: [
- {
- id: feature_flag.default_scope.id,
- environment_scope: 'review/*'
- }
- ]
- }
- }
- end
-
- it 'returns 400' do
- is_expected.to have_gitlab_http_status(:bad_request)
- end
- end
+ context 'with a legacy feature flag' do
+ subject { put(:update, params: params, format: :json) }
- context "when destroys the default scope" do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- iid: feature_flag.iid,
- operations_feature_flag: {
- scopes_attributes: [
- {
- id: feature_flag.default_scope.id,
- _destroy: 1
- }
- ]
- }
- }
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::ReadOnlyRecord)
+ let!(:feature_flag) do
+ create(:operations_feature_flag,
+ :legacy_flag,
+ name: 'ci_live_trace',
+ active: true,
+ project: project)
end
- end
- context "when destroys a production scope" do
- let!(:production_scope) { create_scope(feature_flag, 'production', true) }
let(:params) do
{
namespace_id: project.namespace,
project_id: project,
iid: feature_flag.iid,
operations_feature_flag: {
- scopes_attributes: [
- {
- id: production_scope.id,
- _destroy: 1
- }
- ]
+ name: 'ci_new_live_trace'
}
}
end
- it 'destroys successfully' do
- subject
-
- scopes = json_response['scopes']
- expect(scopes.any? { |scope| scope['environment_scope'] == 'production' })
- .to be_falsy
- end
- end
-
- describe "updating the strategy" do
- it 'creates a default strategy' do
- scope = create_scope(feature_flag, 'production', true, [])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: [{ name: 'default', parameters: {} }]
- }])
+ context 'when user is reporter' do
+ let(:user) { reporter }
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
end
- expect(scope_json['strategies']).to eq([{
- "name" => "default",
- "parameters" => {}
- }])
end
- it 'creates a gradualRolloutUserId strategy' do
- scope = create_scope(feature_flag, 'production', true, [])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: [{ name: 'gradualRolloutUserId',
- parameters: { groupId: 'default', percentage: "70" } }]
- }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
- end
- expect(scope_json['strategies']).to eq([{
- "name" => "gradualRolloutUserId",
- "parameters" => {
- "groupId" => "default",
- "percentage" => "70"
- }
- }])
- end
-
- it 'creates a userWithId strategy' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: [{ name: 'userWithId', parameters: { userIds: 'sam,fred' } }]
- }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
- end
- expect(scope_json['strategies']).to eq([{
- "name" => "userWithId",
- "parameters" => { "userIds" => "sam,fred" }
- }])
- end
-
- it 'updates an existing strategy' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: [{ name: 'gradualRolloutUserId',
- parameters: { groupId: 'default', percentage: "50" } }]
- }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
- end
- expect(scope_json['strategies']).to eq([{
- "name" => "gradualRolloutUserId",
- "parameters" => {
- "groupId" => "default",
- "percentage" => "50"
+ context "when changing default scope's spec" do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [
+ {
+ id: feature_flag.default_scope.id,
+ environment_scope: 'review/*'
+ }
+ ]
+ }
}
- }])
- end
-
- it 'clears an existing strategy' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: []
- }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
- end
- expect(scope_json['strategies']).to eq([])
- end
-
- it 'accepts multiple strategies' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: [
- { name: 'gradualRolloutUserId', parameters: { groupId: 'mygroup', percentage: '55' } },
- { name: 'userWithId', parameters: { userIds: 'joe' } }
- ]
- }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
- end
- expect(scope_json['strategies'].length).to eq(2)
- expect(scope_json['strategies']).to include({
- "name" => "gradualRolloutUserId",
- "parameters" => { "groupId" => "mygroup", "percentage" => "55" }
- })
- expect(scope_json['strategies']).to include({
- "name" => "userWithId",
- "parameters" => { "userIds" => "joe" }
- })
- end
-
- it 'does not modify strategies when there is no strategies key in the params' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
-
- put_request(feature_flag, scopes_attributes: [{ id: scope.id }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
end
- expect(scope_json['strategies']).to eq([{
- "name" => "default",
- "parameters" => {}
- }])
- end
-
- it 'leaves an existing strategy when there are no strategies in the params' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'gradualRolloutUserId',
- parameters: { groupId: 'default', percentage: '10' } }])
- put_request(feature_flag, scopes_attributes: [{ id: scope.id }])
-
- expect(response).to have_gitlab_http_status(:ok)
- scope_json = json_response['scopes'].find do |s|
- s['environment_scope'] == 'production'
+ it 'returns 400' do
+ is_expected.to have_gitlab_http_status(:bad_request)
end
- expect(scope_json['strategies']).to eq([{
- "name" => "gradualRolloutUserId",
- "parameters" => { "groupId" => "default", "percentage" => "10" }
- }])
end
- it 'does not accept extra parameters in the strategy params' do
- scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
-
- put_request(feature_flag, scopes_attributes: [{
- id: scope.id,
- strategies: [{ name: 'userWithId', parameters: { userIds: 'joe', groupId: 'default' } }]
- }])
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq(["Scopes strategies parameters are invalid"])
- end
- end
-
- context 'when legacy feature flags are set to be read only' do
- it 'does not update the flag' do
- stub_feature_flags(feature_flags_legacy_read_only: true)
-
+ it 'does not update a legacy feature flag' do
put_request(feature_flag, name: 'ci_new_live_trace')
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq(["Legacy feature flags are read-only"])
end
-
- it 'updates the flag if the legacy read-only override is enabled for a particular project' do
- stub_feature_flags(
- feature_flags_legacy_read_only: true,
- feature_flags_legacy_read_only_override: project
- )
-
- put_request(feature_flag, name: 'ci_new_live_trace')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('ci_new_live_trace')
- end
end
context 'with a version 2 feature flag' do
@@ -1517,15 +1168,6 @@ RSpec.describe Projects::FeatureFlagsController do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['strategies'].first['scopes']).to eq([])
end
-
- it 'updates the flag when legacy feature flags are set to be read only' do
- stub_feature_flags(feature_flags_legacy_read_only: true)
-
- put_request(new_version_flag, name: 'some-other-name')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(new_version_flag.reload.name).to eq('some-other-name')
- end
end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 12c8c84dd77..d3bdf1baaae 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -227,6 +227,22 @@ RSpec.describe Projects::IssuesController do
end
end
+ describe "GET #show" do
+ before do
+ sign_in(user)
+ project.add_developer(user)
+ end
+
+ it "returns issue_email_participants" do
+ participants = create_list(:issue_email_participant, 2, issue: issue)
+
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['issue_email_participants']).to contain_exactly({ "email" => participants[0].email }, { "email" => participants[1].email })
+ end
+ end
+
describe 'GET #new' do
it 'redirects to signin if not logged in' do
get :new, params: { namespace_id: project.namespace, project_id: project }
@@ -1003,7 +1019,7 @@ RSpec.describe Projects::IssuesController do
def update_verified_issue
update_issue(
issue_params: { title: spammy_title },
- additional_params: { spam_log_id: spam_logs.last.id, recaptcha_verification: true })
+ additional_params: { spam_log_id: spam_logs.last.id, 'g-recaptcha-response': true })
end
it 'returns 200 status' do
@@ -1021,7 +1037,7 @@ RSpec.describe Projects::IssuesController do
it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
spam_log = create(:spam_log)
- expect { update_issue(issue_params: { spam_log_id: spam_log.id, recaptcha_verification: true }) }
+ expect { update_issue(issue_params: { spam_log_id: spam_log.id, 'g-recaptcha-response': true }) }
.not_to change { SpamLog.last.recaptcha_verified }
end
end
@@ -1298,7 +1314,7 @@ RSpec.describe Projects::IssuesController do
let!(:last_spam_log) { spam_logs.last }
def post_verified_issue
- post_new_issue({}, { spam_log_id: last_spam_log.id, recaptcha_verification: true } )
+ post_new_issue({}, { spam_log_id: last_spam_log.id, 'g-recaptcha-response': true } )
end
before do
@@ -1316,7 +1332,7 @@ RSpec.describe Projects::IssuesController do
it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
spam_log = create(:spam_log)
- expect { post_new_issue({}, { spam_log_id: spam_log.id, recaptcha_verification: true } ) }
+ expect { post_new_issue({}, { spam_log_id: spam_log.id, 'g-recaptcha-response': true } ) }
.not_to change { last_spam_log.recaptcha_verified }
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index eb5e62f3d44..430808e1c63 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -675,16 +675,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:forbidden)
end
-
- context 'with restrict_access_to_build_debug_mode feature disabled' do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- end
-
- it 'returns response forbidden' do
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
end
end
@@ -1139,18 +1129,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:ok)
end
-
- context 'with restrict_access_to_build_debug_mode feature disabled' do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- end
-
- it 'returns response ok' do
- response = subject
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
context 'without proper permissions for debug logging on a project' do
@@ -1164,18 +1142,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:forbidden)
end
-
- context 'with restrict_access_to_build_debug_mode feature disabled' do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- end
-
- it 'returns response ok' do
- response = subject
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
end
end
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index 8a3c55033cb..f452c22a5ca 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -84,46 +84,12 @@ RSpec.describe Projects::LabelsController do
create(:label_priority, project: project, label: subgroup_label_2, priority: 1)
end
- RSpec.shared_examples 'returns ancestor group labels' do
- it 'returns ancestor group labels', :aggregate_failures do
- get :index, params: params
+ it 'returns ancestor group labels', :aggregate_failures do
+ params = { namespace_id: project.namespace.to_param, project_id: project }
+ get :index, params: params
- expect(assigns(:labels)).to match_array([subgroup_label_1] + group_labels + project_labels)
- expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + group_priority_labels + project_priority_labels)
- end
- end
-
- context 'when show_inherited_labels disabled' do
- before do
- stub_feature_flags(show_inherited_labels: false)
- end
-
- context 'when include_ancestor_groups false' do
- let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
-
- it 'does not return ancestor group labels', :aggregate_failures do
- get :index, params: params
-
- expect(assigns(:labels)).to match_array([subgroup_label_1] + project_labels)
- expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + project_priority_labels)
- end
- end
-
- context 'when include_ancestor_groups true' do
- let(:params) { { namespace_id: project.namespace.to_param, project_id: project, include_ancestor_groups: true } }
-
- it_behaves_like 'returns ancestor group labels'
- end
- end
-
- context 'when show_inherited_labels enabled' do
- let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
-
- before do
- stub_feature_flags(show_inherited_labels: true)
- end
-
- it_behaves_like 'returns ancestor group labels'
+ expect(assigns(:labels)).to match_array([subgroup_label_1] + group_labels + project_labels)
+ expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + group_priority_labels + project_priority_labels)
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index f4f0a9f8108..f54a07de853 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -193,6 +193,29 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
end
+ context "with the :default_merge_ref_for_diffs flag on" do
+ let(:diffable_merge_ref) { true }
+
+ subject do
+ go(diff_head: true,
+ diff_id: merge_request.merge_request_diff.id,
+ start_sha: merge_request.merge_request_diff.start_commit_sha)
+ end
+
+ it "correctly generates the right diff between versions" do
+ MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+
+ expect_next_instance_of(CompareService) do |service|
+ expect(service).to receive(:execute).with(
+ project,
+ merge_request.merge_request_diff.head_commit_sha,
+ straight: true)
+ end
+
+ subject
+ end
+ end
+
context 'with diff_head param passed' do
before do
allow(merge_request).to receive(:diffable_merge_ref?)
@@ -378,6 +401,57 @@ RSpec.describe Projects::MergeRequests::DiffsController do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'tracks mr_diffs event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_diffs_action)
+ .with(merge_request: merge_request)
+
+ subject
+ end
+
+ context 'when DNT is enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not track any mr_diffs event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_mr_diffs_action)
+
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_mr_diffs_single_file_action)
+
+ subject
+ end
+ end
+
+ context 'when user has view_diffs_file_by_file set to false' do
+ before do
+ user.update!(view_diffs_file_by_file: false)
+ end
+
+ it 'does not track single_file_diffs events' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_mr_diffs_single_file_action)
+
+ subject
+ end
+ end
+
+ context 'when user has view_diffs_file_by_file set to true' do
+ before do
+ user.update!(view_diffs_file_by_file: true)
+ end
+
+ it 'tracks single_file_diffs events' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_diffs_single_file_action)
+ .with(merge_request: merge_request, user: user)
+
+ subject
+ end
+ end
end
def collection_arguments(pagination_data = {})
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index e1405660ccb..be4a1504fc9 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -272,6 +272,72 @@ RSpec.describe Projects::PipelinesController do
end
end
+ describe 'GET #index' do
+ subject(:request) { get :index, params: { namespace_id: project.namespace, project_id: project } }
+
+ context 'experiment not active' do
+ it 'does not push tracking_data to gon' do
+ request
+
+ expect(Gon.tracking_data).to be_nil
+ end
+
+ it 'does not record experiment_user' do
+ expect { request }.not_to change(ExperimentUser, :count)
+ end
+ end
+
+ context 'when experiment active' do
+ before do
+ stub_experiment(pipelines_empty_state: true)
+ stub_experiment_for_subject(pipelines_empty_state: true)
+ end
+
+ it 'pushes tracking_data to Gon' do
+ request
+
+ expect(Gon.experiments["pipelinesEmptyState"]).to eq(true)
+ expect(Gon.tracking_data).to match(
+ {
+ category: 'Growth::Activation::Experiment::PipelinesEmptyState',
+ action: 'view',
+ label: anything,
+ property: 'experimental_group',
+ value: anything
+ }
+ )
+ end
+
+ context 'no pipelines created an no CI set up' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it 'records experiment_user' do
+ expect { request }.to change(ExperimentUser, :count).by(1)
+ end
+ end
+
+ context 'CI set up' do
+ it 'does not record experiment_user' do
+ expect { request }.not_to change(ExperimentUser, :count)
+ end
+ end
+
+ context 'pipelines created' do
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it 'does not record experiment_user' do
+ expect { request }.not_to change(ExperimentUser, :count)
+ end
+ end
+ end
+ end
+
describe 'GET show.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 74311fa89f3..971eb782fa4 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -14,32 +14,137 @@ RSpec.describe Projects::ProjectMembersController do
expect(response).to have_gitlab_http_status(:ok)
end
- context 'when project belongs to group' do
- let(:user_in_group) { create(:user) }
- let(:project_in_group) { create(:project, :public, group: group) }
+ context 'project members' do
+ context 'when project belongs to group' do
+ let(:user_in_group) { create(:user) }
+ let(:project_in_group) { create(:project, :public, group: group) }
+
+ before do
+ group.add_owner(user_in_group)
+ project_in_group.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'lists inherited project members by default' do
+ get :index, params: { namespace_id: project_in_group.namespace, project_id: project_in_group }
+
+ expect(assigns(:project_members).map(&:user_id)).to contain_exactly(user.id, user_in_group.id)
+ end
+
+ it 'lists direct project members only' do
+ get :index, params: { namespace_id: project_in_group.namespace, project_id: project_in_group, with_inherited_permissions: 'exclude' }
+
+ expect(assigns(:project_members).map(&:user_id)).to contain_exactly(user.id)
+ end
+
+ it 'lists inherited project members only' do
+ get :index, params: { namespace_id: project_in_group.namespace, project_id: project_in_group, with_inherited_permissions: 'only' }
+
+ expect(assigns(:project_members).map(&:user_id)).to contain_exactly(user_in_group.id)
+ end
+ end
+
+ context 'when invited members are present' do
+ let!(:invited_member) { create(:project_member, :invited, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'excludes the invited members from project members list' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(assigns(:project_members).map(&:invite_email)).not_to contain_exactly(invited_member.invite_email)
+ end
+ end
+ end
+
+ context 'group links' do
+ let!(:project_group_link) { create(:project_group_link, project: project, group: group) }
+
+ it 'lists group links' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(assigns(:group_links).map(&:id)).to contain_exactly(project_group_link.id)
+ end
+
+ context 'when `search_groups` param is present' do
+ let(:group_2) { create(:group, :public, name: 'group_2') }
+ let!(:project_group_link_2) { create(:project_group_link, project: project, group: group_2) }
+
+ it 'lists group links that match search' do
+ get :index, params: { namespace_id: project.namespace, project_id: project, search_groups: 'group_2' }
+
+ expect(assigns(:group_links).map(&:id)).to contain_exactly(project_group_link_2.id)
+ end
+ end
+ end
+
+ context 'invited members' do
+ let!(:invited_member) { create(:project_member, :invited, project: project) }
before do
- group.add_owner(user_in_group)
- project_in_group.add_maintainer(user)
+ project.add_maintainer(user)
sign_in(user)
end
- it 'lists inherited project members by default' do
- get :index, params: { namespace_id: project_in_group.namespace, project_id: project_in_group }
+ context 'when user has `admin_project_member` permissions' do
+ before do
+ allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(true)
+ end
+
+ it 'lists invited members' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(assigns(:invited_members).map(&:invite_email)).to contain_exactly(invited_member.invite_email)
+ end
+ end
+
+ context 'when user does not have `admin_project_member` permissions' do
+ before do
+ allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(false)
+ end
+
+ it 'does not list invited members' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(assigns(:invited_members)).to be_nil
+ end
+ end
+ end
+
+ context 'access requests' do
+ let(:access_requester_user) { create(:user) }
- expect(assigns(:project_members).map(&:user_id)).to contain_exactly(user.id, user_in_group.id)
+ before do
+ project.request_access(access_requester_user)
+ project.add_maintainer(user)
+ sign_in(user)
end
- it 'lists direct project members only' do
- get :index, params: { namespace_id: project_in_group.namespace, project_id: project_in_group, with_inherited_permissions: 'exclude' }
+ context 'when user has `admin_project_member` permissions' do
+ before do
+ allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(true)
+ end
+
+ it 'lists access requests' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(assigns(:project_members).map(&:user_id)).to contain_exactly(user.id)
+ expect(assigns(:requesters).map(&:user_id)).to contain_exactly(access_requester_user.id)
+ end
end
- it 'lists inherited project members only' do
- get :index, params: { namespace_id: project_in_group.namespace, project_id: project_in_group, with_inherited_permissions: 'only' }
+ context 'when user does not have `admin_project_member` permissions' do
+ before do
+ allow(controller.helpers).to receive(:can_manage_project_members?).with(project).and_return(false)
+ end
+
+ it 'does not list access requests' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(assigns(:project_members).map(&:user_id)).to contain_exactly(user_in_group.id)
+ expect(assigns(:requesters)).to be_nil
+ end
end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index bd7ef3db8b6..a611ac16cd9 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -211,28 +211,13 @@ RSpec.describe ProjectsController do
end
end
- context 'when the storage is not available', :broken_storage do
- let_it_be(:project) { create(:project, :broken_storage) }
-
- before do
- project.add_developer(user)
- sign_in(user)
- end
-
- it 'renders a 503' do
- get :show, params: { namespace_id: project.namespace, id: project }
-
- expect(response).to have_gitlab_http_status(:service_unavailable)
- end
- end
-
context "project with empty repo" do
let_it_be(:empty_project) { create(:project_empty_repo, :public) }
before do
sign_in(user)
- allow(controller).to receive(:record_experiment_user).with(:invite_members_empty_project_version_a)
+ allow(controller).to receive(:record_experiment_user)
end
User.project_views.keys.each do |project_view|
@@ -498,14 +483,14 @@ RSpec.describe ProjectsController do
describe '#housekeeping' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- let(:housekeeping) { Projects::HousekeepingService.new(project) }
+ let(:housekeeping) { Repositories::HousekeepingService.new(project) }
context 'when authenticated as owner' do
before do
group.add_owner(user)
sign_in(user)
- allow(Projects::HousekeepingService).to receive(:new).with(project, :gc).and_return(housekeeping)
+ allow(Repositories::HousekeepingService).to receive(:new).with(project, :gc).and_return(housekeeping)
end
it 'forces a full garbage collection' do
@@ -616,7 +601,7 @@ RSpec.describe ProjectsController do
expect { update_project path: 'renamed_path' }
.not_to change { project.reload.path }
- expect(controller).to set_flash.now[:alert].to(s_('UpdateProject|Cannot rename project because it contains container registry tags!'))
+ expect(controller).to set_flash[:alert].to(s_('UpdateProject|Cannot rename project because it contains container registry tags!'))
expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -748,7 +733,7 @@ RSpec.describe ProjectsController do
describe '#transfer', :enable_admin_mode do
render_views
- let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be(:project, reload: true) { create(:project) }
let_it_be(:admin) { create(:admin) }
let_it_be(:new_namespace) { create(:namespace) }
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 2fb17e56f37..737ec4f95c5 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe RegistrationsController do
include TermsHelper
before do
- stub_feature_flags(invisible_captcha: false)
stub_application_setting(require_admin_approval_after_user_signup: false)
end
@@ -193,15 +192,10 @@ RSpec.describe RegistrationsController do
context 'when invisible captcha is enabled' do
before do
- stub_feature_flags(invisible_captcha: true)
- InvisibleCaptcha.timestamp_enabled = true
+ stub_application_setting(invisible_captcha_enabled: true)
InvisibleCaptcha.timestamp_threshold = treshold
end
- after do
- InvisibleCaptcha.timestamp_enabled = false
- end
-
let(:treshold) { 4 }
let(:session_params) { { invisible_captcha_timestamp: form_rendered_time.iso8601 } }
let(:form_rendered_time) { Time.current }
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index 551abf9241d..34052496871 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Repositories::GitHttpController do
}.from(0).to(1)
end
- it 'records a namespace onboarding progress action' do
+ it 'records an onboarding progress action' do
expect_next_instance_of(OnboardingProgressService) do |service|
expect(service).to receive(:execute).with(action: :git_read)
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index a15b9624c9d..3087beb1326 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Database schema' do
approver_groups: %w[target_id],
approvers: %w[target_id user_id],
audit_events: %w[author_id entity_id target_id],
- audit_events_part_5fc467ac26: %w[author_id entity_id target_id],
+ audit_events_archived: %w[author_id entity_id target_id],
award_emoji: %w[awardable_id user_id],
aws_roles: %w[role_external_id],
boards: %w[milestone_id iteration_id],
diff --git a/spec/factories/alerts_service_data.rb b/spec/factories/alerts_service_data.rb
deleted file mode 100644
index 2dd1d0a714e..00000000000
--- a/spec/factories/alerts_service_data.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :alerts_service_data do
- service { association(:alerts_service) }
- token { SecureRandom.hex }
- end
-end
diff --git a/spec/factories/boards.rb b/spec/factories/boards.rb
index cef7ec37f07..9ea8f3ad06f 100644
--- a/spec/factories/boards.rb
+++ b/spec/factories/boards.rb
@@ -31,4 +31,8 @@ FactoryBot.define do
board.lists.create!(list_type: :closed)
end
end
+
+ factory :group_board, parent: :board do
+ group
+ end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index c3d6e9d7569..24abad66530 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -486,6 +486,14 @@ FactoryBot.define do
end
end
+ trait :non_public_artifacts do
+ options do
+ {
+ artifacts: { public: false }
+ }
+ end
+ end
+
trait :non_playable do
status { 'created' }
self.when { 'manual' }
diff --git a/spec/factories/ci_platform_metrics.rb b/spec/factories/ci_platform_metrics.rb
index 478f9715021..6a517376d65 100644
--- a/spec/factories/ci_platform_metrics.rb
+++ b/spec/factories/ci_platform_metrics.rb
@@ -4,6 +4,6 @@ FactoryBot.define do
factory :ci_platform_metric do
recorded_at { Time.zone.now }
platform_target { generate(:title) }
- count { SecureRandom.random_number(100) }
+ count { SecureRandom.random_number(100) + 1 }
end
end
diff --git a/spec/factories/namespace_onboarding_actions.rb b/spec/factories/namespace_onboarding_actions.rb
deleted file mode 100644
index aca62013b57..00000000000
--- a/spec/factories/namespace_onboarding_actions.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :namespace_onboarding_action do
- namespace
- action { :subscription_created }
- end
-end
diff --git a/spec/factories/namespace_package_settings.rb b/spec/factories/namespace_package_settings.rb
new file mode 100644
index 00000000000..875933ce84f
--- /dev/null
+++ b/spec/factories/namespace_package_settings.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :namespace_package_setting, class: 'Namespace::PackageSetting' do
+ namespace
+
+ maven_duplicates_allowed { true }
+ maven_duplicate_exception_regex { 'SNAPSHOT' }
+
+ trait :group do
+ namespace { association(:group) }
+ end
+ end
+end
diff --git a/spec/factories/onboarding_progresses.rb b/spec/factories/onboarding_progresses.rb
new file mode 100644
index 00000000000..e39bad91b19
--- /dev/null
+++ b/spec/factories/onboarding_progresses.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :onboarding_progress do
+ namespace
+ end
+end
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 5543be4aa82..31f1aabe5dd 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -26,9 +26,31 @@ FactoryBot.define do
sequence(:version) { |n| "1.0-#{n}" }
package_type { :debian }
+ transient do
+ without_package_files { false }
+ file_metadatum_trait { :keep }
+ end
+
+ after :create do |package, evaluator|
+ unless evaluator.without_package_files
+ create :debian_package_file, :source, evaluator.file_metadatum_trait, package: package
+ create :debian_package_file, :dsc, evaluator.file_metadatum_trait, package: package
+ create :debian_package_file, :deb, evaluator.file_metadatum_trait, package: package
+ create :debian_package_file, :deb2, evaluator.file_metadatum_trait, package: package
+ create :debian_package_file, :udeb, evaluator.file_metadatum_trait, package: package
+ create :debian_package_file, :buildinfo, evaluator.file_metadatum_trait, package: package
+ create :debian_package_file, :changes, evaluator.file_metadatum_trait, package: package
+ end
+ end
+
factory :debian_incoming do
name { 'incoming' }
version { nil }
+
+ transient do
+ without_package_files { false }
+ file_metadatum_trait { :unknown }
+ end
end
end
diff --git a/spec/factories/packages/debian/distribution.rb b/spec/factories/packages/debian/distribution.rb
new file mode 100644
index 00000000000..2015f2923b8
--- /dev/null
+++ b/spec/factories/packages/debian/distribution.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_project_distribution, class: 'Packages::Debian::ProjectDistribution' do
+ container { association(:project) }
+
+ sequence(:codename) { |n| "project-dist-#{n}" }
+
+ factory :debian_group_distribution, class: 'Packages::Debian::GroupDistribution' do
+ container { association(:group) }
+
+ sequence(:codename) { |n| "group-dist-#{n}" }
+ end
+
+ trait(:with_file) do
+ after(:build) do |distribution, evaluator|
+ distribution.file = fixture_file_upload('spec/fixtures/packages/debian/README.md')
+ end
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::PackageFileUploader::Store::REMOTE }
+ end
+ end
+end
diff --git a/spec/factories/packages/debian/file_metadatum.rb b/spec/factories/packages/debian/file_metadatum.rb
new file mode 100644
index 00000000000..088d0140afe
--- /dev/null
+++ b/spec/factories/packages/debian/file_metadatum.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_file_metadatum, class: 'Packages::Debian::FileMetadatum' do
+ package_file { association(:debian_package_file, without_loaded_metadatum: true) }
+ file_type { 'deb' }
+ component { 'main' }
+ architecture { 'amd64' }
+ fields { { 'a': 'b' } }
+
+ trait(:unknown) do
+ file_type { 'unknown' }
+ component { nil }
+ architecture { nil }
+ fields { nil }
+ end
+
+ trait(:source) do
+ file_type { 'source' }
+ component { 'main' }
+ architecture { nil }
+ fields { nil }
+ end
+
+ trait(:dsc) do
+ file_type { 'dsc' }
+ component { 'main' }
+ architecture { nil }
+ fields { { 'a': 'b' } }
+ end
+
+ trait(:deb) do
+ file_type { 'deb' }
+ component { 'main' }
+ architecture { 'amd64' }
+ fields { { 'a': 'b' } }
+ end
+
+ trait(:udeb) do
+ file_type { 'udeb' }
+ component { 'main' }
+ architecture { 'amd64' }
+ fields { { 'a': 'b' } }
+ end
+
+ trait(:buildinfo) do
+ file_type { 'buildinfo' }
+ component { 'main' }
+ architecture { nil }
+ fields { { 'Architecture': 'amd64 source' } }
+ end
+
+ trait(:changes) do
+ file_type { 'changes' }
+ component { nil }
+ architecture { nil }
+ fields { { 'Architecture': 'source amd64' } }
+ end
+ end
+end
diff --git a/spec/factories/packages/debian/group_architecture.rb b/spec/factories/packages/debian/group_architecture.rb
new file mode 100644
index 00000000000..2582faae4ed
--- /dev/null
+++ b/spec/factories/packages/debian/group_architecture.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_group_architecture, class: 'Packages::Debian::GroupArchitecture' do
+ distribution { association(:debian_group_distribution) }
+
+ sequence(:name) { |n| "group-arch-#{n}" }
+ end
+end
diff --git a/spec/factories/packages/debian/project_architecture.rb b/spec/factories/packages/debian/project_architecture.rb
new file mode 100644
index 00000000000..d6985da4128
--- /dev/null
+++ b/spec/factories/packages/debian/project_architecture.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_project_architecture, class: 'Packages::Debian::ProjectArchitecture' do
+ distribution { association(:debian_project_distribution) }
+
+ sequence(:name) { |n| "project-arch-#{n}" }
+ end
+end
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_file.rb
index bee1b2076df..c328c01ec95 100644
--- a/spec/factories/packages/package_file.rb
+++ b/spec/factories/packages/package_file.rb
@@ -92,6 +92,97 @@ FactoryBot.define do
end
end
+ factory :debian_package_file do
+ package { association(:debian_package, without_package_files: true) }
+ file_name { 'libsample0_1.2.3~alpha2_amd64.deb' }
+ file_fixture { "spec/fixtures/packages/debian/#{file_name}" }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+
+ transient do
+ without_loaded_metadatum { false }
+ file_metadatum_trait { :deb }
+ end
+
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :debian_file_metadatum, evaluator.file_metadatum_trait, package_file: package_file
+ end
+ end
+
+ trait(:unknown) do
+ package { association(:debian_incoming, without_package_files: true) }
+
+ transient do
+ file_metadatum_trait { :unknown }
+ end
+ end
+
+ trait(:invalid) do
+ file_name { 'README.md' }
+ end
+
+ trait(:source) do
+ file_name { 'sample_1.2.3~alpha2.tar.xz' }
+
+ transient do
+ file_metadatum_trait { :source }
+ end
+ end
+
+ trait(:dsc) do
+ file_name { 'sample_1.2.3~alpha2.dsc' }
+
+ transient do
+ file_metadatum_trait { :dsc }
+ end
+ end
+
+ trait(:deb) do
+ file_name { 'libsample0_1.2.3~alpha2_amd64.deb' }
+
+ transient do
+ file_metadatum_trait { :deb }
+ end
+ end
+
+ trait(:deb2) do
+ file_name { 'sample-dev_1.2.3~binary_amd64.deb' }
+
+ transient do
+ file_metadatum_trait { :deb }
+ end
+ end
+
+ trait(:udeb) do
+ file_name { 'sample-udeb_1.2.3~alpha2_amd64.udeb' }
+
+ transient do
+ file_metadatum_trait { :udeb }
+ end
+ end
+
+ trait(:buildinfo) do
+ file_name { 'sample_1.2.3~alpha2_amd64.buildinfo' }
+
+ transient do
+ file_metadatum_trait { :buildinfo }
+ end
+ end
+
+ trait(:changes) do
+ file_name { 'sample_1.2.3~alpha2_amd64.changes' }
+
+ transient do
+ file_metadatum_trait { :changes }
+ end
+ end
+
+ trait(:keep) do
+ end
+ end
+
trait(:jar) do
file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
file_name { 'my-app-1.0-20180724.124855-1.jar' }
diff --git a/spec/factories/project_group_links.rb b/spec/factories/project_group_links.rb
index 5e3e83f18c1..b1b0f04d84c 100644
--- a/spec/factories/project_group_links.rb
+++ b/spec/factories/project_group_links.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
factory :project_group_link do
project
- group
+ group { association(:group) }
expires_at { nil }
group_access { Gitlab::Access::DEVELOPER }
diff --git a/spec/factories/project_members.rb b/spec/factories/project_members.rb
index 0c2ffac4112..3e83ab7118c 100644
--- a/spec/factories/project_members.rb
+++ b/spec/factories/project_members.rb
@@ -15,7 +15,9 @@ FactoryBot.define do
trait(:invited) do
user_id { nil }
invite_token { 'xxx' }
- invite_email { 'email@email.com' }
+ sequence :invite_email do |n|
+ "email#{n}@email.com"
+ end
end
trait :blocked do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 29a25e71095..f5e496080c4 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -39,11 +39,13 @@ FactoryBot.define do
group_runners_enabled { nil }
merge_pipelines_enabled { nil }
merge_trains_enabled { nil }
+ ci_keep_latest_artifact { nil }
import_status { nil }
import_jid { nil }
import_correlation_id { nil }
import_last_error { nil }
forward_deployment_enabled { nil }
+ restrict_user_defined_variables { nil }
end
before(:create) do |project, evaluator|
@@ -82,6 +84,8 @@ FactoryBot.define do
project.group_runners_enabled = evaluator.group_runners_enabled unless evaluator.group_runners_enabled.nil?
project.merge_pipelines_enabled = evaluator.merge_pipelines_enabled unless evaluator.merge_pipelines_enabled.nil?
project.merge_trains_enabled = evaluator.merge_trains_enabled unless evaluator.merge_trains_enabled.nil?
+ project.ci_keep_latest_artifact = evaluator.ci_keep_latest_artifact unless evaluator.ci_keep_latest_artifact.nil?
+ project.restrict_user_defined_variables = evaluator.restrict_user_defined_variables unless evaluator.restrict_user_defined_variables.nil?
if evaluator.import_status
import_state = project.import_state || project.build_import_state
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 44b157014a5..18d3b2d99b7 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -38,24 +38,6 @@ FactoryBot.define do
end
end
- factory :alerts_service do
- active
- project
- type { 'AlertsService' }
-
- trait :active do
- active { true }
- end
-
- trait :inactive do
- active { false }
- end
-
- before(:create) do |service|
- service.data = build(:alerts_service_data, service: service)
- end
- end
-
factory :drone_ci_service do
project
active { true }
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index f933461a07a..0ed4176109a 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -32,8 +32,8 @@ FactoryBot.define do
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
- create(:alerts_service, project: projects[0])
- create(:alerts_service, :inactive, project: projects[1])
+ create(:service, project: projects[0], type: 'AlertsService', active: true)
+ create(:service, project: projects[1], type: 'AlertsService', active: false)
alert_bot_issues = create_list(:incident, 2, project: projects[0], author: User.alert_bot)
create_list(:incident, 2, project: projects[1], author: User.alert_bot)
issues = create_list(:issue, 4, project: projects[0])
diff --git a/spec/factories_spec.rb b/spec/factories_spec.rb
index 1afd5bdb2ca..38ade20de28 100644
--- a/spec/factories_spec.rb
+++ b/spec/factories_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'factories' do
- include DatabaseHelpers
+ include Database::DatabaseHelpers
shared_examples 'factory' do |factory|
describe "#{factory.name} factory" do
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 0e350a5e12e..a8e18385bd2 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -175,6 +175,7 @@ RSpec.describe 'Admin Groups' do
describe 'add admin himself to a group' do
before do
+ stub_feature_flags(invite_members_group_modal: false)
group.add_user(:user, Gitlab::Access::OWNER)
end
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index e16cde3fa1c..4f135b81bdf 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe "Admin Runners" do
create(:ci_build, pipeline: pipeline, runner_id: runner.id)
visit admin_runners_path
- expect(page).to have_text "Set up a shared Runner manually"
+ expect(page).to have_text "Set up a shared runner manually"
expect(page).to have_text "Runners currently online: 1"
end
@@ -227,7 +227,7 @@ RSpec.describe "Admin Runners" do
end
it 'has all necessary texts including no runner message' do
- expect(page).to have_text "Set up a shared Runner manually"
+ expect(page).to have_text "Set up a shared runner manually"
expect(page).to have_text "Runners currently online: 0"
expect(page).to have_text 'No runners found'
end
@@ -389,7 +389,7 @@ RSpec.describe "Admin Runners" do
let(:page_token) { find('#registration_token').text }
before do
- click_button 'Reset runners registration token'
+ click_button 'Reset registration token'
end
it 'changes registration token' do
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 06d31b544ea..0c66775c323 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -17,10 +17,7 @@ RSpec.describe 'Admin updates settings' do
end
context 'General page' do
- let(:gitpod_feature_enabled) { true }
-
before do
- stub_feature_flags(gitpod: gitpod_feature_enabled)
visit general_admin_application_settings_path
end
@@ -224,28 +221,16 @@ RSpec.describe 'Admin updates settings' do
end
context 'Configure Gitpod' do
- context 'with feature disabled' do
- let(:gitpod_feature_enabled) { false }
-
- it 'do not show settings' do
- expect(page).not_to have_selector('#js-gitpod-settings')
+ it 'changes gitpod settings' do
+ page.within('#js-gitpod-settings') do
+ check 'Enable Gitpod integration'
+ fill_in 'Gitpod URL', with: 'https://gitpod.test/'
+ click_button 'Save changes'
end
- end
-
- context 'with feature enabled' do
- let(:gitpod_feature_enabled) { true }
- it 'changes gitpod settings' do
- page.within('#js-gitpod-settings') do
- check 'Enable Gitpod integration'
- fill_in 'Gitpod URL', with: 'https://gitpod.test/'
- click_button 'Save changes'
- end
-
- expect(page).to have_content 'Application settings saved successfully'
- expect(current_settings.gitpod_url).to eq('https://gitpod.test/')
- expect(current_settings.gitpod_enabled).to be(true)
- end
+ expect(page).to have_content 'Application settings saved successfully'
+ expect(current_settings.gitpod_url).to eq('https://gitpod.test/')
+ expect(current_settings.gitpod_enabled).to be(true)
end
end
end
diff --git a/spec/features/alert_management/user_filters_alerts_by_status_spec.rb b/spec/features/alert_management/user_filters_alerts_by_status_spec.rb
index ee516418cd6..bebbbcbf5f7 100644
--- a/spec/features/alert_management/user_filters_alerts_by_status_spec.rb
+++ b/spec/features/alert_management/user_filters_alerts_by_status_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User filters Alert Management table by status', :js do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
- let_it_be(:alerts_service) { create(:alerts_service, project: project) }
let_it_be(:alert1, reload: true) { create(:alert_management_alert, :triggered, project: project) }
let_it_be(:alert2, reload: true) { create(:alert_management_alert, :acknowledged, project: project) }
let_it_be(:alert3, reload: true) { create(:alert_management_alert, :acknowledged, project: project) }
diff --git a/spec/features/alert_management/user_searches_alerts_spec.rb b/spec/features/alert_management/user_searches_alerts_spec.rb
index 568321de025..3bb1b260f36 100644
--- a/spec/features/alert_management/user_searches_alerts_spec.rb
+++ b/spec/features/alert_management/user_searches_alerts_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User searches Alert Management alerts', :js do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
- let_it_be(:alerts_service) { create(:alerts_service, project: project) }
let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered') }
before_all do
diff --git a/spec/features/alert_management/user_updates_alert_status_spec.rb b/spec/features/alert_management/user_updates_alert_status_spec.rb
index 8974796662c..2d7be3a0022 100644
--- a/spec/features/alert_management/user_updates_alert_status_spec.rb
+++ b/spec/features/alert_management/user_updates_alert_status_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User updates Alert Management status', :js do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
- let_it_be(:alerts_service) { create(:alerts_service, project: project) }
let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered') }
before_all do
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 0e76b5478a1..26b376be660 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -110,6 +110,12 @@ RSpec.describe 'Dashboard Merge Requests' do
visit merge_requests_dashboard_path(assignee_username: current_user.username)
end
+ it 'includes assigned and reviewers in badge' do
+ expect(find('.merge-requests-count')).to have_content('3')
+ expect(find('.js-assigned-mr-count')).to have_content('2')
+ expect(find('.js-reviewer-mr-count')).to have_content('1')
+ end
+
it 'shows assigned merge requests' do
expect(page).to have_content(assigned_merge_request.title)
expect(page).to have_content(assigned_merge_request_from_fork.title)
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index f870adbbdb6..8705c22c41a 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -259,7 +259,7 @@ RSpec.describe 'Dashboard Projects' do
# 4. ProjectsHelper#load_pipeline_status
# 5. RendersMemberAccess#preload_max_member_access_for_collection
# 6. User#max_member_access_for_project_ids
- # 7. CommitWithPipeline#last_pipeline
+ # 7. Ci::CommitWithPipeline#last_pipeline
expect { visit dashboard_projects_path }.not_to exceed_query_limit(control_count + 7)
end
diff --git a/spec/features/groups/board_spec.rb b/spec/features/groups/board_spec.rb
index b25aa26d906..aab3f5e68d5 100644
--- a/spec/features/groups/board_spec.rb
+++ b/spec/features/groups/board_spec.rb
@@ -20,14 +20,19 @@ RSpec.describe 'Group Boards' do
page.within(find('.board', match: :first)) do
issue_title = 'New Issue'
find(:css, '.issue-count-badge-add-button').click
+
+ wait_for_requests
+
expect(find('.board-new-issue-form')).to be_visible
fill_in 'issue_title', with: issue_title
- find('.dropdown-menu-toggle').click
- wait_for_requests
+ page.within("[data-testid='project-select-dropdown']") do
+ find('button.gl-dropdown-toggle').click
+
+ find('.gl-new-dropdown-item button').click
+ end
- click_link(project.name)
click_button 'Submit issue'
expect(page).to have_content(issue_title)
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index c0f967fd0b9..2e1bf27ba8b 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
pat = 'demo-pat'
stub_path = 'stub-group'
- stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=30&top_level_only=true" % { url: source_url }).to_return(
+ stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=30&top_level_only=true&min_access_level=40" % { url: source_url }).to_return(
body: [{
id: 2595438,
web_url: 'https://gitlab.com/groups/auto-breakfast',
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index 31a2c868cac..e9bbe9de3c9 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
let_it_be(:group_to_add) { create(:group) }
before do
+ stub_feature_flags(invite_members_group_modal: false)
group.add_owner(user)
visit group_group_members_path(group)
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index e6da05c4873..c27d0afba6f 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -11,9 +11,37 @@ RSpec.describe 'Groups > Members > Manage members' do
let(:group) { create(:group) }
before do
+ stub_feature_flags(invite_members_group_modal: false)
sign_in(user1)
end
+ shared_examples 'includes the correct Invite Members link' do |should_include, should_not_include|
+ it 'includes either the form or the modal trigger' do
+ group.add_owner(user1)
+
+ visit group_group_members_path(group)
+
+ expect(page).to have_selector(should_include)
+ expect(page).not_to have_selector(should_not_include)
+ end
+ end
+
+ context 'when Invite Members modal is enabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: true)
+ end
+
+ it_behaves_like 'includes the correct Invite Members link', '.js-invite-members-trigger', '.invite-users-form'
+ end
+
+ context 'when Invite Members modal is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
+
+ it_behaves_like 'includes the correct Invite Members link', '.invite-users-form', '.js-invite-members-trigger'
+ end
+
it 'update user to owner level', :js do
group.add_owner(user1)
group.add_developer(user2)
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index de9b32e00aa..38deee547a3 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
let(:new_member) { create(:user, name: 'Mary Jane') }
before do
+ stub_feature_flags(invite_members_group_modal: false)
group.add_owner(user1)
sign_in(user1)
end
diff --git a/spec/features/groups/members/master_manages_access_requests_spec.rb b/spec/features/groups/members/master_manages_access_requests_spec.rb
index 71c9b280ebe..2a17e7d2a5c 100644
--- a/spec/features/groups/members/master_manages_access_requests_spec.rb
+++ b/spec/features/groups/members/master_manages_access_requests_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Groups > Members > Maintainer manages access requests' do
it_behaves_like 'Maintainer manages access requests' do
- let(:has_tabs) { true }
let(:entity) { create(:group, :public) }
let(:members_page_path) { group_group_members_path(entity) }
end
diff --git a/spec/features/groups/members/sort_members_spec.rb b/spec/features/groups/members/sort_members_spec.rb
index 68a748aa76a..03758e0d401 100644
--- a/spec/features/groups/members/sort_members_spec.rb
+++ b/spec/features/groups/members/sort_members_spec.rb
@@ -16,174 +16,92 @@ RSpec.describe 'Groups > Members > Sort members', :js do
sign_in(owner)
end
- context 'when `group_members_filtered_search` feature flag is enabled' do
- def expect_sort_by(text, sort_direction)
- within('[data-testid="members-sort-dropdown"]') do
- expect(page).to have_css('button[aria-haspopup="true"]', text: text)
- expect(page).to have_button("Sorting Direction: #{sort_direction == :asc ? 'Ascending' : 'Descending'}")
- end
- end
-
- it 'sorts by account by default' do
- visit_members_list(sort: nil)
-
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
-
- expect_sort_by('Account', :asc)
- end
-
- it 'sorts by max role ascending' do
- visit_members_list(sort: :access_level_asc)
-
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
-
- expect_sort_by('Max role', :asc)
- end
-
- it 'sorts by max role descending' do
- visit_members_list(sort: :access_level_desc)
-
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
-
- expect_sort_by('Max role', :desc)
- end
-
- it 'sorts by access granted ascending' do
- visit_members_list(sort: :last_joined)
-
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
-
- expect_sort_by('Access granted', :asc)
- end
-
- it 'sorts by access granted descending' do
- visit_members_list(sort: :oldest_joined)
-
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
-
- expect_sort_by('Access granted', :desc)
- end
-
- it 'sorts by account ascending' do
- visit_members_list(sort: :name_asc)
-
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
-
- expect_sort_by('Account', :asc)
+ def expect_sort_by(text, sort_direction)
+ within('[data-testid="members-sort-dropdown"]') do
+ expect(page).to have_css('button[aria-haspopup="true"]', text: text)
+ expect(page).to have_button("Sorting Direction: #{sort_direction == :asc ? 'Ascending' : 'Descending'}")
end
+ end
- it 'sorts by account descending' do
- visit_members_list(sort: :name_desc)
+ it 'sorts by account by default' do
+ visit_members_list(sort: nil)
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
+ expect(first_row.text).to include(owner.name)
+ expect(second_row.text).to include(developer.name)
- expect_sort_by('Account', :desc)
- end
+ expect_sort_by('Account', :asc)
+ end
- it 'sorts by last sign-in ascending', :clean_gitlab_redis_shared_state do
- visit_members_list(sort: :recent_sign_in)
+ it 'sorts by max role ascending' do
+ visit_members_list(sort: :access_level_asc)
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
+ expect(first_row.text).to include(developer.name)
+ expect(second_row.text).to include(owner.name)
- expect_sort_by('Last sign-in', :asc)
- end
+ expect_sort_by('Max role', :asc)
+ end
- it 'sorts by last sign-in descending', :clean_gitlab_redis_shared_state do
- visit_members_list(sort: :oldest_sign_in)
+ it 'sorts by max role descending' do
+ visit_members_list(sort: :access_level_desc)
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
+ expect(first_row.text).to include(owner.name)
+ expect(second_row.text).to include(developer.name)
- expect_sort_by('Last sign-in', :desc)
- end
+ expect_sort_by('Max role', :desc)
end
- context 'when `group_members_filtered_search` feature flag is disabled' do
- dropdown_toggle_selector = '[data-testid="user-sort-dropdown"] [data-testid="dropdown-toggle"]'
+ it 'sorts by access granted ascending' do
+ visit_members_list(sort: :last_joined)
- before do
- stub_feature_flags(group_members_filtered_search: false)
- end
-
- it 'sorts alphabetically by default' do
- visit_members_list(sort: nil)
-
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Name, ascending')
- end
+ expect(first_row.text).to include(developer.name)
+ expect(second_row.text).to include(owner.name)
- it 'sorts by access level ascending' do
- visit_members_list(sort: :access_level_asc)
+ expect_sort_by('Access granted', :asc)
+ end
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Access level, ascending')
- end
+ it 'sorts by access granted descending' do
+ visit_members_list(sort: :oldest_joined)
- it 'sorts by access level descending' do
- visit_members_list(sort: :access_level_desc)
+ expect(first_row.text).to include(owner.name)
+ expect(second_row.text).to include(developer.name)
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Access level, descending')
- end
+ expect_sort_by('Access granted', :desc)
+ end
- it 'sorts by last joined' do
- visit_members_list(sort: :last_joined)
+ it 'sorts by account ascending' do
+ visit_members_list(sort: :name_asc)
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Last joined')
- end
+ expect(first_row.text).to include(owner.name)
+ expect(second_row.text).to include(developer.name)
- it 'sorts by oldest joined' do
- visit_members_list(sort: :oldest_joined)
+ expect_sort_by('Account', :asc)
+ end
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Oldest joined')
- end
+ it 'sorts by account descending' do
+ visit_members_list(sort: :name_desc)
- it 'sorts by name ascending' do
- visit_members_list(sort: :name_asc)
+ expect(first_row.text).to include(developer.name)
+ expect(second_row.text).to include(owner.name)
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Name, ascending')
- end
+ expect_sort_by('Account', :desc)
+ end
- it 'sorts by name descending' do
- visit_members_list(sort: :name_desc)
+ it 'sorts by last sign-in ascending', :clean_gitlab_redis_shared_state do
+ visit_members_list(sort: :recent_sign_in)
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Name, descending')
- end
+ expect(first_row.text).to include(owner.name)
+ expect(second_row.text).to include(developer.name)
- it 'sorts by recent sign in', :clean_gitlab_redis_shared_state do
- visit_members_list(sort: :recent_sign_in)
+ expect_sort_by('Last sign-in', :asc)
+ end
- expect(first_row.text).to include(owner.name)
- expect(second_row.text).to include(developer.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Recent sign in')
- end
+ it 'sorts by last sign-in descending', :clean_gitlab_redis_shared_state do
+ visit_members_list(sort: :oldest_sign_in)
- it 'sorts by oldest sign in', :clean_gitlab_redis_shared_state do
- visit_members_list(sort: :oldest_sign_in)
+ expect(first_row.text).to include(developer.name)
+ expect(second_row.text).to include(owner.name)
- expect(first_row.text).to include(developer.name)
- expect(second_row.text).to include(owner.name)
- expect(page).to have_css(dropdown_toggle_selector, text: 'Oldest sign in')
- end
+ expect_sort_by('Last sign-in', :desc)
end
def visit_members_list(sort:)
diff --git a/spec/features/groups/settings/ci_cd_spec.rb b/spec/features/groups/settings/ci_cd_spec.rb
index 9c2f9512b9d..b059cd8da29 100644
--- a/spec/features/groups/settings/ci_cd_spec.rb
+++ b/spec/features/groups/settings/ci_cd_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Group CI/CD settings' do
let(:page_token) { find('#registration_token').text }
before do
- click_button 'Reset runners registration token'
+ click_button 'Reset registration token'
end
it 'changes registration token' do
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
new file mode 100644
index 00000000000..b8ffd73335d
--- /dev/null
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Group Packages & Registries settings' do
+ include WaitForRequests
+
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ context 'when the feature flag is off' do
+ before do
+ stub_feature_flags(packages_and_registries_group_settings: false)
+ end
+
+ it 'the menu item is not visible' do
+ visit group_path(group)
+
+ settings_menu = find_settings_menu
+
+ expect(settings_menu).not_to have_content 'Packages & Registries'
+ end
+ end
+
+ context 'when the feature flag is on' do
+ it 'the menu item is visible' do
+ visit group_path(group)
+
+ settings_menu = find_settings_menu
+ expect(settings_menu).to have_content 'Packages & Registries'
+ end
+
+ it 'has a page title set' do
+ visit_settings_page
+
+ expect(page).to have_title _('Packages & Registries')
+ end
+
+ it 'sidebar menu is open' do
+ visit_settings_page
+
+ sidebar = find('.nav-sidebar')
+ expect(sidebar).to have_link _('Packages & Registries')
+ end
+ end
+
+ def find_settings_menu
+ find('ul[data-testid="group-settings-menu"]')
+ end
+
+ def visit_settings_page
+ visit group_settings_packages_and_registries_path(group)
+ end
+end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index 3ec7717b649..96f8cf0062c 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Incident details', :js do
expect(page).to have_selector('.right-sidebar[data-issuable-type="issue"]')
expect(sidebar).to have_selector('.incident-severity')
- expect(sidebar).not_to have_selector('.milestone')
+ expect(sidebar).to have_selector('.milestone')
end
end
end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 07bf821a590..9b2a11c4b0e 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -294,6 +294,15 @@ RSpec.describe 'GFM autocomplete', :js do
user_item = find('.atwho-view li', text: user.username)
expect(user_item).to have_content(user.username)
end
+
+ it 'does not limit quick actions autocomplete list to 5' do
+ note = find('#note-body')
+ page.within '.timeline-content-form' do
+ note.native.send_keys('/')
+ end
+
+ expect(page).to have_selector('.atwho-view li', minimum: 6, visible: true)
+ end
end
context 'assignees' do
@@ -426,36 +435,16 @@ RSpec.describe 'GFM autocomplete', :js do
visit project_issue_path(project, issue)
note = find('#note-body')
- start_comment_with_emoji(note)
+ start_comment_with_emoji(note, '.atwho-view li')
start_and_cancel_discussion
note.fill_in(with: '')
- start_comment_with_emoji(note)
+ start_comment_with_emoji(note, '.atwho-view li')
note.native.send_keys(:enter)
expect(note.value).to eql('Hello :100: ')
end
-
- def start_comment_with_emoji(note)
- note.native.send_keys('Hello :10')
-
- wait_for_requests
-
- find('.atwho-view li', text: '100')
- end
-
- def start_and_cancel_discussion
- click_button('Reply...')
-
- fill_in('note_note', with: 'Whoops!')
-
- page.accept_alert 'Are you sure you want to cancel creating this comment?' do
- click_button('Cancel')
- end
-
- wait_for_requests
- end
end
shared_examples 'autocomplete suggestions' do
@@ -599,6 +588,33 @@ RSpec.describe 'GFM autocomplete', :js do
expect(page).not_to have_selector('.tribute-container', visible: true)
end
+ it 'does not open autocomplete menu when ":" is prefixed by a number and letters' do
+ note = find('#note-body')
+
+ # Number.
+ page.within '.timeline-content-form' do
+ note.native.send_keys('7:')
+ end
+
+ expect(page).not_to have_selector('.tribute-container', visible: true)
+
+ # ASCII letter.
+ page.within '.timeline-content-form' do
+ note.set('')
+ note.native.send_keys('w:')
+ end
+
+ expect(page).not_to have_selector('.tribute-container', visible: true)
+
+ # Non-ASCII letter.
+ page.within '.timeline-content-form' do
+ note.set('')
+ note.native.send_keys('Ё:')
+ end
+
+ expect(page).not_to have_selector('.tribute-container', visible: true)
+ end
+
it 'selects the first item for assignee dropdowns' do
page.within '.timeline-content-form' do
find('#note-body').native.send_keys('@')
@@ -624,6 +640,16 @@ RSpec.describe 'GFM autocomplete', :js do
expect(find('.tribute-container ul', visible: true)).to have_content(user.name)
end
+ it 'selects the first item for non-assignee dropdowns if a query is entered' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys(':1')
+ end
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true)).to have_selector('.highlight:first-of-type')
+ end
+
context 'when autocompleting for groups' do
it 'shows the group when searching for the name of the group' do
page.within '.timeline-content-form' do
@@ -687,20 +713,34 @@ RSpec.describe 'GFM autocomplete', :js do
expect_to_wrap(false, user_item, note, user.username)
end
- it 'triggers autocomplete after selecting a quick action' do
+ it 'does not wrap for emoji values' do
note = find('#note-body')
page.within '.timeline-content-form' do
- note.native.send_keys('/as')
+ note.native.send_keys(":cartwheel_")
end
- find('.atwho-view li', text: '/assign')
- note.native.send_keys(:tab)
- note.native.send_keys(:right)
+ emoji_item = first('.tribute-container li', text: 'cartwheel_tone1', visible: true)
- wait_for_requests
+ expect_to_wrap(false, emoji_item, note, 'cartwheel_tone1')
+ end
- user_item = find('.tribute-container ul', text: user.username, visible: true)
- expect(user_item).to have_content(user.username)
+ it 'does not open autocomplete if there is no space before' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys("hello:#{user.username[0..2]}")
+ end
+
+ expect(page).not_to have_selector('.tribute-container')
+ end
+
+ it 'autocompletes for quick actions' do
+ note = find('#note-body')
+ page.within '.timeline-content-form' do
+ note.native.send_keys('/as')
+ wait_for_requests
+ note.native.send_keys(:tab)
+ end
+
+ expect(note.value).to have_text('/assign')
end
end
@@ -719,15 +759,14 @@ RSpec.describe 'GFM autocomplete', :js do
note = find('#note-body')
page.within '.timeline-content-form' do
- note.native.send_keys('/as')
+ note.native.send_keys('/assign ')
+ # The `/assign` ajax response might replace the one by `@` below causing a failed test
+ # so we need to wait for the `/assign` ajax request to finish first
+ wait_for_requests
+ note.native.send_keys('@')
+ wait_for_requests
end
- find('.atwho-view li', text: '/assign')
- note.native.send_keys(:tab)
- note.native.send_keys(:right)
-
- wait_for_requests
-
expect(find('.tribute-container ul', visible: true)).not_to have_content(user.username)
expect(find('.tribute-container ul', visible: true)).to have_content(unassigned_user.username)
end
@@ -739,12 +778,14 @@ RSpec.describe 'GFM autocomplete', :js do
page.within '.timeline-content-form' do
note.native.send_keys('/assign @user2')
note.native.send_keys(:enter)
- note.native.send_keys('/assign @')
- note.native.send_keys(:right)
+ note.native.send_keys('/assign ')
+ # The `/assign` ajax response might replace the one by `@` below causing a failed test
+ # so we need to wait for the `/assign` ajax request to finish first
+ wait_for_requests
+ note.native.send_keys('@')
+ wait_for_requests
end
- wait_for_requests
-
expect(find('.tribute-container ul', visible: true)).not_to have_content(user.username)
expect(find('.tribute-container ul', visible: true)).to have_content(unassigned_user.username)
end
@@ -824,6 +865,26 @@ RSpec.describe 'GFM autocomplete', :js do
end
end
+ context 'when other notes are destroyed' do
+ let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
+
+ # This is meant to protect against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/228729
+ it 'keeps autocomplete key listeners' do
+ visit project_issue_path(project, issue)
+ note = find('#note-body')
+
+ start_comment_with_emoji(note, '.tribute-container li')
+
+ start_and_cancel_discussion
+
+ note.fill_in(with: '')
+ start_comment_with_emoji(note, '.tribute-container li')
+ note.native.send_keys(:enter)
+
+ expect(note.value).to eql('Hello :100: ')
+ end
+ end
+
shared_examples 'autocomplete suggestions' do
it 'suggests objects correctly' do
page.within '.timeline-content-form' do
@@ -913,4 +974,24 @@ RSpec.describe 'GFM autocomplete', :js do
note.native.send_keys(text)
end
end
+
+ def start_comment_with_emoji(note, selector)
+ note.native.send_keys('Hello :10')
+
+ wait_for_requests
+
+ find(selector, text: '100')
+ end
+
+ def start_and_cancel_discussion
+ click_button('Reply...')
+
+ fill_in('note_note', with: 'Whoops!')
+
+ page.accept_alert 'Are you sure you want to cancel creating this comment?' do
+ click_button('Cancel')
+ end
+
+ wait_for_requests
+ end
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 94a1de06488..59fba5f65e0 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -13,256 +13,280 @@ RSpec.describe 'Issue Sidebar' do
let!(:xss_label) { create(:label, project: project, title: '&lt;script&gt;alert("xss");&lt;&#x2F;script&gt;') }
before do
- sign_in(user)
+ stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
end
- context 'when concerning the assignee', :js do
- let(:user2) { create(:user) }
- let(:issue2) { create(:issue, project: project, author: user2) }
-
- include_examples 'issuable invite members experiments' do
- let(:issuable_path) { project_issue_path(project, issue2) }
+ context 'when signed in' do
+ before do
+ sign_in(user)
end
- context 'when user is a developer' do
- before do
- project.add_developer(user)
- visit_issue(project, issue2)
-
- find('.block.assignee .edit-link').click
+ context 'when concerning the assignee', :js do
+ let(:user2) { create(:user) }
+ let(:issue2) { create(:issue, project: project, author: user2) }
- wait_for_requests
+ include_examples 'issuable invite members experiments' do
+ let(:issuable_path) { project_issue_path(project, issue2) }
end
- it 'shows author in assignee dropdown' do
- page.within '.dropdown-menu-user' do
- expect(page).to have_content(user2.name)
- end
- end
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ visit_issue(project, issue2)
- it 'shows author when filtering assignee dropdown' do
- page.within '.dropdown-menu-user' do
- find('.dropdown-input-field').set(user2.name)
+ find('.block.assignee .edit-link').click
wait_for_requests
+ end
- expect(page).to have_content(user2.name)
+ it 'shows author in assignee dropdown' do
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_content(user2.name)
+ end
end
- end
- it 'assigns yourself' do
- find('.block.assignee .dropdown-menu-toggle').click
+ it 'shows author when filtering assignee dropdown' do
+ page.within '.dropdown-menu-user' do
+ find('.dropdown-input-field').set(user2.name)
- click_button 'assign yourself'
+ wait_for_requests
- wait_for_requests
+ expect(page).to have_content(user2.name)
+ end
+ end
- find('.block.assignee .edit-link').click
+ it 'assigns yourself' do
+ find('.block.assignee .dropdown-menu-toggle').click
- page.within '.dropdown-menu-user' do
- expect(page.find('.dropdown-header')).to be_visible
- expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name)
- end
- end
+ click_button 'assign yourself'
- it 'keeps your filtered term after filtering and dismissing the dropdown' do
- find('.dropdown-input-field').set(user2.name)
+ wait_for_requests
- wait_for_requests
+ find('.block.assignee .edit-link').click
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_content 'Unassigned'
- click_link user2.name
+ page.within '.dropdown-menu-user' do
+ expect(page.find('.dropdown-header')).to be_visible
+ expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name)
+ end
end
- find('.js-right-sidebar').click
- find('.block.assignee .edit-link').click
-
- expect(page.all('.dropdown-menu-user li').length).to eq(1)
- expect(find('.dropdown-input-field').value).to eq(user2.name)
- end
- end
+ it 'keeps your filtered term after filtering and dismissing the dropdown' do
+ find('.dropdown-input-field').set(user2.name)
- it 'shows label text as "Apply" when assignees are changed' do
- project.add_developer(user)
- visit_issue(project, issue2)
+ wait_for_requests
- find('.block.assignee .edit-link').click
- wait_for_requests
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_content 'Unassigned'
+ click_link user2.name
+ end
- click_on 'Unassigned'
+ find('.js-right-sidebar').click
+ find('.block.assignee .edit-link').click
- expect(page).to have_link('Apply')
- end
- end
+ expect(page.all('.dropdown-menu-user li').length).to eq(1)
+ expect(find('.dropdown-input-field').value).to eq(user2.name)
+ end
+ end
- context 'as a allowed user' do
- before do
- project.add_developer(user)
- visit_issue(project, issue)
- end
+ it 'shows label text as "Apply" when assignees are changed' do
+ project.add_developer(user)
+ visit_issue(project, issue2)
- context 'sidebar', :js do
- it 'changes size when the screen size is smaller' do
- sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
- # Resize the window
- resize_screen_sm
- # Make sure the sidebar is collapsed
- find(sidebar_selector)
- expect(page).to have_css(sidebar_selector)
- # Once is collapsed let's open the sidebard and reload
- open_issue_sidebar
- refresh
- find(sidebar_selector)
- expect(page).to have_css(sidebar_selector)
- # Restore the window size as it was including the sidebar
- restore_window_size
- open_issue_sidebar
- end
+ find('.block.assignee .edit-link').click
+ wait_for_requests
- it 'escapes XSS when viewing issue labels' do
- page.within('.block.labels') do
- click_on 'Edit'
+ click_on 'Unassigned'
- expect(page).to have_content '<script>alert("xss");</script>'
- end
+ expect(page).to have_link('Apply')
end
end
- context 'editing issue labels', :js do
+ context 'as a allowed user' do
before do
- issue.update(labels: [label])
- page.within('.block.labels') do
- click_on 'Edit'
- end
+ project.add_developer(user)
+ visit_issue(project, issue)
end
- it 'shows the current set of labels' do
- page.within('.issuable-show-labels') do
- expect(page).to have_content label.title
+ context 'sidebar', :js do
+ it 'changes size when the screen size is smaller' do
+ sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
+ # Resize the window
+ resize_screen_sm
+ # Make sure the sidebar is collapsed
+ find(sidebar_selector)
+ expect(page).to have_css(sidebar_selector)
+ # Once is collapsed let's open the sidebard and reload
+ open_issue_sidebar
+ refresh
+ find(sidebar_selector)
+ expect(page).to have_css(sidebar_selector)
+ # Restore the window size as it was including the sidebar
+ restore_window_size
+ open_issue_sidebar
end
- end
- it 'shows option to create a project label' do
- page.within('.block.labels') do
- expect(page).to have_content 'Create project'
+ it 'escapes XSS when viewing issue labels' do
+ page.within('.block.labels') do
+ click_on 'Edit'
+
+ expect(page).to have_content '<script>alert("xss");</script>'
+ end
end
end
- context 'creating a project label', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/27992' do
+ context 'editing issue labels', :js do
before do
+ issue.update(labels: [label])
page.within('.block.labels') do
- click_link 'Create project'
+ click_on 'Edit'
end
end
- it 'shows dropdown switches to "create label" section' do
- page.within('.block.labels') do
- expect(page).to have_content 'Create project label'
+ it 'shows the current set of labels' do
+ page.within('.issuable-show-labels') do
+ expect(page).to have_content label.title
end
end
- it 'adds new label' do
+ it 'shows option to create a project label' do
page.within('.block.labels') do
- fill_in 'new_label_name', with: 'wontfix'
- page.find('.suggest-colors a', match: :first).click
- page.find('button', text: 'Create').click
+ expect(page).to have_content 'Create project'
+ end
+ end
- page.within('.dropdown-page-one') do
- expect(page).to have_content 'wontfix'
+ context 'creating a project label', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/27992' do
+ before do
+ page.within('.block.labels') do
+ click_link 'Create project'
end
end
- end
- it 'shows error message if label title is taken' do
- page.within('.block.labels') do
- fill_in 'new_label_name', with: label.title
- page.find('.suggest-colors a', match: :first).click
- page.find('button', text: 'Create').click
+ it 'shows dropdown switches to "create label" section' do
+ page.within('.block.labels') do
+ expect(page).to have_content 'Create project label'
+ end
+ end
+
+ it 'adds new label' do
+ page.within('.block.labels') do
+ fill_in 'new_label_name', with: 'wontfix'
+ page.find('.suggest-colors a', match: :first).click
+ page.find('button', text: 'Create').click
+
+ page.within('.dropdown-page-one') do
+ expect(page).to have_content 'wontfix'
+ end
+ end
+ end
- page.within('.dropdown-page-two') do
- expect(page).to have_content 'Title has already been taken'
+ it 'shows error message if label title is taken' do
+ page.within('.block.labels') do
+ fill_in 'new_label_name', with: label.title
+ page.find('.suggest-colors a', match: :first).click
+ page.find('button', text: 'Create').click
+
+ page.within('.dropdown-page-two') do
+ expect(page).to have_content 'Title has already been taken'
+ end
end
end
end
end
- end
- context 'interacting with collapsed sidebar', :js do
- collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
- expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
- confidentiality_sidebar_block = '.block.confidentiality'
- lock_sidebar_block = '.block.lock'
- collapsed_sidebar_block_icon = '.sidebar-collapsed-icon'
+ context 'interacting with collapsed sidebar', :js do
+ collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
+ expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
+ confidentiality_sidebar_block = '.block.confidentiality'
+ lock_sidebar_block = '.block.lock'
+ collapsed_sidebar_block_icon = '.sidebar-collapsed-icon'
- before do
- resize_screen_sm
- end
+ before do
+ resize_screen_sm
+ end
- it 'confidentiality block expands then collapses sidebar' do
- expect(page).to have_css(collapsed_sidebar_selector)
+ it 'confidentiality block expands then collapses sidebar' do
+ expect(page).to have_css(collapsed_sidebar_selector)
- page.within(confidentiality_sidebar_block) do
- find(collapsed_sidebar_block_icon).click
+ page.within(confidentiality_sidebar_block) do
+ find(collapsed_sidebar_block_icon).click
+ end
+
+ expect(page).to have_css(expanded_sidebar_selector)
+
+ page.within(confidentiality_sidebar_block) do
+ page.find('button', text: 'Cancel').click
+ end
+
+ expect(page).to have_css(collapsed_sidebar_selector)
end
- expect(page).to have_css(expanded_sidebar_selector)
+ it 'lock block expands then collapses sidebar' do
+ expect(page).to have_css(collapsed_sidebar_selector)
+
+ page.within(lock_sidebar_block) do
+ find(collapsed_sidebar_block_icon).click
+ end
+
+ expect(page).to have_css(expanded_sidebar_selector)
+
+ page.within(lock_sidebar_block) do
+ page.find('button', text: 'Cancel').click
+ end
- page.within(confidentiality_sidebar_block) do
- page.find('button', text: 'Cancel').click
+ expect(page).to have_css(collapsed_sidebar_selector)
end
+ end
+ end
- expect(page).to have_css(collapsed_sidebar_selector)
+ context 'as a guest' do
+ before do
+ project.add_guest(user)
+ visit_issue(project, issue)
end
- it 'lock block expands then collapses sidebar' do
- expect(page).to have_css(collapsed_sidebar_selector)
+ it 'does not have a option to edit labels' do
+ expect(page).not_to have_selector('.block.labels .js-sidebar-dropdown-toggle')
+ end
- page.within(lock_sidebar_block) do
- find(collapsed_sidebar_block_icon).click
+ context 'sidebar', :js do
+ it 'finds issue copy forwarding email' do
+ expect(find('[data-qa-selector="copy-forward-email"]').text).to eq "Issue email: #{issue.creatable_note_email_address(user)}"
end
+ end
- expect(page).to have_css(expanded_sidebar_selector)
+ context 'interacting with collapsed sidebar', :js do
+ collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
+ expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
+ lock_sidebar_block = '.block.lock'
+ lock_button = '.block.lock .btn-close'
+ collapsed_sidebar_block_icon = '.sidebar-collapsed-icon'
- page.within(lock_sidebar_block) do
- page.find('button', text: 'Cancel').click
+ before do
+ resize_screen_sm
end
- expect(page).to have_css(collapsed_sidebar_selector)
- end
- end
- end
+ it 'expands then does not show the lock dialog form' do
+ expect(page).to have_css(collapsed_sidebar_selector)
- context 'as a guest' do
- before do
- project.add_guest(user)
- visit_issue(project, issue)
- end
+ page.within(lock_sidebar_block) do
+ find(collapsed_sidebar_block_icon).click
+ end
- it 'does not have a option to edit labels' do
- expect(page).not_to have_selector('.block.labels .js-sidebar-dropdown-toggle')
+ expect(page).to have_css(expanded_sidebar_selector)
+ expect(page).not_to have_selector(lock_button)
+ end
+ end
end
+ end
- context 'interacting with collapsed sidebar', :js do
- collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
- expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
- lock_sidebar_block = '.block.lock'
- lock_button = '.block.lock .btn-close'
- collapsed_sidebar_block_icon = '.sidebar-collapsed-icon'
-
+ context 'when not signed in' do
+ context 'sidebar', :js do
before do
- resize_screen_sm
+ visit_issue(project, issue)
end
- it 'expands then does not show the lock dialog form' do
- expect(page).to have_css(collapsed_sidebar_selector)
-
- page.within(lock_sidebar_block) do
- find(collapsed_sidebar_block_icon).click
- end
-
- expect(page).to have_css(expanded_sidebar_selector)
- expect(page).not_to have_selector(lock_button)
+ it 'does not find issue email' do
+ expect(page).not_to have_selector('[data-qa-selector="copy-forward-email"]')
end
end
end
diff --git a/spec/features/issues/issue_state_spec.rb b/spec/features/issues/issue_state_spec.rb
index 0ef6eb56dff..d5a115433aa 100644
--- a/spec/features/issues/issue_state_spec.rb
+++ b/spec/features/issues/issue_state_spec.rb
@@ -13,10 +13,13 @@ RSpec.describe 'issue state', :js do
shared_examples 'issue closed' do |selector|
it 'can close an issue' do
+ wait_for_requests
+
expect(find('.status-box')).to have_content 'Open'
within selector do
click_button 'Close issue'
+ wait_for_requests
end
expect(find('.status-box')).to have_content 'Closed'
@@ -25,17 +28,20 @@ RSpec.describe 'issue state', :js do
shared_examples 'issue reopened' do |selector|
it 'can reopen an issue' do
+ wait_for_requests
+
expect(find('.status-box')).to have_content 'Closed'
within selector do
click_button 'Reopen issue'
+ wait_for_requests
end
expect(find('.status-box')).to have_content 'Open'
end
end
- describe 'when open' do
+ describe 'when open', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297348' do
context 'when clicking the top `Close issue` button', :aggregate_failures do
let(:open_issue) { create(:issue, project: project) }
@@ -57,7 +63,7 @@ RSpec.describe 'issue state', :js do
end
end
- describe 'when closed' do
+ describe 'when closed', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297201' do
context 'when clicking the top `Reopen issue` button', :aggregate_failures do
let(:closed_issue) { create(:issue, project: project, state: 'closed') }
diff --git a/spec/features/issues/user_comments_on_issue_spec.rb b/spec/features/issues/user_comments_on_issue_spec.rb
index 005d45d9c92..24a261f592b 100644
--- a/spec/features/issues/user_comments_on_issue_spec.rb
+++ b/spec/features/issues/user_comments_on_issue_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe "User comments on issue", :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(tribute_autocomplete: false)
project.add_guest(user)
sign_in(user)
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 668b4265948..98f9ed6c6a2 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -223,8 +223,8 @@ RSpec.describe "User creates issue" do
expect(page).not_to have_selector('.epic-dropdown-container')
end
- it 'hides the milestone select' do
- expect(page).not_to have_selector('.qa-issuable-milestone-dropdown')
+ it 'shows the milestone select' do
+ expect(page).to have_selector('.qa-issuable-milestone-dropdown')
end
it 'hides the weight input' do
diff --git a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
index 6376f9ab5fd..2b94c072c8b 100644
--- a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
+++ b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'User closes/reopens a merge request', :js do
end
end
- describe 'when closed' do
+ describe 'when closed', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297500' do
context 'when clicking the top `Reopen merge request` link', :aggregate_failures do
let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
index 782a7e3bfb6..ac0c66524f0 100644
--- a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -69,7 +69,8 @@ RSpec.describe 'Batch diffs', :js do
end
context 'which is in at least page 2 of the batched pages of diffs' do
- it 'scrolls to the correct discussion' do
+ it 'scrolls to the correct discussion',
+ quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/293814' } do
page.within('.diff-file.file-holder:last-of-type') do
click_link('just now')
end
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index cb7c952dfe4..b86586d53e2 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -228,6 +228,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
page.find('.discussion-next-btn').click
end
+ expect(page).to have_button('Resolve thread', visible: true)
expect(page.evaluate_script("window.pageYOffset")).to be > 0
end
diff --git a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
index d9950f5504b..56517a97716 100644
--- a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
+++ b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
@@ -17,15 +17,16 @@ RSpec.describe 'Merge request > User scrolls to note on load', :js do
it 'scrolls note into view' do
visit "#{project_merge_request_path(project, merge_request)}#{fragment_id}"
- wait_for_requests
+ wait_for_all_requests
+
+ expect(page).to have_selector("#{fragment_id}")
- page_height = page.current_window.size[1]
page_scroll_y = page.evaluate_script("window.scrollY")
- fragment_position_top = page.evaluate_script("Math.round($('#{fragment_id}').offset().top)")
+ fragment_position_top = page.evaluate_script("Math.round(document.querySelector('#{fragment_id}').getBoundingClientRect().top + window.pageYOffset)")
expect(find(fragment_id).visible?).to eq true
expect(fragment_position_top).to be >= page_scroll_y
- expect(fragment_position_top).to be < (page_scroll_y + page_height)
+ expect(page.evaluate_script("window.pageYOffset")).to be > 0
end
it 'renders un-collapsed notes with diff' do
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 6647a4e9291..d9743f6f330 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 2)
- expect(first('.js-pipeline-url-link')).to have_content("##{detached_merge_request_pipeline.id}")
+ expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -97,16 +97,16 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 4)
- expect(all('.js-pipeline-url-link')[0])
+ expect(all('[data-testid="pipeline-url-link"]')[0])
.to have_content("##{detached_merge_request_pipeline_2.id}")
- expect(all('.js-pipeline-url-link')[1])
+ expect(all('[data-testid="pipeline-url-link"]')[1])
.to have_content("##{detached_merge_request_pipeline.id}")
- expect(all('.js-pipeline-url-link')[2])
+ expect(all('[data-testid="pipeline-url-link"]')[2])
.to have_content("##{push_pipeline_2.id}")
- expect(all('.js-pipeline-url-link')[3])
+ expect(all('[data-testid="pipeline-url-link"]')[3])
.to have_content("##{push_pipeline.id}")
end
end
@@ -197,7 +197,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees a branch pipeline in pipeline tab' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 1)
- expect(first('.js-pipeline-url-link')).to have_content("##{push_pipeline.id}")
+ expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{push_pipeline.id}")
end
end
@@ -246,7 +246,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 2)
- expect(first('.js-pipeline-url-link')).to have_content("##{detached_merge_request_pipeline.id}")
+ expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -287,16 +287,16 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 4)
- expect(all('.js-pipeline-url-link')[0])
+ expect(all('[data-testid="pipeline-url-link"]')[0])
.to have_content("##{detached_merge_request_pipeline_2.id}")
- expect(all('.js-pipeline-url-link')[1])
+ expect(all('[data-testid="pipeline-url-link"]')[1])
.to have_content("##{detached_merge_request_pipeline.id}")
- expect(all('.js-pipeline-url-link')[2])
+ expect(all('[data-testid="pipeline-url-link"]')[2])
.to have_content("##{push_pipeline_2.id}")
- expect(all('.js-pipeline-url-link')[3])
+ expect(all('[data-testid="pipeline-url-link"]')[3])
.to have_content("##{push_pipeline.id}")
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 0e8012f161f..c2b2ada47be 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -605,11 +605,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
within(".js-report-section-container") do
click_button 'addTest'
-
- expect(page).to have_content('6.66')
- expect(page).to have_content(sample_java_failed_message.gsub(/\s+/, ' ').strip)
end
end
+
+ within("#modal-mrwidget-reports") do
+ expect(page).to have_content('addTest')
+ expect(page).to have_content('6.66')
+ expect(page).to have_content(sample_java_failed_message.gsub(/\s+/, ' ').strip)
+ end
end
end
end
@@ -650,11 +653,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
within(".js-report-section-container") do
click_button 'Test#sum when a is 1 and b is 3 returns summary'
-
- expect(page).to have_content('2.22')
- expect(page).to have_content(sample_rspec_failed_message.gsub(/\s+/, ' ').strip)
end
end
+
+ within("#modal-mrwidget-reports") do
+ expect(page).to have_content('Test#sum when a is 1 and b is 3 returns summary')
+ expect(page).to have_content('2.22')
+ expect(page).to have_content(sample_rspec_failed_message.gsub(/\s+/, ' ').strip)
+ end
end
end
end
@@ -694,10 +700,13 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
within(".js-report-section-container") do
click_button 'addTest'
-
- expect(page).to have_content('5.55')
end
end
+
+ within("#modal-mrwidget-reports") do
+ expect(page).to have_content('addTest')
+ expect(page).to have_content('5.55')
+ end
end
end
end
@@ -738,10 +747,13 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
within(".js-report-section-container") do
click_button 'addTest'
-
- expect(page).to have_content('8.88')
end
end
+
+ within("#modal-mrwidget-reports") do
+ expect(page).to have_content('addTest')
+ expect(page).to have_content('8.88')
+ end
end
end
end
@@ -782,10 +794,13 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
within(".js-report-section-container") do
click_button 'Test#sum when a is 4 and b is 4 returns summary'
-
- expect(page).to have_content('4.44')
end
end
+
+ within("#modal-mrwidget-reports") do
+ expect(page).to have_content('Test#sum when a is 4 and b is 4 returns summary')
+ expect(page).to have_content('4.44')
+ end
end
end
end
@@ -825,10 +840,13 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
within(".js-report-section-container") do
click_button 'addTest'
-
- expect(page).to have_content('5.55')
end
end
+
+ within("#modal-mrwidget-reports") do
+ expect(page).to have_content('addTest')
+ expect(page).to have_content('5.55')
+ end
end
end
end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 107fc002ebd..77d2cb77ae3 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -184,7 +184,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
page.within(first('.commit')) do
page.within('.pipeline-tags') do
- expect(page.find('.js-pipeline-url-link')[:href]).to include(expected_project.full_path)
+ expect(page.find('[data-testid="pipeline-url-link"]')[:href]).to include(expected_project.full_path)
expect(page).to have_content('detached')
end
page.within('.pipeline-triggerer') do
@@ -238,11 +238,15 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
threads = []
threads << Thread.new do
- @merge_request = MergeRequests::CreateService.new(project, user, merge_request_params).execute
+ Sidekiq::Worker.skipping_transaction_check do
+ @merge_request = MergeRequests::CreateService.new(project, user, merge_request_params).execute
+ end
end
threads << Thread.new do
- @pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push)
+ Sidekiq::Worker.skipping_transaction_check do
+ @pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push)
+ end
end
threads.each { |thr| thr.join }
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 8930c55a28c..8999c4d6656 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -191,7 +191,7 @@ RSpec.describe 'Merge request > User sees versions', :js do
find('.btn-default').click
click_link 'version 1'
end
- expect(page).to have_content '0 files'
+ expect(page).to have_content 'No changes between version 1 and version 1'
end
end
@@ -217,7 +217,7 @@ RSpec.describe 'Merge request > User sees versions', :js do
expect(page).to have_content 'version 1'
end
- expect(page).to have_content '0 files'
+ expect(page).to have_content 'No changes between version 1 and version 1'
end
end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 1e1888cd826..a2ec34335ec 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -87,6 +87,7 @@ RSpec.describe 'User comments on a diff', :js do
expect(page).not_to have_content('Applied')
click_button('Apply suggestion')
+ click_button('Apply')
wait_for_requests
expect(page).to have_content('Applied')
@@ -338,6 +339,7 @@ RSpec.describe 'User comments on a diff', :js do
expect(page).not_to have_content('Applied')
click_button('Apply suggestion')
+ click_button('Apply')
wait_for_requests
expect(page).to have_content('Applied')
@@ -349,6 +351,7 @@ RSpec.describe 'User comments on a diff', :js do
expect(page).not_to have_content('Unresolve thread')
click_button('Apply suggestion')
+ click_button('Apply')
wait_for_requests
expect(page).to have_content('Unresolve thread')
diff --git a/spec/features/merge_request/user_uses_quick_actions_spec.rb b/spec/features/merge_request/user_uses_quick_actions_spec.rb
index 04a2e046f42..b48659353ec 100644
--- a/spec/features/merge_request/user_uses_quick_actions_spec.rb
+++ b/spec/features/merge_request/user_uses_quick_actions_spec.rb
@@ -41,5 +41,6 @@ RSpec.describe 'Merge request > User uses quick actions', :js do
end
it_behaves_like 'merge quick action'
+ it_behaves_like 'rebase quick action'
end
end
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index 466c7ba215e..ccffe25f45e 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'CI Lint', :js do
end
it 'parses Yaml and displays the jobs' do
- expect(page).to have_content('Status: syntax is correct')
+ expect(page).to have_content('Status: Syntax is correct')
within "table" do
aggregate_failures do
@@ -51,7 +51,7 @@ RSpec.describe 'CI Lint', :js do
let(:yaml_content) { 'value: cannot have :' }
it 'displays information about an error' do
- expect(page).to have_content('Status: syntax is incorrect')
+ expect(page).to have_content('Status: Syntax is incorrect')
expect(page).to have_selector(content_selector, text: yaml_content)
end
end
diff --git a/spec/features/projects/commit/user_reverts_commit_spec.rb b/spec/features/projects/commit/user_reverts_commit_spec.rb
index ca4e070703b..f3c364dab97 100644
--- a/spec/features/projects/commit/user_reverts_commit_spec.rb
+++ b/spec/features/projects/commit/user_reverts_commit_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'User reverts a commit', :js do
include RepoHelpers
+ let_it_be(:user) { create(:user) }
let(:project) { create(:project, :repository, namespace: user.namespace) }
- let(:user) { create(:user) }
before do
sign_in(user)
@@ -14,49 +14,37 @@ RSpec.describe 'User reverts a commit', :js do
visit(project_commit_path(project, sample_commit.id))
end
- def click_revert
+ def revert_commit(create_merge_request: false)
find('.header-action-buttons .dropdown').click
- find('a[href="#modal-revert-commit"]').click
- end
+ find('[data-testid="revert-commit-link"]').click
- context 'without creating a new merge request' do
- before do
- click_revert
- page.within('#modal-revert-commit') do
- uncheck('create_merge_request')
- click_button('Revert')
- end
+ page.within('[data-testid="modal-commit"]') do
+ uncheck('create_merge_request') unless create_merge_request
+ click_button('Revert')
end
+ end
+ context 'without creating a new merge request' do
it 'reverts a commit' do
+ revert_commit
+
expect(page).to have_content('The commit has been successfully reverted.')
end
it 'does not revert a previously reverted commit' do
+ revert_commit
# Visit the comment again once it was reverted.
visit project_commit_path(project, sample_commit.id)
- find('.header-action-buttons .dropdown').click
- find('a[href="#modal-revert-commit"]').click
-
- page.within('#modal-revert-commit') do
- uncheck('create_merge_request')
- click_button('Revert')
- end
+ revert_commit
expect(page).to have_content('Sorry, we cannot revert this commit automatically.')
end
end
context 'with creating a new merge request' do
- before do
- click_revert
- end
-
it 'reverts a commit' do
- page.within('#modal-revert-commit') do
- click_button('Revert')
- end
+ revert_commit(create_merge_request: true)
expect(page).to have_content('The commit has been successfully reverted. You can now submit a merge request to get this change into the original branch.')
expect(page).to have_content("From revert-#{Commit.truncate_sha(sample_commit.id)} into master")
diff --git a/spec/features/projects/environments_pod_logs_spec.rb b/spec/features/projects/environments_pod_logs_spec.rb
index c491cd62d85..eaef3e6ca28 100644
--- a/spec/features/projects/environments_pod_logs_spec.rb
+++ b/spec/features/projects/environments_pod_logs_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe 'Environment > Pod Logs', :js, :kubeclient do
stub_kubeclient_pods(environment.deployment_namespace)
stub_kubeclient_logs(pod_name, environment.deployment_namespace, container: 'container-0')
+ stub_kubeclient_deployments(environment.deployment_namespace)
+ stub_kubeclient_ingresses(environment.deployment_namespace)
stub_kubeclient_nodes_and_nodes_metrics(cluster.platform.api_url)
sign_in(project.owner)
diff --git a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
index 750f4dc5ef4..f5941d0ff15 100644
--- a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
+++ b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
@@ -25,7 +25,6 @@ RSpec.describe 'User sees feature flag list', :js do
create_flag(project, 'mr_train', true).tap do |feature_flag|
create_scope(feature_flag, 'production', false)
end
- stub_feature_flags(feature_flags_legacy_read_only_override: false)
end
it 'user sees the first flag' do
@@ -79,41 +78,6 @@ RSpec.describe 'User sees feature flag list', :js do
expect_status_toggle_button_to_be_disabled
end
end
-
- context 'when legacy feature flags are not read-only' do
- before do
- stub_feature_flags(feature_flags_legacy_read_only: false)
- end
-
- it 'user updates the status toggle' do
- visit(project_feature_flags_path(project))
-
- within_feature_flag_row(1) do
- status_toggle_button.click
-
- expect_status_toggle_button_to_be_checked
- end
- end
- end
-
- context 'when legacy feature flags are read-only but the override is active for a project' do
- before do
- stub_feature_flags(
- feature_flags_legacy_read_only: true,
- feature_flags_legacy_read_only_override: project
- )
- end
-
- it 'user updates the status toggle' do
- visit(project_feature_flags_path(project))
-
- within_feature_flag_row(1) do
- status_toggle_button.click
-
- expect_status_toggle_button_to_be_checked
- end
- end
- end
end
context 'with new version flags' do
diff --git a/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
index bc2d63e1953..a435e565ff1 100644
--- a/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
+++ b/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
@@ -14,8 +14,7 @@ RSpec.describe 'User updates feature flag', :js do
before do
stub_feature_flags(
- feature_flag_permissions: false,
- feature_flags_legacy_read_only_override: false
+ feature_flag_permissions: false
)
sign_in(user)
end
@@ -79,117 +78,11 @@ RSpec.describe 'User updates feature flag', :js do
let!(:scope) { create_scope(feature_flag, 'review/*', true) }
- context 'when legacy flags are editable' do
- before do
- stub_feature_flags(feature_flags_legacy_read_only: false)
-
- visit(edit_project_feature_flag_path(project, feature_flag))
- end
-
- it 'user sees persisted default scope' do
- within_scope_row(1) do
- within_environment_spec do
- expect(page).to have_content('* (All Environments)')
- end
-
- within_status do
- expect(find('.project-feature-toggle')['aria-label'])
- .to eq('Toggle Status: ON')
- end
- end
- end
-
- context 'when user updates the status of a scope' do
- before do
- within_scope_row(2) do
- within_status { find('.project-feature-toggle').click }
- end
-
- click_button 'Save changes'
- expect(page).to have_current_path(project_feature_flags_path(project))
- end
-
- it 'shows the updated feature flag' do
- within_feature_flag_row(1) do
- expect(page.find('.feature-flag-name')).to have_content('ci_live_trace')
- expect_status_toggle_button_to_be_checked
-
- within_feature_flag_scopes do
- expect(page.find('.badge:nth-child(1)')).to have_content('*')
- expect(page.find('.badge:nth-child(1)')['class']).to include('badge-info')
- expect(page.find('.badge:nth-child(2)')).to have_content('review/*')
- expect(page.find('.badge:nth-child(2)')['class']).to include('badge-muted')
- end
- end
- end
- end
-
- context 'when user adds a new scope' do
- before do
- within_scope_row(3) do
- within_environment_spec do
- find('.js-env-search > input').set('production')
- find('.js-create-button').click
- end
- end
-
- click_button 'Save changes'
- expect(page).to have_current_path(project_feature_flags_path(project))
- end
-
- it 'shows the newly created scope' do
- within_feature_flag_row(1) do
- within_feature_flag_scopes do
- expect(page.find('.badge:nth-child(3)')).to have_content('production')
- expect(page.find('.badge:nth-child(3)')['class']).to include('badge-muted')
- end
- end
- end
- end
-
- context 'when user deletes a scope' do
- before do
- within_scope_row(2) do
- within_delete { find('.js-delete-scope').click }
- end
-
- click_button 'Save changes'
- expect(page).to have_current_path(project_feature_flags_path(project))
- end
-
- it 'shows the updated feature flag' do
- within_feature_flag_row(1) do
- within_feature_flag_scopes do
- expect(page).to have_css('.badge:nth-child(1)')
- expect(page).not_to have_css('.badge:nth-child(2)')
- end
- end
- end
- end
- end
-
- context 'when legacy flags are read-only' do
- it 'the user cannot edit the flag' do
- visit(edit_project_feature_flag_path(project, feature_flag))
-
- expect(page).to have_text 'This feature flag is read-only, and it will be removed in 14.0.'
- expect(page).to have_css('button.js-ff-submit.disabled')
- end
- end
-
- context 'when legacy flags are read-only, but the override is active for one project' do
- it 'the user can edit the flag' do
- stub_feature_flags(feature_flags_legacy_read_only_override: project)
-
- visit(edit_project_feature_flag_path(project, feature_flag))
- status_toggle_button.click
- click_button 'Save changes'
+ it 'the user cannot edit the flag' do
+ visit(edit_project_feature_flag_path(project, feature_flag))
- expect(page).to have_current_path(project_feature_flags_path(project))
- within_feature_flag_row(1) do
- expect_status_toggle_button_not_to_be_checked
- end
- end
+ expect(page).to have_text 'This feature flag is read-only, and it will be removed in 14.0.'
+ expect(page).to have_css('button.js-ff-submit.disabled')
end
end
end
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index a99df8a79d8..17258f7042f 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a Dockerfile file' do
+RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297400' do
before do
project = create(:project, :repository)
sign_in project.owner
diff --git a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
new file mode 100644
index 00000000000..6308acb41f5
--- /dev/null
+++ b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
+ before do
+ project = create(:project, :repository)
+ sign_in project.owner
+ stub_experiment(ci_syntax_templates: experiment_active)
+ stub_experiment_for_subject(ci_syntax_templates: in_experiment_group)
+
+ visit project_new_blob_path(project, 'master', file_name: '.gitlab-ci.yml')
+ end
+
+ context 'when experiment is not active' do
+ let(:experiment_active) { false }
+ let(:in_experiment_group) { false }
+
+ it 'does not show the "Learn CI/CD syntax" template dropdown' do
+ expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector')
+ end
+ end
+
+ context 'when experiment is active and the user is in the control group' do
+ let(:experiment_active) { true }
+ let(:in_experiment_group) { false }
+
+ it 'does not show the "Learn CI/CD syntax" template dropdown' do
+ expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector')
+ end
+ end
+
+ context 'when experiment is active and the user is in the experimental group' do
+ let(:experiment_active) { true }
+ let(:in_experiment_group) { true }
+
+ it 'allows the user to pick a "Learn CI/CD syntax" template from the dropdown', :js,
+ { quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297347' } } do
+ expect(page).to have_css('.gitlab-ci-syntax-yml-selector')
+
+ find('.js-gitlab-ci-syntax-yml-selector').click
+
+ wait_for_requests
+
+ within '.gitlab-ci-syntax-yml-selector' do
+ find('.dropdown-input-field').set('Artifacts example')
+ find('.dropdown-content .is-focused', text: 'Artifacts example').click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_css('.gitlab-ci-syntax-yml-selector .dropdown-toggle-text', text: 'Learn CI/CD syntax')
+ expect(page).to have_content('You can use artifacts to pass data to jobs in later stages.')
+ end
+ end
+end
diff --git a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
index b5d5527bbfe..bfa7be5bb5c 100644
--- a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
+++ b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'User uploads new design', :js do
let(:feature_enabled) { false }
it 'shows the message about requirements' do
- expect(page).to have_content("To upload designs, you'll need to enable LFS and have admin enable hashed storage.")
+ expect(page).to have_content("To upload designs, you'll need to enable LFS and have an admin enable hashed storage.")
end
end
diff --git a/spec/features/projects/issues/email_participants_spec.rb b/spec/features/projects/issues/email_participants_spec.rb
new file mode 100644
index 00000000000..3ffe0a5ced8
--- /dev/null
+++ b/spec/features/projects/issues/email_participants_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'viewing an issue', :js do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:note) { create(:note_on_issue, project: project, noteable: issue) }
+ let_it_be(:participants) { create_list(:issue_email_participant, 4, issue: issue) }
+
+ before do
+ sign_in(user)
+ visit project_issue_path(project, issue)
+ end
+
+ shared_examples 'email participants warning' do |selector|
+ it 'shows the correct message' do
+ expect(find(selector)).to have_content(", and 1 more will be notified of your comment")
+ end
+ end
+
+ context 'for a new note' do
+ it_behaves_like 'email participants warning', '.new-note'
+ end
+
+ context 'for a reply form' do
+ before do
+ find('.js-reply-button').click
+ end
+
+ it_behaves_like 'email participants warning', '.note-edit-form'
+ end
+end
diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb
index b1e8127c54c..e87880d74b1 100644
--- a/spec/features/projects/jobs/permissions_spec.rb
+++ b/spec/features/projects/jobs/permissions_spec.rb
@@ -179,34 +179,6 @@ RSpec.describe 'Project Jobs Permissions' do
expect(status_code).to eq(expected_status_code)
end
end
-
- context 'when restrict_access_to_build_debug_mode feature not enabled' do
- where(:public_builds, :user_project_role, :ci_debug_trace, :expected_status_code) do
- true | 'developer' | true | 200
- true | 'guest' | true | 200
- true | 'developer' | false | 200
- true | 'guest' | false | 200
- false | 'developer' | true | 200
- false | 'guest' | true | 403
- false | 'developer' | false | 200
- false | 'guest' | false | 403
- end
-
- with_them do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- ci_instance_variable.update!(value: ci_debug_trace)
- project.update!(public_builds: public_builds)
- project.add_role(user, user_project_role)
- end
-
- it 'renders trace to authorized users' do
- visit trace_project_job_path(project, job)
-
- expect(status_code).to eq(expected_status_code)
- end
- end
- end
end
describe 'raw page' do
@@ -237,35 +209,6 @@ RSpec.describe 'Project Jobs Permissions' do
expect(page).to have_content(expected_msg)
end
end
-
- context 'when restrict_access_to_build_debug_mode feature not enabled' do
- where(:public_builds, :user_project_role, :ci_debug_trace, :expected_status_code, :expected_msg) do
- true | 'developer' | true | 200 | nil
- true | 'guest' | true | 200 | nil
- true | 'developer' | false | 200 | nil
- true | 'guest' | false | 200 | nil
- false | 'developer' | true | 200 | nil
- false | 'guest' | true | 403 | 'The current user is not authorized to access the job log'
- false | 'developer' | false | 200 | nil
- false | 'guest' | false | 403 | 'The current user is not authorized to access the job log'
- end
-
- with_them do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- ci_instance_variable.update!(value: ci_debug_trace)
- project.update!(public_builds: public_builds)
- project.add_role(user, user_project_role)
- end
-
- it 'renders raw trace to authorized users' do
- visit raw_project_job_path(project, job)
-
- expect(status_code).to eq(expected_status_code)
- expect(page).to have_content(expected_msg)
- end
- end
- end
end
end
end
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index c768b0e281c..5abebf2320e 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'User browses jobs' do
it 'shows the "CI Lint" button' do
page.within('.nav-controls') do
- ci_lint_tool_link = page.find_link('CI lint')
+ ci_lint_tool_link = page.find_link('CI Lint')
expect(ci_lint_tool_link[:href]).to end_with(project_ci_lint_path(project))
end
diff --git a/spec/features/projects/members/group_members_spec.rb b/spec/features/projects/members/group_members_spec.rb
index 3060d2c6a43..aa15f04bf24 100644
--- a/spec/features/projects/members/group_members_spec.rb
+++ b/spec/features/projects/members/group_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects members' do
+RSpec.describe 'Projects members', :js do
let(:user) { create(:user) }
let(:developer) { create(:user) }
let(:group) { create(:group, :public) }
@@ -66,62 +66,61 @@ RSpec.describe 'Projects members' do
end
end
- context 'with a group and a project invitee' do
+ context 'with a group, a project invitee, and a project requester' do
before do
+ group.request_access(group_requester)
+ project.request_access(project_requester)
group_invitee
project_invitee
visit project_project_members_path(project)
end
- it 'shows the project invitee, the project developer, and the group owner' do
+ it 'shows the group owner' do
page.within first('.content-list') do
- expect(page).to have_content('test1@abc.com')
- expect(page).not_to have_content('test2@abc.com')
-
- # Project developer
- expect(page).to have_content(developer.name)
-
# Group owner
expect(page).to have_content(user.name)
expect(page).to have_content(group.name)
end
end
- end
- context 'with a group requester' do
- before do
- group.request_access(group_requester)
- visit project_project_members_path(project)
+ it 'shows the project developer' do
+ page.within first('.content-list') do
+ # Project developer
+ expect(page).to have_content(developer.name)
+ end
end
- it 'does not appear in the project members page' do
+ it 'shows the project invitee' do
+ click_link 'Invited'
+
page.within first('.content-list') do
+ expect(page).to have_content('test1@abc.com')
+ expect(page).not_to have_content('test2@abc.com')
+ end
+ end
+
+ it 'shows the project requester' do
+ click_link 'Access requests'
+
+ page.within first('.content-list') do
+ expect(page).to have_content(project_requester.name)
expect(page).not_to have_content(group_requester.name)
end
end
end
- context 'with a group and a project requesters' do
+ context 'with a group requester' do
before do
+ stub_feature_flags(invite_members_group_modal: false)
group.request_access(group_requester)
- project.request_access(project_requester)
visit project_project_members_path(project)
end
- it 'shows the project requester, the project developer, and the group owner' do
+ it 'does not appear in the project members page' do
+ expect(page).not_to have_link('Access requests')
page.within first('.content-list') do
- expect(page).to have_content(project_requester.name)
expect(page).not_to have_content(group_requester.name)
end
-
- page.within all('.content-list').last do
- # Project developer
- expect(page).to have_content(developer.name)
-
- # Group owner
- expect(page).to have_content(user.name)
- expect(page).to have_content(group.name)
- end
end
end
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index d59f8eb4b1d..686d86b1783 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
project.add_maintainer(user)
sign_in(user)
visit project_project_members_path(project)
+ click_groups_tab
end
it 'updates group access level' do
@@ -29,6 +30,8 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
visit project_project_members_path(project)
+ click_groups_tab
+
expect(first('.group_member')).to have_content('Guest')
end
@@ -71,23 +74,31 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
expect(page).not_to have_selector('.group_member')
end
- context 'search in existing members (yes, this filters the groups list as well)' do
+ context 'search in existing members' do
it 'finds no results' do
page.within '.user-search-form' do
- fill_in 'search', with: 'testing 123'
+ fill_in 'search_groups', with: 'testing 123'
find('.user-search-btn').click
end
+ click_groups_tab
+
expect(page).not_to have_selector('.group_member')
end
it 'finds results' do
page.within '.user-search-form' do
- fill_in 'search', with: group.name
+ fill_in 'search_groups', with: group.name
find('.user-search-btn').click
end
+ click_groups_tab
+
expect(page).to have_selector('.group_member', count: 1)
end
end
+
+ def click_groups_tab
+ click_link 'Groups'
+ end
end
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index 30e32ad1366..bb56ae348fb 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'Project > Members > Invite group', :js do
it 'the project can be shared with another group' do
visit project_project_members_path(project)
- expect(page).not_to have_css('.project-members-groups')
+ expect(page).not_to have_link 'Groups'
click_on 'invite-group-tab'
@@ -47,7 +47,9 @@ RSpec.describe 'Project > Members > Invite group', :js do
page.find('body').click
find('.btn-success').click
- page.within('.project-members-groups') do
+ click_link 'Groups'
+
+ page.within('[data-testid="project-member-groups"]') do
expect(page).to have_content(group_to_share_with.name)
end
end
@@ -132,7 +134,9 @@ RSpec.describe 'Project > Members > Invite group', :js do
end
it 'the group link shows the expiration time with a warning class' do
- page.within('.project-members-groups') do
+ click_link 'Groups'
+
+ page.within('[data-testid="project-member-groups"]') do
# Using distance_of_time_in_words_to_now because it is not the same as
# subtraction, and this way avoids time zone issues as well
expires_in_text = distance_of_time_in_words_to_now(project.project_group_links.first.expires_at)
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 36ff461aac2..62115f2dce6 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'Project members list' do
let(:project) { create(:project, namespace: group) }
before do
+ stub_feature_flags(invite_members_group_modal: false)
sign_in(user1)
group.add_owner(user1)
end
@@ -82,7 +83,9 @@ RSpec.describe 'Project members list' do
add_user('test@example.com', 'Reporter')
- page.within(second_row) do
+ click_link 'Invited'
+
+ page.within(first_row) do
expect(page).to have_content('test@example.com')
expect(page).to have_content('Invited')
expect(page).to have_button('Reporter')
diff --git a/spec/features/projects/members/master_manages_access_requests_spec.rb b/spec/features/projects/members/master_manages_access_requests_spec.rb
index 2fdc75dca91..4c3eaa93352 100644
--- a/spec/features/projects/members/master_manages_access_requests_spec.rb
+++ b/spec/features/projects/members/master_manages_access_requests_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Projects > Members > Maintainer manages access requests' do
it_behaves_like 'Maintainer manages access requests' do
- let(:has_tabs) { false }
let(:entity) { create(:project, :public) }
let(:members_page_path) { project_project_members_path(entity) }
end
diff --git a/spec/features/projects/members/tabs_spec.rb b/spec/features/projects/members/tabs_spec.rb
new file mode 100644
index 00000000000..bdcf02c82a4
--- /dev/null
+++ b/spec/features/projects/members/tabs_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Members > Tabs' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, creator: user, namespace: user.namespace) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project_members) { create_list(:project_member, 2, project: project) }
+ let_it_be(:access_requests) { create_list(:project_member, 2, :access_request, project: project) }
+ let_it_be(:invites) { create_list(:project_member, 2, :invited, project: project) }
+ let_it_be(:project_group_links) { create_list(:project_group_link, 2, project: project) }
+
+ shared_examples 'active "Members" tab' do
+ it 'displays "Members" tab' do
+ expect(page).to have_selector('.nav-link.active', text: 'Members')
+ end
+ end
+
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+
+ sign_in(user)
+ visit project_project_members_path(project)
+ end
+
+ where(:tab, :count) do
+ 'Members' | 3
+ 'Invited' | 2
+ 'Groups' | 2
+ 'Access requests' | 2
+ end
+
+ with_them do
+ it "renders #{params[:tab]} tab" do
+ expect(page).to have_selector('.nav-link', text: "#{tab} #{count}")
+ end
+ end
+
+ context 'displays "Members" tab by default' do
+ it_behaves_like 'active "Members" tab'
+ end
+
+ context 'when searching "Groups"', :js do
+ before do
+ click_link 'Groups'
+
+ page.within '[data-testid="group-link-search-form"]' do
+ fill_in 'search_groups', with: 'group'
+ find('button[type="submit"]').click
+ end
+ end
+
+ it 'displays "Groups" tab' do
+ expect(page).to have_selector('.nav-link.active', text: 'Groups')
+ end
+
+ context 'and then searching "Members"' do
+ before do
+ click_link 'Members 3'
+
+ page.within '[data-testid="user-search-form"]' do
+ fill_in 'search', with: 'user'
+ find('button[type="submit"]').click
+ end
+ end
+
+ it_behaves_like 'active "Members" tab'
+ end
+ end
+end
diff --git a/spec/features/projects/releases/user_creates_release_spec.rb b/spec/features/projects/releases/user_creates_release_spec.rb
index 0a5f7cc7edd..2acdf983cf2 100644
--- a/spec/features/projects/releases/user_creates_release_spec.rb
+++ b/spec/features/projects/releases/user_creates_release_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'User creates release', :js do
expect(page.find('.ref-selector button')).to have_content(project.default_branch)
end
- context 'when the "Save release" button is clicked' do
+ context 'when the "Save release" button is clicked', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297507' do
let(:tag_name) { 'v1.0' }
let(:release_title) { 'A most magnificent release' }
let(:release_notes) { 'Best. Release. **Ever.** :rocket:' }
diff --git a/spec/features/projects/services/user_activates_alerts_spec.rb b/spec/features/projects/services/user_activates_alerts_spec.rb
deleted file mode 100644
index 8b0acdf3618..00000000000
--- a/spec/features/projects/services/user_activates_alerts_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User activates Alerts', :js do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- let(:service_name) { 'alerts' }
- let(:service_title) { 'Alerts endpoint' }
-
- before do
- sign_in(user)
- project.add_maintainer(user)
- end
-
- context 'when service is deactivated' do
- it 'user cannot activate service' do
- visit_project_services
-
- expect(page).to have_link(service_title)
- click_link(service_title)
-
- expect(page).to have_callout_message
- expect(page).not_to have_active_service
- expect(page).to have_toggle_active_disabled
- end
- end
-
- context 'when service is activated' do
- let_it_be(:activated_alerts_service) do
- create(:alerts_service, :active, project: project)
- end
-
- before do
- visit_alerts_service
- end
-
- it 'user cannot change settings' do
- expect(page).to have_callout_message
- expect(page).to have_active_service
- expect(page).to have_toggle_active_disabled
- expect(page).to have_button_reset_key_disabled
- end
- end
-
- private
-
- def visit_project_services
- visit(project_settings_integrations_path(project))
- end
-
- def visit_alerts_service
- visit(edit_project_service_path(project, service_name))
- end
-
- def have_callout_message
- within('.gl-alert') do
- have_content('You can now manage alert endpoint configuration in the Alerts section on the Operations settings page.')
- end
- end
-
- def have_active_service
- have_selector('.js-service-active-status[data-value="true"]')
- end
-
- def have_toggle_active_disabled
- have_selector('#activated .project-feature-toggle.is-disabled')
- end
-
- def have_button_reset_key_disabled
- have_button('Reset key', disabled: true)
- end
-end
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index ffc0ecc4966..c087237fd7c 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe "Projects > Settings > Pipelines settings" do
let(:page_token) { find('#registration_token').text }
before do
- click_button 'Reset runners registration token'
+ click_button 'Reset registration token'
end
it 'changes registration token' do
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index 3836b95a28a..726b8fb6840 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'Projects > Settings > User manages project members' do
end
end
- it 'shows all members of project shared group' do
+ it 'shows all members of project shared group', :js do
group.add_owner(user)
group.add_developer(user_dmitriy)
@@ -67,7 +67,9 @@ RSpec.describe 'Projects > Settings > User manages project members' do
visit(project_project_members_path(project))
- page.within('.project-members-groups') do
+ click_link 'Groups'
+
+ page.within('[data-testid="project-member-groups"]') do
expect(page).to have_content('OpenSource')
expect(first('.group_member')).to have_content('Maintainer')
end
diff --git a/spec/features/projects/snippets/user_deletes_snippet_spec.rb b/spec/features/projects/snippets/user_deletes_snippet_spec.rb
index 6d526e60512..ca49e6a36b7 100644
--- a/spec/features/projects/snippets/user_deletes_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_deletes_snippet_spec.rb
@@ -21,6 +21,8 @@ RSpec.describe 'Projects > Snippets > User deletes a snippet', :js do
click_button('Delete snippet')
wait_for_requests
- expect(page).not_to have_content(snippet.title)
+ # This assertion also confirms we did not end up on an error page
+ expect(page).to have_selector('#new_snippet_link')
+ expect(project.snippets.length).to eq(0)
end
end
diff --git a/spec/features/promotion_spec.rb b/spec/features/promotion_spec.rb
index 9344f9b56b8..8692930376f 100644
--- a/spec/features/promotion_spec.rb
+++ b/spec/features/promotion_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Promotions', :js do
it 'appears in project edit page' do
visit edit_project_path(project)
- expect(find('#promote_service_desk')).to have_content 'Improve customer support with GitLab Service Desk.'
+ expect(find('#promote_service_desk')).to have_content 'Improve customer support with Service Desk'
end
it 'does not show when cookie is set' do
diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb
index 25447db3c8d..376f990f054 100644
--- a/spec/features/protected_tags_spec.rb
+++ b/spec/features/protected_tags_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Protected Tags', :js do
click_on "Protect"
within(".protected-tags-list") do
- expect(page).to have_content("Protected tag (2)")
+ expect(page).to have_content("Protected tags (2)")
expect(page).to have_content("2 matching tags")
end
end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 5cddad81927..cc024ab8f35 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Runners' do
it 'user can see a button to install runners on kubernetes clusters' do
visit project_runners_path(project)
- expect(page).to have_link('Install Runner on Kubernetes', href: project_clusters_path(project))
+ expect(page).to have_link('Install GitLab Runner on Kubernetes', href: project_clusters_path(project))
end
end
@@ -69,7 +69,7 @@ RSpec.describe 'Runners' do
visit project_runners_path(project)
within '.activated-specific-runners' do
- click_on 'Remove Runner'
+ click_on 'Remove runner'
end
expect(page).not_to have_content(specific_runner.display_name)
@@ -226,10 +226,10 @@ RSpec.describe 'Runners' do
it 'group runners are not available' do
visit project_runners_path(project)
- expect(page).to have_content 'This group does not provide any group Runners yet'
+ expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).to have_content 'Group maintainers can register group runners in the Group CI/CD settings'
- expect(page).not_to have_content 'Ask your group maintainer to set up a group Runner'
+ expect(page).to have_content 'Group maintainers can register group runners in the group\'s CI/CD settings.'
+ expect(page).not_to have_content 'Ask your group maintainer to set up a group runner'
end
end
end
@@ -241,7 +241,7 @@ RSpec.describe 'Runners' do
it 'group runners are not available' do
visit project_runners_path(project)
- expect(page).to have_content 'This project does not belong to a group and can therefore not make use of group Runners.'
+ expect(page).to have_content 'This project does not belong to a group and cannot make use of group runners.'
end
end
@@ -252,10 +252,10 @@ RSpec.describe 'Runners' do
it 'group runners are not available' do
visit project_runners_path(project)
- expect(page).to have_content 'This group does not provide any group Runners yet.'
+ expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).not_to have_content 'Group maintainers can register group runners in the Group CI/CD settings'
- expect(page).to have_content 'Ask your group maintainer to set up a group Runner.'
+ expect(page).not_to have_content 'Group maintainers can register group runners in the group\'s CI/CD settings.'
+ expect(page).to have_content 'Ask your group maintainer to set up a group runner.'
end
end
@@ -267,21 +267,21 @@ RSpec.describe 'Runners' do
it 'group runners are available' do
visit project_runners_path(project)
- expect(page).to have_content 'Available group Runners: 1'
+ expect(page).to have_content 'Available group runners: 1'
expect(page).to have_content 'group-runner'
end
it 'group runners may be disabled for a project' do
visit project_runners_path(project)
- click_on 'Disable group Runners'
+ click_on 'Disable group runners'
- expect(page).to have_content 'Enable group Runners'
+ expect(page).to have_content 'Enable group runners'
expect(project.reload.group_runners_enabled).to be false
- click_on 'Enable group Runners'
+ click_on 'Enable group runners'
- expect(page).to have_content 'Disable group Runners'
+ expect(page).to have_content 'Disable group runners'
expect(project.reload.group_runners_enabled).to be true
end
end
@@ -305,7 +305,7 @@ RSpec.describe 'Runners' do
it 'user can see a link to install runners on kubernetes clusters' do
visit group_settings_ci_cd_path(group)
- expect(page).to have_link('Install Runner on Kubernetes', href: group_clusters_path(group))
+ expect(page).to have_link('Install GitLab Runner on Kubernetes', href: group_clusters_path(group))
end
end
@@ -316,7 +316,7 @@ RSpec.describe 'Runners' do
visit group_settings_ci_cd_path(group)
expect(page).not_to have_content 'No runners found'
- expect(page).to have_content 'Available Runners: 1'
+ expect(page).to have_content 'Available runners: 1'
expect(page).to have_content 'group-runner'
end
@@ -396,7 +396,7 @@ RSpec.describe 'Runners' do
visit group_settings_ci_cd_path(group)
expect(page).not_to have_content 'No runners found'
- expect(page).to have_content 'Available Runners: 1'
+ expect(page).to have_content 'Available runners: 1'
expect(page).to have_content 'project-runner'
end
diff --git a/spec/features/security/dashboard_access_spec.rb b/spec/features/security/dashboard_access_spec.rb
index 5ac4a5c1840..5430329d47d 100644
--- a/spec/features/security/dashboard_access_spec.rb
+++ b/spec/features/security/dashboard_access_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe "Dashboard access" do
it { expect(new_group_path).to be_denied_for :visitor }
end
- describe "GET /profile/groups" do
+ describe "GET /dashboard/groups" do
subject { dashboard_groups_path }
it { is_expected.to be_allowed_for :admin }
diff --git a/spec/features/security/profile_access_spec.rb b/spec/features/security/profile_access_spec.rb
index 3aa8278866c..301efd2d99b 100644
--- a/spec/features/security/profile_access_spec.rb
+++ b/spec/features/security/profile_access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe "Profile access" do
include AccessMatchers
- describe "GET /profile/keys" do
+ describe "GET /-/profile/keys" do
subject { profile_keys_path }
it { is_expected.to be_allowed_for :admin }
@@ -13,7 +13,7 @@ RSpec.describe "Profile access" do
it { is_expected.to be_denied_for :visitor }
end
- describe "GET /profile" do
+ describe "GET /-/profile" do
subject { profile_path }
it { is_expected.to be_allowed_for :admin }
@@ -21,7 +21,7 @@ RSpec.describe "Profile access" do
it { is_expected.to be_denied_for :visitor }
end
- describe "GET /profile/account" do
+ describe "GET /-/profile/account" do
subject { profile_account_path }
it { is_expected.to be_allowed_for :admin }
@@ -29,7 +29,7 @@ RSpec.describe "Profile access" do
it { is_expected.to be_denied_for :visitor }
end
- describe "GET /profile/preferences" do
+ describe "GET /-/profile/preferences" do
subject { profile_preferences_path }
it { is_expected.to be_allowed_for :admin }
@@ -37,7 +37,7 @@ RSpec.describe "Profile access" do
it { is_expected.to be_denied_for :visitor }
end
- describe "GET /profile/audit_log" do
+ describe "GET /-/profile/audit_log" do
subject { audit_log_profile_path }
it { is_expected.to be_allowed_for :admin }
@@ -45,7 +45,7 @@ RSpec.describe "Profile access" do
it { is_expected.to be_denied_for :visitor }
end
- describe "GET /profile/notifications" do
+ describe "GET /-/profile/notifications" do
subject { profile_notifications_path }
it { is_expected.to be_allowed_for :admin }
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index e4a8d836413..5a537c1d4df 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -742,65 +742,30 @@ RSpec.describe 'Login' do
end
context 'when the user did not enable 2FA' do
- context 'when `vue_2fa_recovery_codes` feature flag is disabled' do
- before do
- stub_feature_flags(vue_2fa_recovery_codes: false)
- end
-
- it 'asks to set 2FA before asking to accept the terms' do
- expect(authentication_metrics)
- .to increment(:user_authenticated_counter)
-
- visit new_user_session_path
-
- fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
-
- click_button 'Sign in'
-
- expect_to_be_on_terms_page
- click_button 'Accept terms'
-
- expect(current_path).to eq(profile_two_factor_auth_path)
-
- fill_in 'pin_code', with: user.reload.current_otp
-
- click_button 'Register with two-factor app'
-
- expect(page).to have_content('Congratulations! You have enabled Two-factor Authentication!')
-
- click_link 'Proceed'
-
- expect(current_path).to eq(profile_account_path)
- end
- end
-
- context 'when `vue_2fa_recovery_codes` feature flag is enabled' do
- it 'asks to set 2FA before asking to accept the terms', :js do
- expect(authentication_metrics)
- .to increment(:user_authenticated_counter)
+ it 'asks to set 2FA before asking to accept the terms', :js do
+ expect(authentication_metrics)
+ .to increment(:user_authenticated_counter)
- visit new_user_session_path
+ visit new_user_session_path
- fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_login', with: user.email
+ fill_in 'user_password', with: '12345678'
- click_button 'Sign in'
+ click_button 'Sign in'
- expect_to_be_on_terms_page
- click_button 'Accept terms'
+ expect_to_be_on_terms_page
+ click_button 'Accept terms'
- expect(current_path).to eq(profile_two_factor_auth_path)
+ expect(current_path).to eq(profile_two_factor_auth_path)
- fill_in 'pin_code', with: user.reload.current_otp
+ fill_in 'pin_code', with: user.reload.current_otp
- click_button 'Register with two-factor app'
- click_button 'Copy codes'
- click_link 'Proceed'
+ click_button 'Register with two-factor app'
+ click_button 'Copy codes'
+ click_link 'Proceed'
- expect(current_path).to eq(profile_account_path)
- expect(page).to have_content('Congratulations! You have enabled Two-factor Authentication!')
- end
+ expect(current_path).to eq(profile_account_path)
+ expect(page).to have_content('Congratulations! You have enabled Two-factor Authentication!')
end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index bfdd1e1bdb7..5f70517224e 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -294,17 +294,13 @@ RSpec.describe 'Signup' do
context 'when reCAPTCHA and invisible captcha are enabled' do
before do
- InvisibleCaptcha.timestamp_enabled = true
+ stub_application_setting(invisible_captcha_enabled: true)
stub_application_setting(recaptcha_enabled: true)
allow_next_instance_of(RegistrationsController) do |instance|
allow(instance).to receive(:verify_recaptcha).and_return(true)
end
end
- after do
- InvisibleCaptcha.timestamp_enabled = false
- end
-
context 'when reCAPTCHA detects malicious behaviour' do
before do
allow_next_instance_of(RegistrationsController) do |instance|
diff --git a/spec/finders/alert_management/alerts_finder_spec.rb b/spec/finders/alert_management/alerts_finder_spec.rb
index 87a5da38dd1..3a88db5d854 100644
--- a/spec/finders/alert_management/alerts_finder_spec.rb
+++ b/spec/finders/alert_management/alerts_finder_spec.rb
@@ -42,6 +42,12 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
it { is_expected.to contain_exactly(resolved_alert, ignored_alert) }
end
+
+ context 'skips domain if iid is given' do
+ let(:params) { { iid: resolved_alert.iid, domain: 'threat_monitoring' } }
+
+ it { is_expected.to contain_exactly(resolved_alert) }
+ end
end
context 'empty params' do
diff --git a/spec/finders/ci/daily_build_group_report_results_finder_spec.rb b/spec/finders/ci/daily_build_group_report_results_finder_spec.rb
index 28a732fda82..2a6e44673e3 100644
--- a/spec/finders/ci/daily_build_group_report_results_finder_spec.rb
+++ b/spec/finders/ci/daily_build_group_report_results_finder_spec.rb
@@ -64,13 +64,13 @@ RSpec.describe Ci::DailyBuildGroupReportResultsFinder do
end
end
- context 'when ref_path is not present' do
+ context 'when ref_path query parameter is not present' do
let(:ref_path) { nil }
- context 'when coverages exist for the default branch' do
+ context 'when records with cover data from the default branch exist' do
let(:default_branch) { true }
- it 'returns coverage for the default branch' do
+ it 'returns records with default_branch:true, irrespective of ref_path' do
rspec_coverage_4 = create_daily_coverage('rspec', 66.0, '2020-03-10')
expect(coverages).to contain_exactly(rspec_coverage_4)
diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
index 64b3c46e122..65f6dc0ba74 100644
--- a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
+++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
@@ -225,24 +225,6 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
branch_pipeline_2,
branch_pipeline])
end
-
- context 'when ci_pipelines_for_merge_request_finder_new_cte feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipelines_for_merge_request_finder_new_cte: false)
- end
-
- it 'returns only related merge request pipelines' do
- expect(subject.all)
- .to eq([detached_merge_request_pipeline,
- branch_pipeline_2,
- branch_pipeline])
-
- expect(described_class.new(merge_request_2, nil).all)
- .to eq([detached_merge_request_pipeline_2,
- branch_pipeline_2,
- branch_pipeline])
- end
- end
end
context 'when detached merge request pipeline is run on head ref of the merge request' do
diff --git a/spec/finders/cluster_ancestors_finder_spec.rb b/spec/finders/cluster_ancestors_finder_spec.rb
index ea1dbea4cfe..a54809801b5 100644
--- a/spec/finders/cluster_ancestors_finder_spec.rb
+++ b/spec/finders/cluster_ancestors_finder_spec.rb
@@ -83,8 +83,16 @@ RSpec.describe ClusterAncestorsFinder, '#execute' do
let(:clusterable) { Clusters::Instance.new }
let(:user) { create(:admin) }
- it 'returns the list of instance clusters' do
- is_expected.to eq([instance_cluster])
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns the list of instance clusters' do
+ is_expected.to eq([instance_cluster])
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
end
end
end
diff --git a/spec/finders/concerns/packages/finder_helper_spec.rb b/spec/finders/concerns/packages/finder_helper_spec.rb
new file mode 100644
index 00000000000..73f77647573
--- /dev/null
+++ b/spec/finders/concerns/packages/finder_helper_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::FinderHelper do
+ describe '#packages_visible_to_user' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:project1) { create(:project, namespace: group) }
+ let_it_be(:package1) { create(:package, project: project1) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
+ let_it_be_with_reload(:project2) { create(:project, namespace: subgroup) }
+ let_it_be(:package2) { create(:package, project: project2) }
+
+ let(:finder_class) do
+ Class.new do
+ include ::Packages::FinderHelper
+
+ def initialize(user)
+ @current_user = user
+ end
+
+ def execute(group)
+ packages_visible_to_user(@current_user, within_group: group)
+ end
+ end
+ end
+
+ let(:finder) { finder_class.new(user) }
+
+ subject { finder.execute(group) }
+
+ shared_examples 'returning both packages' do
+ it { is_expected.to contain_exactly(package1, package2) }
+ end
+
+ shared_examples 'returning package1' do
+ it { is_expected.to eq [package1]}
+ end
+
+ shared_examples 'returning no packages' do
+ it { is_expected.to be_empty }
+ end
+
+ where(:group_visibility, :subgroup_visibility, :project2_visibility, :user_role, :shared_example_name) do
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :maintainer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :developer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :guest | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :anonymous | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :maintainer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :developer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :guest | 'returning package1'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :anonymous | 'returning package1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both packages'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both packages'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning package1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning package1'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning no packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning no packages'
+ end
+
+ with_them do
+ before do
+ unless user_role == :anonymous
+ group.send("add_#{user_role}", user)
+ subgroup.send("add_#{user_role}", user)
+ project1.send("add_#{user_role}", user)
+ project2.send("add_#{user_role}", user)
+ end
+
+ project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
+ end
+
+ describe '#projects_visible_to_user' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:project1) { create(:project, namespace: group) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
+ let_it_be_with_reload(:project2) { create(:project, namespace: subgroup) }
+
+ let(:finder_class) do
+ Class.new do
+ include ::Packages::FinderHelper
+
+ def initialize(user)
+ @current_user = user
+ end
+
+ def execute(group)
+ projects_visible_to_user(@current_user, within_group: group)
+ end
+ end
+ end
+
+ let(:finder) { finder_class.new(user) }
+
+ subject { finder.execute(group) }
+
+ shared_examples 'returning both projects' do
+ it { is_expected.to contain_exactly(project1, project2) }
+ end
+
+ shared_examples 'returning project1' do
+ it { is_expected.to eq [project1]}
+ end
+
+ shared_examples 'returning no project' do
+ it { is_expected.to be_empty }
+ end
+
+ where(:group_visibility, :subgroup_visibility, :project2_visibility, :user_role, :shared_example_name) do
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :maintainer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :developer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :guest | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :anonymous | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :maintainer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :developer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :guest | 'returning project1'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :anonymous | 'returning project1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both projects'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both projects'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning project1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning project1'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both projects'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both projects'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning no project'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning no project'
+ end
+
+ with_them do
+ before do
+ unless user_role == :anonymous
+ group.send("add_#{user_role}", user)
+ subgroup.send("add_#{user_role}", user)
+ project1.send("add_#{user_role}", user)
+ project2.send("add_#{user_role}", user)
+ end
+
+ project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
+ end
+end
diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb
index c66fdb19260..3fc4393df5d 100644
--- a/spec/finders/group_projects_finder_spec.rb
+++ b/spec/finders/group_projects_finder_spec.rb
@@ -142,20 +142,40 @@ RSpec.describe GroupProjectsFinder do
describe 'with an admin current user' do
let(:current_user) { create(:admin) }
- context "only shared" do
- let(:options) { { only_shared: true } }
+ context 'when admin mode is enabled', :enable_admin_mode do
+ context "only shared" do
+ let(:options) { { only_shared: true } }
- it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1]) }
- end
+ it { is_expected.to contain_exactly(shared_project_3, shared_project_2, shared_project_1) }
+ end
- context "only owned" do
- let(:options) { { only_owned: true } }
+ context "only owned" do
+ let(:options) { { only_owned: true } }
+
+ it { is_expected.to contain_exactly(private_project, public_project) }
+ end
- it { is_expected.to eq([private_project, public_project]) }
+ context "all" do
+ it { is_expected.to contain_exactly(shared_project_3, shared_project_2, shared_project_1, private_project, public_project) }
+ end
end
- context "all" do
- it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) }
+ context 'when admin mode is disabled' do
+ context "only shared" do
+ let(:options) { { only_shared: true } }
+
+ it { is_expected.to contain_exactly(shared_project_3, shared_project_1) }
+ end
+
+ context "only owned" do
+ let(:options) { { only_owned: true } }
+
+ it { is_expected.to contain_exactly(public_project) }
+ end
+
+ context "all" do
+ it { is_expected.to contain_exactly(shared_project_3, shared_project_1, public_project) }
+ end
end
end
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index c9e9328794e..d69720ae98e 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe GroupsFinder do
+ include AdminModeHelper
+
describe '#execute' do
let(:user) { create(:user) }
@@ -23,11 +25,16 @@ RSpec.describe GroupsFinder do
:external | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
:external | {} | %i(public_group user_public_group user_internal_group user_private_group)
- :admin | { all_available: true } | %i(public_group internal_group private_group user_public_group
- user_internal_group user_private_group)
- :admin | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
- :admin | {} | %i(public_group internal_group private_group user_public_group user_internal_group
- user_private_group)
+ :admin_without_admin_mode | { all_available: true } | %i(public_group internal_group user_public_group
+ user_internal_group user_private_group)
+ :admin_without_admin_mode | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
+ :admin_without_admin_mode | {} | %i(public_group internal_group user_public_group user_internal_group user_private_group)
+
+ :admin_with_admin_mode | { all_available: true } | %i(public_group internal_group private_group user_public_group
+ user_internal_group user_private_group)
+ :admin_with_admin_mode | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
+ :admin_with_admin_mode | {} | %i(public_group internal_group private_group user_public_group user_internal_group
+ user_private_group)
end
with_them do
@@ -52,8 +59,12 @@ RSpec.describe GroupsFinder do
create(:user)
when :external
create(:user, external: true)
- when :admin
+ when :admin_without_admin_mode
create(:user, :admin)
+ when :admin_with_admin_mode
+ admin = create(:user, :admin)
+ enable_admin_mode!(admin)
+ admin
end
@groups.values_at(:user_private_group, :user_internal_group, :user_public_group).each do |group|
group.add_developer(user)
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 0def3412aa7..33b8a5954ae 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -13,7 +13,27 @@ RSpec.describe IssuesFinder do
let(:scope) { 'all' }
it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
+ end
+
+ context 'user does not have read permissions' do
+ let(:search_user) { user2 }
+
+ context 'when filtering by project id' do
+ let(:params) { { project_id: project1.id } }
+
+ it 'returns no issues' do
+ expect(issues).to be_empty
+ end
+ end
+
+ context 'when filtering by group id' do
+ let(:params) { { group_id: group.id } }
+
+ it 'returns no issues' do
+ expect(issues).to be_empty
+ end
+ end
end
context 'assignee filtering' do
@@ -21,7 +41,7 @@ RSpec.describe IssuesFinder do
it_behaves_like 'assignee ID filter' do
let(:params) { { assignee_id: user.id } }
- let(:expected_issuables) { [issue1, issue2] }
+ let(:expected_issuables) { [issue1, issue2, issue5] }
end
it_behaves_like 'assignee NOT ID filter' do
@@ -59,7 +79,25 @@ RSpec.describe IssuesFinder do
end
it_behaves_like 'any assignee filter' do
- let(:expected_issuables) { [issue1, issue2, issue3] }
+ let(:expected_issuables) { [issue1, issue2, issue3, issue5] }
+ end
+ end
+
+ context 'filtering by release' do
+ context 'when the release tag is none' do
+ let(:params) { { release_tag: 'none' } }
+
+ it 'returns issues without releases' do
+ expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
+ end
+ end
+
+ context 'when the release tag exists' do
+ let(:params) { { project_id: project1.id, release_tag: release.tag } }
+
+ it 'returns the issues associated with that release' do
+ expect(issues).to contain_exactly(issue1)
+ end
end
end
@@ -68,7 +106,7 @@ RSpec.describe IssuesFinder do
let(:params) { { projects: [project1.id] } }
it 'returns the issue belonging to the projects' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
end
end
@@ -76,7 +114,7 @@ RSpec.describe IssuesFinder do
let(:params) { { projects: Project.id_in(project1.id) } }
it 'returns the issue belonging to the projects' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
end
end
end
@@ -86,7 +124,7 @@ RSpec.describe IssuesFinder do
context 'when include_subgroup param not set' do
it 'returns all group issues' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
end
context 'when projects outside the group are passed' do
@@ -101,7 +139,7 @@ RSpec.describe IssuesFinder do
let(:params) { { group_id: group.id, projects: [project1.id] } }
it 'returns the issue within the group and projects' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
end
end
@@ -109,7 +147,15 @@ RSpec.describe IssuesFinder do
let(:params) { { group_id: group.id, projects: Project.id_in(project1.id) } }
it 'returns the issue within the group and projects' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
+ end
+ end
+
+ context 'when release_tag is passed as a parameter' do
+ let(:params) { { group_id: group.id, release_tag: 'dne-release-tag' } }
+
+ it 'ignores the release_tag parameter' do
+ expect(issues).to contain_exactly(issue1, issue5)
end
end
end
@@ -120,7 +166,7 @@ RSpec.describe IssuesFinder do
end
it 'returns all group and subgroup issues' do
- expect(issues).to contain_exactly(issue1, issue4)
+ expect(issues).to contain_exactly(issue1, issue4, issue5)
end
context 'when mixed projects are passed' do
@@ -145,7 +191,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { author_id: user2.id } } }
it 'returns issues not created by that user' do
- expect(issues).to contain_exactly(issue1, issue2, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue4, issue5)
end
end
@@ -175,7 +221,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { milestone_title: milestone.title } } }
it 'returns issues not assigned to that milestone' do
- expect(issues).to contain_exactly(issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
end
end
@@ -199,7 +245,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { milestone_title: group_milestone.title } } }
it 'returns issues not assigned to that group milestone' do
- expect(issues).to contain_exactly(issue1, issue4)
+ expect(issues).to contain_exactly(issue1, issue4, issue5)
end
end
end
@@ -208,13 +254,13 @@ RSpec.describe IssuesFinder do
let(:params) { { milestone_title: 'None' } }
it 'returns issues with no milestone' do
- expect(issues).to contain_exactly(issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
end
it 'returns issues with no milestone (deprecated)' do
params[:milestone_title] = Milestone::None.title
- expect(issues).to contain_exactly(issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
end
end
@@ -343,7 +389,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { label_name: label.title } } }
it 'returns issues that do not have that label' do
- expect(issues).to contain_exactly(issue1, issue3, issue4)
+ expect(issues).to contain_exactly(issue1, issue3, issue4, issue5)
end
# IssuableFinder first filters using the outer params (the ones not inside the `not` key.)
@@ -383,7 +429,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
it 'returns issues that do not have any of the labels provided' do
- expect(issues).to contain_exactly(issue1, issue4)
+ expect(issues).to contain_exactly(issue1, issue4, issue5)
end
end
end
@@ -405,7 +451,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
it 'returns issues that do not have ANY ONE of the labels provided' do
- expect(issues).to contain_exactly(issue1, issue4)
+ expect(issues).to contain_exactly(issue1, issue4, issue5)
end
end
end
@@ -414,7 +460,7 @@ RSpec.describe IssuesFinder do
let(:params) { { label_name: described_class::Params::FILTER_NONE } }
it 'returns issues with no labels' do
- expect(issues).to contain_exactly(issue1, issue4)
+ expect(issues).to contain_exactly(issue1, issue4, issue5)
end
end
@@ -483,14 +529,14 @@ RSpec.describe IssuesFinder do
end
context 'filtering by issues iids' do
- let(:params) { { iids: issue3.iid } }
+ let(:params) { { iids: [issue3.iid] } }
- it 'returns issues with iids match' do
- expect(issues).to contain_exactly(issue3)
+ it 'returns issues where iids match' do
+ expect(issues).to contain_exactly(issue3, issue5)
end
context 'using NOT' do
- let(:params) { { not: { iids: issue3.iid } } }
+ let(:params) { { not: { iids: [issue3.iid] } } }
it 'returns issues with no iids match' do
expect(issues).to contain_exactly(issue1, issue2, issue4)
@@ -503,7 +549,7 @@ RSpec.describe IssuesFinder do
let(:params) { { state: 'opened' } }
it 'returns only opened issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
end
end
@@ -519,7 +565,7 @@ RSpec.describe IssuesFinder do
let(:params) { { state: 'all' } }
it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4, issue5)
end
end
@@ -527,7 +573,7 @@ RSpec.describe IssuesFinder do
let(:params) { { state: 'invalid_state' } }
it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4, issue5)
end
end
end
@@ -619,7 +665,7 @@ RSpec.describe IssuesFinder do
let(:params) { { my_reaction_emoji: 'None' } }
it 'returns issues that the user did not react to' do
- expect(issues).to contain_exactly(issue2, issue4)
+ expect(issues).to contain_exactly(issue2, issue4, issue5)
end
end
@@ -642,7 +688,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { my_reaction_emoji: 'thumbsup' } } }
it 'returns issues that the user did not thumbsup to' do
- expect(issues).to contain_exactly(issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
end
end
end
@@ -676,7 +722,7 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { my_reaction_emoji: 'thumbsdown' } } }
it 'returns issues that the user thumbsdown to' do
- expect(issues).to contain_exactly(issue1, issue2, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue4, issue5)
end
end
end
@@ -687,7 +733,7 @@ RSpec.describe IssuesFinder do
context 'no filtering' do
it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, confidential_issue)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5, confidential_issue)
end
end
@@ -703,7 +749,7 @@ RSpec.describe IssuesFinder do
let(:params) { { confidential: false } }
it 'returns only confdential issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
end
end
end
@@ -715,7 +761,7 @@ RSpec.describe IssuesFinder do
let(:params) { { issue_types: [] } }
it 'returns all issues' do
- expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4)
+ expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4, issue5)
end
end
@@ -731,7 +777,7 @@ RSpec.describe IssuesFinder do
let(:params) { { issue_types: ['issue'] } }
it 'returns all issues with type issue' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4)
+ expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
end
end
@@ -739,7 +785,7 @@ RSpec.describe IssuesFinder do
let(:params) { { issue_types: %w(issue incident) } }
it 'returns all issues' do
- expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4)
+ expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4, issue5)
end
end
@@ -790,14 +836,14 @@ RSpec.describe IssuesFinder do
let(:scope) { 'assigned_to_me' }
it 'returns issue assigned to the user' do
- expect(issues).to contain_exactly(issue1, issue2)
+ expect(issues).to contain_exactly(issue1, issue2, issue5)
end
context 'filtering by project' do
let(:params) { { project_id: project1.id } }
it 'returns issues assigned to the user in that project' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
end
end
end
@@ -839,7 +885,7 @@ RSpec.describe IssuesFinder do
let(:params) { base_params.merge(due_date: Issue::NoDueDate.name) }
it 'returns issues with no due date' do
- expect(issues).to contain_exactly(issue1)
+ expect(issues).to contain_exactly(issue1, issue5)
end
end
@@ -872,16 +918,26 @@ RSpec.describe IssuesFinder do
describe '#row_count', :request_store do
let_it_be(:admin) { create(:admin) }
- it 'returns the number of rows for the default state' do
- finder = described_class.new(admin)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns the number of rows for the default state' do
+ finder = described_class.new(admin)
+
+ expect(finder.row_count).to eq(5)
+ end
+
+ it 'returns the number of rows for a given state' do
+ finder = described_class.new(admin, state: 'closed')
- expect(finder.row_count).to eq(4)
+ expect(finder.row_count).to be_zero
+ end
end
- it 'returns the number of rows for a given state' do
- finder = described_class.new(admin, state: 'closed')
+ context 'when admin mode is disabled' do
+ it 'returns no rows' do
+ finder = described_class.new(admin)
- expect(finder.row_count).to be_zero
+ expect(finder.row_count).to be_zero
+ end
end
it 'returns -1 if the query times out' do
@@ -950,8 +1006,17 @@ RSpec.describe IssuesFinder do
subject { described_class.new(admin_user, params).with_confidentiality_access_check }
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
end
end
end
@@ -1023,14 +1088,27 @@ RSpec.describe IssuesFinder do
subject { described_class.new(admin_user, params).with_confidentiality_access_check }
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
end
- it 'does not filter by confidentiality' do
- expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
+ context 'when admin mode is disabled' do
+ it 'returns only public issues' do
+ expect(subject).to include(public_issue)
+ expect(subject).not_to include(confidential_issue)
+ end
- subject
+ it 'filters by confidentiality' do
+ expect(subject.to_sql).to match("issues.confidential")
+ end
end
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 7b59b581b1c..6fdfe780463 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -76,13 +76,40 @@ RSpec.describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request3, merge_request4)
end
- it 'filters by commit sha' do
- merge_requests = described_class.new(
- user,
- commit_sha: merge_request5.merge_request_diff.last_commit_sha
- ).execute
+ context 'filters by commit sha' do
+ subject(:merge_requests) { described_class.new(user, commit_sha: commit_sha).execute }
+
+ context 'when commit belongs to the merge request' do
+ let(:commit_sha) { merge_request5.merge_request_diff.last_commit_sha }
+
+ it 'filters by commit sha' do
+ is_expected.to contain_exactly(merge_request5)
+ end
+ end
+
+ context 'when commit is a squash commit' do
+ before do
+ merge_request4.update!(squash_commit_sha: commit_sha)
+ end
+
+ let(:commit_sha) { '1234abcd' }
+
+ it 'filters by commit sha' do
+ is_expected.to contain_exactly(merge_request4)
+ end
+ end
+
+ context 'when commit is a merge commit' do
+ before do
+ merge_request4.update!(merge_commit_sha: commit_sha)
+ end
+
+ let(:commit_sha) { '1234dcba' }
- expect(merge_requests).to contain_exactly(merge_request5)
+ it 'filters by commit sha' do
+ is_expected.to contain_exactly(merge_request4)
+ end
+ end
end
context 'filters by merged_at date' do
@@ -697,10 +724,18 @@ RSpec.describe MergeRequestsFinder do
context 'with admin user' do
let(:user) { create(:user, :admin) }
- it 'returns all merge requests' do
- expect(merge_requests).to eq(
- [mr_internal_private_repo_access, mr_private_repo_access, mr_internal, mr_private, mr_public]
- )
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns all merge requests' do
+ expect(merge_requests).to contain_exactly(
+ mr_internal_private_repo_access, mr_private_repo_access, mr_internal, mr_private, mr_public
+ )
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'returns public and internal merge requests' do
+ expect(merge_requests).to contain_exactly(mr_internal, mr_public)
+ end
end
end
diff --git a/spec/finders/packages/debian/distributions_finder_spec.rb b/spec/finders/packages/debian/distributions_finder_spec.rb
new file mode 100644
index 00000000000..f141b13e394
--- /dev/null
+++ b/spec/finders/packages/debian/distributions_finder_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::DistributionsFinder do
+ it_behaves_like 'Debian Distributions Finder', :debian_project_distribution, true
+ it_behaves_like 'Debian Distributions Finder', :debian_group_distribution, false
+
+ context 'with nil container' do
+ let(:service) { described_class.new(nil) }
+
+ subject { service.execute.to_a }
+
+ it 'raises error' do
+ expect { subject }.to raise_error ArgumentError, "Unexpected container type of 'NilClass'"
+ end
+ end
+
+ context 'with unexpected container type' do
+ let(:service) { described_class.new(:invalid) }
+
+ subject { service.execute.to_a }
+
+ it 'raises error' do
+ expect { subject }.to raise_error ArgumentError, "Unexpected container type of 'Symbol'"
+ end
+ end
+end
diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb
index 0db69de65a5..8dd53b9c3f9 100644
--- a/spec/finders/packages/group_packages_finder_spec.rb
+++ b/spec/finders/packages/group_packages_finder_spec.rb
@@ -127,12 +127,6 @@ RSpec.describe Packages::GroupPackagesFinder do
it { is_expected.to match_array([package1, package2]) }
end
- context 'does not include packages without version number' do
- let_it_be(:package_without_version) { create(:maven_package, project: project, version: nil) }
-
- it { is_expected.not_to include(package_without_version) }
- end
-
context 'with package_name' do
let_it_be(:named_package) { create(:maven_package, project: project, name: 'maven') }
let(:params) { { package_name: package_name } }
@@ -151,6 +145,8 @@ RSpec.describe Packages::GroupPackagesFinder do
end
end
end
+
+ it_behaves_like 'concerning versionless param'
end
context 'group has package of all types' do
diff --git a/spec/finders/packages/maven/package_finder_spec.rb b/spec/finders/packages/maven/package_finder_spec.rb
index 239e8c10f52..b955c331f28 100644
--- a/spec/finders/packages/maven/package_finder_spec.rb
+++ b/spec/finders/packages/maven/package_finder_spec.rb
@@ -1,57 +1,81 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe ::Packages::Maven::PackageFinder do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:package) { create(:maven_package, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:package) { create(:maven_package, project: project) }
+
+ let(:param_path) { nil }
+ let(:param_project) { nil }
+ let(:param_group) { nil }
+ let(:finder) { described_class.new(param_path, user, project: param_project, group: param_group) }
before do
group.add_developer(user)
end
describe '#execute!' do
- context 'within the project' do
- it 'returns a package' do
- finder = described_class.new(package.maven_metadatum.path, user, project: project)
-
- expect(finder.execute!).to eq(package)
- end
+ subject { finder.execute! }
- it 'raises an error' do
- finder = described_class.new('com/example/my-app/1.0-SNAPSHOT', user, project: project)
+ shared_examples 'handling valid and invalid paths' do
+ context 'with a valid path' do
+ let(:param_path) { package.maven_metadatum.path }
- expect { finder.execute! }.to raise_error(ActiveRecord::RecordNotFound)
+ it { is_expected.to eq(package) }
end
- end
- context 'across all projects' do
- it 'returns a package' do
- finder = described_class.new(package.maven_metadatum.path, user)
+ context 'with an invalid path' do
+ let(:param_path) { 'com/example/my-app/1.0-SNAPSHOT' }
- expect(finder.execute!).to eq(package)
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
+ end
- it 'raises an error' do
- finder = described_class.new('com/example/my-app/1.0-SNAPSHOT', user)
+ context 'within the project' do
+ let(:param_project) { project }
- expect { finder.execute! }.to raise_error(ActiveRecord::RecordNotFound)
- end
+ it_behaves_like 'handling valid and invalid paths'
end
context 'within a group' do
- it 'returns a package' do
- finder = described_class.new(package.maven_metadatum.path, user, group: group)
+ let(:param_group) { group }
- expect(finder.execute!).to eq(package)
- end
+ it_behaves_like 'handling valid and invalid paths'
+ end
+ context 'across all projects' do
it 'raises an error' do
- finder = described_class.new('com/example/my-app/1.0-SNAPSHOT', user, group: group)
+ expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'versionless maven-metadata.xml package' do
+ let_it_be(:sub_group1) { create(:group, parent: group) }
+ let_it_be(:sub_group2) { create(:group, parent: group) }
+ let_it_be(:project1) { create(:project, group: sub_group1) }
+ let_it_be(:project2) { create(:project, group: sub_group2) }
+ let_it_be(:project3) { create(:project, group: sub_group1) }
+ let_it_be(:package_name) { 'foo' }
+ let_it_be(:package1) { create(:maven_package, project: project1, name: package_name, version: nil) }
+ let_it_be(:package2) { create(:maven_package, project: project2, name: package_name, version: nil) }
+ let_it_be(:package3) { create(:maven_package, project: project3, name: package_name, version: nil) }
- expect { finder.execute! }.to raise_error(ActiveRecord::RecordNotFound)
+ let(:param_group) { group }
+ let(:param_path) { package_name }
+
+ before do
+ sub_group1.add_developer(user)
+ sub_group2.add_developer(user)
+ # the package with the most recently published file should be returned
+ create(:package_file, :xml, package: package2)
end
+
+ it { is_expected.to eq(package2) }
end
end
end
diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb
index 9295d0c7a2f..10b5f6c8ec2 100644
--- a/spec/finders/packages/nuget/package_finder_spec.rb
+++ b/spec/finders/packages/nuget/package_finder_spec.rb
@@ -2,74 +2,117 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::PackageFinder do
- let_it_be(:package1) { create(:nuget_package) }
- let_it_be(:project) { package1.project }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, namespace: subgroup) }
+ let_it_be(:package1) { create(:nuget_package, project: project) }
let_it_be(:package2) { create(:nuget_package, name: package1.name, version: '2.0.0', project: project) }
let_it_be(:package3) { create(:nuget_package, name: 'Another.Dummy.Package', project: project) }
+ let_it_be(:other_package_1) { create(:nuget_package, name: package1.name, version: package1.version) }
+ let_it_be(:other_package_2) { create(:nuget_package, name: package1.name, version: package2.version) }
let(:package_name) { package1.name }
let(:package_version) { nil }
let(:limit) { 50 }
describe '#execute!' do
- subject { described_class.new(project, package_name: package_name, package_version: package_version, limit: limit).execute }
+ subject { described_class.new(user, target, package_name: package_name, package_version: package_version, limit: limit).execute }
- it { is_expected.to match_array([package1, package2]) }
+ shared_examples 'handling all the conditions' do
+ it { is_expected.to match_array([package1, package2]) }
- context 'with lower case package name' do
- let(:package_name) { package1.name.downcase }
+ context 'with lower case package name' do
+ let(:package_name) { package1.name.downcase }
- it { is_expected.to match_array([package1, package2]) }
- end
+ it { is_expected.to match_array([package1, package2]) }
+ end
- context 'with unknown package name' do
- let(:package_name) { 'foobar' }
+ context 'with unknown package name' do
+ let(:package_name) { 'foobar' }
- it { is_expected.to be_empty }
- end
+ it { is_expected.to be_empty }
+ end
- context 'with valid version' do
- let(:package_version) { '2.0.0' }
+ context 'with valid version' do
+ let(:package_version) { '2.0.0' }
- it { is_expected.to match_array([package2]) }
- end
+ it { is_expected.to match_array([package2]) }
+ end
- context 'with unknown version' do
- let(:package_version) { 'foobar' }
+ context 'with unknown version' do
+ let(:package_version) { 'foobar' }
- it { is_expected.to be_empty }
- end
+ it { is_expected.to be_empty }
+ end
+
+ context 'with limit hit' do
+ let_it_be(:package4) { create(:nuget_package, name: package1.name, project: project) }
+ let_it_be(:package5) { create(:nuget_package, name: package1.name, project: project) }
+ let_it_be(:package6) { create(:nuget_package, name: package1.name, project: project) }
+ let(:limit) { 2 }
+
+ it { is_expected.to match_array([package5, package6]) }
+ end
+
+ context 'with downcase package name' do
+ let(:package_name) { package1.name.downcase }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
- context 'with limit hit' do
- let_it_be(:package4) { create(:nuget_package, name: package1.name, project: project) }
- let_it_be(:package5) { create(:nuget_package, name: package1.name, project: project) }
- let_it_be(:package6) { create(:nuget_package, name: package1.name, project: project) }
- let(:limit) { 2 }
+ context 'with prefix wildcard' do
+ let(:package_name) { "%#{package1.name[3..-1]}" }
- it { is_expected.to match_array([package5, package6]) }
+ it { is_expected.to match_array([package1, package2]) }
+ end
+
+ context 'with suffix wildcard' do
+ let(:package_name) { "#{package1.name[0..-3]}%" }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
+
+ context 'with surrounding wildcards' do
+ let(:package_name) { "%#{package1.name[3..-3]}%" }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
end
- context 'with downcase package name' do
- let(:package_name) { package1.name.downcase }
+ context 'with a project' do
+ let(:target) { project }
- it { is_expected.to match_array([package1, package2]) }
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'handling all the conditions'
end
- context 'with prefix wildcard' do
- let(:package_name) { "%#{package1.name[3..-1]}" }
+ context 'with a subgroup' do
+ let(:target) { subgroup }
- it { is_expected.to match_array([package1, package2]) }
+ before do
+ subgroup.add_developer(user)
+ end
+
+ it_behaves_like 'handling all the conditions'
end
- context 'with suffix wildcard' do
- let(:package_name) { "#{package1.name[0..-3]}%" }
+ context 'with a group' do
+ let(:target) { group }
- it { is_expected.to match_array([package1, package2]) }
+ before do
+ group.add_developer(user)
+ end
+
+ it_behaves_like 'handling all the conditions'
end
- context 'with surrounding wildcards' do
- let(:package_name) { "%#{package1.name[3..-3]}%" }
+ context 'with nil' do
+ let(:target) { nil }
- it { is_expected.to match_array([package1, package2]) }
+ it { is_expected.to be_empty }
end
end
end
diff --git a/spec/finders/packages/packages_finder_spec.rb b/spec/finders/packages/packages_finder_spec.rb
index 925b003bb8e..77a171db144 100644
--- a/spec/finders/packages/packages_finder_spec.rb
+++ b/spec/finders/packages/packages_finder_spec.rb
@@ -81,10 +81,6 @@ RSpec.describe ::Packages::PackagesFinder do
it { is_expected.to match_array([conan_package, maven_package]) }
end
- context 'does not include packages without version number' do
- let_it_be(:package_without_version) { create(:maven_package, project: project, version: nil) }
-
- it { is_expected.not_to include(package_without_version) }
- end
+ it_behaves_like 'concerning versionless param'
end
end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index 57977fb69b4..4d9ff30daba 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectsFinder, :do_not_mock_admin_mode do
+RSpec.describe ProjectsFinder do
include AdminModeHelper
describe '#execute' do
@@ -353,7 +353,7 @@ RSpec.describe ProjectsFinder, :do_not_mock_admin_mode do
end
before do
- stub_feature_flags(project_finder_similarity_sort: true)
+ stub_feature_flags(project_finder_similarity_sort: current_user)
end
it { is_expected.to eq([internal_project2, internal_project4, internal_project3]) }
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index 6fc1cbcee0a..9c9a04a4df5 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -106,12 +106,18 @@ RSpec.describe SnippetsFinder do
expect(snippets).to contain_exactly(public_personal_snippet)
end
- it 'returns all snippets for an admin' do
+ it 'returns all snippets for an admin in admin mode', :enable_admin_mode do
snippets = described_class.new(admin, author: user).execute
expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
end
+ it 'returns all public and internal snippets for an admin without admin mode' do
+ snippets = described_class.new(admin, author: user).execute
+
+ expect(snippets).to contain_exactly(internal_personal_snippet, public_personal_snippet)
+ end
+
context 'when author is not valid' do
it 'returns quickly' do
finder = described_class.new(admin, author: non_existing_record_id)
@@ -180,12 +186,18 @@ RSpec.describe SnippetsFinder do
expect(snippets).to contain_exactly(private_project_snippet)
end
- it 'returns all snippets for an admin' do
+ it 'returns all snippets for an admin in admin mode', :enable_admin_mode do
snippets = described_class.new(admin, project: project).execute
expect(snippets).to contain_exactly(private_project_snippet, internal_project_snippet, public_project_snippet)
end
+ it 'returns public and internal snippets for an admin without admin mode' do
+ snippets = described_class.new(admin, project: project).execute
+
+ expect(snippets).to contain_exactly(internal_project_snippet, public_project_snippet)
+ end
+
context 'filter by author' do
let!(:other_user) { create(:user) }
let!(:other_private_project_snippet) { create(:project_snippet, :private, project: project, author: other_user) }
@@ -218,7 +230,7 @@ RSpec.describe SnippetsFinder do
end
context 'filter by snippet type' do
- context 'when filtering by only_personal snippet' do
+ context 'when filtering by only_personal snippet', :enable_admin_mode do
it 'returns only personal snippet' do
snippets = described_class.new(admin, only_personal: true).execute
@@ -228,7 +240,7 @@ RSpec.describe SnippetsFinder do
end
end
- context 'when filtering by only_project snippet' do
+ context 'when filtering by only_project snippet', :enable_admin_mode do
it 'returns only project snippet' do
snippets = described_class.new(admin, only_project: true).execute
@@ -239,7 +251,7 @@ RSpec.describe SnippetsFinder do
end
end
- context 'filtering by ids' do
+ context 'filtering by ids', :enable_admin_mode do
it 'returns only personal snippet' do
snippets = described_class.new(
admin, ids: [private_personal_snippet.id,
@@ -265,13 +277,21 @@ RSpec.describe SnippetsFinder do
)
end
- it 'returns all personal snippets for admins' do
+ it 'returns all personal snippets for admins when in admin mode', :enable_admin_mode do
snippets = described_class.new(admin, explore: true).execute
expect(snippets).to contain_exactly(
private_personal_snippet, internal_personal_snippet, public_personal_snippet
)
end
+
+ it 'also returns internal personal snippets for admins without admin mode' do
+ snippets = described_class.new(admin, explore: true).execute
+
+ expect(snippets).to contain_exactly(
+ internal_personal_snippet, public_personal_snippet
+ )
+ end
end
context 'when the user cannot read cross project' do
@@ -302,7 +322,7 @@ RSpec.describe SnippetsFinder do
end
end
- context 'no sort param is provided' do
+ context 'no sort param is provided', :enable_admin_mode do
it 'returns snippets sorted by id' do
snippets = described_class.new(admin).execute
@@ -310,7 +330,7 @@ RSpec.describe SnippetsFinder do
end
end
- context 'sort param is provided' do
+ context 'sort param is provided', :enable_admin_mode do
it 'returns snippets sorted by sort param' do
snippets = described_class.new(admin, sort: 'updated_desc').execute
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
index 0fdd6ab402d..2da864b9a46 100644
--- a/spec/finders/template_finder_spec.rb
+++ b/spec/finders/template_finder_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe TemplateFinder do
:gitlab_ci_ymls | described_class
:licenses | ::LicenseTemplateFinder
:metrics_dashboard_ymls | described_class
+ :gitlab_ci_syntax_ymls | described_class
end
with_them do
@@ -30,6 +31,7 @@ RSpec.describe TemplateFinder do
:gitignores | 'Actionscript'
:gitlab_ci_ymls | 'Android'
:metrics_dashboard_ymls | 'Default'
+ :gitlab_ci_syntax_ymls | 'Artifacts example'
end
with_them do
diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb
index a04f5452fcd..d9cc71106d5 100644
--- a/spec/finders/users_finder_spec.rb
+++ b/spec/finders/users_finder_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe UsersFinder do
it 'returns all users' do
users = described_class.new(user).execute
- expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, internal_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, internal_user, admin_user)
end
it 'filters by username' do
@@ -48,13 +48,13 @@ RSpec.describe UsersFinder do
it 'filters by active users' do
users = described_class.new(user, active: true).execute
- expect(users).to contain_exactly(user, normal_user, omniauth_user)
+ expect(users).to contain_exactly(user, normal_user, omniauth_user, admin_user)
end
it 'returns no external users' do
users = described_class.new(user, external: true).execute
- expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, internal_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, internal_user, admin_user)
end
it 'filters by created_at' do
@@ -71,7 +71,7 @@ RSpec.describe UsersFinder do
it 'filters by non internal users' do
users = described_class.new(user, non_internal: true).execute
- expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, admin_user)
end
it 'does not filter by custom attributes' do
@@ -80,17 +80,22 @@ RSpec.describe UsersFinder do
custom_attributes: { foo: 'bar' }
).execute
- expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, internal_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user, internal_user, admin_user)
end
it 'orders returned results' do
users = described_class.new(user, sort: 'id_asc').execute
- expect(users).to eq([normal_user, blocked_user, omniauth_user, internal_user, user])
+ expect(users).to eq([normal_user, admin_user, blocked_user, omniauth_user, internal_user, user])
+ end
+
+ it 'does not filter by admins' do
+ users = described_class.new(user, admins: true).execute
+ expect(users).to contain_exactly(user, normal_user, admin_user, blocked_user, omniauth_user, internal_user)
end
end
- context 'with an admin user' do
+ context 'with an admin user', :enable_admin_mode do
let(:admin) { create(:admin) }
it 'filters by external users' do
@@ -102,7 +107,13 @@ RSpec.describe UsersFinder do
it 'returns all users' do
users = described_class.new(admin).execute
- expect(users).to contain_exactly(admin, normal_user, blocked_user, external_user, omniauth_user, internal_user)
+ expect(users).to contain_exactly(admin, normal_user, blocked_user, external_user, omniauth_user, internal_user, admin_user)
+ end
+
+ it 'returns only admins' do
+ users = described_class.new(admin, admins: true).execute
+
+ expect(users).to contain_exactly(admin, admin_user)
end
it 'filters by custom attributes' do
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
index 2afabcc9195..1a5b8150ed2 100644
--- a/spec/fixtures/api/schemas/entities/discussion.json
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -48,6 +48,7 @@
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
+ "resolved_at": { "type": ["date", "null"] },
"note": { "type": "string" },
"note_html": { "type": "string" },
"current_user": { "type": "object" },
diff --git a/spec/fixtures/api/schemas/group_member.json b/spec/fixtures/api/schemas/entities/member.json
index 3425108e46e..e8b40745803 100644
--- a/spec/fixtures/api/schemas/group_member.json
+++ b/spec/fixtures/api/schemas/entities/member.json
@@ -24,16 +24,18 @@
"properties": {
"integer_value": { "type": "integer" },
"string_value": { "type": "string" }
- }
+ },
+ "additionalProperties": false
},
"source": {
"type": "object",
- "required": ["id", "name", "web_url"],
+ "required": ["id", "full_name", "web_url"],
"properties": {
"id": { "type": "integer" },
- "name": { "type": "string" },
+ "full_name": { "type": "string" },
"web_url": { "type": "string" }
- }
+ },
+ "additionalProperties": false
},
"valid_roles": { "type": "object" },
"created_by": {
@@ -42,39 +44,13 @@
"properties": {
"name": { "type": "string" },
"web_url": { "type": "string" }
- }
+ },
+ "additionalProperties": false
},
"user": {
- "type": "object",
- "required": [
- "id",
- "name",
- "username",
- "avatar_url",
- "web_url",
- "blocked",
- "two_factor_enabled"
- ],
- "properties": {
- "id": { "type": "integer" },
- "name": { "type": "string" },
- "username": { "type": "string" },
- "avatar_url": { "type": ["string", "null"] },
- "web_url": { "type": "string" },
- "blocked": { "type": "boolean" },
- "two_factor_enabled": { "type": "boolean" },
- "status": {
- "type": "object",
- "required": [
- "emoji",
- "message_html"
- ],
- "properties": {
- "emoji": { "type": "string" },
- "message_html": { "type": "string" }
- }
- }
- }
+ "allOf": [
+ { "$ref": "member_user.json" }
+ ]
},
"invite": {
"type": "object",
@@ -83,7 +59,8 @@
"email": { "type": "string" },
"avatar_url": { "type": "string" },
"can_resend": { "type": "boolean" }
- }
+ },
+ "additionalProperties": false
}
}
}
diff --git a/spec/fixtures/api/schemas/entities/member_user.json b/spec/fixtures/api/schemas/entities/member_user.json
new file mode 100644
index 00000000000..983cdb7b9d9
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/member_user.json
@@ -0,0 +1,22 @@
+{
+ "type": "object",
+ "required": ["id", "name", "username", "avatar_url", "web_url", "blocked", "two_factor_enabled"],
+ "properties": {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "avatar_url": { "type": ["string", "null"] },
+ "web_url": { "type": "string" },
+ "blocked": { "type": "boolean" },
+ "two_factor_enabled": { "type": "boolean" },
+ "status": {
+ "type": "object",
+ "required": ["emoji"],
+ "properties": {
+ "emoji": { "type": "string" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index b8c4253056e..4f54a77e6b2 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -37,6 +37,12 @@
"has_opened_alert": { "type": "boolean" },
"cluster_type": { "type": "types/nullable_string.json" },
"terminal_path": { "type": "types/nullable_string.json" },
+ "rollout_status": {
+ "oneOf": [
+ { "type": "null" },
+ { "$ref": "rollout_status.json" }
+ ]
+ },
"last_deployment": {
"oneOf": [
{ "type": "null" },
diff --git a/spec/fixtures/api/schemas/graphql/namespace/package_settings.json b/spec/fixtures/api/schemas/graphql/namespace/package_settings.json
new file mode 100644
index 00000000000..c1286ee0944
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/namespace/package_settings.json
@@ -0,0 +1,12 @@
+{
+ "type": "object",
+ "required": ["mavenDuplicatesAllowed", "mavenDuplicateExceptionRegex"],
+ "properties": {
+ "mavenDuplicatesAllowed": {
+ "type": "boolean"
+ },
+ "mavenDuplicateExceptionRegex": {
+ "type": "string"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_composer_details.json b/spec/fixtures/api/schemas/graphql/packages/package_composer_details.json
new file mode 100644
index 00000000000..bcf64a6e567
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_composer_details.json
@@ -0,0 +1,12 @@
+{
+ "type": "object",
+ "allOf": [{ "$ref": "./package_details.json" }],
+ "properties": {
+ "target_sha": {
+ "type": "string"
+ },
+ "composer_json": {
+ "type": "object"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
new file mode 100644
index 00000000000..4f90285183c
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -0,0 +1,36 @@
+{
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "createdAt": {
+ "type": "string"
+ },
+ "updatedAt": {
+ "type": "string"
+ },
+ "version": {
+ "type": ["string", "null"]
+ },
+ "package_type": {
+ "type": ["string"],
+ "enum": ["MAVEN", "NPM", "CONAN", "NUGET", "PYPI", "COMPOSER", "GENERIC", "GOLANG", "DEBIAN"]
+ },
+ "tags": {
+ "type": "object"
+ },
+ "project": {
+ "type": "object"
+ },
+ "pipelines": {
+ "type": "object"
+ },
+ "versions": {
+ "type": "object"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/group_members.json b/spec/fixtures/api/schemas/members.json
index 6268c7ef4d8..0b0b56da9f8 100644
--- a/spec/fixtures/api/schemas/group_members.json
+++ b/spec/fixtures/api/schemas/members.json
@@ -1,6 +1,6 @@
{
"type": "array",
"items": {
- "$ref": "group_member.json"
+ "$ref": "entities/member.json"
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/notes.json b/spec/fixtures/api/schemas/public_api/v4/notes.json
index 683dcb19836..c4510207882 100644
--- a/spec/fixtures/api/schemas/public_api/v4/notes.json
+++ b/spec/fixtures/api/schemas/public_api/v4/notes.json
@@ -31,6 +31,7 @@
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
+ "resolved_at": { "type": ["date", "null"] },
"confidential": { "type": ["boolean", "null"] }
},
"required": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
new file mode 100644
index 00000000000..f51e7e8edc5
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
@@ -0,0 +1,20 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "created_at",
+ "state",
+ "source_storage_name",
+ "destination_storage_name",
+ "snippet"
+ ],
+ "properties" : {
+ "id": { "type": "integer" },
+ "created_at": { "type": "date" },
+ "state": { "type": "string" },
+ "source_storage_name": { "type": "string" },
+ "destination_storage_name": { "type": "string" },
+ "snippet": { "type": "object" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_moves.json b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_moves.json
new file mode 100644
index 00000000000..292bb335539
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_moves.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "./snippet_repository_storage_move.json"
+ }
+}
diff --git a/spec/fixtures/api/schemas/rollout_status.json b/spec/fixtures/api/schemas/rollout_status.json
new file mode 100644
index 00000000000..334bed4ab65
--- /dev/null
+++ b/spec/fixtures/api/schemas/rollout_status.json
@@ -0,0 +1,52 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "status"
+ ],
+ "properties": {
+ "status": {
+ "type": "string"
+ },
+ "completion": {
+ "type": "integer"
+ },
+ "is_completed": {
+ "type": "boolean"
+ },
+ "has_legacy_app_label": {
+ "type": "boolean"
+ },
+ "instances": {
+ "type": "array",
+ "items": {
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "status",
+ "pod_name",
+ "tooltip",
+ "track",
+ "stable"
+ ],
+ "properties": {
+ "status": {
+ "type": "string"
+ },
+ "pod_name": {
+ "type": "string"
+ },
+ "tooltip": {
+ "type": "string"
+ },
+ "track": {
+ "type": "string"
+ },
+ "stable": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/emails/valid_note_on_issuable.eml b/spec/fixtures/emails/valid_note_on_issuable.eml
new file mode 100644
index 00000000000..29308c9d969
--- /dev/null
+++ b/spec/fixtures/emails/valid_note_on_issuable.eml
@@ -0,0 +1,24 @@
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq-gitlabhq-project_id-auth_token-issue-issue_iid@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <jake@adventuretime.ooo>
+To: incoming+gitlabhq-gitlabhq-project_id-auth_token-issue-issue_iid@appmail.adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject: New Issue comment by email
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
+
+This should create a new comment on the issue.
+
+- Jake out
+
+> This quoted content will be included in the comment.
diff --git a/spec/fixtures/whats_new/blank.yml b/spec/fixtures/whats_new/blank.yml
new file mode 100644
index 00000000000..4628cae2ecc
--- /dev/null
+++ b/spec/fixtures/whats_new/blank.yml
@@ -0,0 +1,9 @@
+- title:
+ body:
+ stage:
+ self-managed:
+ gitlab-com:
+ url:
+ image_url:
+ published_at:
+ release:
diff --git a/spec/fixtures/whats_new/invalid.yml b/spec/fixtures/whats_new/invalid.yml
new file mode 100644
index 00000000000..0e588efaf8f
--- /dev/null
+++ b/spec/fixtures/whats_new/invalid.yml
@@ -0,0 +1,20 @@
+- title: Create and view requirements in GitLab
+ body: The first step towards managing requirements from within GitLab is here! This initial release allows users to create and view requirements at a project level. As Requirements Management evolves in GitLab, stay tuned for support for traceability between all artifacts, creating a seamless workflow to visually demonstrate completeness and compliance.
+ stage: Plan
+ self-managed: true
+ gitlab-com: true
+ packages: [ALL]
+ url: https://docs.gitlab.com/ee/user/project/requirements/index.html
+ image_url: https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png
+ published_at: 2020-04-22
+ release: 12.10
+- title: Retrieve CI/CD secrets from HashiCorp Vault
+ body: In this release, GitLab adds support for lightweight JSON Web Token (JWT) authentication to integrate with your existing HashiCorp Vault. Now, you can seamlessly provide secrets to CI/CD jobs by taking advantage of HashiCorp's JWT authentication method rather than manually having to provide secrets as a variable in GitLab.
+ stage: Release
+ self-managed: true
+ gitlab-com: true
+ packages: [Starter]
+ url: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
+ image_url: https://about.gitlab.com/images/12_10/jwt-vault-1.png
+ published_at: 2020-04-22
+ release: 12.10
diff --git a/spec/fixtures/whats_new/valid.yml b/spec/fixtures/whats_new/valid.yml
new file mode 100644
index 00000000000..cbe9d666357
--- /dev/null
+++ b/spec/fixtures/whats_new/valid.yml
@@ -0,0 +1,20 @@
+- title: Create and view requirements in GitLab
+ body: The first step towards managing requirements from within GitLab is here! This initial release allows users to create and view requirements at a project level. As Requirements Management evolves in GitLab, stay tuned for support for traceability between all artifacts, creating a seamless workflow to visually demonstrate completeness and compliance.
+ stage: Plan
+ self-managed: true
+ gitlab-com: true
+ packages: [Ultimate]
+ url: https://docs.gitlab.com/ee/user/project/requirements/index.html
+ image_url: https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png
+ published_at: 2020-04-22
+ release: 12.10
+- title: Retrieve CI/CD secrets from HashiCorp Vault
+ body: In this release, GitLab adds support for lightweight JSON Web Token (JWT) authentication to integrate with your existing HashiCorp Vault. Now, you can seamlessly provide secrets to CI/CD jobs by taking advantage of HashiCorp's JWT authentication method rather than manually having to provide secrets as a variable in GitLab.
+ stage: Release
+ self-managed: true
+ gitlab-com: true
+ packages: [Starter]
+ url: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
+ image_url: https://about.gitlab.com/images/12_10/jwt-vault-1.png
+ published_at: 2020-04-22
+ release: 12.10
diff --git a/spec/frontend/__helpers__/README.md b/spec/frontend/__helpers__/README.md
new file mode 100644
index 00000000000..7b6c488f820
--- /dev/null
+++ b/spec/frontend/__helpers__/README.md
@@ -0,0 +1,5 @@
+This folder contains helpers to be used in specs (and the specs for the helpers itself).
+There is a convenient alias of `helpers/`, so files in this folder (e.g. `wait_for_promises`) can be imported
+with `helpers/wait_for_promises`.
+
+If you are writing a spec for `~/helpers` application code, please place it in [helpers](../helpers).
diff --git a/spec/frontend/helpers/backoff_helper.js b/spec/frontend/__helpers__/backoff_helper.js
index e5c0308d3fb..a971fcb0945 100644
--- a/spec/frontend/helpers/backoff_helper.js
+++ b/spec/frontend/__helpers__/backoff_helper.js
@@ -16,9 +16,9 @@
*
* @param {Function} callback
*/
-export const backoffMockImplementation = callback => {
+export const backoffMockImplementation = (callback) => {
const q = new Promise((resolve, reject) => {
- const stop = arg => (arg instanceof Error ? reject(arg) : resolve(arg));
+ const stop = (arg) => (arg instanceof Error ? reject(arg) : resolve(arg));
const next = () => callback(next, stop);
// Define a timeout based on a mock timer
setTimeout(() => {
diff --git a/spec/frontend/helpers/class_spec_helper.js b/spec/frontend/__helpers__/class_spec_helper.js
index b26f087f0c5..b26f087f0c5 100644
--- a/spec/frontend/helpers/class_spec_helper.js
+++ b/spec/frontend/__helpers__/class_spec_helper.js
diff --git a/spec/frontend/helpers/class_spec_helper_spec.js b/spec/frontend/__helpers__/class_spec_helper_spec.js
index 533d5687bde..533d5687bde 100644
--- a/spec/frontend/helpers/class_spec_helper_spec.js
+++ b/spec/frontend/__helpers__/class_spec_helper_spec.js
diff --git a/spec/frontend/__helpers__/datetime_helpers.js b/spec/frontend/__helpers__/datetime_helpers.js
new file mode 100644
index 00000000000..25dbd1d477d
--- /dev/null
+++ b/spec/frontend/__helpers__/datetime_helpers.js
@@ -0,0 +1,6 @@
+import dateFormat from 'dateformat';
+
+/**
+ * Returns a date object corresponding to the given date string.
+ */
+export const dateFromString = (dateString) => new Date(dateFormat(dateString));
diff --git a/spec/frontend/helpers/dom_events_helper.js b/spec/frontend/__helpers__/dom_events_helper.js
index 423e5c58bb4..865ea97903f 100644
--- a/spec/frontend/helpers/dom_events_helper.js
+++ b/spec/frontend/__helpers__/dom_events_helper.js
@@ -1,4 +1,4 @@
-export const triggerDOMEvent = type => {
+export const triggerDOMEvent = (type) => {
window.document.dispatchEvent(
new Event(type, {
bubbles: true,
diff --git a/spec/frontend/helpers/dom_shims/README.md b/spec/frontend/__helpers__/dom_shims/README.md
index 1105e4b0c4c..1105e4b0c4c 100644
--- a/spec/frontend/helpers/dom_shims/README.md
+++ b/spec/frontend/__helpers__/dom_shims/README.md
diff --git a/spec/frontend/helpers/dom_shims/create_object_url.js b/spec/frontend/__helpers__/dom_shims/create_object_url.js
index 94d060cab08..94d060cab08 100644
--- a/spec/frontend/helpers/dom_shims/create_object_url.js
+++ b/spec/frontend/__helpers__/dom_shims/create_object_url.js
diff --git a/spec/frontend/helpers/dom_shims/element_scroll_by.js b/spec/frontend/__helpers__/dom_shims/element_scroll_by.js
index 7d91279e4aa..7d91279e4aa 100644
--- a/spec/frontend/helpers/dom_shims/element_scroll_by.js
+++ b/spec/frontend/__helpers__/dom_shims/element_scroll_by.js
diff --git a/spec/frontend/helpers/dom_shims/element_scroll_into_view.js b/spec/frontend/__helpers__/dom_shims/element_scroll_into_view.js
index a7262d04db0..a7262d04db0 100644
--- a/spec/frontend/helpers/dom_shims/element_scroll_into_view.js
+++ b/spec/frontend/__helpers__/dom_shims/element_scroll_into_view.js
diff --git a/spec/frontend/helpers/dom_shims/element_scroll_to.js b/spec/frontend/__helpers__/dom_shims/element_scroll_to.js
index 68f8a115865..68f8a115865 100644
--- a/spec/frontend/helpers/dom_shims/element_scroll_to.js
+++ b/spec/frontend/__helpers__/dom_shims/element_scroll_to.js
diff --git a/spec/frontend/helpers/dom_shims/form_element.js b/spec/frontend/__helpers__/dom_shims/form_element.js
index 46ef0374848..46ef0374848 100644
--- a/spec/frontend/helpers/dom_shims/form_element.js
+++ b/spec/frontend/__helpers__/dom_shims/form_element.js
diff --git a/spec/frontend/helpers/dom_shims/get_client_rects.js b/spec/frontend/__helpers__/dom_shims/get_client_rects.js
index 7ba60dd7936..7ba60dd7936 100644
--- a/spec/frontend/helpers/dom_shims/get_client_rects.js
+++ b/spec/frontend/__helpers__/dom_shims/get_client_rects.js
diff --git a/spec/frontend/helpers/dom_shims/get_client_rects_spec.js b/spec/frontend/__helpers__/dom_shims/get_client_rects_spec.js
index e7b8f1e235b..e7b8f1e235b 100644
--- a/spec/frontend/helpers/dom_shims/get_client_rects_spec.js
+++ b/spec/frontend/__helpers__/dom_shims/get_client_rects_spec.js
diff --git a/spec/frontend/helpers/dom_shims/image_element_properties.js b/spec/frontend/__helpers__/dom_shims/image_element_properties.js
index d94c157e44d..d94c157e44d 100644
--- a/spec/frontend/helpers/dom_shims/image_element_properties.js
+++ b/spec/frontend/__helpers__/dom_shims/image_element_properties.js
diff --git a/spec/frontend/helpers/dom_shims/index.js b/spec/frontend/__helpers__/dom_shims/index.js
index 9b70cb86b8b..9b70cb86b8b 100644
--- a/spec/frontend/helpers/dom_shims/index.js
+++ b/spec/frontend/__helpers__/dom_shims/index.js
diff --git a/spec/frontend/helpers/dom_shims/inner_text.js b/spec/frontend/__helpers__/dom_shims/inner_text.js
index 2b8201eed31..2b8201eed31 100644
--- a/spec/frontend/helpers/dom_shims/inner_text.js
+++ b/spec/frontend/__helpers__/dom_shims/inner_text.js
diff --git a/spec/frontend/helpers/dom_shims/range.js b/spec/frontend/__helpers__/dom_shims/range.js
index 4ffdf3280ad..4ffdf3280ad 100644
--- a/spec/frontend/helpers/dom_shims/range.js
+++ b/spec/frontend/__helpers__/dom_shims/range.js
diff --git a/spec/frontend/helpers/dom_shims/scroll_by.js b/spec/frontend/__helpers__/dom_shims/scroll_by.js
index 90387e51765..90387e51765 100644
--- a/spec/frontend/helpers/dom_shims/scroll_by.js
+++ b/spec/frontend/__helpers__/dom_shims/scroll_by.js
diff --git a/spec/frontend/helpers/dom_shims/size_properties.js b/spec/frontend/__helpers__/dom_shims/size_properties.js
index a2d5940bd1e..fbb919868b1 100644
--- a/spec/frontend/helpers/dom_shims/size_properties.js
+++ b/spec/frontend/__helpers__/dom_shims/size_properties.js
@@ -1,4 +1,4 @@
-const convertFromStyle = style => {
+const convertFromStyle = (style) => {
if (style.match(/[0-9](px|rem)/g)) {
return Number(style.replace(/[^0-9]/g, ''));
}
diff --git a/spec/frontend/helpers/dom_shims/window_scroll_to.js b/spec/frontend/__helpers__/dom_shims/window_scroll_to.js
index 20ae1910bf3..20ae1910bf3 100644
--- a/spec/frontend/helpers/dom_shims/window_scroll_to.js
+++ b/spec/frontend/__helpers__/dom_shims/window_scroll_to.js
diff --git a/spec/frontend/helpers/emoji.js b/spec/frontend/__helpers__/emoji.js
index e8a93e21818..ea6613b53c9 100644
--- a/spec/frontend/helpers/emoji.js
+++ b/spec/frontend/__helpers__/emoji.js
@@ -55,7 +55,7 @@ export const emojiFixtureMap = {
},
};
-Object.keys(emojiFixtureMap).forEach(k => {
+Object.keys(emojiFixtureMap).forEach((k) => {
emojiFixtureMap[k].name = k;
if (!emojiFixtureMap[k].aliases) {
emojiFixtureMap[k].aliases = [];
@@ -64,7 +64,7 @@ Object.keys(emojiFixtureMap).forEach(k => {
export async function initEmojiMock() {
const emojiData = Object.fromEntries(
- Object.values(emojiFixtureMap).map(m => {
+ Object.values(emojiFixtureMap).map((m) => {
const { name: n, moji: e, unicodeVersion: u, category: c, description: d } = m;
return [n, { c, e, d, u }];
}),
@@ -81,8 +81,8 @@ export async function initEmojiMock() {
export function describeEmojiFields(label, tests) {
describe.each`
field | accessor
- ${'name'} | ${e => e.name}
- ${'alias'} | ${e => e.aliases[0]}
- ${'description'} | ${e => e.description}
+ ${'name'} | ${(e) => e.name}
+ ${'alias'} | ${(e) => e.aliases[0]}
+ ${'description'} | ${(e) => e.description}
`(label, tests);
}
diff --git a/spec/frontend/helpers/experimentation_helper.js b/spec/frontend/__helpers__/experimentation_helper.js
index c08c25155e8..c08c25155e8 100644
--- a/spec/frontend/helpers/experimentation_helper.js
+++ b/spec/frontend/__helpers__/experimentation_helper.js
diff --git a/spec/frontend/helpers/fake_date.js b/spec/frontend/__helpers__/fake_date.js
index 387747ab5bd..5391ae04797 100644
--- a/spec/frontend/helpers/fake_date.js
+++ b/spec/frontend/__helpers__/fake_date.js
@@ -3,9 +3,9 @@ export const DEFAULT_ARGS = [2020, 6, 6];
const RealDate = Date;
-const isMocked = val => Boolean(val.mock);
+const isMocked = (val) => Boolean(val.mock);
-export const createFakeDateClass = ctorDefault => {
+export const createFakeDateClass = (ctorDefault) => {
const FakeDate = new Proxy(RealDate, {
construct: (target, argArray) => {
const ctorArgs = argArray.length ? argArray : ctorDefault;
@@ -25,7 +25,7 @@ export const createFakeDateClass = ctorDefault => {
return target[prop];
},
- getPrototypeOf: target => {
+ getPrototypeOf: (target) => {
return target.prototype;
},
// We need to be able to set props so that `jest.spyOn` will work.
diff --git a/spec/frontend/helpers/fake_date_spec.js b/spec/frontend/__helpers__/fake_date_spec.js
index b3ed13e238a..b3ed13e238a 100644
--- a/spec/frontend/helpers/fake_date_spec.js
+++ b/spec/frontend/__helpers__/fake_date_spec.js
diff --git a/spec/frontend/helpers/fake_request_animation_frame.js b/spec/frontend/__helpers__/fake_request_animation_frame.js
index f6fc29df4dc..ca9d82b6c3a 100644
--- a/spec/frontend/helpers/fake_request_animation_frame.js
+++ b/spec/frontend/__helpers__/fake_request_animation_frame.js
@@ -3,7 +3,7 @@ export const useFakeRequestAnimationFrame = () => {
beforeEach(() => {
orig = global.requestAnimationFrame;
- global.requestAnimationFrame = cb => cb();
+ global.requestAnimationFrame = (cb) => cb();
});
afterEach(() => {
diff --git a/spec/frontend/helpers/filtered_search_spec_helper.js b/spec/frontend/__helpers__/filtered_search_spec_helper.js
index ecf10694a16..ecf10694a16 100644
--- a/spec/frontend/helpers/filtered_search_spec_helper.js
+++ b/spec/frontend/__helpers__/filtered_search_spec_helper.js
diff --git a/spec/frontend/helpers/fixtures.js b/spec/frontend/__helpers__/fixtures.js
index a89ceab3f8e..4b86724df93 100644
--- a/spec/frontend/helpers/fixtures.js
+++ b/spec/frontend/__helpers__/fixtures.js
@@ -20,7 +20,7 @@ Did you run bin/rake frontend:fixtures?`,
return fs.readFileSync(absolutePath, 'utf8');
}
-export const getJSONFixture = relativePath => JSON.parse(getFixture(relativePath));
+export const getJSONFixture = (relativePath) => JSON.parse(getFixture(relativePath));
export const resetHTMLFixture = () => {
document.head.innerHTML = '';
diff --git a/spec/frontend/helpers/init_vue_mr_page_helper.js b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
index b9aed63d0f6..b9aed63d0f6 100644
--- a/spec/frontend/helpers/init_vue_mr_page_helper.js
+++ b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
diff --git a/spec/frontend/helpers/jest_helpers.js b/spec/frontend/__helpers__/jest_helpers.js
index 0b623e0a59b..273d2c91966 100644
--- a/spec/frontend/helpers/jest_helpers.js
+++ b/spec/frontend/__helpers__/jest_helpers.js
@@ -15,7 +15,7 @@ Try not to use these in new tests - this module is provided primarily for conven
*/
export function createSpyObj(baseName, methods) {
const obj = {};
- methods.forEach(method => {
+ methods.forEach((method) => {
obj[method] = jest.fn().mockName(`${baseName}#${method}`);
});
return obj;
diff --git a/spec/frontend/helpers/jquery.js b/spec/frontend/__helpers__/jquery.js
index 4af5f904394..4af5f904394 100644
--- a/spec/frontend/helpers/jquery.js
+++ b/spec/frontend/__helpers__/jquery.js
diff --git a/spec/frontend/helpers/keep_alive_component_helper.js b/spec/frontend/__helpers__/keep_alive_component_helper.js
index 54f40bf9093..54f40bf9093 100644
--- a/spec/frontend/helpers/keep_alive_component_helper.js
+++ b/spec/frontend/__helpers__/keep_alive_component_helper.js
diff --git a/spec/frontend/helpers/keep_alive_component_helper_spec.js b/spec/frontend/__helpers__/keep_alive_component_helper_spec.js
index dcccc14f396..dcccc14f396 100644
--- a/spec/frontend/helpers/keep_alive_component_helper_spec.js
+++ b/spec/frontend/__helpers__/keep_alive_component_helper_spec.js
diff --git a/spec/frontend/helpers/local_storage_helper.js b/spec/frontend/__helpers__/local_storage_helper.js
index 0318b80aaef..21749fd8070 100644
--- a/spec/frontend/helpers/local_storage_helper.js
+++ b/spec/frontend/__helpers__/local_storage_helper.js
@@ -8,7 +8,7 @@
*
* @param {() => any} fn Function that returns the object to use for localStorage
*/
-const useLocalStorage = fn => {
+const useLocalStorage = (fn) => {
const origLocalStorage = window.localStorage;
let currentLocalStorage = origLocalStorage;
@@ -35,11 +35,11 @@ export const createLocalStorageSpy = () => {
clear: jest.fn(() => {
storage = {};
}),
- getItem: jest.fn(key => (key in storage ? storage[key] : null)),
+ getItem: jest.fn((key) => (key in storage ? storage[key] : null)),
setItem: jest.fn((key, value) => {
storage[key] = value;
}),
- removeItem: jest.fn(key => delete storage[key]),
+ removeItem: jest.fn((key) => delete storage[key]),
};
};
diff --git a/spec/frontend/helpers/local_storage_helper_spec.js b/spec/frontend/__helpers__/local_storage_helper_spec.js
index 5d9961e7631..5d9961e7631 100644
--- a/spec/frontend/helpers/local_storage_helper_spec.js
+++ b/spec/frontend/__helpers__/local_storage_helper_spec.js
diff --git a/spec/frontend/helpers/locale_helper.js b/spec/frontend/__helpers__/locale_helper.js
index 283d9bc14c9..bb4a2eccf4e 100644
--- a/spec/frontend/helpers/locale_helper.js
+++ b/spec/frontend/__helpers__/locale_helper.js
@@ -1,4 +1,4 @@
-export const setLanguage = languageCode => {
+export const setLanguage = (languageCode) => {
const htmlElement = document.querySelector('html');
if (languageCode) {
diff --git a/spec/frontend/helpers/mock_apollo_helper.js b/spec/frontend/__helpers__/mock_apollo_helper.js
index 914cce1d662..914cce1d662 100644
--- a/spec/frontend/helpers/mock_apollo_helper.js
+++ b/spec/frontend/__helpers__/mock_apollo_helper.js
diff --git a/spec/frontend/helpers/mock_dom_observer.js b/spec/frontend/__helpers__/mock_dom_observer.js
index 1b93b81535d..1b93b81535d 100644
--- a/spec/frontend/helpers/mock_dom_observer.js
+++ b/spec/frontend/__helpers__/mock_dom_observer.js
diff --git a/spec/frontend/helpers/mock_window_location_helper.js b/spec/frontend/__helpers__/mock_window_location_helper.js
index 175044d1fce..08a28fbbbd6 100644
--- a/spec/frontend/helpers/mock_window_location_helper.js
+++ b/spec/frontend/__helpers__/mock_window_location_helper.js
@@ -8,7 +8,7 @@
*
* @param {() => any} fn Function that returns the object to use for window.location
*/
-const useMockLocation = fn => {
+const useMockLocation = (fn) => {
const origWindowLocation = window.location;
let currentWindowLocation;
diff --git a/spec/frontend/helpers/set_timeout_promise_helper.js b/spec/frontend/__helpers__/set_timeout_promise_helper.js
index 47087619187..afd18d92d15 100644
--- a/spec/frontend/helpers/set_timeout_promise_helper.js
+++ b/spec/frontend/__helpers__/set_timeout_promise_helper.js
@@ -1,4 +1,4 @@
export default (time = 0) =>
- new Promise(resolve => {
+ new Promise((resolve) => {
setTimeout(resolve, time);
});
diff --git a/spec/frontend/helpers/set_window_location_helper.js b/spec/frontend/__helpers__/set_window_location_helper.js
index a94e73762c9..a94e73762c9 100644
--- a/spec/frontend/helpers/set_window_location_helper.js
+++ b/spec/frontend/__helpers__/set_window_location_helper.js
diff --git a/spec/frontend/helpers/set_window_location_helper_spec.js b/spec/frontend/__helpers__/set_window_location_helper_spec.js
index da609b6bbf0..98f26854822 100644
--- a/spec/frontend/helpers/set_window_location_helper_spec.js
+++ b/spec/frontend/__helpers__/set_window_location_helper_spec.js
@@ -32,7 +32,7 @@ describe('setWindowLocation', () => {
it.each([null, 1, undefined, false, '', 'gitlab.com'])(
'throws an error when called with an invalid url: "%s"',
- invalidUrl => {
+ (invalidUrl) => {
expect(() => setWindowLocation(invalidUrl)).toThrow(/Invalid URL/);
expect(window.location).toBe(originalLocation);
},
diff --git a/spec/frontend/helpers/stub_children.js b/spec/frontend/__helpers__/stub_children.js
index 91171eb3d8c..0711563699f 100644
--- a/spec/frontend/helpers/stub_children.js
+++ b/spec/frontend/__helpers__/stub_children.js
@@ -1,3 +1,3 @@
export default function stubChildren(Component) {
- return Object.fromEntries(Object.keys(Component.components).map(c => [c, true]));
+ return Object.fromEntries(Object.keys(Component.components).map((c) => [c, true]));
}
diff --git a/spec/frontend/helpers/stub_component.js b/spec/frontend/__helpers__/stub_component.js
index 45550450517..45550450517 100644
--- a/spec/frontend/helpers/stub_component.js
+++ b/spec/frontend/__helpers__/stub_component.js
diff --git a/spec/frontend/__helpers__/stub_transition.js b/spec/frontend/__helpers__/stub_transition.js
new file mode 100644
index 00000000000..9cddb432a63
--- /dev/null
+++ b/spec/frontend/__helpers__/stub_transition.js
@@ -0,0 +1,8 @@
+export function stubTransition() {
+ return {
+ render() {
+ // eslint-disable-next-line no-underscore-dangle
+ return this.$options._renderChildren;
+ },
+ };
+}
diff --git a/spec/frontend/helpers/test_constants.js b/spec/frontend/__helpers__/test_constants.js
index 69b78f556aa..69b78f556aa 100644
--- a/spec/frontend/helpers/test_constants.js
+++ b/spec/frontend/__helpers__/test_constants.js
diff --git a/spec/frontend/helpers/text_helper.js b/spec/frontend/__helpers__/text_helper.js
index e0fe18e5560..164f18faadc 100644
--- a/spec/frontend/helpers/text_helper.js
+++ b/spec/frontend/__helpers__/text_helper.js
@@ -2,17 +2,17 @@
* Replaces line break with an empty space
* @param {*} data
*/
-export const removeBreakLine = data => data.replace(/\r?\n|\r/g, ' ');
+export const removeBreakLine = (data) => data.replace(/\r?\n|\r/g, ' ');
/**
* Removes line breaks, spaces and trims the given text
* @param {String} str
* @returns {String}
*/
-export const trimText = str =>
+export const trimText = (str) =>
str
.replace(/\r?\n|\r/g, '')
.replace(/\s\s+/g, ' ')
.trim();
-export const removeWhitespace = str => str.replace(/\s\s+/g, ' ');
+export const removeWhitespace = (str) => str.replace(/\s\s+/g, ' ');
diff --git a/spec/frontend/helpers/timeout.js b/spec/frontend/__helpers__/timeout.js
index 702ef0be5aa..8688625a95e 100644
--- a/spec/frontend/helpers/timeout.js
+++ b/spec/frontend/__helpers__/timeout.js
@@ -4,7 +4,7 @@ const IS_DEBUGGING = process.execArgv.join(' ').includes('--inspect-brk');
let testTimeoutNS;
-export const setTestTimeout = newTimeoutMS => {
+export const setTestTimeout = (newTimeoutMS) => {
const newTimeoutNS = newTimeoutMS * NS_PER_MS;
// never accept a smaller timeout than the default
if (newTimeoutNS < testTimeoutNS) {
@@ -18,7 +18,7 @@ export const setTestTimeout = newTimeoutMS => {
// Allows slow tests to set their own timeout.
// Useful for tests with jQuery, which is very slow in big DOMs.
let temporaryTimeoutNS = null;
-export const setTestTimeoutOnce = newTimeoutMS => {
+export const setTestTimeoutOnce = (newTimeoutMS) => {
const newTimeoutNS = newTimeoutMS * NS_PER_MS;
// never accept a smaller timeout than the default
if (newTimeoutNS < testTimeoutNS) {
@@ -28,7 +28,7 @@ export const setTestTimeoutOnce = newTimeoutMS => {
temporaryTimeoutNS = newTimeoutNS;
};
-export const initializeTestTimeout = defaultTimeoutMS => {
+export const initializeTestTimeout = (defaultTimeoutMS) => {
setTestTimeout(defaultTimeoutMS);
let testStartTime;
diff --git a/spec/frontend/helpers/tracking_helper.js b/spec/frontend/__helpers__/tracking_helper.js
index bd3bd24028c..08ba5a6628e 100644
--- a/spec/frontend/helpers/tracking_helper.js
+++ b/spec/frontend/__helpers__/tracking_helper.js
@@ -14,7 +14,7 @@ export function mockTracking(category = '_category_', documentOverride, spyMetho
export function unmockTracking() {
window.snowplow = undefined;
- handlers.forEach(event => document.removeEventListener(event.name, event.func));
+ handlers.forEach((event) => document.removeEventListener(event.name, event.func));
}
export function triggerEvent(selectorOrEl, eventName = 'click') {
diff --git a/spec/frontend/helpers/user_mock_data_helper.js b/spec/frontend/__helpers__/user_mock_data_helper.js
index a6adc9dc3a0..db747283d9e 100644
--- a/spec/frontend/helpers/user_mock_data_helper.js
+++ b/spec/frontend/__helpers__/user_mock_data_helper.js
@@ -2,12 +2,7 @@ let id = 1;
// Code taken from: https://gist.github.com/6174/6062387
const getRandomString = () =>
- Math.random()
- .toString(36)
- .substring(2, 15) +
- Math.random()
- .toString(36)
- .substring(2, 15);
+ Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
const getRandomUrl = () => `https://${getRandomString()}.com/${getRandomString()}`;
diff --git a/spec/frontend/helpers/vue_mock_directive.js b/spec/frontend/__helpers__/vue_mock_directive.js
index 28d4708835d..e952f258c4d 100644
--- a/spec/frontend/helpers/vue_mock_directive.js
+++ b/spec/frontend/__helpers__/vue_mock_directive.js
@@ -1,4 +1,4 @@
-export const getKey = name => `$_gl_jest_${name}`;
+export const getKey = (name) => `$_gl_jest_${name}`;
export const getBinding = (el, name) => el[getKey(name)];
diff --git a/spec/frontend/helpers/vue_mount_component_helper.js b/spec/frontend/__helpers__/vue_mount_component_helper.js
index 615ff69a01c..615ff69a01c 100644
--- a/spec/frontend/helpers/vue_mount_component_helper.js
+++ b/spec/frontend/__helpers__/vue_mount_component_helper.js
diff --git a/spec/frontend/helpers/vue_test_utils_helper.js b/spec/frontend/__helpers__/vue_test_utils_helper.js
index 0e9127b5c65..ffccfb249c2 100644
--- a/spec/frontend/helpers/vue_test_utils_helper.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper.js
@@ -2,7 +2,7 @@ import { isArray } from 'lodash';
const vNodeContainsText = (vnode, text) =>
(vnode.text && vnode.text.includes(text)) ||
- (vnode.children && vnode.children.filter(child => vNodeContainsText(child, text)).length);
+ (vnode.children && vnode.children.filter((child) => vNodeContainsText(child, text)).length);
/**
* Determines whether a `shallowMount` Wrapper contains text
@@ -17,7 +17,7 @@ const vNodeContainsText = (vnode, text) =>
*/
export const shallowWrapperContainsSlotText = (shallowWrapper, slotName, text) =>
Boolean(
- shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length,
+ shallowWrapper.vm.$slots[slotName].filter((vnode) => vNodeContainsText(vnode, text)).length,
);
/**
@@ -27,8 +27,8 @@ export const shallowWrapperContainsSlotText = (shallowWrapper, slotName, text) =
* @param {String} expectedMutationType - The Mutation to wait for
*/
export const waitForMutation = (store, expectedMutationType) =>
- new Promise(resolve => {
- const unsubscribe = store.subscribe(mutation => {
+ new Promise((resolve) => {
+ const unsubscribe = store.subscribe((mutation) => {
if (mutation.type === expectedMutationType) {
unsubscribe();
resolve();
@@ -36,7 +36,7 @@ export const waitForMutation = (store, expectedMutationType) =>
});
});
-export const extendedWrapper = wrapper => {
+export const extendedWrapper = (wrapper) => {
if (isArray(wrapper) || !wrapper?.find) {
// eslint-disable-next-line no-console
console.warn(
diff --git a/spec/frontend/helpers/vue_test_utils_helper_spec.js b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
index 31c4ccd5dbb..31c4ccd5dbb 100644
--- a/spec/frontend/helpers/vue_test_utils_helper_spec.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
diff --git a/spec/frontend/helpers/vuex_action_helper.js b/spec/frontend/__helpers__/vuex_action_helper.js
index 64dd3888d47..e482a8fbc71 100644
--- a/spec/frontend/helpers/vuex_action_helper.js
+++ b/spec/frontend/__helpers__/vuex_action_helper.js
@@ -116,12 +116,12 @@ export default (
payload,
);
- return (result || new Promise(resolve => setImmediate(resolve)))
- .catch(error => {
+ return (result || new Promise((resolve) => setImmediate(resolve)))
+ .catch((error) => {
validateResults();
throw error;
})
- .then(data => {
+ .then((data) => {
validateResults();
return data;
});
diff --git a/spec/frontend/helpers/vuex_action_helper_spec.js b/spec/frontend/__helpers__/vuex_action_helper_spec.js
index 4d7bf21820a..b4f5a291774 100644
--- a/spec/frontend/helpers/vuex_action_helper_spec.js
+++ b/spec/frontend/__helpers__/vuex_action_helper_spec.js
@@ -10,7 +10,7 @@ const testActionFnWithOptionsArg = (...args) => {
describe.each([testActionFn, testActionFnWithOptionsArg])(
'VueX test helper (testAction)',
- testAction => {
+ (testAction) => {
let originalExpect;
let assertion;
let mock;
@@ -25,7 +25,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
*/
originalExpect = expect;
assertion = null;
- global.expect = actual => ({
+ global.expect = (actual) => ({
toEqual: () => {
originalExpect(actual).toEqual(assertion);
},
@@ -72,13 +72,13 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
testAction(action, null, {}, assertion.mutations, assertion.actions, noop);
});
- it('works with done callback once finished', done => {
+ it('works with done callback once finished', (done) => {
assertion = { mutations: [], actions: [] };
testAction(noop, null, {}, assertion.mutations, assertion.actions, done);
});
- it('returns a promise', done => {
+ it('returns a promise', (done) => {
assertion = { mutations: [], actions: [] };
testAction(noop, null, {}, assertion.mutations, assertion.actions)
@@ -96,7 +96,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
return axios
.get(TEST_HOST)
- .catch(error => {
+ .catch((error) => {
commit('ERROR');
lastError = error;
throw error;
@@ -111,7 +111,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
lastError = null;
});
- it('works with done callback once finished', done => {
+ it('works with done callback once finished', (done) => {
mock.onGet(TEST_HOST).replyOnce(200, 42);
assertion = { mutations: [{ type: 'SUCCESS' }], actions: [{ type: 'ACTION' }] };
@@ -119,34 +119,34 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
testAction(asyncAction, null, {}, assertion.mutations, assertion.actions, done);
});
- it('returns original data of successful promise while checking actions/mutations', done => {
+ it('returns original data of successful promise while checking actions/mutations', (done) => {
mock.onGet(TEST_HOST).replyOnce(200, 42);
assertion = { mutations: [{ type: 'SUCCESS' }], actions: [{ type: 'ACTION' }] };
testAction(asyncAction, null, {}, assertion.mutations, assertion.actions)
- .then(res => {
+ .then((res) => {
originalExpect(res).toEqual(data);
done();
})
.catch(done.fail);
});
- it('returns original error of rejected promise while checking actions/mutations', done => {
+ it('returns original error of rejected promise while checking actions/mutations', (done) => {
mock.onGet(TEST_HOST).replyOnce(500, '');
assertion = { mutations: [{ type: 'ERROR' }], actions: [{ type: 'ACTION' }] };
testAction(asyncAction, null, {}, assertion.mutations, assertion.actions)
.then(done.fail)
- .catch(error => {
+ .catch((error) => {
originalExpect(error).toBe(lastError);
done();
});
});
});
- it('works with async actions not returning promises', done => {
+ it('works with async actions not returning promises', (done) => {
const data = { FOO: 'BAR' };
const asyncAction = ({ commit, dispatch }) => {
@@ -158,7 +158,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
commit('SUCCESS');
return data;
})
- .catch(error => {
+ .catch((error) => {
commit('ERROR');
throw error;
});
diff --git a/spec/frontend/__helpers__/wait_for_promises.js b/spec/frontend/__helpers__/wait_for_promises.js
new file mode 100644
index 00000000000..2fd1cc6ba0d
--- /dev/null
+++ b/spec/frontend/__helpers__/wait_for_promises.js
@@ -0,0 +1 @@
+export default () => new Promise((resolve) => requestAnimationFrame(resolve));
diff --git a/spec/frontend/helpers/wait_for_text.js b/spec/frontend/__helpers__/wait_for_text.js
index 6bed8a90a98..6bed8a90a98 100644
--- a/spec/frontend/helpers/wait_for_text.js
+++ b/spec/frontend/__helpers__/wait_for_text.js
diff --git a/spec/frontend/helpers/wait_using_real_timer.js b/spec/frontend/__helpers__/wait_using_real_timer.js
index ddf23cd97b4..110d5f46c08 100644
--- a/spec/frontend/helpers/wait_using_real_timer.js
+++ b/spec/frontend/__helpers__/wait_using_real_timer.js
@@ -1,6 +1,6 @@
/* useful for timing promises when jest fakeTimers are not reliable enough */
-export default timeout =>
- new Promise(resolve => {
+export default (timeout) =>
+ new Promise((resolve) => {
jest.useRealTimers();
setTimeout(resolve, timeout);
jest.useFakeTimers();
diff --git a/spec/frontend/helpers/web_worker_mock.js b/spec/frontend/__helpers__/web_worker_mock.js
index 2b4a391e1d2..2b4a391e1d2 100644
--- a/spec/frontend/helpers/web_worker_mock.js
+++ b/spec/frontend/__helpers__/web_worker_mock.js
diff --git a/spec/frontend/__mocks__/@gitlab/ui.js b/spec/frontend/__mocks__/@gitlab/ui.js
index 94e3f624c25..7cdecefab05 100644
--- a/spec/frontend/__mocks__/@gitlab/ui.js
+++ b/spec/frontend/__mocks__/@gitlab/ui.js
@@ -2,7 +2,7 @@ export * from '@gitlab/ui';
/**
* The @gitlab/ui tooltip directive requires awkward and distracting set up in tests
- * for components that use it (e.g., `attachToDocument: true` and `sync: true` passed
+ * for components that use it (e.g., `attachTo: document.body` and `sync: true` passed
* to the `mount` helper from `vue-test-utils`).
*
* This mock decouples those tests from the implementation, removing the need to set
@@ -38,7 +38,7 @@ jest.mock('@gitlab/ui/dist/components/base/popover/popover.js', () => ({
required: false,
default: () => [],
},
- ...Object.fromEntries(['target', 'triggers', 'placement'].map(prop => [prop, {}])),
+ ...Object.fromEntries(['target', 'triggers', 'placement'].map((prop) => [prop, {}])),
},
render(h) {
return h(
@@ -47,7 +47,7 @@ jest.mock('@gitlab/ui/dist/components/base/popover/popover.js', () => ({
class: 'gl-popover',
...this.$attrs,
},
- Object.keys(this.$slots).map(s => this.$slots[s]),
+ Object.keys(this.$slots).map((s) => this.$slots[s]),
);
},
}));
diff --git a/spec/frontend/__mocks__/lodash/debounce.js b/spec/frontend/__mocks__/lodash/debounce.js
index e8b61c80147..d4fe2ce5406 100644
--- a/spec/frontend/__mocks__/lodash/debounce.js
+++ b/spec/frontend/__mocks__/lodash/debounce.js
@@ -8,7 +8,7 @@
// [2]: https://gitlab.com/gitlab-org/gitlab/-/issues/213378
// Further reference: https://github.com/facebook/jest/issues/3465
-export default fn => {
+export default (fn) => {
const debouncedFn = jest.fn().mockImplementation(fn);
debouncedFn.cancel = jest.fn();
debouncedFn.flush = jest.fn().mockImplementation(() => {
diff --git a/spec/frontend/__mocks__/lodash/defer.js b/spec/frontend/__mocks__/lodash/defer.js
new file mode 100644
index 00000000000..9325cb28ba6
--- /dev/null
+++ b/spec/frontend/__mocks__/lodash/defer.js
@@ -0,0 +1,5 @@
+/**
+ * Instead of messing around with timers, we execute deferred functions
+ * immediately in our specs
+ */
+export default (fn, ...args) => fn(...args);
diff --git a/spec/frontend/__mocks__/lodash/throttle.js b/spec/frontend/__mocks__/lodash/throttle.js
index aef391afd0c..e8a82654c78 100644
--- a/spec/frontend/__mocks__/lodash/throttle.js
+++ b/spec/frontend/__mocks__/lodash/throttle.js
@@ -1,4 +1,4 @@
// Similar to `lodash/debounce`, `lodash/throttle` also causes flaky specs.
// See `./debounce.js` for more details.
-export default fn => fn;
+export default (fn) => fn;
diff --git a/spec/frontend/activities_spec.js b/spec/frontend/activities_spec.js
index d14be3a1f26..00519148b30 100644
--- a/spec/frontend/activities_spec.js
+++ b/spec/frontend/activities_spec.js
@@ -44,23 +44,19 @@ describe('Activities', () => {
});
for (let i = 0; i < filters.length; i += 1) {
- (i => {
+ ((i) => {
describe(`when selecting ${getEventName(i)}`, () => {
beforeEach(() => {
$(getSelector(i)).click();
});
for (let x = 0; x < filters.length; x += 1) {
- (x => {
+ ((x) => {
const shouldHighlight = i === x;
const testName = shouldHighlight ? 'should highlight' : 'should not highlight';
it(`${testName} ${getEventName(x)}`, () => {
- expect(
- $(getSelector(x))
- .parent()
- .hasClass('active'),
- ).toEqual(shouldHighlight);
+ expect($(getSelector(x)).parent().hasClass('active')).toEqual(shouldHighlight);
});
})(x);
}
diff --git a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
index 8948a9926bb..1eb9ccc9c6c 100644
--- a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
+++ b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
@@ -4,6 +4,7 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
<gl-modal-stub
body-class="add-review-item pt-0"
cancel-variant="light"
+ dismisslabel="Close"
modalclass=""
modalid="add-review-item"
ok-disabled="true"
diff --git a/spec/frontend/add_context_commits_modal/store/actions_spec.js b/spec/frontend/add_context_commits_modal/store/actions_spec.js
index 24948dd6073..3bb3cb68f56 100644
--- a/spec/frontend/add_context_commits_modal/store/actions_spec.js
+++ b/spec/frontend/add_context_commits_modal/store/actions_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
+import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import {
setBaseConfig,
@@ -15,7 +16,6 @@ import {
resetModalState,
} from '~/add_context_commits_modal/store/actions';
import * as types from '~/add_context_commits_modal/store/mutation_types';
-import testAction from '../../helpers/vuex_action_helper';
describe('AddContextCommitsModalStoreActions', () => {
const contextCommitEndpoint =
@@ -42,7 +42,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setBaseConfig', () => {
- it('commits SET_BASE_CONFIG', done => {
+ it('commits SET_BASE_CONFIG', (done) => {
const options = { contextCommitsPath, mergeRequestIid, projectId };
testAction(
setBaseConfig,
@@ -65,7 +65,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setTabIndex', () => {
- it('commits SET_TABINDEX', done => {
+ it('commits SET_TABINDEX', (done) => {
testAction(
setTabIndex,
{ tabIndex: 1 },
@@ -78,7 +78,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setCommits', () => {
- it('commits SET_COMMITS', done => {
+ it('commits SET_COMMITS', (done) => {
testAction(
setCommits,
{ commits: [], silentAddition: false },
@@ -89,7 +89,7 @@ describe('AddContextCommitsModalStoreActions', () => {
);
});
- it('commits SET_COMMITS_SILENT', done => {
+ it('commits SET_COMMITS_SILENT', (done) => {
testAction(
setCommits,
{ commits: [], silentAddition: true },
@@ -102,7 +102,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('createContextCommits', () => {
- it('calls API to create context commits', done => {
+ it('calls API to create context commits', (done) => {
mock.onPost(contextCommitEndpoint).reply(200, {});
testAction(createContextCommits, { commits: [] }, {}, [], [], done);
@@ -126,7 +126,7 @@ describe('AddContextCommitsModalStoreActions', () => {
)
.reply(200, [dummyCommit]);
});
- it('commits FETCH_CONTEXT_COMMITS', done => {
+ it('commits FETCH_CONTEXT_COMMITS', (done) => {
const contextCommit = { ...dummyCommit, isSelected: true };
testAction(
fetchContextCommits,
@@ -150,7 +150,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setContextCommits', () => {
- it('commits SET_CONTEXT_COMMITS', done => {
+ it('commits SET_CONTEXT_COMMITS', (done) => {
testAction(
setContextCommits,
{ data: [] },
@@ -168,7 +168,7 @@ describe('AddContextCommitsModalStoreActions', () => {
.onDelete('/api/v4/projects/gitlab-org%2Fgitlab/merge_requests/1/context_commits')
.reply(204);
});
- it('calls API to remove context commits', done => {
+ it('calls API to remove context commits', (done) => {
testAction(
removeContextCommits,
{ forceReload: false },
@@ -181,7 +181,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setSelectedCommits', () => {
- it('commits SET_SELECTED_COMMITS', done => {
+ it('commits SET_SELECTED_COMMITS', (done) => {
testAction(
setSelectedCommits,
[dummyCommit],
@@ -194,7 +194,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setSearchText', () => {
- it('commits SET_SEARCH_TEXT', done => {
+ it('commits SET_SEARCH_TEXT', (done) => {
const searchText = 'Dummy Text';
testAction(
setSearchText,
@@ -208,7 +208,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('setToRemoveCommits', () => {
- it('commits SET_TO_REMOVE_COMMITS', done => {
+ it('commits SET_TO_REMOVE_COMMITS', (done) => {
const commitId = 'abcde';
testAction(
@@ -223,7 +223,7 @@ describe('AddContextCommitsModalStoreActions', () => {
});
describe('resetModalState', () => {
- it('commits RESET_MODAL_STATE', done => {
+ it('commits RESET_MODAL_STATE', (done) => {
const commitId = 'abcde';
testAction(
diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js
index dda0c2b857c..a4dcfa1a480 100644
--- a/spec/frontend/admin/statistics_panel/components/app_spec.js
+++ b/spec/frontend/admin/statistics_panel/components/app_spec.js
@@ -34,7 +34,7 @@ describe('Admin statistics app', () => {
wrapper.destroy();
});
- const findStats = idx => wrapper.findAll('.js-stats').at(idx);
+ const findStats = (idx) => wrapper.findAll('.js-stats').at(idx);
describe('template', () => {
describe('when app is loading', () => {
diff --git a/spec/frontend/admin/statistics_panel/store/actions_spec.js b/spec/frontend/admin/statistics_panel/store/actions_spec.js
index 9b18b1aebda..ecbc823be12 100644
--- a/spec/frontend/admin/statistics_panel/store/actions_spec.js
+++ b/spec/frontend/admin/statistics_panel/store/actions_spec.js
@@ -22,7 +22,7 @@ describe('Admin statistics panel actions', () => {
mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(200, mockStatistics);
});
- it('dispatches success with received data', done =>
+ it('dispatches success with received data', (done) =>
testAction(
actions.fetchStatistics,
null,
@@ -46,7 +46,7 @@ describe('Admin statistics panel actions', () => {
mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(500);
});
- it('dispatches error', done =>
+ it('dispatches error', (done) =>
testAction(
actions.fetchStatistics,
null,
@@ -67,7 +67,7 @@ describe('Admin statistics panel actions', () => {
});
describe('requestStatistic', () => {
- it('should commit the request mutation', done =>
+ it('should commit the request mutation', (done) =>
testAction(
actions.requestStatistics,
null,
@@ -79,7 +79,7 @@ describe('Admin statistics panel actions', () => {
});
describe('receiveStatisticsSuccess', () => {
- it('should commit received data', done =>
+ it('should commit received data', (done) =>
testAction(
actions.receiveStatisticsSuccess,
mockStatistics,
@@ -96,7 +96,7 @@ describe('Admin statistics panel actions', () => {
});
describe('receiveStatisticsError', () => {
- it('should commit error', done => {
+ it('should commit error', (done) => {
testAction(
actions.receiveStatisticsError,
500,
diff --git a/spec/frontend/admin/users/components/user_avatar_spec.js b/spec/frontend/admin/users/components/user_avatar_spec.js
new file mode 100644
index 00000000000..ba4e83690d0
--- /dev/null
+++ b/spec/frontend/admin/users/components/user_avatar_spec.js
@@ -0,0 +1,63 @@
+import { GlAvatarLink, GlAvatarLabeled, GlBadge } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+
+import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
+import { users, paths } from '../mock_data';
+
+describe('AdminUserAvatar component', () => {
+ let wrapper;
+ const user = users[0];
+ const adminUserPath = paths.adminUser;
+
+ const findAvatar = () => wrapper.find(GlAvatarLabeled);
+ const findAvatarLink = () => wrapper.find(GlAvatarLink);
+ const findAllBadges = () => wrapper.findAll(GlBadge);
+
+ const initComponent = (props = {}) => {
+ wrapper = mount(AdminUserAvatar, {
+ propsData: {
+ user,
+ adminUserPath,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when initialized', () => {
+ beforeEach(() => {
+ initComponent();
+ });
+
+ it("links to the user's admin path", () => {
+ expect(findAvatarLink().attributes()).toMatchObject({
+ href: adminUserPath.replace('id', user.username),
+ 'data-user-id': user.id.toString(),
+ 'data-username': user.username,
+ });
+ });
+
+ it("renders the user's name", () => {
+ expect(findAvatar().props('label')).toBe(user.name);
+ });
+
+ it("renders the user's email", () => {
+ expect(findAvatar().props('subLabel')).toBe(user.email);
+ });
+
+ it("renders the user's avatar image", () => {
+ expect(findAvatar().attributes('src')).toBe(user.avatarUrl);
+ });
+
+ it("renders the user's badges", () => {
+ findAllBadges().wrappers.forEach((badge, idx) => {
+ expect(badge.text()).toBe(user.badges[idx].text);
+ expect(badge.props('variant')).toBe(user.badges[idx].variant);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index ba36e1e32ef..b79d2d4d39d 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -2,6 +2,7 @@ import { GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import AdminUsersTable from '~/admin/users/components/users_table.vue';
+import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
import { users, paths } from '../mock_data';
describe('AdminUsersTable component', () => {
@@ -44,8 +45,12 @@ describe('AdminUsersTable component', () => {
${'projectsCount'} | ${'Projects'}
${'createdAt'} | ${'Created on'}
${'lastActivityOn'} | ${'Last activity'}
- `('renders users.$key for $label', ({ key, label }) => {
- expect(getCellByLabel(0, label).text()).toBe(`${user[key]}`);
+ `('renders users.$key in column $label', ({ key, label }) => {
+ expect(getCellByLabel(0, label).text()).toContain(`${user[key]}`);
+ });
+
+ it('renders an AdminUserAvatar component', () => {
+ expect(getCellByLabel(0, 'Name').find(AdminUserAvatar).exists()).toBe(true);
});
});
diff --git a/spec/frontend/admin/users/mock_data.js b/spec/frontend/admin/users/mock_data.js
index 62fa9469638..860994a9152 100644
--- a/spec/frontend/admin/users/mock_data.js
+++ b/spec/frontend/admin/users/mock_data.js
@@ -8,7 +8,10 @@ export const users = [
lastActivityOn: '2020-12-09',
avatarUrl:
'https://secure.gravatar.com/avatar/054f062d8b1a42b123f17e13a173cda8?s=80\\u0026d=identicon',
- badges: [],
+ badges: [
+ { text: 'Admin', variant: 'success' },
+ { text: "It's you!", variant: null },
+ ],
projectsCount: 0,
actions: [],
},
diff --git a/spec/frontend/alert_handler_spec.js b/spec/frontend/alert_handler_spec.js
index 0cee28112a8..e4cd38a7799 100644
--- a/spec/frontend/alert_handler_spec.js
+++ b/spec/frontend/alert_handler_spec.js
@@ -7,7 +7,7 @@ describe('Alert Handler', () => {
const DISMISS_CLASS = 'gl-alert-dismiss';
const DISMISS_LABEL = 'Dismiss';
- const generateHtml = parentClass =>
+ const generateHtml = (parentClass) =>
`<div class="${parentClass}">
<button aria-label="${DISMISS_LABEL}">Dismiss</button>
</div>`;
diff --git a/spec/frontend/alert_management/components/alert_details_spec.js b/spec/frontend/alert_management/components/alert_details_spec.js
index e2d913398f9..976e50625a6 100644
--- a/spec/frontend/alert_management/components/alert_details_spec.js
+++ b/spec/frontend/alert_management/components/alert_details_spec.js
@@ -3,6 +3,7 @@ import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import AlertDetails from '~/alert_management/components/alert_details.vue';
import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
import {
@@ -201,6 +202,11 @@ describe('AlertDetails', () => {
it('calls `$apollo.mutate` with `createIssueQuery`', () => {
const issueIid = '10';
+ mountComponent({
+ mountMethod: mount,
+ data: { alert: { ...mockAlert } },
+ });
+
jest
.spyOn(wrapper.vm.$apollo, 'mutate')
.mockResolvedValue({ data: { createAlertIssue: { issue: { iid: issueIid } } } });
@@ -215,7 +221,7 @@ describe('AlertDetails', () => {
});
});
- it('shows error alert when incident creation fails ', () => {
+ it('shows error alert when incident creation fails ', async () => {
const errorMsg = 'Something went wrong';
mountComponent({
mountMethod: mount,
@@ -225,9 +231,8 @@ describe('AlertDetails', () => {
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue(errorMsg);
findCreateIncidentBtn().trigger('click');
- setImmediate(() => {
- expect(findIncidentCreationAlert().text()).toBe(errorMsg);
- });
+ await waitForPromises();
+ expect(findIncidentCreationAlert().text()).toBe(errorMsg);
});
});
diff --git a/spec/frontend/alert_management/components/alert_management_empty_state_spec.js b/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
index ddb102339cc..509c67743c1 100644
--- a/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
@@ -31,17 +31,5 @@ describe('AlertManagementEmptyState', () => {
it('shows empty state', () => {
expect(EmptyState().exists()).toBe(true);
});
-
- it('show OpsGenie integration state when OpsGenie mcv is true', () => {
- mountComponent({
- provide: {
- alertManagementEnabled: false,
- userCanEnableAlertManagement: false,
- opsgenieMvcEnabled: true,
- opsgenieMvcTargetUrl: 'https://opsgenie-url.com',
- },
- });
- expect(EmptyState().props('title')).toBe('Opsgenie is enabled');
- });
});
});
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index f7a629142f9..0cc3d565e10 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -85,11 +85,7 @@ describe('AlertManagementTable', () => {
});
expect(findAlertsTable().exists()).toBe(true);
expect(findLoader().exists()).toBe(true);
- expect(
- findAlerts()
- .at(0)
- .classes(),
- ).not.toContain('gl-hover-bg-blue-50');
+ expect(findAlerts().at(0).classes()).not.toContain('gl-hover-bg-blue-50');
});
it('error state', () => {
@@ -101,11 +97,7 @@ describe('AlertManagementTable', () => {
expect(findAlertsTable().text()).toContain('No alerts to display');
expect(findLoader().exists()).toBe(false);
expect(findAlert().props().variant).toBe('danger');
- expect(
- findAlerts()
- .at(0)
- .classes(),
- ).not.toContain('gl-hover-bg-blue-50');
+ expect(findAlerts().at(0).classes()).not.toContain('gl-hover-bg-blue-50');
});
it('empty state', () => {
@@ -125,11 +117,7 @@ describe('AlertManagementTable', () => {
expect(findAlertsTable().text()).toContain('No alerts to display');
expect(findLoader().exists()).toBe(false);
expect(findAlert().props().variant).toBe('info');
- expect(
- findAlerts()
- .at(0)
- .classes(),
- ).not.toContain('gl-hover-bg-blue-50');
+ expect(findAlerts().at(0).classes()).not.toContain('gl-hover-bg-blue-50');
});
it('has data state', () => {
@@ -140,11 +128,7 @@ describe('AlertManagementTable', () => {
expect(findLoader().exists()).toBe(false);
expect(findAlertsTable().exists()).toBe(true);
expect(findAlerts()).toHaveLength(mockAlerts.length);
- expect(
- findAlerts()
- .at(0)
- .classes(),
- ).toContain('gl-hover-bg-blue-50');
+ expect(findAlerts().at(0).classes()).toContain('gl-hover-bg-blue-50');
});
it('displays the alert ID and title formatted correctly', () => {
@@ -170,11 +154,7 @@ describe('AlertManagementTable', () => {
data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
- expect(
- findStatusDropdown()
- .find('.dropdown-title')
- .exists(),
- ).toBe(false);
+ expect(findStatusDropdown().find('.dropdown-title').exists()).toBe(false);
});
it('shows correct severity icons', async () => {
@@ -186,11 +166,7 @@ describe('AlertManagementTable', () => {
await wrapper.vm.$nextTick();
expect(wrapper.find(GlTable).exists()).toBe(true);
- expect(
- findAlertsTable()
- .find(GlIcon)
- .classes('icon-critical'),
- ).toBe(true);
+ expect(findAlertsTable().find(GlIcon).classes('icon-critical')).toBe(true);
});
it('renders severity text', () => {
@@ -199,11 +175,7 @@ describe('AlertManagementTable', () => {
loading: false,
});
- expect(
- findSeverityFields()
- .at(0)
- .text(),
- ).toBe('Critical');
+ expect(findSeverityFields().at(0).text()).toBe('Critical');
});
it('renders Unassigned when no assignee(s) present', () => {
@@ -212,11 +184,7 @@ describe('AlertManagementTable', () => {
loading: false,
});
- expect(
- findAssignees()
- .at(0)
- .text(),
- ).toBe('Unassigned');
+ expect(findAssignees().at(0).text()).toBe('Unassigned');
});
it('renders user avatar when assignee present', () => {
@@ -225,9 +193,7 @@ describe('AlertManagementTable', () => {
loading: false,
});
- const avatar = findAssignees()
- .at(1)
- .find(GlAvatar);
+ const avatar = findAssignees().at(1).find(GlAvatar);
const { src, label } = avatar.attributes();
const { name, avatarUrl } = mockAlerts[1].assignees.nodes[0];
@@ -244,9 +210,7 @@ describe('AlertManagementTable', () => {
expect(visitUrl).not.toHaveBeenCalled();
- findAlerts()
- .at(0)
- .trigger('click');
+ findAlerts().at(0).trigger('click');
expect(visitUrl).toHaveBeenCalledWith('/1527542/details', false);
});
@@ -258,11 +222,9 @@ describe('AlertManagementTable', () => {
expect(visitUrl).not.toHaveBeenCalled();
- findAlerts()
- .at(0)
- .trigger('click', {
- metaKey: true,
- });
+ findAlerts().at(0).trigger('click', {
+ metaKey: true,
+ });
expect(visitUrl).toHaveBeenCalledWith('/1527542/details', true);
});
@@ -276,24 +238,12 @@ describe('AlertManagementTable', () => {
});
it('shows "None" when no link exists', () => {
- expect(
- findIssueFields()
- .at(0)
- .text(),
- ).toBe('None');
+ expect(findIssueFields().at(0).text()).toBe('None');
});
it('renders a link when one exists', () => {
- expect(
- findIssueFields()
- .at(1)
- .text(),
- ).toBe('#1');
- expect(
- findIssueFields()
- .at(1)
- .attributes('href'),
- ).toBe('/gitlab-org/gitlab/-/issues/1');
+ expect(findIssueFields().at(1).text()).toBe('#1');
+ expect(findIssueFields().at(1).attributes('href')).toBe('/gitlab-org/gitlab/-/issues/1');
});
});
@@ -350,11 +300,7 @@ describe('AlertManagementTable', () => {
loading: false,
});
- expect(
- findAlerts()
- .at(0)
- .classes(),
- ).toContain('new-alert');
+ expect(findAlerts().at(0).classes()).toContain('new-alert');
});
it('should not highlight the row when alert is not new', () => {
@@ -363,11 +309,7 @@ describe('AlertManagementTable', () => {
loading: false,
});
- expect(
- findAlerts()
- .at(0)
- .classes(),
- ).not.toContain('new-alert');
+ expect(findAlerts().at(0).classes()).not.toContain('new-alert');
});
});
});
diff --git a/spec/frontend/alert_management/components/alert_status_spec.js b/spec/frontend/alert_management/components/alert_status_spec.js
index f5916b8b265..6f2ddb86020 100644
--- a/spec/frontend/alert_management/components/alert_status_spec.js
+++ b/spec/frontend/alert_management/components/alert_status_spec.js
@@ -3,7 +3,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
import AlertManagementStatus from '~/alert_management/components/alert_status.vue';
-import updateAlertStatusMutation from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
+import updateAlertStatusMutation from '~/graphql_shared/mutations/update_alert_status.mutation.graphql';
import Tracking from '~/tracking';
import mockAlerts from '../mocks/alerts.json';
diff --git a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
index 6430273ec59..00c479071fe 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
@@ -165,21 +165,9 @@ describe('Alert Details Sidebar Assignees', () => {
},
});
- expect(
- findAssigned()
- .find('img')
- .attributes('src'),
- ).toBe('/url');
- expect(
- findAssigned()
- .find('.dropdown-menu-user-full-name')
- .text(),
- ).toBe('root');
- expect(
- findAssigned()
- .find('.dropdown-menu-user-username')
- .text(),
- ).toBe('@root');
+ expect(findAssigned().find('img').attributes('src')).toBe('/url');
+ expect(findAssigned().find('.dropdown-menu-user-full-name').text()).toBe('root');
+ expect(findAssigned().find('.dropdown-menu-user-username').text()).toBe('@root');
});
});
});
diff --git a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
index bef4a341985..0b60a36cf54 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
@@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils';
import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
import AlertSidebarStatus from '~/alert_management/components/sidebar/sidebar_status.vue';
-import updateAlertStatusMutation from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
+import updateAlertStatusMutation from '~/graphql_shared/mutations/update_alert_status.mutation.graphql';
import Tracking from '~/tracking';
import mockAlerts from '../../mocks/alerts.json';
diff --git a/spec/frontend/alert_management/mocks/alerts_provide_config.json b/spec/frontend/alert_management/mocks/alerts_provide_config.json
index af543e641bc..3313ff8128c 100644
--- a/spec/frontend/alert_management/mocks/alerts_provide_config.json
+++ b/spec/frontend/alert_management/mocks/alerts_provide_config.json
@@ -7,7 +7,5 @@
"populatingAlertsHelpUrl": "/link",
"emptyAlertSvgPath": "/link",
"alertManagementEnabled": false,
- "userCanEnableAlertManagement": false,
- "opsgenieMvcTargetUrl": "/link",
- "opsgenieMvcEnabled": false
+ "userCanEnableAlertManagement": false
} \ No newline at end of file
diff --git a/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js b/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
index ed78a593944..346059ed7be 100644
--- a/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
+++ b/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
@@ -22,29 +23,21 @@ describe('AlertsServiceForm', () => {
let wrapper;
let mockAxios;
- const createComponent = (props = defaultProps, { methods } = {}) => {
+ const createComponent = (props = defaultProps) => {
wrapper = shallowMount(AlertsServiceForm, {
propsData: {
...defaultProps,
...props,
},
- methods,
});
};
const findUrl = () => wrapper.find('#url');
const findAuthorizationKey = () => wrapper.find('#authorization-key');
const findDescription = () => wrapper.find('[data-testid="description"');
- const findActiveStatusIcon = val =>
- document.querySelector(`.js-service-active-status[data-value=${val.toString()}]`);
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- setFixtures(`
- <div>
- <span class="js-service-active-status" data-value="true"><svg class="s16 cgreen" data-testid="check-icon"><use xlink:href="icons.svg#check" /></svg></span>
- <span class="js-service-active-status" data-value="false"><svg class="s16 clgray" data-testid="power-icon"><use xlink:href="icons.svg#power" /></svg></span>
- </div>`);
});
afterEach(() => {
@@ -75,25 +68,16 @@ describe('AlertsServiceForm', () => {
});
describe('reset key', () => {
- it('triggers resetKey method', () => {
- const resetKey = jest.fn();
- const methods = { resetKey };
- createComponent(defaultProps, { methods });
-
- wrapper.find(GlModal).vm.$emit('ok');
-
- expect(resetKey).toHaveBeenCalled();
- });
-
- it('updates the authorization key on success', () => {
+ it('updates the authorization key on success', async () => {
const formPath = 'some/path';
- mockAxios.onPut(formPath, { service: { token: '' } }).replyOnce(200, { token: 'newToken' });
+ mockAxios.onPut(formPath).replyOnce(200, { token: 'newToken' });
createComponent({ formPath });
- return wrapper.vm.resetKey().then(() => {
- expect(findAuthorizationKey().attributes('value')).toBe('newToken');
- });
+ wrapper.find(GlModal).vm.$emit('ok');
+ await axios.waitForAll();
+
+ expect(findAuthorizationKey().attributes('value')).toBe('newToken');
});
it('shows flash message on error', () => {
@@ -112,16 +96,6 @@ describe('AlertsServiceForm', () => {
});
describe('activate toggle', () => {
- it('triggers toggleActivated method', () => {
- const toggleActivated = jest.fn();
- const methods = { toggleActivated };
- createComponent(defaultProps, { methods });
-
- wrapper.find(ToggleButton).vm.$emit('change', true);
-
- expect(toggleActivated).toHaveBeenCalled();
- });
-
describe('successfully completes', () => {
describe.each`
initialActivated | value
@@ -143,11 +117,6 @@ describe('AlertsServiceForm', () => {
it(`updates toggle button value to ${value}`, () => {
expect(wrapper.find(ToggleButton).props('value')).toBe(value);
});
-
- it('updates visible status icons', () => {
- expect(findActiveStatusIcon(!value)).toHaveClass('d-none');
- expect(findActiveStatusIcon(value)).not.toHaveClass('d-none');
- });
},
);
});
@@ -175,7 +144,7 @@ describe('AlertsServiceForm', () => {
it('cannot be toggled', () => {
wrapper.find(ToggleButton).vm.$emit('change');
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
});
});
diff --git a/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap b/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap
index a1ced8910b3..ef68a6a2c32 100644
--- a/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap
+++ b/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap
@@ -4,11 +4,10 @@ exports[`AlertsSettingsFormNew with default values renders the initial template
"<form class=\\"gl-mt-6\\">
<h5 class=\\"gl-font-lg gl-my-5\\">Add new integrations</h5>
<div id=\\"integration-type\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"integration-type__BV_label_\\" for=\\"integration-type\\" class=\\"d-block col-form-label\\">1. Select integration type</label>
- <div class=\\"bv-no-focus-ring\\"><select class=\\"gl-form-select custom-select\\" id=\\"__BVID__8\\">
+ <div class=\\"bv-no-focus-ring\\"><select class=\\"gl-form-select mw-100 custom-select\\" id=\\"__BVID__8\\">
<option value=\\"\\">Select integration type</option>
<option value=\\"HTTP\\">HTTP Endpoint</option>
<option value=\\"PROMETHEUS\\">External Prometheus</option>
- <option value=\\"OPSGENIE\\">Opsgenie</option>
</select>
<!---->
<!---->
@@ -16,82 +15,84 @@ exports[`AlertsSettingsFormNew with default values renders the initial template
<!---->
</div>
</div>
- <div class=\\"gl-mt-3 collapse\\" style=\\"display: none;\\" id=\\"__BVID__10\\">
- <div>
- <div id=\\"name-integration\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"name-integration__BV_label_\\" for=\\"name-integration\\" class=\\"d-block col-form-label\\">2. Name integration</label>
- <div class=\\"bv-no-focus-ring\\"><input type=\\"text\\" placeholder=\\"Enter integration name\\" class=\\"gl-form-input form-control\\" id=\\"__BVID__15\\">
- <!---->
- <!---->
- <!---->
+ <transition-stub css=\\"true\\" enterclass=\\"\\" leaveclass=\\"collapse show\\" entertoclass=\\"collapse show\\" leavetoclass=\\"collapse\\" enteractiveclass=\\"collapsing\\" leaveactiveclass=\\"collapsing\\" class=\\"gl-mt-3\\">
+ <div class=\\"collapse\\" style=\\"display: none;\\" id=\\"__BVID__10\\">
+ <div>
+ <div id=\\"name-integration\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"name-integration__BV_label_\\" for=\\"name-integration\\" class=\\"d-block col-form-label\\">2. Name integration</label>
+ <div class=\\"bv-no-focus-ring\\"><input type=\\"text\\" placeholder=\\"Enter integration name\\" class=\\"gl-form-input form-control\\" id=\\"__BVID__15\\">
+ <!---->
+ <!---->
+ <!---->
+ </div>
</div>
- </div>
- <div id=\\"integration-webhook\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"integration-webhook__BV_label_\\" for=\\"integration-webhook\\" class=\\"d-block col-form-label\\">3. Set up webhook</label>
- <div class=\\"bv-no-focus-ring\\"><span>Utilize the URL and authorization key below to authorize an external service to send alerts to GitLab. Review your external service's documentation to learn where to add these details, and the <a rel=\\"noopener noreferrer\\" target=\\"_blank\\" href=\\"https://docs.gitlab.com/ee/operations/incident_management/alert_integrations.html\\" class=\\"gl-link gl-display-inline-block\\">GitLab documentation</a> to learn more about configuring your endpoint.</span> <label class=\\"gl-display-flex gl-flex-direction-column gl-mb-0 gl-w-max-content gl-my-4 gl-font-weight-normal\\">
- <div class=\\"gl-toggle-wrapper\\"><span class=\\"gl-toggle-label\\">Active</span>
- <!----> <button aria-label=\\"Active\\" type=\\"button\\" class=\\"gl-toggle\\"><span class=\\"toggle-icon\\"><svg data-testid=\\"close-icon\\" aria-hidden=\\"true\\" class=\\"gl-icon s16\\"><use href=\\"#close\\"></use></svg></span></button></div>
+ <div id=\\"integration-webhook\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"integration-webhook__BV_label_\\" for=\\"integration-webhook\\" class=\\"d-block col-form-label\\">3. Set up webhook</label>
+ <div class=\\"bv-no-focus-ring\\"><span>Utilize the URL and authorization key below to authorize an external service to send alerts to GitLab. Review your external service's documentation to learn where to add these details, and the <a rel=\\"noopener noreferrer\\" target=\\"_blank\\" href=\\"https://docs.gitlab.com/ee/operations/incident_management/alert_integrations.html\\" class=\\"gl-link gl-display-inline-block\\">GitLab documentation</a> to learn more about configuring your endpoint.</span> <label class=\\"gl-display-flex gl-flex-direction-column gl-mb-0 gl-w-max-content gl-my-4 gl-font-weight-normal\\">
+ <div class=\\"gl-toggle-wrapper\\"><span class=\\"gl-toggle-label\\">Active</span>
+ <!----> <button aria-label=\\"Active\\" type=\\"button\\" class=\\"gl-toggle\\"><span class=\\"toggle-icon\\"><svg data-testid=\\"close-icon\\" aria-hidden=\\"true\\" class=\\"gl-icon s16\\"><use href=\\"#close\\"></use></svg></span></button></div>
+ <!---->
+ </label>
<!---->
- </label>
- <!---->
- <div class=\\"gl-my-4\\"><span class=\\"gl-font-weight-bold\\">
+ <div class=\\"gl-my-4\\"><span class=\\"gl-font-weight-bold\\">
Webhook URL
</span>
- <div id=\\"url\\" readonly=\\"readonly\\">
- <div role=\\"group\\" class=\\"input-group\\">
- <!---->
- <!----> <input id=\\"url\\" type=\\"text\\" readonly=\\"readonly\\" class=\\"gl-form-input form-control\\">
- <div class=\\"input-group-append\\"><button title=\\"Copy\\" data-clipboard-text=\\"\\" aria-label=\\"Copy this value\\" type=\\"button\\" class=\\"btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon\\">
- <!----> <svg data-testid=\\"copy-to-clipboard-icon\\" aria-hidden=\\"true\\" class=\\"gl-button-icon gl-icon s16\\">
- <use href=\\"#copy-to-clipboard\\"></use>
- </svg>
- <!----></button></div>
- <!---->
+ <div id=\\"url\\" readonly=\\"readonly\\">
+ <div role=\\"group\\" class=\\"input-group\\">
+ <!---->
+ <!----> <input id=\\"url\\" type=\\"text\\" readonly=\\"readonly\\" class=\\"gl-form-input form-control\\">
+ <div class=\\"input-group-append\\"><button title=\\"Copy\\" data-clipboard-text=\\"\\" aria-label=\\"Copy this value\\" type=\\"button\\" class=\\"btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon\\">
+ <!----> <svg data-testid=\\"copy-to-clipboard-icon\\" aria-hidden=\\"true\\" class=\\"gl-button-icon gl-icon s16\\">
+ <use href=\\"#copy-to-clipboard\\"></use>
+ </svg>
+ <!----></button></div>
+ <!---->
+ </div>
</div>
</div>
- </div>
- <div class=\\"gl-my-4\\"><span class=\\"gl-font-weight-bold\\">
+ <div class=\\"gl-my-4\\"><span class=\\"gl-font-weight-bold\\">
Authorization key
</span>
- <div id=\\"authorization-key\\" readonly=\\"readonly\\" class=\\"gl-mb-3\\">
- <div role=\\"group\\" class=\\"input-group\\">
+ <div id=\\"authorization-key\\" readonly=\\"readonly\\" class=\\"gl-mb-3\\">
+ <div role=\\"group\\" class=\\"input-group\\">
+ <!---->
+ <!----> <input id=\\"authorization-key\\" type=\\"text\\" readonly=\\"readonly\\" class=\\"gl-form-input form-control\\">
+ <div class=\\"input-group-append\\"><button title=\\"Copy\\" data-clipboard-text=\\"\\" aria-label=\\"Copy this value\\" type=\\"button\\" class=\\"btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon\\">
+ <!----> <svg data-testid=\\"copy-to-clipboard-icon\\" aria-hidden=\\"true\\" class=\\"gl-button-icon gl-icon s16\\">
+ <use href=\\"#copy-to-clipboard\\"></use>
+ </svg>
+ <!----></button></div>
+ <!---->
+ </div>
+ </div> <button type=\\"button\\" disabled=\\"disabled\\" class=\\"btn btn-default btn-md disabled gl-button\\">
<!---->
- <!----> <input id=\\"authorization-key\\" type=\\"text\\" readonly=\\"readonly\\" class=\\"gl-form-input form-control\\">
- <div class=\\"input-group-append\\"><button title=\\"Copy\\" data-clipboard-text=\\"\\" aria-label=\\"Copy this value\\" type=\\"button\\" class=\\"btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon\\">
- <!----> <svg data-testid=\\"copy-to-clipboard-icon\\" aria-hidden=\\"true\\" class=\\"gl-button-icon gl-icon s16\\">
- <use href=\\"#copy-to-clipboard\\"></use>
- </svg>
- <!----></button></div>
- <!---->
- </div>
- </div> <button type=\\"button\\" disabled=\\"disabled\\" class=\\"btn btn-default btn-md disabled gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">
+ <!----> <span class=\\"gl-button-text\\">
Reset Key
</span></button>
+ <!---->
+ </div>
+ <!---->
+ <!---->
+ <!---->
+ </div>
+ </div>
+ <div id=\\"test-integration\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"test-integration__BV_label_\\" for=\\"test-integration\\" class=\\"d-block col-form-label\\">4. Sample alert payload (optional)</label>
+ <div class=\\"bv-no-focus-ring\\"><span>Provide an example payload from the monitoring tool you intend to integrate with. This payload can be used to test the integration (optional).</span> <textarea id=\\"test-payload\\" disabled=\\"disabled\\" placeholder=\\"{ &quot;events&quot;: [{ &quot;application&quot;: &quot;Name of application&quot; }] }\\" wrap=\\"soft\\" class=\\"gl-form-input gl-form-textarea gl-my-3 form-control is-valid\\" style=\\"resize: none; overflow-y: scroll;\\"></textarea>
+ <!---->
+ <!---->
<!---->
</div>
- <!---->
- <!---->
- <!---->
</div>
+ <!---->
+ <!---->
</div>
- <div id=\\"test-integration\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"test-integration__BV_label_\\" for=\\"test-integration\\" class=\\"d-block col-form-label\\">4. Sample alert payload (optional)</label>
- <div class=\\"bv-no-focus-ring\\"><span>Provide an example payload from the monitoring tool you intend to integrate with. This payload can be used to test the integration (optional).</span> <textarea id=\\"test-payload\\" disabled=\\"disabled\\" placeholder=\\"{ &quot;events&quot;: [{ &quot;application&quot;: &quot;Name of application&quot; }] }\\" wrap=\\"soft\\" class=\\"gl-form-input gl-form-textarea gl-my-3 form-control is-valid\\" style=\\"resize: none; overflow-y: scroll;\\"></textarea>
+ <div class=\\"gl-display-flex gl-justify-content-start gl-py-3\\"><button data-testid=\\"integration-form-submit\\" type=\\"submit\\" class=\\"btn js-no-auto-disable btn-success btn-md gl-button\\">
<!---->
+ <!----> <span class=\\"gl-button-text\\">Save integration
+ </span></button> <button data-testid=\\"integration-test-and-submit\\" type=\\"button\\" disabled=\\"disabled\\" class=\\"btn gl-mx-3 js-no-auto-disable btn-success btn-md disabled gl-button btn-success-secondary\\">
<!---->
+ <!----> <span class=\\"gl-button-text\\">Save and test payload</span></button> <button type=\\"reset\\" class=\\"btn js-no-auto-disable btn-default btn-md gl-button\\">
<!---->
- </div>
- </div>
- <!---->
- <!---->
+ <!----> <span class=\\"gl-button-text\\">Cancel</span></button></div>
</div>
- <div class=\\"gl-display-flex gl-justify-content-start gl-py-3\\"><button data-testid=\\"integration-form-submit\\" type=\\"submit\\" class=\\"btn js-no-auto-disable btn-success btn-md gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Save integration
- </span></button> <button data-testid=\\"integration-test-and-submit\\" type=\\"button\\" disabled=\\"disabled\\" class=\\"btn gl-mx-3 js-no-auto-disable btn-success btn-md disabled gl-button btn-success-secondary\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Save and test payload</span></button> <button type=\\"reset\\" class=\\"btn js-no-auto-disable btn-default btn-md gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Cancel</span></button></div>
- </div>
+ </transition-stub>
</form>"
`;
diff --git a/spec/frontend/alerts_settings/alert_mapping_builder_spec.js b/spec/frontend/alerts_settings/alert_mapping_builder_spec.js
index 12536c27dfe..5d48ff02e35 100644
--- a/spec/frontend/alerts_settings/alert_mapping_builder_spec.js
+++ b/spec/frontend/alerts_settings/alert_mapping_builder_spec.js
@@ -28,11 +28,7 @@ describe('AlertMappingBuilder', () => {
});
const findColumnInRow = (row, column) =>
- wrapper
- .findAll('.gl-display-table-row')
- .at(row)
- .findAll('.gl-display-table-cell ')
- .at(column);
+ wrapper.findAll('.gl-display-table-row').at(row).findAll('.gl-display-table-cell ').at(column);
it('renders column captions', () => {
expect(findColumnInRow(0, 0).text()).toContain(i18n.columns.gitlabKeyTitle);
@@ -67,7 +63,7 @@ describe('AlertMappingBuilder', () => {
const dropdownItems = dropdown.findAll(GlDropdownItem);
const { nodes } = parsedMapping.samplePayload.payloadAlerFields;
const numberOfMappingOptions = nodes.filter(({ type }) =>
- type.some(t => compatibleTypes.includes(t)),
+ type.some((t) => compatibleTypes.includes(t)),
);
expect(dropdown.exists()).toBe(true);
@@ -86,7 +82,7 @@ describe('AlertMappingBuilder', () => {
const dropdownItems = dropdown.findAll(GlDropdownItem);
const { nodes } = parsedMapping.samplePayload.payloadAlerFields;
const numberOfMappingOptions = nodes.filter(({ type }) =>
- type.some(t => compatibleTypes.includes(t)),
+ type.some((t) => compatibleTypes.includes(t)),
);
expect(searchBox.exists()).toBe(Boolean(numberOfFallbacks));
diff --git a/spec/frontend/alerts_settings/alerts_integrations_list_spec.js b/spec/frontend/alerts_settings/alerts_integrations_list_spec.js
index 3a7392f64f7..5a3874d055b 100644
--- a/spec/frontend/alerts_settings/alerts_integrations_list_spec.js
+++ b/spec/frontend/alerts_settings/alerts_integrations_list_spec.js
@@ -72,11 +72,7 @@ describe('AlertIntegrationsList', () => {
it('renders an highlighted row when a current integration is selected to edit', () => {
mountComponent({ data: { currentIntegration: { id: '1' } } });
- expect(
- findTableComponentRows()
- .at(0)
- .classes(),
- ).toContain('gl-bg-blue-50');
+ expect(findTableComponentRows().at(0).classes()).toContain('gl-bg-blue-50');
});
describe('integration status', () => {
diff --git a/spec/frontend/alerts_settings/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/alerts_settings_form_spec.js
index 428c6f93444..21cdec6f94c 100644
--- a/spec/frontend/alerts_settings/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/alerts_settings_form_spec.js
@@ -28,7 +28,6 @@ describe('AlertsSettingsFormNew', () => {
propsData: {
loading: false,
canAddIntegration: true,
- canManageOpsgenie: true,
...props,
},
provide: {
@@ -86,11 +85,7 @@ describe('AlertsSettingsFormNew', () => {
await wrapper.vm.$nextTick();
- expect(
- findFormFields()
- .at(0)
- .isVisible(),
- ).toBe(true);
+ expect(findFormFields().at(0).isVisible()).toBe(true);
});
it('disables the dropdown and shows help text when multi integrations are not supported', async () => {
@@ -104,11 +99,7 @@ describe('AlertsSettingsFormNew', () => {
const options = findSelect().findAll('option');
await options.at(2).setSelected();
- expect(
- findFormFields()
- .at(0)
- .attributes('disabled'),
- ).toBe('disabled');
+ expect(findFormFields().at(0).attributes('disabled')).toBe('disabled');
});
});
@@ -119,9 +110,7 @@ describe('AlertsSettingsFormNew', () => {
const options = findSelect().findAll('option');
await options.at(1).setSelected();
- await findFormFields()
- .at(0)
- .setValue('Test integration');
+ await findFormFields().at(0).setValue('Test integration');
await findFormToggle().trigger('click');
await wrapper.vm.$nextTick();
@@ -145,12 +134,8 @@ describe('AlertsSettingsFormNew', () => {
const options = findSelect().findAll('option');
await options.at(2).setSelected();
- await findFormFields()
- .at(0)
- .setValue('Test integration');
- await findFormFields()
- .at(1)
- .setValue('https://test.com');
+ await findFormFields().at(0).setValue('Test integration');
+ await findFormFields().at(1).setValue('https://test.com');
await findFormToggle().trigger('click');
await wrapper.vm.$nextTick();
@@ -179,9 +164,7 @@ describe('AlertsSettingsFormNew', () => {
},
});
- await findFormFields()
- .at(0)
- .setValue('Test integration post');
+ await findFormFields().at(0).setValue('Test integration post');
await findFormToggle().trigger('click');
await wrapper.vm.$nextTick();
@@ -210,12 +193,8 @@ describe('AlertsSettingsFormNew', () => {
},
});
- await findFormFields()
- .at(0)
- .setValue('Test integration');
- await findFormFields()
- .at(1)
- .setValue('https://test-post.com');
+ await findFormFields().at(0).setValue('Test integration');
+ await findFormFields().at(1).setValue('https://test-post.com');
await findFormToggle().trigger('click');
await wrapper.vm.$nextTick();
@@ -300,11 +279,7 @@ describe('AlertsSettingsFormNew', () => {
resetSamplePayloadConfirmed,
});
await wrapper.vm.$nextTick();
- expect(
- findTestPayloadSection()
- .find(GlFormTextarea)
- .attributes('disabled'),
- ).toBe(disabled);
+ expect(findTestPayloadSection().find(GlFormTextarea).attributes('disabled')).toBe(disabled);
});
});
diff --git a/spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js
index fe187d9e8f9..4d0732ca76c 100644
--- a/spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js
@@ -1,10 +1,10 @@
import VueApollo from 'vue-apollo';
import { mount, createLocalVue } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
-import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import { GlLoadingIcon } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import AlertsSettingsWrapper from '~/alerts_settings/components/alerts_settings_wrapper.vue';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
@@ -376,17 +376,4 @@ describe('AlertsSettingsWrapper', () => {
});
});
});
-
- // TODO: Will be removed in 13.7 as part of: https://gitlab.com/gitlab-org/gitlab/-/issues/273657
- describe('Opsgenie integration', () => {
- it.each([true, false])('it shows/hides the alert when opsgenie is %s', active => {
- createComponent({
- data: { integrations: { list: mockIntegrations }, currentIntegration: mockIntegrations[0] },
- provide: { opsgenie: { active } },
- loading: false,
- });
-
- expect(wrapper.find(GlAlert).exists()).toBe(active);
- });
- });
});
diff --git a/spec/frontend/alerts_settings/util.js b/spec/frontend/alerts_settings/util.js
index f9f9b69791e..5c07f22f1c9 100644
--- a/spec/frontend/alerts_settings/util.js
+++ b/spec/frontend/alerts_settings/util.js
@@ -19,12 +19,6 @@ export const defaultAlertSettingsConfig = {
url: PROMETHEUS_URL,
active: ACTIVE,
},
- opsgenie: {
- opsgenieMvcIsAvailable: true,
- formPath: INVALID_URL,
- active: ACTIVE,
- opsgenieMvcTargetUrl: GENERIC_URL,
- },
projectPath: '',
multiIntegrations: true,
};
diff --git a/spec/frontend/analytics/instance_statistics/components/app_spec.js b/spec/frontend/analytics/instance_statistics/components/app_spec.js
index 8ac663b3046..cc676e86e99 100644
--- a/spec/frontend/analytics/instance_statistics/components/app_spec.js
+++ b/spec/frontend/analytics/instance_statistics/components/app_spec.js
@@ -25,11 +25,11 @@ describe('InstanceStatisticsApp', () => {
expect(wrapper.find(InstanceCounts).exists()).toBe(true);
});
- ['Pipelines', 'Issues & Merge Requests'].forEach(instance => {
+ ['Pipelines', 'Issues & Merge Requests'].forEach((instance) => {
it(`displays the ${instance} chart`, () => {
const chartTitles = wrapper
.findAll(InstanceStatisticsCountChart)
- .wrappers.map(chartComponent => chartComponent.props('chartTitle'));
+ .wrappers.map((chartComponent) => chartComponent.props('chartTitle'));
expect(chartTitles).toContain(instance);
});
diff --git a/spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js b/spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js
index 275a84988f8..a69f3388cbb 100644
--- a/spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js
+++ b/spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js
@@ -2,7 +2,7 @@ import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlLineChart } from '@gitlab/ui/dist/charts';
import { GlAlert } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import InstanceStatisticsCountChart from '~/analytics/instance_statistics/components/instance_statistics_count_chart.vue';
import statsQuery from '~/analytics/instance_statistics/graphql/queries/instance_count.query.graphql';
import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
diff --git a/spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js b/spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js
index d9f42430aa8..bf94e476ea3 100644
--- a/spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js
+++ b/spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js
@@ -2,7 +2,7 @@ import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlLineChart } from '@gitlab/ui/dist/charts';
import { GlAlert } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { useFakeDate } from 'helpers/fake_date';
import ProjectsAndGroupChart from '~/analytics/instance_statistics/components/projects_and_groups_chart.vue';
import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
diff --git a/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js b/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js
index 6ed9d203f3d..b9fa30643df 100644
--- a/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js
+++ b/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js
@@ -2,7 +2,7 @@ import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlAreaChart } from '@gitlab/ui/dist/charts';
import { GlAlert } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { useFakeDate } from 'helpers/fake_date';
import UsersChart from '~/analytics/instance_statistics/components/users_chart.vue';
import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
diff --git a/spec/frontend/api/api_utils_spec.js b/spec/frontend/api/api_utils_spec.js
new file mode 100644
index 00000000000..3fec26f0149
--- /dev/null
+++ b/spec/frontend/api/api_utils_spec.js
@@ -0,0 +1,35 @@
+import * as apiUtils from '~/api/api_utils';
+
+describe('~/api/api_utils.js', () => {
+ describe('buildApiUrl', () => {
+ beforeEach(() => {
+ window.gon = {
+ api_version: 'v7',
+ };
+ });
+
+ it('returns a URL with the correct API version', () => {
+ expect(apiUtils.buildApiUrl('/api/:version/users/:id/status')).toEqual(
+ '/api/v7/users/:id/status',
+ );
+ });
+
+ it('only replaces the first instance of :version in the URL', () => {
+ expect(apiUtils.buildApiUrl('/api/:version/projects/:id/packages/:version')).toEqual(
+ '/api/v7/projects/:id/packages/:version',
+ );
+ });
+
+ describe('when gon includes a relative_url_root property', () => {
+ beforeEach(() => {
+ window.gon.relative_url_root = '/relative/root';
+ });
+
+ it('returns a URL with the correct relative root URL and API version', () => {
+ expect(apiUtils.buildApiUrl('/api/:version/users/:id/status')).toEqual(
+ '/relative/root/api/v7/users/:id/status',
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 37630c15b89..76d67195499 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -34,7 +34,7 @@ describe('Api', () => {
expect(builtUrl).toEqual(expectedOutput);
});
- [null, '', '/'].forEach(root => {
+ [null, '', '/'].forEach((root) => {
it(`works when relative_url_root is ${root}`, () => {
window.gon.relative_url_root = root;
const input = '/api/:version/foo/bar';
@@ -137,14 +137,14 @@ describe('Api', () => {
});
describe('group', () => {
- it('fetches a group', done => {
+ it('fetches a group', (done) => {
const groupId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`;
mock.onGet(expectedUrl).reply(httpStatus.OK, {
name: 'test',
});
- Api.group(groupId, response => {
+ Api.group(groupId, (response) => {
expect(response.name).toBe('test');
done();
});
@@ -152,7 +152,7 @@ describe('Api', () => {
});
describe('groupMembers', () => {
- it('fetches group members', done => {
+ it('fetches group members', (done) => {
const groupId = '54321';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/members`;
const expectedData = [{ id: 7 }];
@@ -167,8 +167,52 @@ describe('Api', () => {
});
});
+ describe('addGroupMembersByUserId', () => {
+ it('adds an existing User as a new Group Member by User ID', () => {
+ const groupId = 1;
+ const expectedUserId = 2;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/1/members`;
+ const params = {
+ user_id: expectedUserId,
+ access_level: 10,
+ expires_at: undefined,
+ };
+
+ mock.onPost(expectedUrl).reply(200, {
+ id: expectedUserId,
+ state: 'active',
+ });
+
+ return Api.addGroupMembersByUserId(groupId, params).then(({ data }) => {
+ expect(data.id).toBe(expectedUserId);
+ expect(data.state).toBe('active');
+ });
+ });
+ });
+
+ describe('inviteGroupMembersByEmail', () => {
+ it('invites a new email address to create a new User and become a Group Member', () => {
+ const groupId = 1;
+ const email = 'email@example.com';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/1/invitations`;
+ const params = {
+ email,
+ access_level: 10,
+ expires_at: undefined,
+ };
+
+ mock.onPost(expectedUrl).reply(200, {
+ status: 'success',
+ });
+
+ return Api.inviteGroupMembersByEmail(groupId, params).then(({ data }) => {
+ expect(data.status).toBe('success');
+ });
+ });
+ });
+
describe('groupMilestones', () => {
- it('fetches group milestones', done => {
+ it('fetches group milestones', (done) => {
const groupId = '16';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/milestones`;
const expectedData = [
@@ -198,7 +242,7 @@ describe('Api', () => {
});
describe('groups', () => {
- it('fetches groups', done => {
+ it('fetches groups', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`;
@@ -208,7 +252,7 @@ describe('Api', () => {
},
]);
- Api.groups(query, options, response => {
+ Api.groups(query, options, (response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
done();
@@ -217,7 +261,7 @@ describe('Api', () => {
});
describe('namespaces', () => {
- it('fetches namespaces', done => {
+ it('fetches namespaces', (done) => {
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`;
mock.onGet(expectedUrl).reply(httpStatus.OK, [
@@ -226,7 +270,7 @@ describe('Api', () => {
},
]);
- Api.namespaces(query, response => {
+ Api.namespaces(query, (response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
done();
@@ -235,7 +279,7 @@ describe('Api', () => {
});
describe('projects', () => {
- it('fetches projects with membership when logged in', done => {
+ it('fetches projects with membership when logged in', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
@@ -246,14 +290,14 @@ describe('Api', () => {
},
]);
- Api.projects(query, options, response => {
+ Api.projects(query, options, (response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
done();
});
});
- it('fetches projects without membership when not logged in', done => {
+ it('fetches projects without membership when not logged in', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
@@ -263,7 +307,7 @@ describe('Api', () => {
},
]);
- Api.projects(query, options, response => {
+ Api.projects(query, options, (response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
done();
@@ -272,7 +316,7 @@ describe('Api', () => {
});
describe('updateProject', () => {
- it('update a project with the given payload', done => {
+ it('update a project with the given payload', (done) => {
const projectPath = 'foo';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}`;
mock.onPut(expectedUrl).reply(httpStatus.OK, { foo: 'bar' });
@@ -287,7 +331,7 @@ describe('Api', () => {
});
describe('projectUsers', () => {
- it('fetches all users of a particular project', done => {
+ it('fetches all users of a particular project', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const projectPath = 'gitlab-org%2Fgitlab-ce';
@@ -299,7 +343,7 @@ describe('Api', () => {
]);
Api.projectUsers('gitlab-org/gitlab-ce', query, options)
- .then(response => {
+ .then((response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
})
@@ -312,7 +356,7 @@ describe('Api', () => {
const projectPath = 'abc';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests`;
- it('fetches all merge requests for a project', done => {
+ it('fetches all merge requests for a project', (done) => {
const mockData = [{ source_branch: 'foo' }, { source_branch: 'bar' }];
mock.onGet(expectedUrl).reply(httpStatus.OK, mockData);
Api.projectMergeRequests(projectPath)
@@ -325,7 +369,7 @@ describe('Api', () => {
.catch(done.fail);
});
- it('fetches merge requests filtered with passed params', done => {
+ it('fetches merge requests filtered with passed params', (done) => {
const params = {
source_branch: 'bar',
};
@@ -343,7 +387,7 @@ describe('Api', () => {
});
describe('projectMergeRequest', () => {
- it('fetches a merge request', done => {
+ it('fetches a merge request', (done) => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}`;
@@ -361,7 +405,7 @@ describe('Api', () => {
});
describe('projectMergeRequestChanges', () => {
- it('fetches the changes of a merge request', done => {
+ it('fetches the changes of a merge request', (done) => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/changes`;
@@ -379,7 +423,7 @@ describe('Api', () => {
});
describe('projectMergeRequestVersions', () => {
- it('fetches the versions of a merge request', done => {
+ it('fetches the versions of a merge request', (done) => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/versions`;
@@ -400,7 +444,7 @@ describe('Api', () => {
});
describe('projectRunners', () => {
- it('fetches the runners of a project', done => {
+ it('fetches the runners of a project', (done) => {
const projectPath = 7;
const params = { scope: 'active' };
const mockData = [{ id: 4 }];
@@ -417,7 +461,7 @@ describe('Api', () => {
});
describe('projectMilestones', () => {
- it('fetches project milestones', done => {
+ it('fetches project milestones', (done) => {
const projectId = 1;
const options = { state: 'active' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/milestones`;
@@ -458,8 +502,52 @@ describe('Api', () => {
});
});
+ describe('addProjectMembersByUserId', () => {
+ it('adds an existing User as a new Project Member by User ID', () => {
+ const projectId = 1;
+ const expectedUserId = 2;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/members`;
+ const params = {
+ user_id: expectedUserId,
+ access_level: 10,
+ expires_at: undefined,
+ };
+
+ mock.onPost(expectedUrl).reply(200, {
+ id: expectedUserId,
+ state: 'active',
+ });
+
+ return Api.addProjectMembersByUserId(projectId, params).then(({ data }) => {
+ expect(data.id).toBe(expectedUserId);
+ expect(data.state).toBe('active');
+ });
+ });
+ });
+
+ describe('inviteProjectMembersByEmail', () => {
+ it('invites a new email address to create a new User and become a Project Member', () => {
+ const projectId = 1;
+ const expectedEmail = 'email@example.com';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/invitations`;
+ const params = {
+ email: expectedEmail,
+ access_level: 10,
+ expires_at: undefined,
+ };
+
+ mock.onPost(expectedUrl).reply(200, {
+ status: 'success',
+ });
+
+ return Api.inviteProjectMembersByEmail(projectId, params).then(({ data }) => {
+ expect(data.status).toBe('success');
+ });
+ });
+ });
+
describe('newLabel', () => {
- it('creates a new label', done => {
+ it('creates a new label', (done) => {
const namespace = 'some namespace';
const project = 'some project';
const labelData = { some: 'data' };
@@ -467,7 +555,7 @@ describe('Api', () => {
const expectedData = {
label: labelData,
};
- mock.onPost(expectedUrl).reply(config => {
+ mock.onPost(expectedUrl).reply((config) => {
expect(config.data).toBe(JSON.stringify(expectedData));
return [
@@ -478,20 +566,20 @@ describe('Api', () => {
];
});
- Api.newLabel(namespace, project, labelData, response => {
+ Api.newLabel(namespace, project, labelData, (response) => {
expect(response.name).toBe('test');
done();
});
});
- it('creates a group label', done => {
+ it('creates a group label', (done) => {
const namespace = 'group/subgroup';
const labelData = { some: 'data' };
const expectedUrl = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespace);
const expectedData = {
label: labelData,
};
- mock.onPost(expectedUrl).reply(config => {
+ mock.onPost(expectedUrl).reply((config) => {
expect(config.data).toBe(JSON.stringify(expectedData));
return [
@@ -502,7 +590,7 @@ describe('Api', () => {
];
});
- Api.newLabel(namespace, undefined, labelData, response => {
+ Api.newLabel(namespace, undefined, labelData, (response) => {
expect(response.name).toBe('test');
done();
});
@@ -510,7 +598,7 @@ describe('Api', () => {
});
describe('groupProjects', () => {
- it('fetches group projects', done => {
+ it('fetches group projects', (done) => {
const groupId = '123456';
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
@@ -520,7 +608,7 @@ describe('Api', () => {
},
]);
- Api.groupProjects(groupId, query, {}, response => {
+ Api.groupProjects(groupId, query, {}, (response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
done();
@@ -561,7 +649,7 @@ describe('Api', () => {
templateKey,
)}`;
- it('fetches an issue template', done => {
+ it('fetches an issue template', (done) => {
mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => {
@@ -587,7 +675,7 @@ describe('Api', () => {
const templateType = 'template type';
const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}`;
- it('fetches all templates by type', done => {
+ it('fetches all templates by type', (done) => {
const expectedData = [
{ key: 'Template1', name: 'Template 1', content: 'This is template 1!' },
];
@@ -615,12 +703,12 @@ describe('Api', () => {
});
describe('projectTemplates', () => {
- it('fetches a list of templates', done => {
+ it('fetches a list of templates', (done) => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/gitlab-org%2Fgitlab-ce/templates/licenses`;
mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
- Api.projectTemplates('gitlab-org/gitlab-ce', 'licenses', {}, response => {
+ Api.projectTemplates('gitlab-org/gitlab-ce', 'licenses', {}, (response) => {
expect(response).toBe('test');
done();
});
@@ -628,13 +716,13 @@ describe('Api', () => {
});
describe('projectTemplate', () => {
- it('fetches a single template', done => {
+ it('fetches a single template', (done) => {
const data = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/gitlab-org%2Fgitlab-ce/templates/licenses/test%20license`;
mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
- Api.projectTemplate('gitlab-org/gitlab-ce', 'licenses', 'test license', data, response => {
+ Api.projectTemplate('gitlab-org/gitlab-ce', 'licenses', 'test license', data, (response) => {
expect(response).toBe('test');
done();
});
@@ -642,7 +730,7 @@ describe('Api', () => {
});
describe('users', () => {
- it('fetches users', done => {
+ it('fetches users', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`;
@@ -663,7 +751,7 @@ describe('Api', () => {
});
describe('user', () => {
- it('fetches single user', done => {
+ it('fetches single user', (done) => {
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}`;
mock.onGet(expectedUrl).reply(httpStatus.OK, {
@@ -680,7 +768,7 @@ describe('Api', () => {
});
describe('user counts', () => {
- it('fetches single user counts', done => {
+ it('fetches single user counts', (done) => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/user_counts`;
mock.onGet(expectedUrl).reply(httpStatus.OK, {
merge_requests: 4,
@@ -696,7 +784,7 @@ describe('Api', () => {
});
describe('user status', () => {
- it('fetches single user status', done => {
+ it('fetches single user status', (done) => {
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}/status`;
mock.onGet(expectedUrl).reply(httpStatus.OK, {
@@ -713,7 +801,7 @@ describe('Api', () => {
});
describe('user projects', () => {
- it('fetches all projects that belong to a particular user', done => {
+ it('fetches all projects that belong to a particular user', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const userId = '123456';
@@ -724,7 +812,7 @@ describe('Api', () => {
},
]);
- Api.userProjects(userId, query, options, response => {
+ Api.userProjects(userId, query, options, (response) => {
expect(response.length).toBe(1);
expect(response[0].name).toBe('test');
done();
@@ -733,7 +821,7 @@ describe('Api', () => {
});
describe('commitPipelines', () => {
- it('fetches pipelines for a given commit', done => {
+ it('fetches pipelines for a given commit', (done) => {
const projectId = 'example/foobar';
const commitSha = 'abc123def';
const expectedUrl = `${dummyUrlRoot}/${projectId}/commit/${commitSha}/pipelines`;
@@ -756,7 +844,7 @@ describe('Api', () => {
describe('pipelineJobs', () => {
it.each([undefined, {}, { foo: true }])(
'fetches the jobs for a given pipeline given %p params',
- async params => {
+ async (params) => {
const projectId = 123;
const pipelineId = 456;
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/pipelines/${pipelineId}/jobs`;
@@ -774,7 +862,7 @@ describe('Api', () => {
});
describe('createBranch', () => {
- it('creates new branch', done => {
+ it('creates new branch', (done) => {
const ref = 'master';
const branch = 'new-branch-name';
const dummyProjectPath = 'gitlab-org/gitlab-ce';
@@ -799,7 +887,7 @@ describe('Api', () => {
});
describe('projectForks', () => {
- it('gets forked projects', done => {
+ it('gets forked projects', (done) => {
const dummyProjectPath = 'gitlab-org/gitlab-ce';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${encodeURIComponent(
dummyProjectPath,
@@ -822,7 +910,7 @@ describe('Api', () => {
});
describe('createContextCommits', () => {
- it('creates a new context commit', done => {
+ it('creates a new context commit', (done) => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const commitsData = ['abcdefg'];
@@ -851,7 +939,7 @@ describe('Api', () => {
});
describe('allContextCommits', () => {
- it('gets all context commits', done => {
+ it('gets all context commits', (done) => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/context_commits`;
@@ -872,7 +960,7 @@ describe('Api', () => {
});
describe('removeContextCommits', () => {
- it('removes context commits', done => {
+ it('removes context commits', (done) => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const commitsData = ['abcdefg'];
@@ -1120,7 +1208,7 @@ describe('Api', () => {
});
describe('updateIssue', () => {
- it('update an issue with the given payload', done => {
+ it('update an issue with the given payload', (done) => {
const projectId = 8;
const issue = 1;
const expectedArray = [1, 2, 3];
@@ -1137,7 +1225,7 @@ describe('Api', () => {
});
describe('updateMergeRequest', () => {
- it('update an issue with the given payload', done => {
+ it('update an issue with the given payload', (done) => {
const projectId = 8;
const mergeRequest = 1;
const expectedArray = [1, 2, 3];
@@ -1154,7 +1242,7 @@ describe('Api', () => {
});
describe('tags', () => {
- it('fetches all tags of a particular project', done => {
+ it('fetches all tags of a particular project', (done) => {
const query = 'dummy query';
const options = { unused: 'option' };
const projectId = 8;
diff --git a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
new file mode 100644
index 00000000000..9d05e6d99f6
--- /dev/null
+++ b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
@@ -0,0 +1,29 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Keep latest artifact checkbox sets correct setting value in checkbox with query result 1`] = `
+<div>
+ <!---->
+
+ <gl-form-checkbox-stub
+ checked="true"
+ >
+ <b
+ class="gl-mr-3"
+ >
+ Keep artifacts from most recent successful jobs
+ </b>
+
+ <gl-link-stub
+ href="/help/ci/pipelines/job_artifacts"
+ >
+ More information
+ </gl-link-stub>
+ </gl-form-checkbox-stub>
+
+ <p>
+
+ The latest artifacts created by jobs in the most recent successful pipeline will be stored.
+
+ </p>
+</div>
+`;
diff --git a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js
new file mode 100644
index 00000000000..d7f07526b58
--- /dev/null
+++ b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js
@@ -0,0 +1,88 @@
+import { GlFormCheckbox, GlLink } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import KeepLatestArtifactCheckbox from '~/artifacts_settings/keep_latest_artifact_checkbox.vue';
+import GetKeepLatestArtifactProjectSetting from '~/artifacts_settings/graphql/queries/get_keep_latest_artifact_project_setting.query.graphql';
+import UpdateKeepLatestArtifactProjectSetting from '~/artifacts_settings/graphql/mutations/update_keep_latest_artifact_project_setting.mutation.graphql';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+const keepLatestArtifactMock = {
+ data: {
+ project: {
+ ciCdSettings: { keepLatestArtifact: true },
+ },
+ },
+};
+
+const keepLatestArtifactMockResponse = {
+ data: { ciCdSettingsUpdate: { errors: [], __typename: 'CiCdSettingsUpdatePayload' } },
+};
+
+describe('Keep latest artifact checkbox', () => {
+ let wrapper;
+ let apolloProvider;
+ let requestHandlers;
+
+ const fullPath = 'gitlab-org/gitlab';
+ const helpPagePath = '/help/ci/pipelines/job_artifacts';
+
+ const findCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findHelpLink = () => wrapper.find(GlLink);
+
+ const createComponent = (handlers) => {
+ requestHandlers = {
+ keepLatestArtifactQueryHandler: jest.fn().mockResolvedValue(keepLatestArtifactMock),
+ keepLatestArtifactMutationHandler: jest
+ .fn()
+ .mockResolvedValue(keepLatestArtifactMockResponse),
+ ...handlers,
+ };
+
+ apolloProvider = createMockApollo([
+ [GetKeepLatestArtifactProjectSetting, requestHandlers.keepLatestArtifactQueryHandler],
+ [UpdateKeepLatestArtifactProjectSetting, requestHandlers.keepLatestArtifactMutationHandler],
+ ]);
+
+ wrapper = shallowMount(KeepLatestArtifactCheckbox, {
+ provide: {
+ fullPath,
+ helpPagePath,
+ },
+ localVue,
+ apolloProvider,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ apolloProvider = null;
+ });
+
+ it('displays the checkbox and the help link', () => {
+ expect(findCheckbox().exists()).toBe(true);
+ expect(findHelpLink().exists()).toBe(true);
+ });
+
+ it('sets correct setting value in checkbox with query result', async () => {
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('calls mutation on artifact setting change with correct payload', () => {
+ findCheckbox().vm.$emit('change', false);
+
+ expect(requestHandlers.keepLatestArtifactMutationHandler).toHaveBeenCalledWith({
+ fullPath,
+ keepLatestArtifact: false,
+ });
+ });
+});
diff --git a/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js b/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
index 025e605b920..98c8ed4b95d 100644
--- a/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
+++ b/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
@@ -34,8 +34,8 @@ describe('RecoveryCodes', () => {
const findAlert = () => wrapper.find(GlAlert);
const findRecoveryCodes = () => wrapper.findByTestId('recovery-codes');
const findCopyButton = () => wrapper.find(ClipboardButton);
- const findButtonByText = text =>
- wrapper.findAll(GlButton).wrappers.find(buttonWrapper => buttonWrapper.text() === text);
+ const findButtonByText = (text) =>
+ wrapper.findAll(GlButton).wrappers.find((buttonWrapper) => buttonWrapper.text() === text);
const findDownloadButton = () => findButtonByText('Download codes');
const findPrintButton = () => findButtonByText('Print codes');
const findProceedButton = () => findButtonByText('Proceed');
@@ -59,7 +59,7 @@ describe('RecoveryCodes', () => {
it('renders codes', () => {
const recoveryCodes = findRecoveryCodes().text();
- codes.forEach(code => {
+ codes.forEach((code) => {
expect(recoveryCodes).toContain(code);
});
});
diff --git a/spec/frontend/authentication/u2f/authenticate_spec.js b/spec/frontend/authentication/u2f/authenticate_spec.js
index 7a87b420195..bf50ee88035 100644
--- a/spec/frontend/authentication/u2f/authenticate_spec.js
+++ b/spec/frontend/authentication/u2f/authenticate_spec.js
@@ -38,7 +38,7 @@ describe('U2FAuthenticate', () => {
window.u2f = oldu2f;
});
- it('falls back to normal 2fa', done => {
+ it('falls back to normal 2fa', (done) => {
component
.start()
.then(() => {
@@ -50,15 +50,12 @@ describe('U2FAuthenticate', () => {
});
describe('with u2f available', () => {
- beforeEach(done => {
+ beforeEach((done) => {
// bypass automatic form submission within renderAuthenticated
jest.spyOn(component, 'renderAuthenticated').mockReturnValue(true);
u2fDevice = new MockU2FDevice();
- component
- .start()
- .then(done)
- .catch(done.fail);
+ component.start().then(done).catch(done.fail);
});
it('allows authenticating via a U2F device', () => {
diff --git a/spec/frontend/authentication/u2f/register_spec.js b/spec/frontend/authentication/u2f/register_spec.js
index e89ef773be6..9cbadbc2fef 100644
--- a/spec/frontend/authentication/u2f/register_spec.js
+++ b/spec/frontend/authentication/u2f/register_spec.js
@@ -10,15 +10,12 @@ describe('U2FRegister', () => {
preloadFixtures('u2f/register.html');
- beforeEach(done => {
+ beforeEach((done) => {
loadFixtures('u2f/register.html');
u2fDevice = new MockU2FDevice();
container = $('#js-register-token-2fa');
component = new U2FRegister(container, {});
- component
- .start()
- .then(done)
- .catch(done.fail);
+ component.start().then(done).catch(done.fail);
});
it('allows registering a U2F device', () => {
diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js
index c6a9c911ccf..e9482ffbd3d 100644
--- a/spec/frontend/awards_handler_spec.js
+++ b/spec/frontend/awards_handler_spec.js
@@ -57,15 +57,13 @@ describe('AwardsHandler', () => {
preloadFixtures('snippets/show.html');
const openAndWaitForEmojiMenu = (sel = '.js-add-award') => {
- $(sel)
- .eq(0)
- .click();
+ $(sel).eq(0).click();
jest.runOnlyPendingTimers();
const $menu = $('.emoji-menu');
- return new Promise(resolve => {
+ return new Promise((resolve) => {
$menu.one('build-emoji-menu-finish', () => {
resolve();
});
@@ -337,9 +335,7 @@ describe('AwardsHandler', () => {
it('should remove already selected emoji', async () => {
await openEmojiMenuAndAddEmoji();
- $('.js-add-award')
- .eq(0)
- .click();
+ $('.js-add-award').eq(0).click();
const $block = $('.js-awards-block');
const $emoji = $('.emoji-menu').find(
`.emoji-menu-list:not(.frequent-emojis) ${emojiSelector}`,
@@ -360,7 +356,7 @@ describe('AwardsHandler', () => {
await openAndWaitForEmojiMenu();
const emojiMenu = document.querySelector('.emoji-menu');
- Array.prototype.forEach.call(emojiMenu.querySelectorAll('.emoji-menu-title'), title => {
+ Array.prototype.forEach.call(emojiMenu.querySelectorAll('.emoji-menu-title'), (title) => {
expect(title.textContent.trim().toLowerCase()).not.toBe('frequently used');
});
});
@@ -373,7 +369,7 @@ describe('AwardsHandler', () => {
const emojiMenu = document.querySelector('.emoji-menu');
const hasFrequentlyUsedHeading = Array.prototype.some.call(
emojiMenu.querySelectorAll('.emoji-menu-title'),
- title => title.textContent.trim().toLowerCase() === 'frequently used',
+ (title) => title.textContent.trim().toLowerCase() === 'frequently used',
);
expect(hasFrequentlyUsedHeading).toBe(true);
diff --git a/spec/frontend/badges/components/badge_form_spec.js b/spec/frontend/badges/components/badge_form_spec.js
index 1edc9adbfb2..34c65d51115 100644
--- a/spec/frontend/badges/components/badge_form_spec.js
+++ b/spec/frontend/badges/components/badge_form_spec.js
@@ -50,7 +50,7 @@ describe('BadgeForm component', () => {
});
});
- const sharedSubmitTests = submitAction => {
+ const sharedSubmitTests = (submitAction) => {
const nameSelector = '#badge-name';
const imageUrlSelector = '#badge-image-url';
const findImageUrlElement = () => vm.$el.querySelector(imageUrlSelector);
@@ -65,7 +65,7 @@ describe('BadgeForm component', () => {
const submitButton = vm.$el.querySelector('button[type="submit"]');
submitButton.click();
};
- const expectInvalidInput = inputElementSelector => {
+ const expectInvalidInput = (inputElementSelector) => {
const inputElement = vm.$el.querySelector(inputElementSelector);
expect(inputElement.checkValidity()).toBe(false);
@@ -74,7 +74,7 @@ describe('BadgeForm component', () => {
expect(feedbackElement).toBeVisible();
};
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(vm, submitAction).mockReturnValue(Promise.resolve());
store.replaceState({
...store.state,
diff --git a/spec/frontend/badges/components/badge_list_row_spec.js b/spec/frontend/badges/components/badge_list_row_spec.js
index 31f0d850857..c559c979cb3 100644
--- a/spec/frontend/badges/components/badge_list_row_spec.js
+++ b/spec/frontend/badges/components/badge_list_row_spec.js
@@ -73,7 +73,7 @@ describe('BadgeListRow component', () => {
expect(vm.editBadge).toHaveBeenCalled();
});
- it('calls updateBadgeInModal and shows modal when clicking then delete button', done => {
+ it('calls updateBadgeInModal and shows modal when clicking then delete button', (done) => {
jest.spyOn(vm, 'updateBadgeInModal').mockImplementation(() => {});
const deleteButton = vm.$el.querySelector('.table-button-footer button:last-of-type');
@@ -88,12 +88,10 @@ describe('BadgeListRow component', () => {
});
describe('for a group badge', () => {
- beforeEach(done => {
+ beforeEach((done) => {
badge.kind = GROUP_BADGE;
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ Vue.nextTick().then(done).catch(done.fail);
});
it('renders the badge kind', () => {
diff --git a/spec/frontend/badges/components/badge_list_spec.js b/spec/frontend/badges/components/badge_list_spec.js
index 5ffc046eb97..da59e13e1e9 100644
--- a/spec/frontend/badges/components/badge_list_spec.js
+++ b/spec/frontend/badges/components/badge_list_spec.js
@@ -48,7 +48,7 @@ describe('BadgeList component', () => {
expect(rows).toHaveLength(numberOfDummyBadges);
});
- it('renders a message if no badges exist', done => {
+ it('renders a message if no badges exist', (done) => {
store.state.badges = [];
Vue.nextTick()
@@ -59,7 +59,7 @@ describe('BadgeList component', () => {
.catch(done.fail);
});
- it('shows a loading icon when loading', done => {
+ it('shows a loading icon when loading', (done) => {
store.state.isLoading = true;
Vue.nextTick()
@@ -73,15 +73,13 @@ describe('BadgeList component', () => {
});
describe('for group badges', () => {
- beforeEach(done => {
+ beforeEach((done) => {
store.state.kind = GROUP_BADGE;
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ Vue.nextTick().then(done).catch(done.fail);
});
- it('renders a message if no badges exist', done => {
+ it('renders a message if no badges exist', (done) => {
store.state.badges = [];
Vue.nextTick()
diff --git a/spec/frontend/badges/components/badge_spec.js b/spec/frontend/badges/components/badge_spec.js
index 43004004fb2..990bc094d59 100644
--- a/spec/frontend/badges/components/badge_spec.js
+++ b/spec/frontend/badges/components/badge_spec.js
@@ -23,7 +23,7 @@ describe('Badge component', () => {
const createComponent = (props, el = null) => {
vm = mountComponent(Component, props, el);
const { badgeImage } = findElements();
- return new Promise(resolve => {
+ return new Promise((resolve) => {
badgeImage.addEventListener('load', resolve);
// Manually dispatch load event as it is not triggered
badgeImage.dispatchEvent(new Event('load'));
@@ -36,7 +36,7 @@ describe('Badge component', () => {
describe('watchers', () => {
describe('imageUrl', () => {
- it('sets isLoading and resets numRetries and hasError', done => {
+ it('sets isLoading and resets numRetries and hasError', (done) => {
const props = { ...dummyProps };
createComponent(props)
.then(() => {
@@ -60,7 +60,7 @@ describe('Badge component', () => {
});
describe('methods', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent({ ...dummyProps })
.then(done)
.catch(done.fail);
@@ -98,7 +98,7 @@ describe('Badge component', () => {
});
describe('behavior', () => {
- beforeEach(done => {
+ beforeEach((done) => {
setFixtures('<div id="dummy-element"></div>');
createComponent({ ...dummyProps }, '#dummy-element')
.then(done)
@@ -116,7 +116,7 @@ describe('Badge component', () => {
expect(vm.$el.querySelector('.btn-group')).toBeHidden();
});
- it('shows a loading icon when loading', done => {
+ it('shows a loading icon when loading', (done) => {
vm.isLoading = true;
Vue.nextTick()
@@ -132,7 +132,7 @@ describe('Badge component', () => {
.catch(done.fail);
});
- it('shows an error and reload button if loading failed', done => {
+ it('shows an error and reload button if loading failed', (done) => {
vm.hasError = true;
Vue.nextTick()
diff --git a/spec/frontend/badges/store/actions_spec.js b/spec/frontend/badges/store/actions_spec.js
index 921c21cb55e..52ae29affac 100644
--- a/spec/frontend/badges/store/actions_spec.js
+++ b/spec/frontend/badges/store/actions_spec.js
@@ -9,7 +9,10 @@ import { createDummyBadge, createDummyBadgeResponse } from '../dummy_badge';
describe('Badges store actions', () => {
const dummyEndpointUrl = `${TEST_HOST}/badges/endpoint`;
- const dummyBadges = [{ ...createDummyBadge(), id: 5 }, { ...createDummyBadge(), id: 6 }];
+ const dummyBadges = [
+ { ...createDummyBadge(), id: 5 },
+ { ...createDummyBadge(), id: 6 },
+ ];
let axiosMock;
let badgeId;
@@ -30,7 +33,7 @@ describe('Badges store actions', () => {
});
describe('requestNewBadge', () => {
- it('commits REQUEST_NEW_BADGE', done => {
+ it('commits REQUEST_NEW_BADGE', (done) => {
testAction(
actions.requestNewBadge,
null,
@@ -43,7 +46,7 @@ describe('Badges store actions', () => {
});
describe('receiveNewBadge', () => {
- it('commits RECEIVE_NEW_BADGE', done => {
+ it('commits RECEIVE_NEW_BADGE', (done) => {
const newBadge = createDummyBadge();
testAction(
actions.receiveNewBadge,
@@ -57,7 +60,7 @@ describe('Badges store actions', () => {
});
describe('receiveNewBadgeError', () => {
- it('commits RECEIVE_NEW_BADGE_ERROR', done => {
+ it('commits RECEIVE_NEW_BADGE_ERROR', (done) => {
testAction(
actions.receiveNewBadgeError,
null,
@@ -84,10 +87,10 @@ describe('Badges store actions', () => {
};
});
- it('dispatches requestNewBadge and receiveNewBadge for successful response', done => {
+ it('dispatches requestNewBadge and receiveNewBadge for successful response', (done) => {
const dummyResponse = createDummyBadgeResponse();
- endpointMock.replyOnce(req => {
+ endpointMock.replyOnce((req) => {
expect(req.data).toBe(
JSON.stringify({
name: 'TestBadge',
@@ -111,8 +114,8 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('dispatches requestNewBadge and receiveNewBadgeError for error response', done => {
- endpointMock.replyOnce(req => {
+ it('dispatches requestNewBadge and receiveNewBadgeError for error response', (done) => {
+ endpointMock.replyOnce((req) => {
expect(req.data).toBe(
JSON.stringify({
name: 'TestBadge',
@@ -138,7 +141,7 @@ describe('Badges store actions', () => {
});
describe('requestDeleteBadge', () => {
- it('commits REQUEST_DELETE_BADGE', done => {
+ it('commits REQUEST_DELETE_BADGE', (done) => {
testAction(
actions.requestDeleteBadge,
badgeId,
@@ -151,7 +154,7 @@ describe('Badges store actions', () => {
});
describe('receiveDeleteBadge', () => {
- it('commits RECEIVE_DELETE_BADGE', done => {
+ it('commits RECEIVE_DELETE_BADGE', (done) => {
testAction(
actions.receiveDeleteBadge,
badgeId,
@@ -164,7 +167,7 @@ describe('Badges store actions', () => {
});
describe('receiveDeleteBadgeError', () => {
- it('commits RECEIVE_DELETE_BADGE_ERROR', done => {
+ it('commits RECEIVE_DELETE_BADGE_ERROR', (done) => {
testAction(
actions.receiveDeleteBadgeError,
badgeId,
@@ -185,7 +188,7 @@ describe('Badges store actions', () => {
dispatch = jest.fn();
});
- it('dispatches requestDeleteBadge and receiveDeleteBadge for successful response', done => {
+ it('dispatches requestDeleteBadge and receiveDeleteBadge for successful response', (done) => {
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestDeleteBadge', badgeId]]);
dispatch.mockClear();
@@ -201,7 +204,7 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('dispatches requestDeleteBadge and receiveDeleteBadgeError for error response', done => {
+ it('dispatches requestDeleteBadge and receiveDeleteBadgeError for error response', (done) => {
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestDeleteBadge', badgeId]]);
dispatch.mockClear();
@@ -220,7 +223,7 @@ describe('Badges store actions', () => {
});
describe('editBadge', () => {
- it('commits START_EDITING', done => {
+ it('commits START_EDITING', (done) => {
const dummyBadge = createDummyBadge();
testAction(
actions.editBadge,
@@ -234,7 +237,7 @@ describe('Badges store actions', () => {
});
describe('requestLoadBadges', () => {
- it('commits REQUEST_LOAD_BADGES', done => {
+ it('commits REQUEST_LOAD_BADGES', (done) => {
const dummyData = 'this is not real data';
testAction(
actions.requestLoadBadges,
@@ -248,7 +251,7 @@ describe('Badges store actions', () => {
});
describe('receiveLoadBadges', () => {
- it('commits RECEIVE_LOAD_BADGES', done => {
+ it('commits RECEIVE_LOAD_BADGES', (done) => {
const badges = dummyBadges;
testAction(
actions.receiveLoadBadges,
@@ -262,7 +265,7 @@ describe('Badges store actions', () => {
});
describe('receiveLoadBadgesError', () => {
- it('commits RECEIVE_LOAD_BADGES_ERROR', done => {
+ it('commits RECEIVE_LOAD_BADGES_ERROR', (done) => {
testAction(
actions.receiveLoadBadgesError,
null,
@@ -283,7 +286,7 @@ describe('Badges store actions', () => {
dispatch = jest.fn();
});
- it('dispatches requestLoadBadges and receiveLoadBadges for successful response', done => {
+ it('dispatches requestLoadBadges and receiveLoadBadges for successful response', (done) => {
const dummyData = 'this is just some data';
const dummyReponse = [
createDummyBadgeResponse(),
@@ -307,7 +310,7 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('dispatches requestLoadBadges and receiveLoadBadgesError for error response', done => {
+ it('dispatches requestLoadBadges and receiveLoadBadgesError for error response', (done) => {
const dummyData = 'this is just some data';
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestLoadBadges', dummyData]]);
@@ -327,7 +330,7 @@ describe('Badges store actions', () => {
});
describe('requestRenderedBadge', () => {
- it('commits REQUEST_RENDERED_BADGE', done => {
+ it('commits REQUEST_RENDERED_BADGE', (done) => {
testAction(
actions.requestRenderedBadge,
null,
@@ -340,7 +343,7 @@ describe('Badges store actions', () => {
});
describe('receiveRenderedBadge', () => {
- it('commits RECEIVE_RENDERED_BADGE', done => {
+ it('commits RECEIVE_RENDERED_BADGE', (done) => {
const dummyBadge = createDummyBadge();
testAction(
actions.receiveRenderedBadge,
@@ -354,7 +357,7 @@ describe('Badges store actions', () => {
});
describe('receiveRenderedBadgeError', () => {
- it('commits RECEIVE_RENDERED_BADGE_ERROR', done => {
+ it('commits RECEIVE_RENDERED_BADGE_ERROR', (done) => {
testAction(
actions.receiveRenderedBadgeError,
null,
@@ -385,7 +388,7 @@ describe('Badges store actions', () => {
dispatch = jest.fn();
});
- it('returns immediately if imageUrl is empty', done => {
+ it('returns immediately if imageUrl is empty', (done) => {
jest.spyOn(axios, 'get').mockImplementation(() => {});
badgeInForm.imageUrl = '';
@@ -398,7 +401,7 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('returns immediately if linkUrl is empty', done => {
+ it('returns immediately if linkUrl is empty', (done) => {
jest.spyOn(axios, 'get').mockImplementation(() => {});
badgeInForm.linkUrl = '';
@@ -411,7 +414,7 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('escapes user input', done => {
+ it('escapes user input', (done) => {
jest
.spyOn(axios, 'get')
.mockImplementation(() => Promise.resolve({ data: createDummyBadgeResponse() }));
@@ -434,7 +437,7 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('dispatches requestRenderedBadge and receiveRenderedBadge for successful response', done => {
+ it('dispatches requestRenderedBadge and receiveRenderedBadge for successful response', (done) => {
const dummyReponse = createDummyBadgeResponse();
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestRenderedBadge']]);
@@ -453,7 +456,7 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('dispatches requestRenderedBadge and receiveRenderedBadgeError for error response', done => {
+ it('dispatches requestRenderedBadge and receiveRenderedBadgeError for error response', (done) => {
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestRenderedBadge']]);
dispatch.mockClear();
@@ -472,7 +475,7 @@ describe('Badges store actions', () => {
});
describe('requestUpdatedBadge', () => {
- it('commits REQUEST_UPDATED_BADGE', done => {
+ it('commits REQUEST_UPDATED_BADGE', (done) => {
testAction(
actions.requestUpdatedBadge,
null,
@@ -485,7 +488,7 @@ describe('Badges store actions', () => {
});
describe('receiveUpdatedBadge', () => {
- it('commits RECEIVE_UPDATED_BADGE', done => {
+ it('commits RECEIVE_UPDATED_BADGE', (done) => {
const updatedBadge = createDummyBadge();
testAction(
actions.receiveUpdatedBadge,
@@ -499,7 +502,7 @@ describe('Badges store actions', () => {
});
describe('receiveUpdatedBadgeError', () => {
- it('commits RECEIVE_UPDATED_BADGE_ERROR', done => {
+ it('commits RECEIVE_UPDATED_BADGE_ERROR', (done) => {
testAction(
actions.receiveUpdatedBadgeError,
null,
@@ -526,10 +529,10 @@ describe('Badges store actions', () => {
dispatch = jest.fn();
});
- it('dispatches requestUpdatedBadge and receiveUpdatedBadge for successful response', done => {
+ it('dispatches requestUpdatedBadge and receiveUpdatedBadge for successful response', (done) => {
const dummyResponse = createDummyBadgeResponse();
- endpointMock.replyOnce(req => {
+ endpointMock.replyOnce((req) => {
expect(req.data).toBe(
JSON.stringify({
name: 'TestBadge',
@@ -553,8 +556,8 @@ describe('Badges store actions', () => {
.catch(done.fail);
});
- it('dispatches requestUpdatedBadge and receiveUpdatedBadgeError for error response', done => {
- endpointMock.replyOnce(req => {
+ it('dispatches requestUpdatedBadge and receiveUpdatedBadgeError for error response', (done) => {
+ endpointMock.replyOnce((req) => {
expect(req.data).toBe(
JSON.stringify({
name: 'TestBadge',
@@ -580,7 +583,7 @@ describe('Badges store actions', () => {
});
describe('stopEditing', () => {
- it('commits STOP_EDITING', done => {
+ it('commits STOP_EDITING', (done) => {
testAction(
actions.stopEditing,
null,
@@ -593,7 +596,7 @@ describe('Badges store actions', () => {
});
describe('updateBadgeInForm', () => {
- it('commits UPDATE_BADGE_IN_FORM', done => {
+ it('commits UPDATE_BADGE_IN_FORM', (done) => {
const dummyBadge = createDummyBadge();
testAction(
actions.updateBadgeInForm,
@@ -606,7 +609,7 @@ describe('Badges store actions', () => {
});
describe('updateBadgeInModal', () => {
- it('commits UPDATE_BADGE_IN_MODAL', done => {
+ it('commits UPDATE_BADGE_IN_MODAL', (done) => {
const dummyBadge = createDummyBadge();
testAction(
actions.updateBadgeInModal,
diff --git a/spec/frontend/batch_comments/components/diff_file_drafts_spec.js b/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
index 6e0b61db9fa..dcb68b1804f 100644
--- a/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
+++ b/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
@@ -44,18 +44,8 @@ describe('Batch comments diff file drafts component', () => {
expect(vm.findAll('.js-diff-notes-index').length).toEqual(2);
- expect(
- vm
- .findAll('.js-diff-notes-index')
- .at(0)
- .text(),
- ).toEqual('1');
-
- expect(
- vm
- .findAll('.js-diff-notes-index')
- .at(1)
- .text(),
- ).toEqual('2');
+ expect(vm.findAll('.js-diff-notes-index').at(0).text()).toEqual('1');
+
+ expect(vm.findAll('.js-diff-notes-index').at(1).text()).toEqual('2');
});
});
diff --git a/spec/frontend/batch_comments/components/draft_note_spec.js b/spec/frontend/batch_comments/components/draft_note_spec.js
index 99980c98f8b..ae7134b63c8 100644
--- a/spec/frontend/batch_comments/components/draft_note_spec.js
+++ b/spec/frontend/batch_comments/components/draft_note_spec.js
@@ -65,7 +65,7 @@ describe('Batch comments draft note component', () => {
);
});
- it('sets as loading when draft is publishing', done => {
+ it('sets as loading when draft is publishing', (done) => {
createComponent();
wrapper.vm.$store.state.batchComments.currentlyPublishingDrafts.push(1);
@@ -80,7 +80,7 @@ describe('Batch comments draft note component', () => {
});
describe('update', () => {
- it('dispatches updateDraft', done => {
+ it('dispatches updateDraft', (done) => {
createComponent();
const note = wrapper.find(NoteableNote);
@@ -121,7 +121,7 @@ describe('Batch comments draft note component', () => {
});
describe('quick actions', () => {
- it('renders referenced commands', done => {
+ it('renders referenced commands', (done) => {
createComponent();
wrapper.setProps({
draft: {
diff --git a/spec/frontend/batch_comments/components/drafts_count_spec.js b/spec/frontend/batch_comments/components/drafts_count_spec.js
index 83d2f9eb639..5f74de9c014 100644
--- a/spec/frontend/batch_comments/components/drafts_count_spec.js
+++ b/spec/frontend/batch_comments/components/drafts_count_spec.js
@@ -27,7 +27,7 @@ describe('Batch comments drafts count component', () => {
expect(vm.$el.textContent).toContain('1');
});
- it('renders screen reader text', done => {
+ it('renders screen reader text', (done) => {
const el = vm.$el.querySelector('.sr-only');
expect(el.textContent).toContain('draft');
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index 8ddad3dacfe..173b2710a30 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -85,7 +85,7 @@ describe('Batch comments draft preview item component', () => {
describe('for thread', () => {
beforeEach(() => {
- createComponent(false, { discussion_id: '1', resolve_discussion: true }, store => {
+ createComponent(false, { discussion_id: '1', resolve_discussion: true }, (store) => {
store.state.notes.discussions.push({
id: '1',
notes: [
diff --git a/spec/frontend/batch_comments/components/publish_button_spec.js b/spec/frontend/batch_comments/components/publish_button_spec.js
index 4032713150c..eca424814b4 100644
--- a/spec/frontend/batch_comments/components/publish_button_spec.js
+++ b/spec/frontend/batch_comments/components/publish_button_spec.js
@@ -29,7 +29,7 @@ describe('Batch comments publish button component', () => {
expect(vm.$store.dispatch).toHaveBeenCalledWith('batchComments/publishReview', undefined);
});
- it('sets loading when isPublishing is true', done => {
+ it('sets loading when isPublishing is true', (done) => {
vm.$store.state.batchComments.isPublishing = true;
vm.$nextTick(() => {
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index e66f36aa3a2..3ad131de24e 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import * as actions from '~/batch_comments/stores/modules/batch_comments/actions';
import axios from '~/lib/utils/axios_utils';
@@ -28,7 +28,7 @@ describe('Batch comments store actions', () => {
});
describe('addDraftToDiscussion', () => {
- it('commits ADD_NEW_DRAFT if no errors returned', done => {
+ it('commits ADD_NEW_DRAFT if no errors returned', (done) => {
res = { id: 1 };
mock.onAny().reply(200, res);
@@ -42,7 +42,7 @@ describe('Batch comments store actions', () => {
);
});
- it('does not commit ADD_NEW_DRAFT if errors returned', done => {
+ it('does not commit ADD_NEW_DRAFT if errors returned', (done) => {
mock.onAny().reply(500);
testAction(
@@ -57,7 +57,7 @@ describe('Batch comments store actions', () => {
});
describe('createNewDraft', () => {
- it('commits ADD_NEW_DRAFT if no errors returned', done => {
+ it('commits ADD_NEW_DRAFT if no errors returned', (done) => {
res = { id: 1 };
mock.onAny().reply(200, res);
@@ -71,7 +71,7 @@ describe('Batch comments store actions', () => {
);
});
- it('does not commit ADD_NEW_DRAFT if errors returned', done => {
+ it('does not commit ADD_NEW_DRAFT if errors returned', (done) => {
mock.onAny().reply(500);
testAction(actions.createNewDraft, { endpoint: TEST_HOST, data: 'test' }, null, [], [], done);
@@ -89,7 +89,7 @@ describe('Batch comments store actions', () => {
};
});
- it('commits DELETE_DRAFT if no errors returned', done => {
+ it('commits DELETE_DRAFT if no errors returned', (done) => {
const commit = jest.fn();
const context = {
getters,
@@ -107,7 +107,7 @@ describe('Batch comments store actions', () => {
.catch(done.fail);
});
- it('does not commit DELETE_DRAFT if errors returned', done => {
+ it('does not commit DELETE_DRAFT if errors returned', (done) => {
const commit = jest.fn();
const context = {
getters,
@@ -136,7 +136,7 @@ describe('Batch comments store actions', () => {
};
});
- it('commits SET_BATCH_COMMENTS_DRAFTS with returned data', done => {
+ it('commits SET_BATCH_COMMENTS_DRAFTS with returned data', (done) => {
const commit = jest.fn();
const context = {
getters,
@@ -170,7 +170,7 @@ describe('Batch comments store actions', () => {
rootGetters = { discussionsStructuredByLineCode: 'discussions' };
});
- it('dispatches actions & commits', done => {
+ it('dispatches actions & commits', (done) => {
mock.onAny().reply(200);
actions
@@ -185,7 +185,7 @@ describe('Batch comments store actions', () => {
.catch(done.fail);
});
- it('dispatches error commits', done => {
+ it('dispatches error commits', (done) => {
mock.onAny().reply(500);
actions
@@ -210,7 +210,7 @@ describe('Batch comments store actions', () => {
};
});
- it('commits RECEIVE_DRAFT_UPDATE_SUCCESS with returned data', done => {
+ it('commits RECEIVE_DRAFT_UPDATE_SUCCESS with returned data', (done) => {
const commit = jest.fn();
const context = {
getters,
@@ -228,7 +228,7 @@ describe('Batch comments store actions', () => {
.catch(done.fail);
});
- it('calls passed callback', done => {
+ it('calls passed callback', (done) => {
const commit = jest.fn();
const context = {
getters,
@@ -249,7 +249,7 @@ describe('Batch comments store actions', () => {
});
describe('expandAllDiscussions', () => {
- it('dispatches expandDiscussion for all drafts', done => {
+ it('dispatches expandDiscussion for all drafts', (done) => {
const state = {
drafts: [
{
diff --git a/spec/frontend/behaviors/autosize_spec.js b/spec/frontend/behaviors/autosize_spec.js
index 3444c7b4075..352bd8a0ed0 100644
--- a/spec/frontend/behaviors/autosize_spec.js
+++ b/spec/frontend/behaviors/autosize_spec.js
@@ -6,7 +6,7 @@ function load() {
jest.mock('~/helpers/startup_css_helper', () => {
return {
- waitForCSSLoaded: jest.fn().mockImplementation(cb => cb.apply()),
+ waitForCSSLoaded: jest.fn().mockImplementation((cb) => cb.apply()),
};
});
diff --git a/spec/frontend/behaviors/bind_in_out_spec.js b/spec/frontend/behaviors/bind_in_out_spec.js
index 92a68ddd387..49425a9377e 100644
--- a/spec/frontend/behaviors/bind_in_out_spec.js
+++ b/spec/frontend/behaviors/bind_in_out_spec.js
@@ -1,5 +1,5 @@
+import ClassSpecHelper from 'helpers/class_spec_helper';
import BindInOut from '~/behaviors/bind_in_out';
-import ClassSpecHelper from '../helpers/class_spec_helper';
describe('BindInOut', () => {
let testContext;
diff --git a/spec/frontend/behaviors/copy_as_gfm_spec.js b/spec/frontend/behaviors/copy_as_gfm_spec.js
index 46d4451c941..16ea4ba8624 100644
--- a/spec/frontend/behaviors/copy_as_gfm_spec.js
+++ b/spec/frontend/behaviors/copy_as_gfm_spec.js
@@ -57,7 +57,7 @@ describe('CopyAsGFM', () => {
const fragment = document.createDocumentFragment();
const node = document.createElement('div');
node.innerHTML = html;
- Array.from(node.childNodes).forEach(item => fragment.appendChild(item));
+ Array.from(node.childNodes).forEach((item) => fragment.appendChild(item));
return fragment;
},
}),
@@ -80,7 +80,7 @@ describe('CopyAsGFM', () => {
return clipboardData;
};
- beforeAll(done => {
+ beforeAll((done) => {
initCopyAsGFM();
// Fake call to nodeToGfm so the import of lazy bundle happened
@@ -94,7 +94,7 @@ describe('CopyAsGFM', () => {
beforeEach(() => jest.spyOn(clipboardData, 'setData'));
describe('list handling', () => {
- it('uses correct gfm for unordered lists', done => {
+ it('uses correct gfm for unordered lists', (done) => {
const selection = stubSelection('<li>List Item1</li><li>List Item2</li>\n', 'UL');
window.getSelection = jest.fn(() => selection);
@@ -108,7 +108,7 @@ describe('CopyAsGFM', () => {
});
});
- it('uses correct gfm for ordered lists', done => {
+ it('uses correct gfm for ordered lists', (done) => {
const selection = stubSelection('<li>List Item1</li><li>List Item2</li>\n', 'OL');
window.getSelection = jest.fn(() => selection);
@@ -127,7 +127,7 @@ describe('CopyAsGFM', () => {
describe('CopyAsGFM.quoted', () => {
const sampleGFM = '* List 1\n* List 2\n\n`Some code`';
- it('adds quote char `> ` to each line', done => {
+ it('adds quote char `> ` to each line', (done) => {
const expectedQuotedGFM = '> * List 1\n> * List 2\n> \n> `Some code`';
expect(CopyAsGFM.quoted(sampleGFM)).toEqual(expectedQuotedGFM);
done();
diff --git a/spec/frontend/behaviors/gl_emoji_spec.js b/spec/frontend/behaviors/gl_emoji_spec.js
index 46b4e5d3d5c..6e476d84501 100644
--- a/spec/frontend/behaviors/gl_emoji_spec.js
+++ b/spec/frontend/behaviors/gl_emoji_spec.js
@@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'jest/helpers/wait_for_promises';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { initEmojiMap, EMOJI_VERSION } from '~/emoji';
import installGlEmojiElement from '~/behaviors/gl_emoji';
diff --git a/spec/frontend/behaviors/load_startup_css_spec.js b/spec/frontend/behaviors/load_startup_css_spec.js
index 81222ac5aaa..59f49585645 100644
--- a/spec/frontend/behaviors/load_startup_css_spec.js
+++ b/spec/frontend/behaviors/load_startup_css_spec.js
@@ -7,7 +7,7 @@ describe('behaviors/load_startup_css', () => {
const setupListeners = () => {
document
.querySelectorAll('link')
- .forEach(x => x.addEventListener('load', () => loadListener(x)));
+ .forEach((x) => x.addEventListener('load', () => loadListener(x)));
};
beforeEach(() => {
diff --git a/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js b/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
index eab805382bd..7044618fd9e 100644
--- a/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
+++ b/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
@@ -8,7 +8,7 @@ describe('PasteMarkdownTable', () => {
Object.defineProperty(event, 'dataTransfer', {
value: {
- getData: jest.fn().mockImplementation(type => {
+ getData: jest.fn().mockImplementation((type) => {
if (type === 'text/html') {
return '<table><tr><td>First</td><td>Second</td></tr></table>';
}
@@ -48,7 +48,7 @@ describe('PasteMarkdownTable', () => {
it('returns false when the number of rows are not consistent', () => {
data.types = ['text/html', 'text/plain'];
- data.getData = jest.fn().mockImplementation(mimeType => {
+ data.getData = jest.fn().mockImplementation((mimeType) => {
if (mimeType === 'text/html') {
return '<table><tr><td>def test<td></tr></table>';
}
@@ -60,7 +60,7 @@ describe('PasteMarkdownTable', () => {
it('returns false when the table copy comes from a diff', () => {
data.types = ['text/html', 'text/plain'];
- data.getData = jest.fn().mockImplementation(mimeType => {
+ data.getData = jest.fn().mockImplementation((mimeType) => {
if (mimeType === 'text/html') {
return '<table class="diff-wrap-lines"><tr><td>First</td><td>Second</td></tr></table>';
}
@@ -74,7 +74,7 @@ describe('PasteMarkdownTable', () => {
describe('convertToTableMarkdown', () => {
it('returns a Markdown table', () => {
data.types = ['text/html', 'text/plain'];
- data.getData = jest.fn().mockImplementation(type => {
+ data.getData = jest.fn().mockImplementation((type) => {
if (type === 'text/html') {
return '<table><tr><td>First</td><td>Last</td><tr><td>John</td><td>Doe</td><tr><td>Jane</td><td>Doe</td></table>';
} else if (type === 'text/plain') {
@@ -99,7 +99,7 @@ describe('PasteMarkdownTable', () => {
it('returns a Markdown table with rows normalized', () => {
data.types = ['text/html', 'text/plain'];
- data.getData = jest.fn().mockImplementation(type => {
+ data.getData = jest.fn().mockImplementation((type) => {
if (type === 'text/html') {
return '<table><tr><td>First</td><td>Last</td><tr><td>John</td><td>Doe</td><tr><td>Jane</td><td>/td></table>';
} else if (type === 'text/plain') {
diff --git a/spec/frontend/behaviors/quick_submit_spec.js b/spec/frontend/behaviors/quick_submit_spec.js
index 2dc2bb198e8..d3d65892aff 100644
--- a/spec/frontend/behaviors/quick_submit_spec.js
+++ b/spec/frontend/behaviors/quick_submit_spec.js
@@ -17,7 +17,7 @@ describe('Quick Submit behavior', () => {
submit: jest.fn(),
};
- $('form').submit(e => {
+ $('form').submit((e) => {
// Prevent a form submit from moving us off the testing page
e.preventDefault();
// Explicitly call the spie to know this function get's not called
diff --git a/spec/frontend/behaviors/requires_input_spec.js b/spec/frontend/behaviors/requires_input_spec.js
index 617fe49b059..0f27f89d6dc 100644
--- a/spec/frontend/behaviors/requires_input_spec.js
+++ b/spec/frontend/behaviors/requires_input_spec.js
@@ -32,30 +32,18 @@ describe('requiresInput', () => {
it('enables submit when all required fields receive input', () => {
$('.js-requires-input').requiresInput();
- $('#required1')
- .val('input1')
- .change();
+ $('#required1').val('input1').change();
expect(submitButton).toBeDisabled();
- $('#optional1')
- .val('input1')
- .change();
+ $('#optional1').val('input1').change();
expect(submitButton).toBeDisabled();
- $('#required2')
- .val('input2')
- .change();
- $('#required3')
- .val('input3')
- .change();
- $('#required4')
- .val('input4')
- .change();
- $('#required5')
- .val('1')
- .change();
+ $('#required2').val('input2').change();
+ $('#required3').val('input3').change();
+ $('#required4').val('input4').change();
+ $('#required5').val('1').change();
expect($('.submit')).not.toBeDisabled();
});
diff --git a/spec/frontend/behaviors/secret_values_spec.js b/spec/frontend/behaviors/secret_values_spec.js
index 5aaab093c0c..06155017dd1 100644
--- a/spec/frontend/behaviors/secret_values_spec.js
+++ b/spec/frontend/behaviors/secret_values_spec.js
@@ -18,7 +18,7 @@ function generateValueMarkup(
function generateFixtureMarkup(secrets, isRevealed, valueClass, placeholderClass) {
return `
<div class="js-secret-container">
- ${secrets.map(secret => generateValueMarkup(secret, valueClass, placeholderClass)).join('')}
+ ${secrets.map((secret) => generateValueMarkup(secret, valueClass, placeholderClass)).join('')}
<button
class="js-secret-value-reveal-button"
data-secret-reveal-status="${isRevealed}"
@@ -122,12 +122,12 @@ describe('setupSecretValues', () => {
const placeholders = wrapper.querySelectorAll('.js-secret-value-placeholder');
expect(values.length).toEqual(3);
- values.forEach(value => {
+ values.forEach((value) => {
expect(value.classList.contains('hide')).toEqual(true);
});
expect(placeholders.length).toEqual(3);
- placeholders.forEach(placeholder => {
+ placeholders.forEach((placeholder) => {
expect(placeholder.classList.contains('hide')).toEqual(false);
});
});
@@ -141,24 +141,24 @@ describe('setupSecretValues', () => {
revealButton.click();
expect(values.length).toEqual(3);
- values.forEach(value => {
+ values.forEach((value) => {
expect(value.classList.contains('hide')).toEqual(false);
});
expect(placeholders.length).toEqual(3);
- placeholders.forEach(placeholder => {
+ placeholders.forEach((placeholder) => {
expect(placeholder.classList.contains('hide')).toEqual(true);
});
revealButton.click();
expect(values.length).toEqual(3);
- values.forEach(value => {
+ values.forEach((value) => {
expect(value.classList.contains('hide')).toEqual(true);
});
expect(placeholders.length).toEqual(3);
- placeholders.forEach(placeholder => {
+ placeholders.forEach((placeholder) => {
expect(placeholder.classList.contains('hide')).toEqual(false);
});
});
@@ -181,24 +181,24 @@ describe('setupSecretValues', () => {
revealButton.click();
expect(values.length).toEqual(4);
- values.forEach(value => {
+ values.forEach((value) => {
expect(value.classList.contains('hide')).toEqual(false);
});
expect(placeholders.length).toEqual(4);
- placeholders.forEach(placeholder => {
+ placeholders.forEach((placeholder) => {
expect(placeholder.classList.contains('hide')).toEqual(true);
});
revealButton.click();
expect(values.length).toEqual(4);
- values.forEach(value => {
+ values.forEach((value) => {
expect(value.classList.contains('hide')).toEqual(true);
});
expect(placeholders.length).toEqual(4);
- placeholders.forEach(placeholder => {
+ placeholders.forEach((placeholder) => {
expect(placeholder.classList.contains('hide')).toEqual(false);
});
});
diff --git a/spec/frontend/behaviors/shortcuts/keybindings_spec.js b/spec/frontend/behaviors/shortcuts/keybindings_spec.js
index 23fea79f828..d05b3fbdce2 100644
--- a/spec/frontend/behaviors/shortcuts/keybindings_spec.js
+++ b/spec/frontend/behaviors/shortcuts/keybindings_spec.js
@@ -9,7 +9,7 @@ describe('~/behaviors/shortcuts/keybindings.js', () => {
useLocalStorageSpy();
});
- const setupCustomizations = async customizationsAsString => {
+ const setupCustomizations = async (customizationsAsString) => {
localStorage.clear();
if (customizationsAsString) {
diff --git a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
index 77dcc28dd48..94ba1615c89 100644
--- a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
+++ b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
@@ -15,7 +15,7 @@ describe('ShortcutsIssuable', () => {
preloadFixtures(snippetShowFixtureName, mrShowFixtureName);
- beforeAll(done => {
+ beforeAll((done) => {
initCopyAsGFM();
// Fake call to nodeToGfm so the import of lazy bundle happened
@@ -81,7 +81,7 @@ describe('ShortcutsIssuable', () => {
stubSelection('<p>Selected text.</p>');
});
- it('leaves existing input intact', done => {
+ it('leaves existing input intact', (done) => {
$(FORM_SELECTOR).val('This text was already here.');
expect($(FORM_SELECTOR).val()).toBe('This text was already here.');
@@ -96,7 +96,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `input`', done => {
+ it('triggers `input`', (done) => {
let triggered = false;
$(FORM_SELECTOR).on('input', () => {
triggered = true;
@@ -110,7 +110,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `focus`', done => {
+ it('triggers `focus`', (done) => {
const spy = jest.spyOn(document.querySelector(FORM_SELECTOR), 'focus');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -122,7 +122,7 @@ describe('ShortcutsIssuable', () => {
});
describe('with a one-line selection', () => {
- it('quotes the selection', done => {
+ it('quotes the selection', (done) => {
stubSelection('<p>This text has been selected.</p>');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -134,7 +134,7 @@ describe('ShortcutsIssuable', () => {
});
describe('with a multi-line selection', () => {
- it('quotes the selected lines as a group', done => {
+ it('quotes the selected lines as a group', (done) => {
stubSelection(
'<p>Selected line one.</p>\n<p>Selected line two.</p>\n<p>Selected line three.</p>',
);
@@ -154,7 +154,7 @@ describe('ShortcutsIssuable', () => {
stubSelection('<p>Selected text.</p>', true);
});
- it('does not add anything to the input', done => {
+ it('does not add anything to the input', (done) => {
ShortcutsIssuable.replyWithSelectedText(true);
setImmediate(() => {
@@ -163,7 +163,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `focus`', done => {
+ it('triggers `focus`', (done) => {
const spy = jest.spyOn(document.querySelector(FORM_SELECTOR), 'focus');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -179,7 +179,7 @@ describe('ShortcutsIssuable', () => {
stubSelection('<div class="md">Selected text.</div><p>Invalid selected text.</p>', true);
});
- it('only adds the valid part to the input', done => {
+ it('only adds the valid part to the input', (done) => {
ShortcutsIssuable.replyWithSelectedText(true);
setImmediate(() => {
@@ -188,7 +188,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `focus`', done => {
+ it('triggers `focus`', (done) => {
const spy = jest.spyOn(document.querySelector(FORM_SELECTOR), 'focus');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -198,7 +198,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `input`', done => {
+ it('triggers `input`', (done) => {
let triggered = false;
$(FORM_SELECTOR).on('input', () => {
triggered = true;
@@ -233,7 +233,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('adds the quoted selection to the input', done => {
+ it('adds the quoted selection to the input', (done) => {
ShortcutsIssuable.replyWithSelectedText(true);
setImmediate(() => {
@@ -242,7 +242,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `focus`', done => {
+ it('triggers `focus`', (done) => {
const spy = jest.spyOn(document.querySelector(FORM_SELECTOR), 'focus');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -252,7 +252,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `input`', done => {
+ it('triggers `input`', (done) => {
let triggered = false;
$(FORM_SELECTOR).on('input', () => {
triggered = true;
@@ -287,7 +287,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('does not add anything to the input', done => {
+ it('does not add anything to the input', (done) => {
ShortcutsIssuable.replyWithSelectedText(true);
setImmediate(() => {
@@ -296,7 +296,7 @@ describe('ShortcutsIssuable', () => {
});
});
- it('triggers `focus`', done => {
+ it('triggers `focus`', (done) => {
const spy = jest.spyOn(document.querySelector(FORM_SELECTOR), 'focus');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -308,7 +308,7 @@ describe('ShortcutsIssuable', () => {
});
describe('with a valid selection with no text content', () => {
- it('returns the proper markdown', done => {
+ it('returns the proper markdown', (done) => {
stubSelection('<img src="https://gitlab.com/logo.png" alt="logo" />');
ShortcutsIssuable.replyWithSelectedText(true);
@@ -334,7 +334,7 @@ describe('ShortcutsIssuable', () => {
'.sidebar-source-branch button',
);
- [sidebarCollapsedBtn, sidebarExpandedBtn].forEach(btn => jest.spyOn(btn, 'click'));
+ [sidebarCollapsedBtn, sidebarExpandedBtn].forEach((btn) => jest.spyOn(btn, 'click'));
});
afterEach(() => {
diff --git a/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js b/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js
index 3b64e4910e2..09c49617bc5 100644
--- a/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js
+++ b/spec/frontend/blob/balsamiq/balsamiq_viewer_spec.js
@@ -1,7 +1,7 @@
import sqljs from 'sql.js';
+import ClassSpecHelper from 'helpers/class_spec_helper';
import axios from '~/lib/utils/axios_utils';
import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
-import ClassSpecHelper from '../../helpers/class_spec_helper';
jest.mock('sql.js');
@@ -49,7 +49,7 @@ describe('BalsamiqViewer', () => {
);
});
- it('should call `renderFile` on request success', done => {
+ it('should call `renderFile` on request success', (done) => {
jest.spyOn(axios, 'get').mockReturnValue(requestSuccess);
jest.spyOn(bv, 'renderFile').mockImplementation(() => {});
@@ -61,7 +61,7 @@ describe('BalsamiqViewer', () => {
.catch(done.fail);
});
- it('should not call `renderFile` on request failure', done => {
+ it('should not call `renderFile` on request failure', (done) => {
jest.spyOn(axios, 'get').mockReturnValue(Promise.reject());
jest.spyOn(bv, 'renderFile').mockImplementation(() => {});
@@ -95,8 +95,8 @@ describe('BalsamiqViewer', () => {
balsamiqViewer.viewer = viewer;
balsamiqViewer.getPreviews.mockReturnValue(previews);
- balsamiqViewer.renderPreview.mockImplementation(preview => preview);
- viewer.appendChild.mockImplementation(containerElement => {
+ balsamiqViewer.renderPreview.mockImplementation((preview) => preview);
+ viewer.appendChild.mockImplementation((containerElement) => {
container = containerElement;
});
@@ -177,7 +177,9 @@ describe('BalsamiqViewer', () => {
database,
};
- jest.spyOn(BalsamiqViewer, 'parsePreview').mockImplementation(preview => preview.toString());
+ jest
+ .spyOn(BalsamiqViewer, 'parsePreview')
+ .mockImplementation((preview) => preview.toString());
database.exec.mockReturnValue(thumbnails);
getPreviews = BalsamiqViewer.prototype.getPreviews.call(balsamiqViewer);
diff --git a/spec/frontend/blob/components/blob_content_error_spec.js b/spec/frontend/blob/components/blob_content_error_spec.js
index 0c6d269ad05..95686be8294 100644
--- a/spec/frontend/blob/components/blob_content_error_spec.js
+++ b/spec/frontend/blob/components/blob_content_error_spec.js
@@ -32,7 +32,7 @@ describe('Blob Content Error component', () => {
viewerError: error.id,
});
expect(wrapper.text()).toContain(reason);
- options.forEach(option => {
+ options.forEach((option) => {
expect(wrapper.text()).toContain(option);
});
});
@@ -52,7 +52,7 @@ describe('Blob Content Error component', () => {
},
});
expect(wrapper.text()).toContain(reason);
- options.forEach(option => {
+ options.forEach((option) => {
expect(wrapper.text()).toContain(option);
});
});
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
index dbed086a552..7de8d9236ed 100644
--- a/spec/frontend/blob/components/blob_edit_content_spec.js
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -40,7 +40,7 @@ describe('Blob Header Editing', () => {
wrapper.destroy();
});
- const triggerChangeContent = val => {
+ const triggerChangeContent = (val) => {
getValue.mockReturnValue(val);
const [cb] = onDidChangeModelContent.mock.calls[0];
diff --git a/spec/frontend/blob/components/blob_edit_header_spec.js b/spec/frontend/blob/components/blob_edit_header_spec.js
index 4355f46db7e..22e8e6d986c 100644
--- a/spec/frontend/blob/components/blob_edit_header_spec.js
+++ b/spec/frontend/blob/components/blob_edit_header_spec.js
@@ -15,7 +15,7 @@ describe('Blob Header Editing', () => {
});
};
const findDeleteButton = () =>
- wrapper.findAll(GlButton).wrappers.find(x => x.text() === 'Delete file');
+ wrapper.findAll(GlButton).wrappers.find((x) => x.text() === 'Delete file');
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/blob/components/blob_header_filepath_spec.js b/spec/frontend/blob/components/blob_header_filepath_spec.js
index 067a4ae61a0..7b8b5050486 100644
--- a/spec/frontend/blob/components/blob_header_filepath_spec.js
+++ b/spec/frontend/blob/components/blob_header_filepath_spec.js
@@ -32,12 +32,7 @@ describe('Blob Header Filepath', () => {
it('renders regular name', () => {
createComponent();
- expect(
- wrapper
- .find('.js-blob-header-filepath')
- .text()
- .trim(),
- ).toBe(MockBlob.path);
+ expect(wrapper.find('.js-blob-header-filepath').text().trim()).toBe(MockBlob.path);
});
it('does not fail if the name is empty', () => {
@@ -71,12 +66,7 @@ describe('Blob Header Filepath', () => {
);
expect(wrapper.text()).toContain(slotContent);
- expect(
- wrapper
- .text()
- .trim()
- .substring(0, slotContent.length),
- ).toBe(slotContent);
+ expect(wrapper.text().trim().substring(0, slotContent.length)).toBe(slotContent);
});
});
diff --git a/spec/frontend/blob/components/blob_header_spec.js b/spec/frontend/blob/components/blob_header_spec.js
index 3e84347bee4..b3f80183f6b 100644
--- a/spec/frontend/blob/components/blob_header_spec.js
+++ b/spec/frontend/blob/components/blob_header_spec.js
@@ -75,7 +75,7 @@ describe('Blob Header Default Actions', () => {
expect(wrapper.find(DefaultActions).exists()).toBe(false);
});
- Object.keys(slots).forEach(slot => {
+ Object.keys(slots).forEach((slot) => {
it('renders the slots', () => {
const slotContent = slots[slot];
createComponent(
diff --git a/spec/frontend/blob/pipeline_tour_success_modal_spec.js b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
index e8011558765..d4562019302 100644
--- a/spec/frontend/blob/pipeline_tour_success_modal_spec.js
+++ b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import Cookies from 'js-cookie';
import { GlSprintf, GlModal, GlLink } from '@gitlab/ui';
import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
+import { stubComponent } from 'helpers/stub_component';
import pipelineTourSuccess from '~/blob/pipeline_tour_success_modal.vue';
import modalProps from './pipeline_tour_success_mock_data';
@@ -10,13 +11,21 @@ describe('PipelineTourSuccessModal', () => {
let cookieSpy;
let trackingSpy;
+ const GlEmoji = { template: '<img/>' };
const createComponent = () => {
wrapper = shallowMount(pipelineTourSuccess, {
propsData: modalProps,
stubs: {
- GlModal,
+ GlModal: stubComponent(GlModal, {
+ template: `
+ <div>
+ <slot name="modal-title"></slot>
+ <slot></slot>
+ <slot name="modal-footer"></slot>
+ </div>`,
+ }),
GlSprintf,
- 'gl-emoji': '<img/>',
+ GlEmoji,
},
});
};
@@ -67,7 +76,7 @@ describe('PipelineTourSuccessModal', () => {
it('has expected structure', () => {
const modal = wrapper.find(GlModal);
const sprintf = modal.find(GlSprintf);
- const emoji = modal.find('img');
+ const emoji = modal.find(GlEmoji);
expect(wrapper.text()).toContain("That's it, well done!");
expect(sprintf.exists()).toBe(true);
diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js
index cd12d5e17a8..a24e7de9037 100644
--- a/spec/frontend/blob/sketch/index_spec.js
+++ b/spec/frontend/blob/sketch/index_spec.js
@@ -11,7 +11,7 @@ describe('Sketch viewer', () => {
});
describe('with error message', () => {
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(SketchLoader.prototype, 'getZipFile').mockImplementation(
() =>
new Promise((resolve, reject) => {
@@ -37,7 +37,7 @@ describe('Sketch viewer', () => {
});
describe('success', () => {
- beforeEach(done => {
+ beforeEach((done) => {
const loadAsyncMock = {
files: {
'previews/preview.png': {
@@ -48,7 +48,7 @@ describe('Sketch viewer', () => {
loadAsyncMock.files['previews/preview.png'].async.mockImplementation(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve('foo');
done();
}),
diff --git a/spec/frontend/blob/utils_spec.js b/spec/frontend/blob/utils_spec.js
index ab9e325e963..fa83690c104 100644
--- a/spec/frontend/blob/utils_spec.js
+++ b/spec/frontend/blob/utils_spec.js
@@ -26,7 +26,7 @@ describe('Blob utilities', () => {
it.each([[{}], [{ blobPath, blobContent, blobGlobalId }]])(
'creates the instance with the passed parameters %s',
- extraParams => {
+ (extraParams) => {
const params = {
el: editorEl,
...extraParams,
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
index a4b4044f5f9..4a5eb31602d 100644
--- a/spec/frontend/blob/viewer/index_spec.js
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -37,7 +37,7 @@ describe('Blob viewer', () => {
window.location.hash = '';
});
- it('loads source file after switching views', done => {
+ it('loads source file after switching views', (done) => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setImmediate(() => {
@@ -51,7 +51,7 @@ describe('Blob viewer', () => {
});
});
- it('loads source file when line number is in hash', done => {
+ it('loads source file when line number is in hash', (done) => {
window.location.hash = '#L1';
new BlobViewer();
@@ -117,7 +117,7 @@ describe('Blob viewer', () => {
expect(copyButton.blur).not.toHaveBeenCalled();
});
- it('enables after switching to simple view', done => {
+ it('enables after switching to simple view', (done) => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setImmediate(() => {
@@ -127,7 +127,7 @@ describe('Blob viewer', () => {
});
});
- it('updates tooltip after switching to simple view', done => {
+ it('updates tooltip after switching to simple view', (done) => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setImmediate(() => {
@@ -171,7 +171,7 @@ describe('Blob viewer', () => {
${['simple', 'rich']}
`('when view switches to $views', ({ views }) => {
beforeEach(async () => {
- views.forEach(view => blob.switchToViewer(view));
+ views.forEach((view) => blob.switchToViewer(view));
await axios.waitForAll();
});
diff --git a/spec/frontend/blob_edit/edit_blob_spec.js b/spec/frontend/blob_edit/edit_blob_spec.js
index 9637ea09a3a..7927c14d2ac 100644
--- a/spec/frontend/blob_edit/edit_blob_spec.js
+++ b/spec/frontend/blob_edit/edit_blob_spec.js
@@ -1,12 +1,12 @@
import waitForPromises from 'helpers/wait_for_promises';
import EditBlob from '~/blob_edit/edit_blob';
import EditorLite from '~/editor/editor_lite';
-import { EditorMarkdownExtension } from '~/editor/editor_markdown_ext';
-import { FileTemplateExtension } from '~/editor/editor_file_template_ext';
+import { EditorMarkdownExtension } from '~/editor/extensions/editor_markdown_ext';
+import { FileTemplateExtension } from '~/editor/extensions/editor_file_template_ext';
jest.mock('~/editor/editor_lite');
-jest.mock('~/editor/editor_markdown_ext');
-jest.mock('~/editor/editor_file_template_ext');
+jest.mock('~/editor/extensions/editor_markdown_ext');
+jest.mock('~/editor/extensions/editor_file_template_ext');
describe('Blob Editing', () => {
const useMock = jest.fn();
@@ -31,7 +31,7 @@ describe('Blob Editing', () => {
FileTemplateExtension.mockClear();
});
- const editorInst = isMarkdown => {
+ const editorInst = (isMarkdown) => {
return new EditBlob({
isMarkdown,
});
diff --git a/spec/frontend/boards/board_list_deprecated_spec.js b/spec/frontend/boards/board_list_deprecated_spec.js
new file mode 100644
index 00000000000..393d7f954b1
--- /dev/null
+++ b/spec/frontend/boards/board_list_deprecated_spec.js
@@ -0,0 +1,275 @@
+/* global List */
+/* global ListIssue */
+
+import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import eventHub from '~/boards/eventhub';
+import BoardList from '~/boards/components/board_list_deprecated.vue';
+import '~/boards/models/issue';
+import '~/boards/models/list';
+import { listObj, boardsMockInterceptor } from './mock_data';
+import store from '~/boards/stores';
+import boardsStore from '~/boards/stores/boards_store';
+
+const createComponent = ({ done, listIssueProps = {}, componentProps = {}, listProps = {} }) => {
+ const el = document.createElement('div');
+
+ document.body.appendChild(el);
+ const mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
+ boardsStore.create();
+
+ const BoardListComp = Vue.extend(BoardList);
+ const list = new List({ ...listObj, ...listProps });
+ const issue = new ListIssue({
+ title: 'Testing',
+ id: 1,
+ iid: 1,
+ confidential: false,
+ labels: [],
+ assignees: [],
+ ...listIssueProps,
+ });
+ if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesSize')) {
+ list.issuesSize = 1;
+ }
+ list.issues.push(issue);
+
+ const component = new BoardListComp({
+ el,
+ store,
+ propsData: {
+ disabled: false,
+ list,
+ issues: list.issues,
+ ...componentProps,
+ },
+ provide: {
+ groupId: null,
+ rootPath: '/',
+ },
+ }).$mount();
+
+ Vue.nextTick(() => {
+ done();
+ });
+
+ return { component, mock };
+};
+
+describe('Board list component', () => {
+ let mock;
+ let component;
+ let getIssues;
+ function generateIssues(compWrapper) {
+ for (let i = 1; i < 20; i += 1) {
+ const issue = { ...compWrapper.list.issues[0] };
+ issue.id += i;
+ compWrapper.list.issues.push(issue);
+ }
+ }
+
+ describe('When Expanded', () => {
+ beforeEach((done) => {
+ getIssues = jest.spyOn(List.prototype, 'getIssues').mockReturnValue(new Promise(() => {}));
+ ({ mock, component } = createComponent({ done }));
+ });
+
+ afterEach(() => {
+ mock.restore();
+ component.$destroy();
+ });
+
+ it('loads first page of issues', () => {
+ return waitForPromises().then(() => {
+ expect(getIssues).toHaveBeenCalled();
+ });
+ });
+
+ it('renders component', () => {
+ expect(component.$el.classList.contains('board-list-component')).toBe(true);
+ });
+
+ it('renders loading icon', () => {
+ component.list.loading = true;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-list-loading')).not.toBeNull();
+ });
+ });
+
+ it('renders issues', () => {
+ expect(component.$el.querySelectorAll('.board-card').length).toBe(1);
+ });
+
+ it('sets data attribute with issue id', () => {
+ expect(component.$el.querySelector('.board-card').getAttribute('data-issue-id')).toBe('1');
+ });
+
+ it('shows new issue form', () => {
+ component.toggleForm();
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
+
+ expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
+ });
+ });
+
+ it('shows new issue form after eventhub event', () => {
+ eventHub.$emit(`toggle-issue-form-${component.list.id}`);
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
+
+ expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
+ });
+ });
+
+ it('does not show new issue form for closed list', () => {
+ component.list.type = 'closed';
+ component.toggleForm();
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-new-issue-form')).toBeNull();
+ });
+ });
+
+ it('shows count list item', () => {
+ component.showCount = true;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-list-count')).not.toBeNull();
+
+ expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
+ 'Showing all issues',
+ );
+ });
+ });
+
+ it('sets data attribute with invalid id', () => {
+ component.showCount = true;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-list-count').getAttribute('data-issue-id')).toBe(
+ '-1',
+ );
+ });
+ });
+
+ it('shows how many more issues to load', () => {
+ component.showCount = true;
+ component.list.issuesSize = 20;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
+ 'Showing 1 of 20 issues',
+ );
+ });
+ });
+
+ it('loads more issues after scrolling', () => {
+ jest.spyOn(component.list, 'nextPage').mockImplementation(() => {});
+ generateIssues(component);
+ component.$refs.list.dispatchEvent(new Event('scroll'));
+
+ return waitForPromises().then(() => {
+ expect(component.list.nextPage).toHaveBeenCalled();
+ });
+ });
+
+ it('does not load issues if already loading', () => {
+ component.list.nextPage = jest
+ .spyOn(component.list, 'nextPage')
+ .mockReturnValue(new Promise(() => {}));
+
+ component.onScroll();
+ component.onScroll();
+
+ return waitForPromises().then(() => {
+ expect(component.list.nextPage).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ it('shows loading more spinner', () => {
+ component.showCount = true;
+ component.list.loadingMore = true;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.board-list-count .gl-spinner')).not.toBeNull();
+ });
+ });
+ });
+
+ describe('When Collapsed', () => {
+ beforeEach((done) => {
+ getIssues = jest.spyOn(List.prototype, 'getIssues').mockReturnValue(new Promise(() => {}));
+ ({ mock, component } = createComponent({
+ done,
+ listProps: { type: 'closed', collapsed: true, issuesSize: 50 },
+ }));
+ generateIssues(component);
+ component.scrollHeight = jest.spyOn(component, 'scrollHeight').mockReturnValue(0);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ component.$destroy();
+ });
+
+ it('does not load all issues', () => {
+ return waitForPromises().then(() => {
+ // Initial getIssues from list constructor
+ expect(getIssues).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
+
+ describe('max issue count warning', () => {
+ beforeEach((done) => {
+ ({ mock, component } = createComponent({
+ done,
+ listProps: { type: 'closed', collapsed: true, issuesSize: 50 },
+ }));
+ });
+
+ afterEach(() => {
+ mock.restore();
+ component.$destroy();
+ });
+
+ describe('when issue count exceeds max issue count', () => {
+ it('sets background to bg-danger-100', () => {
+ component.list.issuesSize = 4;
+ component.list.maxIssueCount = 3;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.bg-danger-100')).not.toBeNull();
+ });
+ });
+ });
+
+ describe('when list issue count does NOT exceed list max issue count', () => {
+ it('does not sets background to bg-danger-100', () => {
+ component.list.issuesSize = 2;
+ component.list.maxIssueCount = 3;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.bg-danger-100')).toBeNull();
+ });
+ });
+ });
+
+ describe('when list max issue count is 0', () => {
+ it('does not sets background to bg-danger-100', () => {
+ component.list.maxIssueCount = 0;
+
+ return Vue.nextTick().then(() => {
+ expect(component.$el.querySelector('.bg-danger-100')).toBeNull();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index 80d7a72151d..f82b1f7ed5c 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -5,7 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import Sortable from 'sortablejs';
import axios from '~/lib/utils/axios_utils';
-import BoardList from '~/boards/components/board_list.vue';
+import BoardList from '~/boards/components/board_list_deprecated.vue';
import '~/boards/models/issue';
import '~/boards/models/list';
diff --git a/spec/frontend/boards/board_list_new_spec.js b/spec/frontend/boards/board_list_new_spec.js
deleted file mode 100644
index 96b03ed927e..00000000000
--- a/spec/frontend/boards/board_list_new_spec.js
+++ /dev/null
@@ -1,268 +0,0 @@
-import Vuex from 'vuex';
-import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
-import { createLocalVue, mount } from '@vue/test-utils';
-import eventHub from '~/boards/eventhub';
-import BoardList from '~/boards/components/board_list_new.vue';
-import BoardCard from '~/boards/components/board_card.vue';
-import '~/boards/models/list';
-import { mockList, mockIssuesByListId, issues, mockIssues } from './mock_data';
-import defaultState from '~/boards/stores/state';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-const actions = {
- fetchIssuesForList: jest.fn(),
-};
-
-const createStore = (state = defaultState) => {
- return new Vuex.Store({
- state,
- actions,
- });
-};
-
-const createComponent = ({
- listIssueProps = {},
- componentProps = {},
- listProps = {},
- state = {},
-} = {}) => {
- const store = createStore({
- issuesByListId: mockIssuesByListId,
- issues,
- pageInfoByListId: {
- 'gid://gitlab/List/1': { hasNextPage: true },
- 'gid://gitlab/List/2': {},
- },
- listsFlags: {
- 'gid://gitlab/List/1': {},
- 'gid://gitlab/List/2': {},
- },
- ...state,
- });
-
- const list = {
- ...mockList,
- ...listProps,
- };
- const issue = {
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
- labels: [],
- assignees: [],
- ...listIssueProps,
- };
- if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesCount')) {
- list.issuesCount = 1;
- }
-
- const component = mount(BoardList, {
- localVue,
- propsData: {
- disabled: false,
- list,
- issues: [issue],
- canAdminList: true,
- ...componentProps,
- },
- store,
- provide: {
- groupId: null,
- rootPath: '/',
- weightFeatureAvailable: false,
- boardWeight: null,
- },
- });
-
- return component;
-};
-
-describe('Board list component', () => {
- let wrapper;
- const findByTestId = testId => wrapper.find(`[data-testid="${testId}"]`);
- useFakeRequestAnimationFrame();
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('When Expanded', () => {
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- it('renders component', () => {
- expect(wrapper.find('.board-list-component').exists()).toBe(true);
- });
-
- it('renders loading icon', () => {
- wrapper = createComponent({
- state: { listsFlags: { 'gid://gitlab/List/1': { isLoading: true } } },
- });
-
- expect(findByTestId('board_list_loading').exists()).toBe(true);
- });
-
- it('renders issues', () => {
- expect(wrapper.findAll(BoardCard).length).toBe(1);
- });
-
- it('sets data attribute with issue id', () => {
- expect(wrapper.find('.board-card').attributes('data-issue-id')).toBe('1');
- });
-
- it('shows new issue form', async () => {
- wrapper.vm.toggleForm();
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.board-new-issue-form').exists()).toBe(true);
- });
-
- it('shows new issue form after eventhub event', async () => {
- eventHub.$emit(`toggle-issue-form-${wrapper.vm.list.id}`);
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.board-new-issue-form').exists()).toBe(true);
- });
-
- it('does not show new issue form for closed list', () => {
- wrapper.setProps({ list: { type: 'closed' } });
- wrapper.vm.toggleForm();
-
- expect(wrapper.find('.board-new-issue-form').exists()).toBe(false);
- });
-
- it('shows count list item', async () => {
- wrapper.vm.showCount = true;
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.board-list-count').exists()).toBe(true);
-
- expect(wrapper.find('.board-list-count').text()).toBe('Showing all issues');
- });
-
- it('sets data attribute with invalid id', async () => {
- wrapper.vm.showCount = true;
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.board-list-count').attributes('data-issue-id')).toBe('-1');
- });
-
- it('shows how many more issues to load', async () => {
- wrapper.vm.showCount = true;
- wrapper.setProps({ list: { issuesCount: 20 } });
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.board-list-count').text()).toBe('Showing 1 of 20 issues');
- });
- });
-
- describe('load more issues', () => {
- beforeEach(() => {
- wrapper = createComponent({
- listProps: { issuesCount: 25 },
- });
- });
-
- it('loads more issues after scrolling', () => {
- wrapper.vm.listRef.dispatchEvent(new Event('scroll'));
-
- expect(actions.fetchIssuesForList).toHaveBeenCalled();
- });
-
- it('does not load issues if already loading', () => {
- wrapper = createComponent({
- state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } },
- });
- wrapper.vm.listRef.dispatchEvent(new Event('scroll'));
-
- expect(actions.fetchIssuesForList).not.toHaveBeenCalled();
- });
-
- it('shows loading more spinner', async () => {
- wrapper = createComponent({
- state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } },
- });
- wrapper.vm.showCount = true;
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.board-list-count .gl-spinner').exists()).toBe(true);
- });
- });
-
- describe('max issue count warning', () => {
- beforeEach(() => {
- wrapper = createComponent({
- listProps: { issuesCount: 50 },
- });
- });
-
- describe('when issue count exceeds max issue count', () => {
- it('sets background to bg-danger-100', async () => {
- wrapper.setProps({ list: { issuesCount: 4, maxIssueCount: 3 } });
-
- await wrapper.vm.$nextTick();
- expect(wrapper.find('.bg-danger-100').exists()).toBe(true);
- });
- });
-
- describe('when list issue count does NOT exceed list max issue count', () => {
- it('does not sets background to bg-danger-100', () => {
- wrapper.setProps({ list: { issuesCount: 2, maxIssueCount: 3 } });
-
- expect(wrapper.find('.bg-danger-100').exists()).toBe(false);
- });
- });
-
- describe('when list max issue count is 0', () => {
- it('does not sets background to bg-danger-100', () => {
- wrapper.setProps({ list: { maxIssueCount: 0 } });
-
- expect(wrapper.find('.bg-danger-100').exists()).toBe(false);
- });
- });
- });
-
- describe('drag & drop issue', () => {
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- describe('handleDragOnStart', () => {
- it('adds a class `is-dragging` to document body', () => {
- expect(document.body.classList.contains('is-dragging')).toBe(false);
-
- findByTestId('tree-root-wrapper').vm.$emit('start');
-
- expect(document.body.classList.contains('is-dragging')).toBe(true);
- });
- });
-
- describe('handleDragOnEnd', () => {
- it('removes class `is-dragging` from document body', () => {
- jest.spyOn(wrapper.vm, 'moveIssue').mockImplementation(() => {});
- document.body.classList.add('is-dragging');
-
- findByTestId('tree-root-wrapper').vm.$emit('end', {
- oldIndex: 1,
- newIndex: 0,
- item: {
- dataset: {
- issueId: mockIssues[0].id,
- issueIid: mockIssues[0].iid,
- issuePath: mockIssues[0].referencePath,
- },
- },
- to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
- from: { dataset: { listId: 'gid://gitlab/List/2' } },
- });
-
- expect(document.body.classList.contains('is-dragging')).toBe(false);
- });
- });
- });
-});
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 0fe3c88f518..1b62f25044e 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -1,29 +1,51 @@
-/* global List */
-/* global ListIssue */
-
-import Vue from 'vue';
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
+import Vuex from 'vuex';
+import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
+import { createLocalVue, mount } from '@vue/test-utils';
import eventHub from '~/boards/eventhub';
-import waitForPromises from '../helpers/wait_for_promises';
import BoardList from '~/boards/components/board_list.vue';
-import '~/boards/models/issue';
-import '~/boards/models/list';
-import { listObj, boardsMockInterceptor } from './mock_data';
-import store from '~/boards/stores';
-import boardsStore from '~/boards/stores/boards_store';
-
-const createComponent = ({ done, listIssueProps = {}, componentProps = {}, listProps = {} }) => {
- const el = document.createElement('div');
-
- document.body.appendChild(el);
- const mock = new MockAdapter(axios);
- mock.onAny().reply(boardsMockInterceptor);
- boardsStore.create();
-
- const BoardListComp = Vue.extend(BoardList);
- const list = new List({ ...listObj, ...listProps });
- const issue = new ListIssue({
+import BoardCard from '~/boards/components/board_card.vue';
+import { mockList, mockIssuesByListId, issues, mockIssues } from './mock_data';
+import defaultState from '~/boards/stores/state';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const actions = {
+ fetchIssuesForList: jest.fn(),
+};
+
+const createStore = (state = defaultState) => {
+ return new Vuex.Store({
+ state,
+ actions,
+ });
+};
+
+const createComponent = ({
+ listIssueProps = {},
+ componentProps = {},
+ listProps = {},
+ state = {},
+} = {}) => {
+ const store = createStore({
+ issuesByListId: mockIssuesByListId,
+ issues,
+ pageInfoByListId: {
+ 'gid://gitlab/List/1': { hasNextPage: true },
+ 'gid://gitlab/List/2': {},
+ },
+ listsFlags: {
+ 'gid://gitlab/List/1': {},
+ 'gid://gitlab/List/2': {},
+ },
+ ...state,
+ });
+
+ const list = {
+ ...mockList,
+ ...listProps,
+ };
+ const issue = {
title: 'Testing',
id: 1,
iid: 1,
@@ -31,244 +53,214 @@ const createComponent = ({ done, listIssueProps = {}, componentProps = {}, listP
labels: [],
assignees: [],
...listIssueProps,
- });
- if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesSize')) {
- list.issuesSize = 1;
+ };
+ if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesCount')) {
+ list.issuesCount = 1;
}
- list.issues.push(issue);
- const component = new BoardListComp({
- el,
- store,
+ const component = mount(BoardList, {
+ localVue,
propsData: {
disabled: false,
list,
- issues: list.issues,
+ issues: [issue],
+ canAdminList: true,
...componentProps,
},
+ store,
provide: {
groupId: null,
rootPath: '/',
+ weightFeatureAvailable: false,
+ boardWeight: null,
},
- }).$mount();
-
- Vue.nextTick(() => {
- done();
});
- return { component, mock };
+ return component;
};
describe('Board list component', () => {
- let mock;
- let component;
- let getIssues;
- function generateIssues(compWrapper) {
- for (let i = 1; i < 20; i += 1) {
- const issue = { ...compWrapper.list.issues[0] };
- issue.id += i;
- compWrapper.list.issues.push(issue);
- }
- }
+ let wrapper;
+ const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`);
+ useFakeRequestAnimationFrame();
- describe('When Expanded', () => {
- beforeEach(done => {
- getIssues = jest.spyOn(List.prototype, 'getIssues').mockReturnValue(new Promise(() => {}));
- ({ mock, component } = createComponent({ done }));
- });
-
- afterEach(() => {
- mock.restore();
- component.$destroy();
- });
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
- it('loads first page of issues', () => {
- return waitForPromises().then(() => {
- expect(getIssues).toHaveBeenCalled();
- });
+ describe('When Expanded', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
});
it('renders component', () => {
- expect(component.$el.classList.contains('board-list-component')).toBe(true);
+ expect(wrapper.find('.board-list-component').exists()).toBe(true);
});
it('renders loading icon', () => {
- component.list.loading = true;
-
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-list-loading')).not.toBeNull();
+ wrapper = createComponent({
+ state: { listsFlags: { 'gid://gitlab/List/1': { isLoading: true } } },
});
+
+ expect(findByTestId('board_list_loading').exists()).toBe(true);
});
it('renders issues', () => {
- expect(component.$el.querySelectorAll('.board-card').length).toBe(1);
+ expect(wrapper.findAll(BoardCard).length).toBe(1);
});
it('sets data attribute with issue id', () => {
- expect(component.$el.querySelector('.board-card').getAttribute('data-issue-id')).toBe('1');
+ expect(wrapper.find('.board-card').attributes('data-issue-id')).toBe('1');
});
- it('shows new issue form', () => {
- component.toggleForm();
-
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
+ it('shows new issue form', async () => {
+ wrapper.vm.toggleForm();
- expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-new-issue-form').exists()).toBe(true);
});
- it('shows new issue form after eventhub event', () => {
- eventHub.$emit(`toggle-issue-form-${component.list.id}`);
-
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-new-issue-form')).not.toBeNull();
+ it('shows new issue form after eventhub event', async () => {
+ eventHub.$emit(`toggle-issue-form-${wrapper.vm.list.id}`);
- expect(component.$el.querySelector('.is-smaller')).not.toBeNull();
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-new-issue-form').exists()).toBe(true);
});
it('does not show new issue form for closed list', () => {
- component.list.type = 'closed';
- component.toggleForm();
+ wrapper.setProps({ list: { type: 'closed' } });
+ wrapper.vm.toggleForm();
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-new-issue-form')).toBeNull();
- });
+ expect(wrapper.find('.board-new-issue-form').exists()).toBe(false);
});
- it('shows count list item', () => {
- component.showCount = true;
+ it('shows count list item', async () => {
+ wrapper.vm.showCount = true;
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-list-count')).not.toBeNull();
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count').exists()).toBe(true);
- expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
- 'Showing all issues',
- );
- });
+ expect(wrapper.find('.board-list-count').text()).toBe('Showing all issues');
});
- it('sets data attribute with invalid id', () => {
- component.showCount = true;
+ it('sets data attribute with invalid id', async () => {
+ wrapper.vm.showCount = true;
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-list-count').getAttribute('data-issue-id')).toBe(
- '-1',
- );
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count').attributes('data-issue-id')).toBe('-1');
});
- it('shows how many more issues to load', () => {
- component.showCount = true;
- component.list.issuesSize = 20;
+ it('shows how many more issues to load', async () => {
+ wrapper.vm.showCount = true;
+ wrapper.setProps({ list: { issuesCount: 20 } });
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-list-count').textContent.trim()).toBe(
- 'Showing 1 of 20 issues',
- );
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count').text()).toBe('Showing 1 of 20 issues');
});
+ });
- it('loads more issues after scrolling', () => {
- jest.spyOn(component.list, 'nextPage').mockImplementation(() => {});
- generateIssues(component);
- component.$refs.list.dispatchEvent(new Event('scroll'));
-
- return waitForPromises().then(() => {
- expect(component.list.nextPage).toHaveBeenCalled();
+ describe('load more issues', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ listProps: { issuesCount: 25 },
});
});
- it('does not load issues if already loading', () => {
- component.list.nextPage = jest
- .spyOn(component.list, 'nextPage')
- .mockReturnValue(new Promise(() => {}));
-
- component.onScroll();
- component.onScroll();
+ it('loads more issues after scrolling', () => {
+ wrapper.vm.listRef.dispatchEvent(new Event('scroll'));
- return waitForPromises().then(() => {
- expect(component.list.nextPage).toHaveBeenCalledTimes(1);
- });
+ expect(actions.fetchIssuesForList).toHaveBeenCalled();
});
- it('shows loading more spinner', () => {
- component.showCount = true;
- component.list.loadingMore = true;
-
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.board-list-count .gl-spinner')).not.toBeNull();
+ it('does not load issues if already loading', () => {
+ wrapper = createComponent({
+ state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } },
});
- });
- });
+ wrapper.vm.listRef.dispatchEvent(new Event('scroll'));
- describe('When Collapsed', () => {
- beforeEach(done => {
- getIssues = jest.spyOn(List.prototype, 'getIssues').mockReturnValue(new Promise(() => {}));
- ({ mock, component } = createComponent({
- done,
- listProps: { type: 'closed', collapsed: true, issuesSize: 50 },
- }));
- generateIssues(component);
- component.scrollHeight = jest.spyOn(component, 'scrollHeight').mockReturnValue(0);
+ expect(actions.fetchIssuesForList).not.toHaveBeenCalled();
});
- afterEach(() => {
- mock.restore();
- component.$destroy();
- });
-
- it('does not load all issues', () => {
- return waitForPromises().then(() => {
- // Initial getIssues from list constructor
- expect(getIssues).toHaveBeenCalledTimes(1);
+ it('shows loading more spinner', async () => {
+ wrapper = createComponent({
+ state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } },
});
+ wrapper.vm.showCount = true;
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count .gl-spinner').exists()).toBe(true);
});
});
describe('max issue count warning', () => {
- beforeEach(done => {
- ({ mock, component } = createComponent({
- done,
- listProps: { type: 'closed', collapsed: true, issuesSize: 50 },
- }));
- });
-
- afterEach(() => {
- mock.restore();
- component.$destroy();
+ beforeEach(() => {
+ wrapper = createComponent({
+ listProps: { issuesCount: 50 },
+ });
});
describe('when issue count exceeds max issue count', () => {
- it('sets background to bg-danger-100', () => {
- component.list.issuesSize = 4;
- component.list.maxIssueCount = 3;
+ it('sets background to bg-danger-100', async () => {
+ wrapper.setProps({ list: { issuesCount: 4, maxIssueCount: 3 } });
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.bg-danger-100')).not.toBeNull();
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.bg-danger-100').exists()).toBe(true);
});
});
describe('when list issue count does NOT exceed list max issue count', () => {
it('does not sets background to bg-danger-100', () => {
- component.list.issuesSize = 2;
- component.list.maxIssueCount = 3;
+ wrapper.setProps({ list: { issuesCount: 2, maxIssueCount: 3 } });
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.bg-danger-100')).toBeNull();
- });
+ expect(wrapper.find('.bg-danger-100').exists()).toBe(false);
});
});
describe('when list max issue count is 0', () => {
it('does not sets background to bg-danger-100', () => {
- component.list.maxIssueCount = 0;
+ wrapper.setProps({ list: { maxIssueCount: 0 } });
+
+ expect(wrapper.find('.bg-danger-100').exists()).toBe(false);
+ });
+ });
+ });
- return Vue.nextTick().then(() => {
- expect(component.$el.querySelector('.bg-danger-100')).toBeNull();
+ describe('drag & drop issue', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ describe('handleDragOnStart', () => {
+ it('adds a class `is-dragging` to document body', () => {
+ expect(document.body.classList.contains('is-dragging')).toBe(false);
+
+ findByTestId('tree-root-wrapper').vm.$emit('start');
+
+ expect(document.body.classList.contains('is-dragging')).toBe(true);
+ });
+ });
+
+ describe('handleDragOnEnd', () => {
+ it('removes class `is-dragging` from document body', () => {
+ jest.spyOn(wrapper.vm, 'moveIssue').mockImplementation(() => {});
+ document.body.classList.add('is-dragging');
+
+ findByTestId('tree-root-wrapper').vm.$emit('end', {
+ oldIndex: 1,
+ newIndex: 0,
+ item: {
+ dataset: {
+ issueId: mockIssues[0].id,
+ issueIid: mockIssues[0].iid,
+ issuePath: mockIssues[0].referencePath,
+ },
+ },
+ to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
+ from: { dataset: { listId: 'gid://gitlab/List/2' } },
});
+
+ expect(document.body.classList.contains('is-dragging')).toBe(false);
});
});
});
diff --git a/spec/frontend/boards/board_new_issue_spec.js b/spec/frontend/boards/board_new_issue_deprecated_spec.js
index 3eebfeca965..8236b468189 100644
--- a/spec/frontend/boards/board_new_issue_spec.js
+++ b/spec/frontend/boards/board_new_issue_deprecated_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import boardNewIssue from '~/boards/components/board_new_issue.vue';
+import boardNewIssue from '~/boards/components/board_new_issue_deprecated.vue';
import boardsStore from '~/boards/stores/boards_store';
import '~/boards/models/list';
diff --git a/spec/frontend/boards/boards_store_spec.js b/spec/frontend/boards/boards_store_spec.js
index c89f6d22ef2..f1d249ff069 100644
--- a/spec/frontend/boards/boards_store_spec.js
+++ b/spec/frontend/boards/boards_store_spec.js
@@ -77,7 +77,7 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
- axiosMock.onPost(endpoints.listsEndpoint).replyOnce(config => requestSpy(config));
+ axiosMock.onPost(endpoints.listsEndpoint).replyOnce((config) => requestSpy(config));
});
it('makes a request to create a list', () => {
@@ -114,7 +114,7 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
- axiosMock.onPut(`${endpoints.listsEndpoint}/${id}`).replyOnce(config => requestSpy(config));
+ axiosMock.onPut(`${endpoints.listsEndpoint}/${id}`).replyOnce((config) => requestSpy(config));
});
it('makes a request to update a list position', () => {
@@ -148,7 +148,7 @@ describe('boardsStore', () => {
requestSpy = jest.fn();
axiosMock
.onDelete(`${endpoints.listsEndpoint}/${id}`)
- .replyOnce(config => requestSpy(config));
+ .replyOnce((config) => requestSpy(config));
});
it('makes a request to delete a list', () => {
@@ -269,7 +269,7 @@ describe('boardsStore', () => {
requestSpy = jest.fn();
axiosMock
.onPut(`${urlRoot}/-/boards/${boardId}/issues/${id}`)
- .replyOnce(config => requestSpy(config));
+ .replyOnce((config) => requestSpy(config));
});
it('makes a request to move an issue between lists', () => {
@@ -308,7 +308,7 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
- axiosMock.onPost(url).replyOnce(config => requestSpy(config));
+ axiosMock.onPost(url).replyOnce((config) => requestSpy(config));
});
it('makes a request to create a new issue', () => {
@@ -378,7 +378,7 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
- axiosMock.onPost(endpoints.bulkUpdatePath).replyOnce(config => requestSpy(config));
+ axiosMock.onPost(endpoints.bulkUpdatePath).replyOnce((config) => requestSpy(config));
});
it('makes a request to create a list', () => {
@@ -456,24 +456,6 @@ describe('boardsStore', () => {
});
});
- describe('deleteBoard', () => {
- const id = 'capsized';
- const url = `${endpoints.boardsEndpoint}/${id}.json`;
-
- it('makes a request to delete a boards', () => {
- axiosMock.onDelete(url).replyOnce(200, dummyResponse);
- const expectedResponse = expect.objectContaining({ data: dummyResponse });
-
- return expect(boardsStore.deleteBoard({ id })).resolves.toEqual(expectedResponse);
- });
-
- it('fails for error response', () => {
- axiosMock.onDelete(url).replyOnce(500);
-
- return expect(boardsStore.deleteBoard({ id })).rejects.toThrow();
- });
- });
-
describe('when created', () => {
beforeEach(() => {
setupDefaultResponses();
@@ -603,7 +585,7 @@ describe('boardsStore', () => {
expect(boardsStore.state.lists.length).toBe(1);
- boardsStore.removeList(listObj.id, 'label');
+ boardsStore.removeList(listObj.id);
expect(boardsStore.state.lists.length).toBe(0);
});
diff --git a/spec/frontend/boards/components/board_assignee_dropdown_spec.js b/spec/frontend/boards/components/board_assignee_dropdown_spec.js
index bbdcc707f09..e52c14f9783 100644
--- a/spec/frontend/boards/components/board_assignee_dropdown_spec.js
+++ b/spec/frontend/boards/components/board_assignee_dropdown_spec.js
@@ -6,7 +6,7 @@ import {
GlSearchBoxByType,
GlLoadingIcon,
} from '@gitlab/ui';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import VueApollo from 'vue-apollo';
import BoardAssigneeDropdown from '~/boards/components/board_assignee_dropdown.vue';
import IssuableAssignees from '~/sidebar/components/assignees/issuable_assignees.vue';
@@ -93,8 +93,8 @@ describe('BoardCardAssigneeDropdown', () => {
await wrapper.vm.$nextTick();
};
- const findByText = text => {
- return wrapper.findAll(GlDropdownItem).wrappers.find(node => node.text().indexOf(text) === 0);
+ const findByText = (text) => {
+ return wrapper.findAll(GlDropdownItem).wrappers.find((node) => node.text().indexOf(text) === 0);
};
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
@@ -102,7 +102,7 @@ describe('BoardCardAssigneeDropdown', () => {
beforeEach(() => {
store.state.activeId = '1';
store.state.issues = {
- '1': {
+ 1: {
iid,
assignees: [{ username: activeIssueName, name: activeIssueName, id: activeIssueName }],
},
@@ -145,12 +145,7 @@ describe('BoardCardAssigneeDropdown', () => {
it('renders gl-avatar-labeled in gl-avatar-link', () => {
const item = findByText('hello');
- expect(
- item
- .find(GlAvatarLink)
- .find(GlAvatarLabeled)
- .exists(),
- ).toBe(true);
+ expect(item.find(GlAvatarLink).find(GlAvatarLabeled).exists()).toBe(true);
});
});
diff --git a/spec/frontend/boards/components/board_card_layout_spec.js b/spec/frontend/boards/components/board_card_layout_spec.js
index 80f649a1a96..d8633871e8d 100644
--- a/spec/frontend/boards/components/board_card_layout_spec.js
+++ b/spec/frontend/boards/components/board_card_layout_spec.js
@@ -1,7 +1,8 @@
/* global List */
/* global ListLabel */
-import { shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
@@ -10,20 +11,35 @@ import axios from '~/lib/utils/axios_utils';
import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/list';
-import store from '~/boards/stores';
+import boardsVuexStore from '~/boards/stores';
import boardsStore from '~/boards/stores/boards_store';
import BoardCardLayout from '~/boards/components/board_card_layout.vue';
import issueCardInner from '~/boards/components/issue_card_inner.vue';
import { listObj, boardsMockInterceptor, setMockEndpoints } from '../mock_data';
+import { ISSUABLE } from '~/boards/constants';
+
describe('Board card layout', () => {
let wrapper;
let mock;
let list;
+ let store;
+
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
+ const createStore = ({ getters = {}, actions = {} } = {}) => {
+ store = new Vuex.Store({
+ ...boardsVuexStore,
+ actions,
+ getters,
+ });
+ };
// this particular mount component needs to be used after the root beforeEach because it depends on list being initialized
- const mountComponent = propsData => {
+ const mountComponent = ({ propsData = {}, provide = {} } = {}) => {
wrapper = shallowMount(BoardCardLayout, {
+ localVue,
stubs: {
issueCardInner,
},
@@ -38,6 +54,8 @@ describe('Board card layout', () => {
provide: {
groupId: null,
rootPath: '/',
+ scopedLabelsAvailable: false,
+ ...provide,
},
});
};
@@ -74,6 +92,7 @@ describe('Board card layout', () => {
describe('mouse events', () => {
it('sets showDetail to true on mousedown', async () => {
+ createStore();
mountComponent();
wrapper.trigger('mousedown');
@@ -83,6 +102,7 @@ describe('Board card layout', () => {
});
it('sets showDetail to false on mousemove', async () => {
+ createStore();
mountComponent();
wrapper.trigger('mousedown');
await wrapper.vm.$nextTick();
@@ -91,5 +111,49 @@ describe('Board card layout', () => {
await wrapper.vm.$nextTick();
expect(wrapper.vm.showDetail).toBe(false);
});
+
+ it("calls 'setActiveId' when 'graphqlBoardLists' feature flag is turned on", async () => {
+ const setActiveId = jest.fn();
+ createStore({
+ actions: {
+ setActiveId,
+ },
+ });
+ mountComponent({
+ provide: {
+ glFeatures: { graphqlBoardLists: true },
+ },
+ });
+
+ wrapper.trigger('mouseup');
+ await wrapper.vm.$nextTick();
+
+ expect(setActiveId).toHaveBeenCalledTimes(1);
+ expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), {
+ id: list.issues[0].id,
+ sidebarType: ISSUABLE,
+ });
+ });
+
+ it("calls 'setActiveId' when epic swimlanes is active", async () => {
+ const setActiveId = jest.fn();
+ const isSwimlanesOn = () => true;
+ createStore({
+ getters: { isSwimlanesOn },
+ actions: {
+ setActiveId,
+ },
+ });
+ mountComponent();
+
+ wrapper.trigger('mouseup');
+ await wrapper.vm.$nextTick();
+
+ expect(setActiveId).toHaveBeenCalledTimes(1);
+ expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), {
+ id: list.issues[0].id,
+ sidebarType: ISSUABLE,
+ });
+ });
});
});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 5e23c781eae..1084009caad 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -29,7 +29,7 @@ describe('BoardCard', () => {
const findUserAvatarLink = () => wrapper.find(userAvatarLink);
// this particular mount component needs to be used after the root beforeEach because it depends on list being initialized
- const mountComponent = propsData => {
+ const mountComponent = (propsData) => {
wrapper = mount(BoardCard, {
stubs: {
issueCardInner,
@@ -45,6 +45,7 @@ describe('BoardCard', () => {
provide: {
groupId: null,
rootPath: '/',
+ scopedLabelsAvailable: false,
},
});
};
@@ -133,9 +134,7 @@ describe('BoardCard', () => {
it('does not set detail issue if link is clicked', () => {
mountComponent();
- findIssueCardInner()
- .find('a')
- .trigger('mouseup');
+ findIssueCardInner().find('a').trigger('mouseup');
expect(boardsStore.detail.issue).toEqual({});
});
diff --git a/spec/frontend/boards/components/board_column_new_spec.js b/spec/frontend/boards/components/board_column_deprecated_spec.js
index 81c0e60f931..a703caca4eb 100644
--- a/spec/frontend/boards/components/board_column_new_spec.js
+++ b/spec/frontend/boards/components/board_column_deprecated_spec.js
@@ -1,40 +1,65 @@
+import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
import { listObj } from 'jest/boards/mock_data';
-import BoardColumn from '~/boards/components/board_column_new.vue';
+import Board from '~/boards/components/board_column_deprecated.vue';
+import List from '~/boards/models/list';
import { ListType } from '~/boards/constants';
-import { createStore } from '~/boards/stores';
+import axios from '~/lib/utils/axios_utils';
describe('Board Column Component', () => {
let wrapper;
- let store;
+ let axiosMock;
+
+ beforeEach(() => {
+ window.gon = {};
+ axiosMock = new AxiosMockAdapter(axios);
+ axiosMock.onGet(`${TEST_HOST}/lists/1/issues`).reply(200, { issues: [] });
+ });
afterEach(() => {
+ axiosMock.restore();
+
wrapper.destroy();
- wrapper = null;
+
+ localStorage.clear();
});
- const createComponent = ({ listType = ListType.backlog, collapsed = false } = {}) => {
+ const createComponent = ({
+ listType = ListType.backlog,
+ collapsed = false,
+ withLocalStorage = true,
+ } = {}) => {
const boardId = '1';
const listMock = {
...listObj,
- listType,
+ list_type: listType,
collapsed,
};
if (listType === ListType.assignee) {
delete listMock.label;
- listMock.assignee = {};
+ listMock.user = {};
}
- store = createStore();
+ // Making List reactive
+ const list = Vue.observable(new List(listMock));
- wrapper = shallowMount(BoardColumn, {
- store,
+ if (withLocalStorage) {
+ localStorage.setItem(
+ `boards.${boardId}.${list.type}.${list.id}.expanded`,
+ (!collapsed).toString(),
+ );
+ }
+
+ wrapper = shallowMount(Board, {
propsData: {
+ boardId,
disabled: false,
- list: listMock,
+ list,
},
provide: {
boardId,
@@ -57,7 +82,7 @@ describe('Board Column Component', () => {
it('has class is-collapsed when list is collapsed', () => {
createComponent({ collapsed: false });
- expect(isCollapsed()).toBe(false);
+ expect(wrapper.vm.list.isExpanded).toBe(true);
});
it('does not have class is-collapsed when list is expanded', () => {
diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js
index ba11225676b..1dcdad2b492 100644
--- a/spec/frontend/boards/components/board_column_spec.js
+++ b/spec/frontend/boards/components/board_column_spec.js
@@ -1,65 +1,40 @@
-import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'helpers/test_constants';
import { listObj } from 'jest/boards/mock_data';
-import Board from '~/boards/components/board_column.vue';
-import List from '~/boards/models/list';
+import BoardColumn from '~/boards/components/board_column.vue';
import { ListType } from '~/boards/constants';
-import axios from '~/lib/utils/axios_utils';
+import { createStore } from '~/boards/stores';
describe('Board Column Component', () => {
let wrapper;
- let axiosMock;
-
- beforeEach(() => {
- window.gon = {};
- axiosMock = new AxiosMockAdapter(axios);
- axiosMock.onGet(`${TEST_HOST}/lists/1/issues`).reply(200, { issues: [] });
- });
+ let store;
afterEach(() => {
- axiosMock.restore();
-
wrapper.destroy();
-
- localStorage.clear();
+ wrapper = null;
});
- const createComponent = ({
- listType = ListType.backlog,
- collapsed = false,
- withLocalStorage = true,
- } = {}) => {
+ const createComponent = ({ listType = ListType.backlog, collapsed = false } = {}) => {
const boardId = '1';
const listMock = {
...listObj,
- list_type: listType,
+ listType,
collapsed,
};
if (listType === ListType.assignee) {
delete listMock.label;
- listMock.user = {};
+ listMock.assignee = {};
}
- // Making List reactive
- const list = Vue.observable(new List(listMock));
+ store = createStore();
- if (withLocalStorage) {
- localStorage.setItem(
- `boards.${boardId}.${list.type}.${list.id}.expanded`,
- (!collapsed).toString(),
- );
- }
-
- wrapper = shallowMount(Board, {
+ wrapper = shallowMount(BoardColumn, {
+ store,
propsData: {
- boardId,
disabled: false,
- list,
+ list: listMock,
},
provide: {
boardId,
@@ -82,7 +57,7 @@ describe('Board Column Component', () => {
it('has class is-collapsed when list is collapsed', () => {
createComponent({ collapsed: false });
- expect(wrapper.vm.list.isExpanded).toBe(true);
+ expect(isCollapsed()).toBe(false);
});
it('does not have class is-collapsed when list is expanded', () => {
diff --git a/spec/frontend/boards/components/board_configuration_options_spec.js b/spec/frontend/boards/components/board_configuration_options_spec.js
index e9a1cb6a4e8..d9614c254e2 100644
--- a/spec/frontend/boards/components/board_configuration_options_spec.js
+++ b/spec/frontend/boards/components/board_configuration_options_spec.js
@@ -3,38 +3,30 @@ import BoardConfigurationOptions from '~/boards/components/board_configuration_o
describe('BoardConfigurationOptions', () => {
let wrapper;
- const board = { hide_backlog_list: false, hide_closed_list: false };
const defaultProps = {
- currentBoard: board,
- board,
- isNewForm: false,
+ hideBacklogList: false,
+ hideClosedList: false,
};
- const createComponent = () => {
+ const createComponent = (props = {}) => {
wrapper = shallowMount(BoardConfigurationOptions, {
- propsData: { ...defaultProps },
+ propsData: { ...defaultProps, ...props },
});
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
- const backlogListCheckbox = el => el.find('[data-testid="backlog-list-checkbox"]');
- const closedListCheckbox = el => el.find('[data-testid="closed-list-checkbox"]');
+ const backlogListCheckbox = () => wrapper.find('[data-testid="backlog-list-checkbox"]');
+ const closedListCheckbox = () => wrapper.find('[data-testid="closed-list-checkbox"]');
const checkboxAssert = (backlogCheckbox, closedCheckbox) => {
- expect(backlogListCheckbox(wrapper).attributes('checked')).toEqual(
+ expect(backlogListCheckbox().attributes('checked')).toEqual(
backlogCheckbox ? undefined : 'true',
);
- expect(closedListCheckbox(wrapper).attributes('checked')).toEqual(
- closedCheckbox ? undefined : 'true',
- );
+ expect(closedListCheckbox().attributes('checked')).toEqual(closedCheckbox ? undefined : 'true');
};
it.each`
@@ -45,15 +37,28 @@ describe('BoardConfigurationOptions', () => {
${false} | ${false}
`(
'renders two checkbox when one is $backlogCheckboxValue and other is $closedCheckboxValue',
- async ({ backlogCheckboxValue, closedCheckboxValue }) => {
- await wrapper.setData({
+ ({ backlogCheckboxValue, closedCheckboxValue }) => {
+ createComponent({
hideBacklogList: backlogCheckboxValue,
hideClosedList: closedCheckboxValue,
});
-
- return wrapper.vm.$nextTick().then(() => {
- checkboxAssert(backlogCheckboxValue, closedCheckboxValue);
- });
+ checkboxAssert(backlogCheckboxValue, closedCheckboxValue);
},
);
+
+ it('emits a correct value on backlog checkbox change', () => {
+ createComponent();
+
+ backlogListCheckbox().vm.$emit('change');
+
+ expect(wrapper.emitted('update:hideBacklogList')).toEqual([[true]]);
+ });
+
+ it('emits a correct value on closed checkbox change', () => {
+ createComponent();
+
+ closedListCheckbox().vm.$emit('change');
+
+ expect(wrapper.emitted('update:hideClosedList')).toEqual([[true]]);
+ });
});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 291013c561e..98be02d7dbf 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -4,7 +4,7 @@ import { GlAlert } from '@gitlab/ui';
import Draggable from 'vuedraggable';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
import getters from 'ee_else_ce/boards/stores/getters';
-import BoardColumn from '~/boards/components/board_column.vue';
+import BoardColumnDeprecated from '~/boards/components/board_column_deprecated.vue';
import { mockLists, mockListsWithModel } from '../mock_data';
import BoardContent from '~/boards/components/board_content.vue';
@@ -17,6 +17,7 @@ const actions = {
describe('BoardContent', () => {
let wrapper;
+ window.gon = {};
const defaultState = {
isShowingEpicsSwimlanes: false,
@@ -56,10 +57,12 @@ describe('BoardContent', () => {
wrapper.destroy();
});
- it('renders a BoardColumn component per list', () => {
+ it('renders a BoardColumnDeprecated component per list', () => {
createComponent();
- expect(wrapper.findAll(BoardColumn)).toHaveLength(mockLists.length);
+ expect(wrapper.findAllComponents(BoardColumnDeprecated)).toHaveLength(
+ mockListsWithModel.length,
+ );
});
it('does not display EpicsSwimlanes component', () => {
@@ -70,6 +73,13 @@ describe('BoardContent', () => {
});
describe('graphqlBoardLists feature flag enabled', () => {
+ beforeEach(() => {
+ createComponent({ graphqlBoardListsEnabled: true });
+ gon.features = {
+ graphqlBoardLists: true,
+ };
+ });
+
describe('can admin list', () => {
beforeEach(() => {
createComponent({ graphqlBoardListsEnabled: true, props: { canAdminList: true } });
@@ -85,7 +95,7 @@ describe('BoardContent', () => {
createComponent({ graphqlBoardListsEnabled: true, props: { canAdminList: false } });
});
- it('renders draggable component', () => {
+ it('does not render draggable component', () => {
expect(wrapper.find(Draggable).exists()).toBe(false);
});
});
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 3b15cbb6b7e..c34987a55de 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,20 +1,22 @@
import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import { GlModal } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
-import axios from '~/lib/utils/axios_utils';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import { visitUrl } from '~/lib/utils/url_utility';
import boardsStore from '~/boards/stores/boards_store';
import BoardForm from '~/boards/components/board_form.vue';
-import BoardConfigurationOptions from '~/boards/components/board_configuration_options.vue';
-import createBoardMutation from '~/boards/graphql/board.mutation.graphql';
+import updateBoardMutation from '~/boards/graphql/board_update.mutation.graphql';
+import createBoardMutation from '~/boards/graphql/board_create.mutation.graphql';
+import destroyBoardMutation from '~/boards/graphql/board_destroy.mutation.graphql';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
+ stripFinalUrlSegment: jest.requireActual('~/lib/utils/url_utility').stripFinalUrlSegment,
}));
+jest.mock('~/flash');
const currentBoard = {
id: 1,
@@ -28,18 +30,6 @@ const currentBoard = {
hide_closed_list: false,
};
-const boardDefaults = {
- id: false,
- name: '',
- labels: [],
- milestone_id: undefined,
- assignee: {},
- assignee_id: undefined,
- weight: null,
- hide_backlog_list: false,
- hide_closed_list: false,
-};
-
const defaultProps = {
canAdminBoard: false,
labelsPath: `${TEST_HOST}/labels/path`,
@@ -47,22 +37,15 @@ const defaultProps = {
currentBoard,
};
-const endpoints = {
- boardsEndpoint: 'test-endpoint',
-};
-
-const mutate = jest.fn().mockResolvedValue({});
-
describe('BoardForm', () => {
let wrapper;
- let axiosMock;
+ let mutate;
const findModal = () => wrapper.find(GlModal);
const findModalActionPrimary = () => findModal().props('actionPrimary');
const findForm = () => wrapper.find('[data-testid="board-form"]');
const findFormWrapper = () => wrapper.find('[data-testid="board-form-wrapper"]');
const findDeleteConfirmation = () => wrapper.find('[data-testid="delete-confirmation-message"]');
- const findConfigurationOptions = () => wrapper.find(BoardConfigurationOptions);
const findInput = () => wrapper.find('#board-new-name');
const createComponent = (props, data) => {
@@ -74,26 +57,26 @@ describe('BoardForm', () => {
};
},
provide: {
- endpoints,
+ rootPath: 'root',
},
mocks: {
$apollo: {
mutate,
},
},
- attachToDocument: true,
+ attachTo: document.body,
});
};
beforeEach(() => {
- axiosMock = new AxiosMockAdapter(axios);
+ delete window.location;
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
- axiosMock.restore();
boardsStore.state.currentPage = null;
+ mutate = null;
});
describe('when user can not admin the board', () => {
@@ -145,7 +128,7 @@ describe('BoardForm', () => {
});
it('clears the form', () => {
- expect(findConfigurationOptions().props('board')).toEqual(boardDefaults);
+ expect(findInput().element.value).toBe('');
});
it('shows a correct title about creating a board', () => {
@@ -164,16 +147,21 @@ describe('BoardForm', () => {
it('renders form wrapper', () => {
expect(findFormWrapper().exists()).toBe(true);
});
-
- it('passes a true isNewForm prop to BoardConfigurationOptions component', () => {
- expect(findConfigurationOptions().props('isNewForm')).toBe(true);
- });
});
describe('when submitting a create event', () => {
+ const fillForm = () => {
+ findInput().value = 'Test name';
+ findInput().trigger('input');
+ findInput().trigger('keyup.enter', { metaKey: true });
+ };
+
beforeEach(() => {
- const url = `${endpoints.boardsEndpoint}.json`;
- axiosMock.onPost(url).reply(200, { id: '2', board_path: 'new path' });
+ mutate = jest.fn().mockResolvedValue({
+ data: {
+ createBoard: { board: { id: 'gid://gitlab/Board/123', webPath: 'test-path' } },
+ },
+ });
});
it('does not call API if board name is empty', async () => {
@@ -185,28 +173,37 @@ describe('BoardForm', () => {
expect(mutate).not.toHaveBeenCalled();
});
- it('calls REST and GraphQL API and redirects to correct page', async () => {
+ it('calls a correct GraphQL mutation and redirects to correct page from existing board', async () => {
createComponent({ canAdminBoard: true });
-
- findInput().value = 'Test name';
- findInput().trigger('input');
- findInput().trigger('keyup.enter', { metaKey: true });
+ fillForm();
await waitForPromises();
- expect(axiosMock.history.post[0].data).toBe(
- JSON.stringify({ board: { ...boardDefaults, name: 'test', label_ids: [''] } }),
- );
-
expect(mutate).toHaveBeenCalledWith({
mutation: createBoardMutation,
variables: {
- id: 'gid://gitlab/Board/2',
+ input: expect.objectContaining({
+ name: 'test',
+ }),
},
});
await waitForPromises();
- expect(visitUrl).toHaveBeenCalledWith('new path');
+ expect(visitUrl).toHaveBeenCalledWith('test-path');
+ });
+
+ it('shows an error flash if GraphQL mutation fails', async () => {
+ mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
+ createComponent({ canAdminBoard: true });
+ fillForm();
+
+ await waitForPromises();
+
+ expect(mutate).toHaveBeenCalled();
+
+ await waitForPromises();
+ expect(visitUrl).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
});
});
});
@@ -222,7 +219,7 @@ describe('BoardForm', () => {
});
it('clears the form', () => {
- expect(findConfigurationOptions().props('board')).toEqual(currentBoard);
+ expect(findInput().element.value).toEqual(currentBoard.name);
});
it('shows a correct title about creating a board', () => {
@@ -241,36 +238,121 @@ describe('BoardForm', () => {
it('renders form wrapper', () => {
expect(findFormWrapper().exists()).toBe(true);
});
+ });
- it('passes a false isNewForm prop to BoardConfigurationOptions component', () => {
- expect(findConfigurationOptions().props('isNewForm')).toBe(false);
+ it('calls GraphQL mutation with correct parameters when issues are not grouped', async () => {
+ mutate = jest.fn().mockResolvedValue({
+ data: {
+ updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
+ },
});
- });
+ window.location = new URL('https://test/boards/1');
+ createComponent({ canAdminBoard: true });
- describe('when submitting an update event', () => {
- beforeEach(() => {
- const url = endpoints.boardsEndpoint;
- axiosMock.onPut(url).reply(200, { board_path: 'new path' });
+ findInput().trigger('keyup.enter', { metaKey: true });
+
+ await waitForPromises();
+
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: updateBoardMutation,
+ variables: {
+ input: expect.objectContaining({
+ id: `gid://gitlab/Board/${currentBoard.id}`,
+ }),
+ },
});
- it('calls REST and GraphQL API with correct parameters', async () => {
- createComponent({ canAdminBoard: true });
+ await waitForPromises();
+ expect(visitUrl).toHaveBeenCalledWith('test-path');
+ });
- findInput().trigger('keyup.enter', { metaKey: true });
+ it('calls GraphQL mutation with correct parameters when issues are grouped by epic', async () => {
+ mutate = jest.fn().mockResolvedValue({
+ data: {
+ updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
+ },
+ });
+ window.location = new URL('https://test/boards/1?group_by=epic');
+ createComponent({ canAdminBoard: true });
- await waitForPromises();
+ findInput().trigger('keyup.enter', { metaKey: true });
- expect(axiosMock.history.put[0].data).toBe(
- JSON.stringify({ board: { ...currentBoard, label_ids: [''] } }),
- );
+ await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: createBoardMutation,
- variables: {
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: updateBoardMutation,
+ variables: {
+ input: expect.objectContaining({
id: `gid://gitlab/Board/${currentBoard.id}`,
- },
- });
+ }),
+ },
});
+
+ await waitForPromises();
+ expect(visitUrl).toHaveBeenCalledWith('test-path?group_by=epic');
+ });
+
+ it('shows an error flash if GraphQL mutation fails', async () => {
+ mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
+ createComponent({ canAdminBoard: true });
+ findInput().trigger('keyup.enter', { metaKey: true });
+
+ await waitForPromises();
+
+ expect(mutate).toHaveBeenCalled();
+
+ await waitForPromises();
+ expect(visitUrl).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+
+ describe('when deleting a board', () => {
+ beforeEach(() => {
+ boardsStore.state.currentPage = 'delete';
+ });
+
+ it('passes correct primary action text and variant', () => {
+ createComponent({ canAdminBoard: true });
+ expect(findModalActionPrimary().text).toBe('Delete');
+ expect(findModalActionPrimary().attributes[0].variant).toBe('danger');
+ });
+
+ it('renders delete confirmation message', () => {
+ createComponent({ canAdminBoard: true });
+ expect(findDeleteConfirmation().exists()).toBe(true);
+ });
+
+ it('calls a correct GraphQL mutation and redirects to correct page after deleting board', async () => {
+ mutate = jest.fn().mockResolvedValue({});
+ createComponent({ canAdminBoard: true });
+ findModal().vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: destroyBoardMutation,
+ variables: {
+ id: 'gid://gitlab/Board/1',
+ },
+ });
+
+ await waitForPromises();
+ expect(visitUrl).toHaveBeenCalledWith('root');
+ });
+
+ it('shows an error flash if GraphQL mutation fails', async () => {
+ mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
+ createComponent({ canAdminBoard: true });
+ findModal().vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(mutate).toHaveBeenCalled();
+
+ await waitForPromises();
+ expect(visitUrl).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/boards/components/board_list_header_new_spec.js b/spec/frontend/boards/components/board_list_header_deprecated_spec.js
index 7428dfae83f..6207724e6a9 100644
--- a/spec/frontend/boards/components/board_list_header_new_spec.js
+++ b/spec/frontend/boards/components/board_list_header_deprecated_spec.js
@@ -1,23 +1,28 @@
-import Vuex from 'vuex';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-
-import { mockLabelList } from 'jest/boards/mock_data';
-import BoardListHeader from '~/boards/components/board_list_header_new.vue';
+import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+
+import { TEST_HOST } from 'helpers/test_constants';
+import { listObj } from 'jest/boards/mock_data';
+import BoardListHeader from '~/boards/components/board_list_header_deprecated.vue';
+import List from '~/boards/models/list';
import { ListType } from '~/boards/constants';
-
-const localVue = createLocalVue();
-
-localVue.use(Vuex);
+import axios from '~/lib/utils/axios_utils';
describe('Board List Header Component', () => {
let wrapper;
- let store;
+ let axiosMock;
- const updateListSpy = jest.fn();
+ beforeEach(() => {
+ window.gon = {};
+ axiosMock = new AxiosMockAdapter(axios);
+ axiosMock.onGet(`${TEST_HOST}/lists/1/issues`).reply(200, { issues: [] });
+ });
afterEach(() => {
+ axiosMock.restore();
+
wrapper.destroy();
- wrapper = null;
localStorage.clear();
});
@@ -26,76 +31,65 @@ describe('Board List Header Component', () => {
listType = ListType.backlog,
collapsed = false,
withLocalStorage = true,
- currentUserId = null,
} = {}) => {
const boardId = '1';
const listMock = {
- ...mockLabelList,
- listType,
+ ...listObj,
+ list_type: listType,
collapsed,
};
if (listType === ListType.assignee) {
delete listMock.label;
- listMock.assignee = {};
+ listMock.user = {};
}
+ // Making List reactive
+ const list = Vue.observable(new List(listMock));
+
if (withLocalStorage) {
localStorage.setItem(
- `boards.${boardId}.${listMock.listType}.${listMock.id}.expanded`,
+ `boards.${boardId}.${list.type}.${list.id}.expanded`,
(!collapsed).toString(),
);
}
- store = new Vuex.Store({
- state: {},
- actions: { updateList: updateListSpy },
- getters: {},
- });
-
wrapper = shallowMount(BoardListHeader, {
- store,
- localVue,
propsData: {
disabled: false,
- list: listMock,
+ list,
},
provide: {
boardId,
- weightFeatureAvailable: false,
- currentUserId,
},
});
};
- const isCollapsed = () => wrapper.vm.list.collapsed;
- const isExpanded = () => !isCollapsed;
+ const isCollapsed = () => !wrapper.props().list.isExpanded;
+ const isExpanded = () => wrapper.vm.list.isExpanded;
const findAddIssueButton = () => wrapper.find({ ref: 'newIssueBtn' });
- const findTitle = () => wrapper.find('.board-title');
const findCaret = () => wrapper.find('.board-title-caret');
describe('Add issue button', () => {
const hasNoAddButton = [ListType.closed];
const hasAddButton = [ListType.backlog, ListType.label, ListType.milestone, ListType.assignee];
- it.each(hasNoAddButton)('does not render when List Type is `%s`', listType => {
+ it.each(hasNoAddButton)('does not render when List Type is `%s`', (listType) => {
createComponent({ listType });
expect(findAddIssueButton().exists()).toBe(false);
});
- it.each(hasAddButton)('does render when List Type is `%s`', listType => {
+ it.each(hasAddButton)('does render when List Type is `%s`', (listType) => {
createComponent({ listType });
expect(findAddIssueButton().exists()).toBe(true);
});
it('has a test for each list type', () => {
- createComponent();
-
- Object.values(ListType).forEach(value => {
+ Object.values(ListType).forEach((value) => {
expect([...hasAddButton, ...hasNoAddButton]).toContain(value);
});
});
@@ -108,80 +102,64 @@ describe('Board List Header Component', () => {
});
describe('expanding / collapsing the column', () => {
- it('does not collapse when clicking the header', async () => {
+ it('does not collapse when clicking the header', () => {
createComponent();
expect(isCollapsed()).toBe(false);
-
wrapper.find('[data-testid="board-list-header"]').trigger('click');
- await wrapper.vm.$nextTick();
-
- expect(isCollapsed()).toBe(false);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(isCollapsed()).toBe(false);
+ });
});
- it('collapses expanded Column when clicking the collapse icon', async () => {
+ it('collapses expanded Column when clicking the collapse icon', () => {
createComponent();
- expect(isCollapsed()).toBe(false);
-
+ expect(isExpanded()).toBe(true);
findCaret().vm.$emit('click');
- await wrapper.vm.$nextTick();
-
- expect(isCollapsed()).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(isCollapsed()).toBe(true);
+ });
});
- it('expands collapsed Column when clicking the expand icon', async () => {
+ it('expands collapsed Column when clicking the expand icon', () => {
createComponent({ collapsed: true });
expect(isCollapsed()).toBe(true);
-
findCaret().vm.$emit('click');
- await wrapper.vm.$nextTick();
-
- expect(isCollapsed()).toBe(false);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(isCollapsed()).toBe(false);
+ });
});
- it("when logged in it calls list update and doesn't set localStorage", async () => {
- createComponent({ withLocalStorage: false, currentUserId: 1 });
-
- findCaret().vm.$emit('click');
- await wrapper.vm.$nextTick();
-
- expect(updateListSpy).toHaveBeenCalledTimes(1);
- expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(null);
- });
+ it("when logged in it calls list update and doesn't set localStorage", () => {
+ jest.spyOn(List.prototype, 'update');
+ window.gon.current_user_id = 1;
- it("when logged out it doesn't call list update and sets localStorage", async () => {
- createComponent();
+ createComponent({ withLocalStorage: false });
findCaret().vm.$emit('click');
- await wrapper.vm.$nextTick();
- expect(updateListSpy).not.toHaveBeenCalled();
- expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(String(isExpanded()));
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.list.update).toHaveBeenCalledTimes(1);
+ expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(null);
+ });
});
- });
- describe('user can drag', () => {
- const cannotDragList = [ListType.backlog, ListType.closed];
- const canDragList = [ListType.label, ListType.milestone, ListType.assignee];
+ it("when logged out it doesn't call list update and sets localStorage", () => {
+ jest.spyOn(List.prototype, 'update');
- it.each(cannotDragList)(
- 'does not have user-can-drag-class so user cannot drag list',
- listType => {
- createComponent({ listType });
-
- expect(findTitle().classes()).not.toContain('user-can-drag');
- },
- );
+ createComponent();
- it.each(canDragList)('has user-can-drag-class so user can drag list', listType => {
- createComponent({ listType });
+ findCaret().vm.$emit('click');
- expect(findTitle().classes()).toContain('user-can-drag');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.list.update).not.toHaveBeenCalled();
+ expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(String(isExpanded()));
+ });
});
});
});
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 656a503bb86..357d05ced02 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -1,28 +1,23 @@
-import Vue from 'vue';
-import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { TEST_HOST } from 'helpers/test_constants';
-import { listObj } from 'jest/boards/mock_data';
+import { mockLabelList } from 'jest/boards/mock_data';
import BoardListHeader from '~/boards/components/board_list_header.vue';
-import List from '~/boards/models/list';
import { ListType } from '~/boards/constants';
-import axios from '~/lib/utils/axios_utils';
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
describe('Board List Header Component', () => {
let wrapper;
- let axiosMock;
+ let store;
- beforeEach(() => {
- window.gon = {};
- axiosMock = new AxiosMockAdapter(axios);
- axiosMock.onGet(`${TEST_HOST}/lists/1/issues`).reply(200, { issues: [] });
- });
+ const updateListSpy = jest.fn();
afterEach(() => {
- axiosMock.restore();
-
wrapper.destroy();
+ wrapper = null;
localStorage.clear();
});
@@ -31,65 +26,76 @@ describe('Board List Header Component', () => {
listType = ListType.backlog,
collapsed = false,
withLocalStorage = true,
+ currentUserId = null,
} = {}) => {
const boardId = '1';
const listMock = {
- ...listObj,
- list_type: listType,
+ ...mockLabelList,
+ listType,
collapsed,
};
if (listType === ListType.assignee) {
delete listMock.label;
- listMock.user = {};
+ listMock.assignee = {};
}
- // Making List reactive
- const list = Vue.observable(new List(listMock));
-
if (withLocalStorage) {
localStorage.setItem(
- `boards.${boardId}.${list.type}.${list.id}.expanded`,
+ `boards.${boardId}.${listMock.listType}.${listMock.id}.expanded`,
(!collapsed).toString(),
);
}
+ store = new Vuex.Store({
+ state: {},
+ actions: { updateList: updateListSpy },
+ getters: {},
+ });
+
wrapper = shallowMount(BoardListHeader, {
+ store,
+ localVue,
propsData: {
disabled: false,
- list,
+ list: listMock,
},
provide: {
boardId,
+ weightFeatureAvailable: false,
+ currentUserId,
},
});
};
- const isCollapsed = () => !wrapper.props().list.isExpanded;
- const isExpanded = () => wrapper.vm.list.isExpanded;
+ const isCollapsed = () => wrapper.vm.list.collapsed;
+ const isExpanded = () => !isCollapsed;
const findAddIssueButton = () => wrapper.find({ ref: 'newIssueBtn' });
+ const findTitle = () => wrapper.find('.board-title');
const findCaret = () => wrapper.find('.board-title-caret');
describe('Add issue button', () => {
const hasNoAddButton = [ListType.closed];
const hasAddButton = [ListType.backlog, ListType.label, ListType.milestone, ListType.assignee];
- it.each(hasNoAddButton)('does not render when List Type is `%s`', listType => {
+ it.each(hasNoAddButton)('does not render when List Type is `%s`', (listType) => {
createComponent({ listType });
expect(findAddIssueButton().exists()).toBe(false);
});
- it.each(hasAddButton)('does render when List Type is `%s`', listType => {
+ it.each(hasAddButton)('does render when List Type is `%s`', (listType) => {
createComponent({ listType });
expect(findAddIssueButton().exists()).toBe(true);
});
it('has a test for each list type', () => {
- Object.values(ListType).forEach(value => {
+ createComponent();
+
+ Object.values(ListType).forEach((value) => {
expect([...hasAddButton, ...hasNoAddButton]).toContain(value);
});
});
@@ -102,64 +108,80 @@ describe('Board List Header Component', () => {
});
describe('expanding / collapsing the column', () => {
- it('does not collapse when clicking the header', () => {
+ it('does not collapse when clicking the header', async () => {
createComponent();
expect(isCollapsed()).toBe(false);
+
wrapper.find('[data-testid="board-list-header"]').trigger('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(isCollapsed()).toBe(false);
- });
+ await wrapper.vm.$nextTick();
+
+ expect(isCollapsed()).toBe(false);
});
- it('collapses expanded Column when clicking the collapse icon', () => {
+ it('collapses expanded Column when clicking the collapse icon', async () => {
createComponent();
- expect(isExpanded()).toBe(true);
+ expect(isCollapsed()).toBe(false);
+
findCaret().vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(isCollapsed()).toBe(true);
- });
+ await wrapper.vm.$nextTick();
+
+ expect(isCollapsed()).toBe(true);
});
- it('expands collapsed Column when clicking the expand icon', () => {
+ it('expands collapsed Column when clicking the expand icon', async () => {
createComponent({ collapsed: true });
expect(isCollapsed()).toBe(true);
+
findCaret().vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(isCollapsed()).toBe(false);
- });
- });
+ await wrapper.vm.$nextTick();
- it("when logged in it calls list update and doesn't set localStorage", () => {
- jest.spyOn(List.prototype, 'update');
- window.gon.current_user_id = 1;
+ expect(isCollapsed()).toBe(false);
+ });
- createComponent({ withLocalStorage: false });
+ it("when logged in it calls list update and doesn't set localStorage", async () => {
+ createComponent({ withLocalStorage: false, currentUserId: 1 });
findCaret().vm.$emit('click');
+ await wrapper.vm.$nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.list.update).toHaveBeenCalledTimes(1);
- expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(null);
- });
+ expect(updateListSpy).toHaveBeenCalledTimes(1);
+ expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(null);
});
- it("when logged out it doesn't call list update and sets localStorage", () => {
- jest.spyOn(List.prototype, 'update');
-
+ it("when logged out it doesn't call list update and sets localStorage", async () => {
createComponent();
findCaret().vm.$emit('click');
+ await wrapper.vm.$nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.list.update).not.toHaveBeenCalled();
- expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(String(isExpanded()));
- });
+ expect(updateListSpy).not.toHaveBeenCalled();
+ expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.expanded`)).toBe(String(isExpanded()));
+ });
+ });
+
+ describe('user can drag', () => {
+ const cannotDragList = [ListType.backlog, ListType.closed];
+ const canDragList = [ListType.label, ListType.milestone, ListType.assignee];
+
+ it.each(cannotDragList)(
+ 'does not have user-can-drag-class so user cannot drag list',
+ (listType) => {
+ createComponent({ listType });
+
+ expect(findTitle().classes()).not.toContain('user-can-drag');
+ },
+ );
+
+ it.each(canDragList)('has user-can-drag-class so user can drag list', (listType) => {
+ createComponent({ listType });
+
+ expect(findTitle().classes()).toContain('user-can-drag');
});
});
});
diff --git a/spec/frontend/boards/components/board_new_issue_new_spec.js b/spec/frontend/boards/components/board_new_issue_spec.js
index ee1c4f31cf0..5a01221a5be 100644
--- a/spec/frontend/boards/components/board_new_issue_new_spec.js
+++ b/spec/frontend/boards/components/board_new_issue_spec.js
@@ -1,9 +1,9 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import BoardNewIssue from '~/boards/components/board_new_issue_new.vue';
+import BoardNewIssue from '~/boards/components/board_new_issue.vue';
import '~/boards/models/list';
-import { mockList } from '../mock_data';
+import { mockList, mockGroupProjects } from '../mock_data';
const localVue = createLocalVue();
@@ -29,7 +29,7 @@ describe('Issue boards new issue form', () => {
beforeEach(() => {
const store = new Vuex.Store({
- state: {},
+ state: { selectedProject: mockGroupProjects[0] },
actions: { addListNewIssue: addListNewIssuesSpy },
getters: {},
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index db3c8c22950..81575bf486a 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -26,7 +26,7 @@ describe('BoardsSelector', () => {
const boards = boardGenerator(20);
const recentBoards = boardGenerator(5);
- const fillSearchBox = filterTerm => {
+ const fillSearchBox = (filterTerm) => {
const searchBox = wrapper.find({ ref: 'searchBox' });
const searchBoxInput = searchBox.find('input');
searchBoxInput.setValue(filterTerm);
@@ -59,7 +59,7 @@ describe('BoardsSelector', () => {
data: {
group: {
boards: {
- edges: boards.map(board => ({ node: board })),
+ edges: boards.map((board) => ({ node: board })),
},
},
},
@@ -94,7 +94,7 @@ describe('BoardsSelector', () => {
weights: [],
},
mocks: { $apollo },
- attachToDocument: true,
+ attachTo: document.body,
});
wrapper.vm.$apollo.addSmartQuery = jest.fn((_, options) => {
@@ -152,7 +152,7 @@ describe('BoardsSelector', () => {
it('shows only matching boards when filtering', () => {
const filterTerm = 'board1';
- const expectedCount = boards.filter(board => board.name.includes(filterTerm)).length;
+ const expectedCount = boards.filter((board) => board.name.includes(filterTerm)).length;
fillSearchBox(filterTerm);
diff --git a/spec/frontend/boards/components/issue_count_spec.js b/spec/frontend/boards/components/issue_count_spec.js
index d1ff0bdbf88..f1870e9cc9e 100644
--- a/spec/frontend/boards/components/issue_count_spec.js
+++ b/spec/frontend/boards/components/issue_count_spec.js
@@ -6,7 +6,7 @@ describe('IssueCount', () => {
let maxIssueCount;
let issuesSize;
- const createComponent = props => {
+ const createComponent = (props) => {
vm = shallowMount(IssueCount, { propsData: props });
};
diff --git a/spec/frontend/boards/components/issue_due_date_spec.js b/spec/frontend/boards/components/issue_due_date_spec.js
index 880859287e1..73340c1b96b 100644
--- a/spec/frontend/boards/components/issue_due_date_spec.js
+++ b/spec/frontend/boards/components/issue_due_date_spec.js
@@ -10,7 +10,7 @@ const createComponent = (dueDate = new Date(), closed = false) =>
},
});
-const findTime = wrapper => wrapper.find('time');
+const findTime = (wrapper) => wrapper.find('time');
describe('Issue Due Date component', () => {
let wrapper;
diff --git a/spec/frontend/boards/components/issue_time_estimate_deprecated_spec.js b/spec/frontend/boards/components/issue_time_estimate_deprecated_spec.js
new file mode 100644
index 00000000000..fafebaf3a4e
--- /dev/null
+++ b/spec/frontend/boards/components/issue_time_estimate_deprecated_spec.js
@@ -0,0 +1,64 @@
+import { shallowMount } from '@vue/test-utils';
+import IssueTimeEstimate from '~/boards/components/issue_time_estimate_deprecated.vue';
+import boardsStore from '~/boards/stores/boards_store';
+
+describe('Issue Time Estimate component', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ boardsStore.create();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when limitToHours is false', () => {
+ beforeEach(() => {
+ boardsStore.timeTracking.limitToHours = false;
+ wrapper = shallowMount(IssueTimeEstimate, {
+ propsData: {
+ estimate: 374460,
+ },
+ });
+ });
+
+ it('renders the correct time estimate', () => {
+ expect(wrapper.find('time').text().trim()).toEqual('2w 3d 1m');
+ });
+
+ it('renders expanded time estimate in tooltip', () => {
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('2 weeks 3 days 1 minute');
+ });
+
+ it('prevents tooltip xss', (done) => {
+ const alertSpy = jest.spyOn(window, 'alert');
+ wrapper.setProps({ estimate: 'Foo <script>alert("XSS")</script>' });
+ wrapper.vm.$nextTick(() => {
+ expect(alertSpy).not.toHaveBeenCalled();
+ expect(wrapper.find('time').text().trim()).toEqual('0m');
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('0m');
+ done();
+ });
+ });
+ });
+
+ describe('when limitToHours is true', () => {
+ beforeEach(() => {
+ boardsStore.timeTracking.limitToHours = true;
+ wrapper = shallowMount(IssueTimeEstimate, {
+ propsData: {
+ estimate: 374460,
+ },
+ });
+ });
+
+ it('renders the correct time estimate', () => {
+ expect(wrapper.find('time').text().trim()).toEqual('104h 1m');
+ });
+
+ it('renders expanded time estimate in tooltip', () => {
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('104 hours 1 minute');
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/issue_time_estimate_spec.js b/spec/frontend/boards/components/issue_time_estimate_spec.js
index 162a6df828b..9ac8fae3fcc 100644
--- a/spec/frontend/boards/components/issue_time_estimate_spec.js
+++ b/spec/frontend/boards/components/issue_time_estimate_spec.js
@@ -1,75 +1,65 @@
+import { config as vueConfig } from 'vue';
import { shallowMount } from '@vue/test-utils';
import IssueTimeEstimate from '~/boards/components/issue_time_estimate.vue';
-import boardsStore from '~/boards/stores/boards_store';
describe('Issue Time Estimate component', () => {
let wrapper;
- beforeEach(() => {
- boardsStore.create();
- });
-
afterEach(() => {
wrapper.destroy();
});
describe('when limitToHours is false', () => {
beforeEach(() => {
- boardsStore.timeTracking.limitToHours = false;
wrapper = shallowMount(IssueTimeEstimate, {
propsData: {
estimate: 374460,
},
+ provide: {
+ timeTrackingLimitToHours: false,
+ },
});
});
it('renders the correct time estimate', () => {
- expect(
- wrapper
- .find('time')
- .text()
- .trim(),
- ).toEqual('2w 3d 1m');
+ expect(wrapper.find('time').text().trim()).toEqual('2w 3d 1m');
});
it('renders expanded time estimate in tooltip', () => {
expect(wrapper.find('.js-issue-time-estimate').text()).toContain('2 weeks 3 days 1 minute');
});
- it('prevents tooltip xss', done => {
+ it('prevents tooltip xss', async () => {
const alertSpy = jest.spyOn(window, 'alert');
- wrapper.setProps({ estimate: 'Foo <script>alert("XSS")</script>' });
- wrapper.vm.$nextTick(() => {
- expect(alertSpy).not.toHaveBeenCalled();
- expect(
- wrapper
- .find('time')
- .text()
- .trim(),
- ).toEqual('0m');
- expect(wrapper.find('.js-issue-time-estimate').text()).toContain('0m');
- done();
- });
+
+ try {
+ // This will raise props validating warning by Vue, silencing it
+ vueConfig.silent = true;
+ await wrapper.setProps({ estimate: 'Foo <script>alert("XSS")</script>' });
+ } finally {
+ vueConfig.silent = false;
+ }
+
+ expect(alertSpy).not.toHaveBeenCalled();
+ expect(wrapper.find('time').text().trim()).toEqual('0m');
+ expect(wrapper.find('.js-issue-time-estimate').text()).toContain('0m');
});
});
describe('when limitToHours is true', () => {
beforeEach(() => {
- boardsStore.timeTracking.limitToHours = true;
wrapper = shallowMount(IssueTimeEstimate, {
propsData: {
estimate: 374460,
},
+ provide: {
+ timeTrackingLimitToHours: true,
+ },
});
});
it('renders the correct time estimate', () => {
- expect(
- wrapper
- .find('time')
- .text()
- .trim(),
- ).toEqual('104h 1m');
+ expect(wrapper.find('time').text().trim()).toEqual('104h 1m');
});
it('renders expanded time estimate in tooltip', () => {
diff --git a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
index d7df2ff1563..de414bb929e 100644
--- a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
@@ -33,6 +33,14 @@ describe('boards sidebar remove issue', () => {
expect(findTitle().text()).toBe(title);
});
+ it('renders provided title slot', () => {
+ const title = 'Sidebar item title on slot';
+ const slots = { title: `<strong>${title}</strong>` };
+ createComponent({ slots });
+
+ expect(wrapper.text()).toContain(title);
+ });
+
it('hides edit button, loader and expanded content by default', () => {
createComponent();
@@ -74,9 +82,19 @@ describe('boards sidebar remove issue', () => {
return wrapper.vm.$nextTick().then(() => {
expect(findCollapsed().isVisible()).toBe(false);
expect(findExpanded().isVisible()).toBe(true);
- expect(findExpanded().text()).toBe('Select item');
});
});
+
+ it('hides the header while editing if `toggleHeader` is true', async () => {
+ createComponent({ canUpdate: true, props: { toggleHeader: true } });
+ findEditButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findEditButton().isVisible()).toBe(false);
+ expect(findTitle().isVisible()).toBe(false);
+ expect(findExpanded().isVisible()).toBe(true);
+ });
});
describe('collapsing an item by offclicking', () => {
@@ -96,12 +114,13 @@ describe('boards sidebar remove issue', () => {
expect(findExpanded().isVisible()).toBe(false);
});
- it('emits close event', async () => {
+ it('emits events', async () => {
document.body.click();
await wrapper.vm.$nextTick();
- expect(wrapper.emitted().close.length).toBe(1);
+ expect(wrapper.emitted().close).toHaveLength(1);
+ expect(wrapper.emitted()['off-click']).toHaveLength(1);
});
});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js
new file mode 100644
index 00000000000..86895c648a4
--- /dev/null
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js
@@ -0,0 +1,182 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlFormInput, GlForm } from '@gitlab/ui';
+import BoardSidebarIssueTitle from '~/boards/components/sidebar/board_sidebar_issue_title.vue';
+import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue';
+import createFlash from '~/flash';
+import { createStore } from '~/boards/stores';
+
+const TEST_TITLE = 'New issue title';
+const TEST_ISSUE_A = {
+ id: 'gid://gitlab/Issue/1',
+ iid: 8,
+ title: 'Issue 1',
+ referencePath: 'h/b#1',
+};
+const TEST_ISSUE_B = {
+ id: 'gid://gitlab/Issue/2',
+ iid: 9,
+ title: 'Issue 2',
+ referencePath: 'h/b#2',
+};
+
+jest.mock('~/flash');
+
+describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
+ let wrapper;
+ let store;
+
+ afterEach(() => {
+ localStorage.clear();
+ wrapper.destroy();
+ store = null;
+ wrapper = null;
+ });
+
+ const createWrapper = (issue = TEST_ISSUE_A) => {
+ store = createStore();
+ store.state.issues = { [issue.id]: { ...issue } };
+ store.dispatch('setActiveId', { id: issue.id });
+
+ wrapper = shallowMount(BoardSidebarIssueTitle, {
+ store,
+ provide: {
+ canUpdate: true,
+ },
+ stubs: {
+ 'board-editable-item': BoardEditableItem,
+ },
+ });
+ };
+
+ const findForm = () => wrapper.find(GlForm);
+ const findAlert = () => wrapper.find(GlAlert);
+ const findFormInput = () => wrapper.find(GlFormInput);
+ const findEditableItem = () => wrapper.find(BoardEditableItem);
+ const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
+ const findTitle = () => wrapper.find('[data-testid="issue-title"]');
+ const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
+
+ it('renders title and reference', () => {
+ createWrapper();
+
+ expect(findTitle().text()).toContain(TEST_ISSUE_A.title);
+ expect(findCollapsed().text()).toContain(TEST_ISSUE_A.referencePath);
+ });
+
+ it('does not render alert', () => {
+ createWrapper();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ describe('when new title is submitted', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {
+ store.state.issues[TEST_ISSUE_A.id].title = TEST_TITLE;
+ });
+ findFormInput().vm.$emit('input', TEST_TITLE);
+ findForm().vm.$emit('submit', { preventDefault: () => {} });
+ await wrapper.vm.$nextTick();
+ });
+
+ it('collapses sidebar and renders new title', () => {
+ expect(findCollapsed().isVisible()).toBe(true);
+ expect(findTitle().text()).toContain(TEST_TITLE);
+ });
+
+ it('commits change to the server', () => {
+ expect(wrapper.vm.setActiveIssueTitle).toHaveBeenCalledWith({
+ title: TEST_TITLE,
+ projectPath: 'h/b',
+ });
+ });
+ });
+
+ describe('when submitting and invalid title', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {});
+ findFormInput().vm.$emit('input', '');
+ findForm().vm.$emit('submit', { preventDefault: () => {} });
+ await wrapper.vm.$nextTick();
+ });
+
+ it('commits change to the server', () => {
+ expect(wrapper.vm.setActiveIssueTitle).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when abandoning the form without saving', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ wrapper.vm.$refs.sidebarItem.expand();
+ findFormInput().vm.$emit('input', TEST_TITLE);
+ findEditableItem().vm.$emit('off-click');
+ await wrapper.vm.$nextTick();
+ });
+
+ it('does not collapses sidebar and shows alert', () => {
+ expect(findCollapsed().isVisible()).toBe(false);
+ expect(findAlert().exists()).toBe(true);
+ expect(localStorage.getItem(`${TEST_ISSUE_A.id}/issue-title-pending-changes`)).toBe(
+ TEST_TITLE,
+ );
+ });
+ });
+
+ describe('when accessing the form with pending changes', () => {
+ beforeAll(() => {
+ localStorage.setItem(`${TEST_ISSUE_A.id}/issue-title-pending-changes`, TEST_TITLE);
+
+ createWrapper();
+ });
+
+ it('sets title, expands item and shows alert', async () => {
+ expect(wrapper.vm.title).toBe(TEST_TITLE);
+ expect(findCollapsed().isVisible()).toBe(false);
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('when cancel button is clicked', () => {
+ beforeEach(async () => {
+ createWrapper(TEST_ISSUE_B);
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {
+ store.state.issues[TEST_ISSUE_B.id].title = TEST_TITLE;
+ });
+ findFormInput().vm.$emit('input', TEST_TITLE);
+ findCancelButton().vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ });
+
+ it('collapses sidebar and render former title', () => {
+ expect(wrapper.vm.setActiveIssueTitle).not.toHaveBeenCalled();
+ expect(findCollapsed().isVisible()).toBe(true);
+ expect(findTitle().text()).toBe(TEST_ISSUE_B.title);
+ });
+ });
+
+ describe('when the mutation fails', () => {
+ beforeEach(async () => {
+ createWrapper(TEST_ISSUE_B);
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {
+ throw new Error(['failed mutation']);
+ });
+ findFormInput().vm.$emit('input', 'Invalid title');
+ findForm().vm.$emit('submit', { preventDefault: () => {} });
+ await wrapper.vm.$nextTick();
+ });
+
+ it('collapses sidebar and renders former issue title', () => {
+ expect(findCollapsed().isVisible()).toBe(true);
+ expect(findTitle().text()).toContain(TEST_ISSUE_B.title);
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
index da000d21f6a..2342caa9dfd 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
@@ -10,8 +10,8 @@ import createFlash from '~/flash';
jest.mock('~/flash');
-const TEST_LABELS_PAYLOAD = TEST_LABELS.map(label => ({ ...label, set: true }));
-const TEST_LABELS_TITLES = TEST_LABELS.map(label => label.title);
+const TEST_LABELS_PAYLOAD = TEST_LABELS.map((label) => ({ ...label, set: true }));
+const TEST_LABELS_TITLES = TEST_LABELS.map((label) => label.title);
describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
let wrapper;
@@ -37,14 +37,15 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
labelsFilterBasePath: TEST_HOST,
},
stubs: {
- 'board-editable-item': BoardEditableItem,
- 'labels-select': '<div></div>',
+ BoardEditableItem,
+ LabelsSelect: true,
},
});
};
const findLabelsSelect = () => wrapper.find({ ref: 'labelsSelect' });
- const findLabelsTitles = () => wrapper.findAll(GlLabel).wrappers.map(item => item.props('title'));
+ const findLabelsTitles = () =>
+ wrapper.findAll(GlLabel).wrappers.map((item) => item.props('title'));
const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
it('renders "None" when no labels are selected', () => {
@@ -76,7 +77,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
it('commits change to the server', () => {
expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
- addLabelIds: TEST_LABELS.map(label => label.id),
+ addLabelIds: TEST_LABELS.map((label) => label.id),
projectPath: 'gitlab-org/test-subgroup/gitlab-test',
removeLabelIds: [],
});
@@ -84,7 +85,10 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
});
describe('when labels are updated over existing labels', () => {
- const testLabelsPayload = [{ id: 5, set: true }, { id: 7, set: true }];
+ const testLabelsPayload = [
+ { id: 5, set: true },
+ { id: 7, set: true },
+ ];
const expectedLabels = [{ id: 5 }, { id: 7 }];
beforeEach(async () => {
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
index ee54c662167..b1df0f2d771 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
@@ -83,7 +83,7 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () =
});
describe('Board sidebar subscription component `behavior`', () => {
- const mockSetActiveIssueSubscribed = subscribedState => {
+ const mockSetActiveIssueSubscribed = (subscribedState) => {
jest.spyOn(wrapper.vm, 'setActiveIssueSubscribed').mockImplementation(async () => {
store.commit(types.UPDATE_ISSUE_BY_ID, {
issueId: mockActiveIssue.id,
diff --git a/spec/frontend/boards/components/sidebar/remove_issue_spec.js b/spec/frontend/boards/components/sidebar/remove_issue_spec.js
index a33e4046724..1b7a78e6e58 100644
--- a/spec/frontend/boards/components/sidebar/remove_issue_spec.js
+++ b/spec/frontend/boards/components/sidebar/remove_issue_spec.js
@@ -8,7 +8,7 @@ describe('boards sidebar remove issue', () => {
const findButton = () => wrapper.find(GlButton);
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(RemoveIssue, {
propsData: {
issue: {},
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_deprecated_spec.js
index 7e22e9647f0..fd7b0edb97e 100644
--- a/spec/frontend/boards/issue_card_spec.js
+++ b/spec/frontend/boards/issue_card_deprecated_spec.js
@@ -6,7 +6,7 @@ import '~/boards/models/assignee';
import '~/boards/models/issue';
import '~/boards/models/list';
import { GlLabel } from '@gitlab/ui';
-import IssueCardInner from '~/boards/components/issue_card_inner.vue';
+import IssueCardInner from '~/boards/components/issue_card_inner_deprecated.vue';
import { listObj } from './mock_data';
import store from '~/boards/stores';
@@ -79,7 +79,7 @@ describe('Issue card component', () => {
expect(wrapper.find('.issue-blocked-icon').exists()).toBe(false);
});
- it('renders confidential icon', done => {
+ it('renders confidential icon', (done) => {
wrapper.setProps({
issue: {
...wrapper.props('issue'),
@@ -102,7 +102,7 @@ describe('Issue card component', () => {
});
describe('exists', () => {
- beforeEach(done => {
+ beforeEach((done) => {
wrapper.setProps({
issue: {
...wrapper.props('issue'),
@@ -132,7 +132,7 @@ describe('Issue card component', () => {
expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
});
- it('renders the avatar using avatar_url property', done => {
+ it('renders the avatar using avatar_url property', (done) => {
wrapper.props('issue').updateData({
...wrapper.props('issue'),
assignees: [
@@ -156,7 +156,7 @@ describe('Issue card component', () => {
});
describe('assignee default avatar', () => {
- beforeEach(done => {
+ beforeEach((done) => {
global.gon.default_avatar_url = 'default_avatar';
wrapper.setProps({
@@ -189,7 +189,7 @@ describe('Issue card component', () => {
});
describe('multiple assignees', () => {
- beforeEach(done => {
+ beforeEach((done) => {
wrapper.setProps({
issue: {
...wrapper.props('issue'),
@@ -224,7 +224,7 @@ describe('Issue card component', () => {
});
describe('more than three assignees', () => {
- beforeEach(done => {
+ beforeEach((done) => {
const { assignees } = wrapper.props('issue');
assignees.push(
new ListAssignee({
@@ -245,23 +245,18 @@ describe('Issue card component', () => {
});
it('renders more avatar counter', () => {
- expect(
- wrapper
- .find('.board-card-assignee .avatar-counter')
- .text()
- .trim(),
- ).toEqual('+2');
+ expect(wrapper.find('.board-card-assignee .avatar-counter').text().trim()).toEqual('+2');
});
it('renders two assignees', () => {
expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(2);
});
- it('renders 99+ avatar counter', done => {
+ it('renders 99+ avatar counter', (done) => {
const assignees = [
...wrapper.props('issue').assignees,
...range(5, 103).map(
- i =>
+ (i) =>
new ListAssignee({
id: i,
name: 'name',
@@ -278,12 +273,7 @@ describe('Issue card component', () => {
});
wrapper.vm.$nextTick(() => {
- expect(
- wrapper
- .find('.board-card-assignee .avatar-counter')
- .text()
- .trim(),
- ).toEqual('99+');
+ expect(wrapper.find('.board-card-assignee .avatar-counter').text().trim()).toEqual('99+');
done();
});
});
@@ -291,7 +281,7 @@ describe('Issue card component', () => {
});
describe('labels', () => {
- beforeEach(done => {
+ beforeEach((done) => {
issue.addLabel(label1);
wrapper.setProps({ issue: { ...issue } });
@@ -306,7 +296,7 @@ describe('Issue card component', () => {
expect(label.props('backgroundColor')).toEqual(label1.color);
});
- it('does not render label if label does not have an ID', done => {
+ it('does not render label if label does not have an ID', (done) => {
issue.addLabel(
new ListLabel({
title: 'closed',
@@ -325,7 +315,7 @@ describe('Issue card component', () => {
});
describe('blocked', () => {
- beforeEach(done => {
+ beforeEach((done) => {
wrapper.setProps({
issue: {
...wrapper.props('issue'),
diff --git a/spec/frontend/boards/issue_card_inner_spec.js b/spec/frontend/boards/issue_card_inner_spec.js
new file mode 100644
index 00000000000..f9ad78494af
--- /dev/null
+++ b/spec/frontend/boards/issue_card_inner_spec.js
@@ -0,0 +1,372 @@
+import { mount } from '@vue/test-utils';
+import { range } from 'lodash';
+import { GlLabel } from '@gitlab/ui';
+import IssueCardInner from '~/boards/components/issue_card_inner.vue';
+import { mockLabelList } from './mock_data';
+import defaultStore from '~/boards/stores';
+import eventHub from '~/boards/eventhub';
+import { updateHistory } from '~/lib/utils/url_utility';
+
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/boards/eventhub');
+
+describe('Issue card component', () => {
+ const user = {
+ id: 1,
+ name: 'testing 123',
+ username: 'test',
+ avatarUrl: 'test_image',
+ };
+
+ const label1 = {
+ id: 3,
+ title: 'testing 123',
+ color: '#000CFF',
+ textColor: 'white',
+ description: 'test',
+ };
+
+ let wrapper;
+ let issue;
+ let list;
+
+ const createWrapper = (props = {}, store = defaultStore) => {
+ wrapper = mount(IssueCardInner, {
+ store,
+ propsData: {
+ list,
+ issue,
+ ...props,
+ },
+ stubs: {
+ GlLabel: true,
+ },
+ provide: {
+ groupId: null,
+ rootPath: '/',
+ scopedLabelsAvailable: false,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ list = mockLabelList;
+ issue = {
+ title: 'Testing',
+ id: 1,
+ iid: 1,
+ confidential: false,
+ labels: [list.label],
+ assignees: [],
+ referencePath: '#1',
+ webUrl: '/test/1',
+ weight: 1,
+ };
+
+ createWrapper({ issue, list });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ jest.clearAllMocks();
+ });
+
+ it('renders issue title', () => {
+ expect(wrapper.find('.board-card-title').text()).toContain(issue.title);
+ });
+
+ it('includes issue base in link', () => {
+ expect(wrapper.find('.board-card-title a').attributes('href')).toContain('/test');
+ });
+
+ it('includes issue title on link', () => {
+ expect(wrapper.find('.board-card-title a').attributes('title')).toBe(issue.title);
+ });
+
+ it('does not render confidential icon', () => {
+ expect(wrapper.find('.confidential-icon').exists()).toBe(false);
+ });
+
+ it('does not render blocked icon', () => {
+ expect(wrapper.find('.issue-blocked-icon').exists()).toBe(false);
+ });
+
+ it('renders issue ID with #', () => {
+ expect(wrapper.find('.board-card-number').text()).toContain(`#${issue.id}`);
+ });
+
+ it('does not render assignee', () => {
+ expect(wrapper.find('.board-card-assignee .avatar').exists()).toBe(false);
+ });
+
+ describe('confidential issue', () => {
+ beforeEach(() => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ confidential: true,
+ },
+ });
+ });
+
+ it('renders confidential icon', () => {
+ expect(wrapper.find('.confidential-icon').exists()).toBe(true);
+ });
+ });
+
+ describe('with assignee', () => {
+ describe('with avatar', () => {
+ beforeEach(() => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees: [user],
+ updateData(newData) {
+ Object.assign(this, newData);
+ },
+ },
+ });
+ });
+
+ it('renders assignee', () => {
+ expect(wrapper.find('.board-card-assignee .avatar').exists()).toBe(true);
+ });
+
+ it('sets title', () => {
+ expect(wrapper.find('.js-assignee-tooltip').text()).toContain(`${user.name}`);
+ });
+
+ it('sets users path', () => {
+ expect(wrapper.find('.board-card-assignee a').attributes('href')).toBe('/test');
+ });
+
+ it('renders avatar', () => {
+ expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
+ });
+
+ it('renders the avatar using avatarUrl property', async () => {
+ wrapper.props('issue').updateData({
+ ...wrapper.props('issue'),
+ assignees: [
+ {
+ id: '1',
+ name: 'test',
+ state: 'active',
+ username: 'test_name',
+ avatarUrl: 'test_image_from_avatar_url',
+ },
+ ],
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
+ 'test_image_from_avatar_url?width=24',
+ );
+ });
+ });
+
+ describe('with default avatar', () => {
+ beforeEach(() => {
+ global.gon.default_avatar_url = 'default_avatar';
+
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees: [
+ {
+ id: 1,
+ name: 'testing 123',
+ username: 'test',
+ },
+ ],
+ },
+ });
+ });
+
+ afterEach(() => {
+ global.gon.default_avatar_url = null;
+ });
+
+ it('displays defaults avatar if users avatar is null', () => {
+ expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
+ expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
+ 'default_avatar?width=24',
+ );
+ });
+ });
+ });
+
+ describe('multiple assignees', () => {
+ beforeEach(() => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees: [
+ {
+ id: 2,
+ name: 'user2',
+ username: 'user2',
+ avatarUrl: 'test_image',
+ },
+ {
+ id: 3,
+ name: 'user3',
+ username: 'user3',
+ avatarUrl: 'test_image',
+ },
+ {
+ id: 4,
+ name: 'user4',
+ username: 'user4',
+ avatarUrl: 'test_image',
+ },
+ ],
+ },
+ });
+ });
+
+ it('renders all three assignees', () => {
+ expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(3);
+ });
+
+ describe('more than three assignees', () => {
+ beforeEach(() => {
+ const { assignees } = wrapper.props('issue');
+ assignees.push({
+ id: 5,
+ name: 'user5',
+ username: 'user5',
+ avatarUrl: 'test_image',
+ });
+
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees,
+ },
+ });
+ });
+
+ it('renders more avatar counter', () => {
+ expect(wrapper.find('.board-card-assignee .avatar-counter').text().trim()).toEqual('+2');
+ });
+
+ it('renders two assignees', () => {
+ expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(2);
+ });
+
+ it('renders 99+ avatar counter', async () => {
+ const assignees = [
+ ...wrapper.props('issue').assignees,
+ ...range(5, 103).map((i) => ({
+ id: i,
+ name: 'name',
+ username: 'username',
+ avatarUrl: 'test_image',
+ })),
+ ];
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ assignees,
+ },
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('.board-card-assignee .avatar-counter').text().trim()).toEqual('99+');
+ });
+ });
+ });
+
+ describe('labels', () => {
+ beforeEach(() => {
+ wrapper.setProps({ issue: { ...issue, labels: [list.label, label1] } });
+ });
+
+ it('does not render list label but renders all other labels', () => {
+ expect(wrapper.findAll(GlLabel).length).toBe(1);
+ const label = wrapper.find(GlLabel);
+ expect(label.props('title')).toEqual(label1.title);
+ expect(label.props('description')).toEqual(label1.description);
+ expect(label.props('backgroundColor')).toEqual(label1.color);
+ });
+
+ it('does not render label if label does not have an ID', async () => {
+ wrapper.setProps({ issue: { ...issue, labels: [label1, { title: 'closed' }] } });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.findAll(GlLabel).length).toBe(1);
+ expect(wrapper.text()).not.toContain('closed');
+ });
+ });
+
+ describe('blocked', () => {
+ beforeEach(() => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ blocked: true,
+ },
+ });
+ });
+
+ it('renders blocked icon if issue is blocked', () => {
+ expect(wrapper.find('.issue-blocked-icon').exists()).toBe(true);
+ });
+ });
+
+ describe('filterByLabel method', () => {
+ beforeEach(() => {
+ delete window.location;
+
+ wrapper.setProps({
+ updateFilters: true,
+ });
+ });
+
+ describe('when selected label is not in the filter', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'performSearch').mockImplementation(() => {});
+ window.location = { search: '' };
+ wrapper.vm.filterByLabel(label1);
+ });
+
+ it('calls updateHistory', () => {
+ expect(updateHistory).toHaveBeenCalledTimes(1);
+ });
+
+ it('dispatches performSearch vuex action', () => {
+ expect(wrapper.vm.performSearch).toHaveBeenCalledTimes(1);
+ });
+
+ it('emits updateTokens event', () => {
+ expect(eventHub.$emit).toHaveBeenCalledTimes(1);
+ expect(eventHub.$emit).toHaveBeenCalledWith('updateTokens');
+ });
+ });
+
+ describe('when selected label is already in the filter', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'performSearch').mockImplementation(() => {});
+ window.location = { search: '?label_name[]=testing%20123' };
+ wrapper.vm.filterByLabel(label1);
+ });
+
+ it('does not call updateHistory', () => {
+ expect(updateHistory).not.toHaveBeenCalled();
+ });
+
+ it('does not dispatch performSearch vuex action', () => {
+ expect(wrapper.vm.performSearch).not.toHaveBeenCalled();
+ });
+
+ it('does not emit updateTokens event', () => {
+ expect(eventHub.$emit).not.toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/list_spec.js b/spec/frontend/boards/list_spec.js
index b731bb6e474..db01f62c9a6 100644
--- a/spec/frontend/boards/list_spec.js
+++ b/spec/frontend/boards/list_spec.js
@@ -37,7 +37,7 @@ describe('List model', () => {
describe('list type', () => {
const notExpandableList = ['blank'];
- const table = Object.keys(ListType).map(k => {
+ const table = Object.keys(ListType).map((k) => {
const value = ListType[k];
return [value, !notExpandableList.includes(value)];
});
@@ -186,7 +186,7 @@ describe('List model', () => {
list.issues = [];
});
- it('adds new issue to top of list', done => {
+ it('adds new issue to top of list', (done) => {
const user = new ListAssignee({
id: 1,
name: 'testing 123',
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index ea6c52c6830..d5cfb9b7d07 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -263,7 +263,7 @@ export const BoardsMockData = {
},
};
-export const boardsMockInterceptor = config => {
+export const boardsMockInterceptor = (config) => {
const body = BoardsMockData[config.method.toUpperCase()][config.url];
return [200, body];
};
@@ -285,7 +285,7 @@ export const setMockEndpoints = (opts = {}) => {
export const mockList = {
id: 'gid://gitlab/List/1',
title: 'Backlog',
- position: null,
+ position: -Infinity,
listType: 'backlog',
collapsed: false,
label: null,
@@ -318,7 +318,7 @@ export const mockLists = [mockList, mockLabelList];
export const mockListsById = keyBy(mockLists, 'id');
-export const mockListsWithModel = mockLists.map(listMock =>
+export const mockListsWithModel = mockLists.map((listMock) =>
Vue.observable(new List({ ...listMock, doNotFetchIssues: true })),
);
@@ -350,3 +350,33 @@ export const issues = {
[mockIssue3.id]: mockIssue3,
[mockIssue4.id]: mockIssue4,
};
+
+export const mockRawGroupProjects = [
+ {
+ id: 0,
+ name: 'Example Project',
+ name_with_namespace: 'Awesome Group / Example Project',
+ path_with_namespace: 'awesome-group/example-project',
+ },
+ {
+ id: 1,
+ name: 'Foobar Project',
+ name_with_namespace: 'Awesome Group / Foobar Project',
+ path_with_namespace: 'awesome-group/foobar-project',
+ },
+];
+
+export const mockGroupProjects = [
+ {
+ id: 0,
+ name: 'Example Project',
+ nameWithNamespace: 'Awesome Group / Example Project',
+ fullPath: 'awesome-group/example-project',
+ },
+ {
+ id: 1,
+ name: 'Foobar Project',
+ nameWithNamespace: 'Awesome Group / Foobar Project',
+ fullPath: 'awesome-group/foobar-project',
+ },
+];
diff --git a/spec/frontend/boards/project_select_deprecated_spec.js b/spec/frontend/boards/project_select_deprecated_spec.js
new file mode 100644
index 00000000000..e4f8f96bd33
--- /dev/null
+++ b/spec/frontend/boards/project_select_deprecated_spec.js
@@ -0,0 +1,261 @@
+import { mount } from '@vue/test-utils';
+import axios from 'axios';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { GlDropdown, GlDropdownItem, GlSearchBoxByType, GlLoadingIcon } from '@gitlab/ui';
+import httpStatus from '~/lib/utils/http_status';
+import { featureAccessLevel } from '~/pages/projects/shared/permissions/constants';
+import { ListType } from '~/boards/constants';
+import eventHub from '~/boards/eventhub';
+import { deprecatedCreateFlash as flash } from '~/flash';
+
+import ProjectSelect from '~/boards/components/project_select_deprecated.vue';
+
+import { listObj, mockRawGroupProjects } from './mock_data';
+
+jest.mock('~/boards/eventhub');
+jest.mock('~/flash');
+
+const dummyGon = {
+ api_version: 'v4',
+ relative_url_root: '/gitlab',
+};
+
+const mockGroupId = 1;
+const mockProjectsList1 = mockRawGroupProjects.slice(0, 1);
+const mockProjectsList2 = mockRawGroupProjects.slice(1);
+const mockDefaultFetchOptions = {
+ with_issues_enabled: true,
+ with_shared: false,
+ include_subgroups: true,
+ order_by: 'similarity',
+};
+
+const itemsPerPage = 20;
+
+describe('ProjectSelect component', () => {
+ let wrapper;
+ let axiosMock;
+
+ const findLabel = () => wrapper.find("[data-testid='header-label']");
+ const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findGlDropdownLoadingIcon = () =>
+ findGlDropdown().find('button:first-child').find(GlLoadingIcon);
+ const findGlSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
+ const findGlDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findFirstGlDropdownItem = () => findGlDropdownItems().at(0);
+ const findInMenuLoadingIcon = () => wrapper.find("[data-testid='dropdown-text-loading-icon']");
+ const findEmptySearchMessage = () => wrapper.find("[data-testid='empty-result-message']");
+
+ const mockGetRequest = (data = [], statusCode = httpStatus.OK) => {
+ axiosMock
+ .onGet(`/gitlab/api/v4/groups/${mockGroupId}/projects.json`)
+ .replyOnce(statusCode, data);
+ };
+
+ const searchForProject = async (keyword, waitForAll = true) => {
+ findGlSearchBoxByType().vm.$emit('input', keyword);
+
+ if (waitForAll) {
+ await axios.waitForAll();
+ }
+ };
+
+ const createWrapper = async ({ list = listObj } = {}, waitForAll = true) => {
+ wrapper = mount(ProjectSelect, {
+ propsData: {
+ list,
+ },
+ provide: {
+ groupId: 1,
+ },
+ });
+
+ if (waitForAll) {
+ await axios.waitForAll();
+ }
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ window.gon = dummyGon;
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ axiosMock.restore();
+ jest.clearAllMocks();
+ });
+
+ it('displays a header title', async () => {
+ createWrapper({});
+
+ expect(findLabel().text()).toBe('Projects');
+ });
+
+ it('renders a default dropdown text', async () => {
+ createWrapper({});
+
+ expect(findGlDropdown().exists()).toBe(true);
+ expect(findGlDropdown().text()).toContain('Select a project');
+ });
+
+ describe('when mounted', () => {
+ it('displays a loading icon while projects are being fetched', async () => {
+ mockGetRequest([]);
+
+ createWrapper({}, false);
+
+ expect(findGlDropdownLoadingIcon().exists()).toBe(true);
+
+ await axios.waitForAll();
+
+ expect(axiosMock.history.get[0].params).toMatchObject({ search: '' });
+ expect(axiosMock.history.get[0].url).toBe(
+ `/gitlab/api/v4/groups/${mockGroupId}/projects.json`,
+ );
+
+ expect(findGlDropdownLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when dropdown menu is open', () => {
+ describe('by default', () => {
+ beforeEach(async () => {
+ mockGetRequest(mockProjectsList1);
+
+ await createWrapper();
+ });
+
+ it('shows GlSearchBoxByType with default attributes', () => {
+ expect(findGlSearchBoxByType().exists()).toBe(true);
+ expect(findGlSearchBoxByType().vm.$attrs).toMatchObject({
+ placeholder: 'Search projects',
+ debounce: '250',
+ });
+ });
+
+ it("displays the fetched project's name", () => {
+ expect(findFirstGlDropdownItem().exists()).toBe(true);
+ expect(findFirstGlDropdownItem().text()).toContain(mockProjectsList1[0].name);
+ });
+
+ it("doesn't render loading icon in the menu", () => {
+ expect(findInMenuLoadingIcon().isVisible()).toBe(false);
+ });
+
+ it('renders empty search result message', async () => {
+ await createWrapper();
+
+ expect(findEmptySearchMessage().exists()).toBe(true);
+ });
+ });
+
+ describe('when a project is selected', () => {
+ beforeEach(async () => {
+ mockGetRequest(mockProjectsList1);
+
+ await createWrapper();
+
+ await findFirstGlDropdownItem().find('button').trigger('click');
+ });
+
+ it('emits setSelectedProject with correct project metadata', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('setSelectedProject', {
+ id: mockProjectsList1[0].id,
+ path: mockProjectsList1[0].path_with_namespace,
+ name: mockProjectsList1[0].name,
+ namespacedName: mockProjectsList1[0].name_with_namespace,
+ });
+ });
+
+ it('renders the name of the selected project', () => {
+ expect(findGlDropdown().find('.gl-new-dropdown-button-text').text()).toBe(
+ mockProjectsList1[0].name,
+ );
+ });
+ });
+
+ describe('when user searches for a project', () => {
+ beforeEach(async () => {
+ mockGetRequest(mockProjectsList1);
+
+ await createWrapper();
+ });
+
+ it('calls API with correct parameters with default fetch options', async () => {
+ await searchForProject('foobar');
+
+ const expectedApiParams = {
+ search: 'foobar',
+ per_page: itemsPerPage,
+ ...mockDefaultFetchOptions,
+ };
+
+ expect(axiosMock.history.get[1].params).toMatchObject(expectedApiParams);
+ expect(axiosMock.history.get[1].url).toBe(
+ `/gitlab/api/v4/groups/${mockGroupId}/projects.json`,
+ );
+ });
+
+ describe("when list type is defined and isn't backlog", () => {
+ it('calls API with an additional fetch option (min_access_level)', async () => {
+ axiosMock.reset();
+
+ await createWrapper({ list: { ...listObj, type: ListType.label } });
+
+ await searchForProject('foobar');
+
+ const expectedApiParams = {
+ search: 'foobar',
+ per_page: itemsPerPage,
+ ...mockDefaultFetchOptions,
+ min_access_level: featureAccessLevel.EVERYONE,
+ };
+
+ expect(axiosMock.history.get[1].params).toMatchObject(expectedApiParams);
+ expect(axiosMock.history.get[1].url).toBe(
+ `/gitlab/api/v4/groups/${mockGroupId}/projects.json`,
+ );
+ });
+ });
+
+ it('displays and hides gl-loading-icon while and after fetching data', async () => {
+ await searchForProject('some keyword', false);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findInMenuLoadingIcon().isVisible()).toBe(true);
+
+ await axios.waitForAll();
+
+ expect(findInMenuLoadingIcon().isVisible()).toBe(false);
+ });
+
+ it('flashes an error message when fetching fails', async () => {
+ mockGetRequest([], httpStatus.INTERNAL_SERVER_ERROR);
+
+ await searchForProject('foobar');
+
+ expect(flash).toHaveBeenCalledTimes(1);
+ expect(flash).toHaveBeenCalledWith('Something went wrong while fetching projects');
+ });
+
+ describe('with non-empty search result', () => {
+ beforeEach(async () => {
+ mockGetRequest(mockProjectsList2);
+
+ await searchForProject('foobar');
+ });
+
+ it('displays the retrieved list of projects', async () => {
+ expect(findFirstGlDropdownItem().text()).toContain(mockProjectsList2[0].name);
+ });
+
+ it('does not render empty search result message', async () => {
+ expect(findEmptySearchMessage().exists()).toBe(false);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/project_select_spec.js b/spec/frontend/boards/project_select_spec.js
new file mode 100644
index 00000000000..14ddab3542b
--- /dev/null
+++ b/spec/frontend/boards/project_select_spec.js
@@ -0,0 +1,154 @@
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem, GlSearchBoxByType, GlLoadingIcon } from '@gitlab/ui';
+import defaultState from '~/boards/stores/state';
+
+import ProjectSelect from '~/boards/components/project_select.vue';
+
+import { mockList, mockGroupProjects } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const actions = {
+ fetchGroupProjects: jest.fn(),
+ setSelectedProject: jest.fn(),
+};
+
+const createStore = (state = defaultState) => {
+ return new Vuex.Store({
+ state,
+ actions,
+ });
+};
+
+const mockProjectsList1 = mockGroupProjects.slice(0, 1);
+
+describe('ProjectSelect component', () => {
+ let wrapper;
+
+ const findLabel = () => wrapper.find("[data-testid='header-label']");
+ const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findGlDropdownLoadingIcon = () =>
+ findGlDropdown().find('button:first-child').find(GlLoadingIcon);
+ const findGlSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
+ const findGlDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findFirstGlDropdownItem = () => findGlDropdownItems().at(0);
+ const findInMenuLoadingIcon = () => wrapper.find("[data-testid='dropdown-text-loading-icon']");
+ const findEmptySearchMessage = () => wrapper.find("[data-testid='empty-result-message']");
+
+ const createWrapper = (state = {}) => {
+ const store = createStore({
+ groupProjects: [],
+ groupProjectsFlags: {
+ isLoading: false,
+ pageInfo: {
+ hasNextPage: false,
+ },
+ },
+ ...state,
+ });
+
+ wrapper = mount(ProjectSelect, {
+ localVue,
+ propsData: {
+ list: mockList,
+ },
+ store,
+ provide: {
+ groupId: 1,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays a header title', () => {
+ createWrapper();
+
+ expect(findLabel().text()).toBe('Projects');
+ });
+
+ it('renders a default dropdown text', () => {
+ createWrapper();
+
+ expect(findGlDropdown().exists()).toBe(true);
+ expect(findGlDropdown().text()).toContain('Select a project');
+ });
+
+ describe('when mounted', () => {
+ it('displays a loading icon while projects are being fetched', async () => {
+ createWrapper();
+
+ expect(findGlDropdownLoadingIcon().exists()).toBe(true);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findGlDropdownLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when dropdown menu is open', () => {
+ describe('by default', () => {
+ beforeEach(() => {
+ createWrapper({ groupProjects: mockGroupProjects });
+ });
+
+ it('shows GlSearchBoxByType with default attributes', () => {
+ expect(findGlSearchBoxByType().exists()).toBe(true);
+ expect(findGlSearchBoxByType().vm.$attrs).toMatchObject({
+ placeholder: 'Search projects',
+ debounce: '250',
+ });
+ });
+
+ it("displays the fetched project's name", () => {
+ expect(findFirstGlDropdownItem().exists()).toBe(true);
+ expect(findFirstGlDropdownItem().text()).toContain(mockProjectsList1[0].name);
+ });
+
+ it("doesn't render loading icon in the menu", () => {
+ expect(findInMenuLoadingIcon().isVisible()).toBe(false);
+ });
+
+ it('does not render empty search result message', () => {
+ expect(findEmptySearchMessage().exists()).toBe(false);
+ });
+ });
+
+ describe('when no projects are being returned', () => {
+ it('renders empty search result message', () => {
+ createWrapper();
+
+ expect(findEmptySearchMessage().exists()).toBe(true);
+ });
+ });
+
+ describe('when a project is selected', () => {
+ beforeEach(() => {
+ createWrapper({ groupProjects: mockProjectsList1 });
+
+ findFirstGlDropdownItem().find('button').trigger('click');
+ });
+
+ it('renders the name of the selected project', () => {
+ expect(findGlDropdown().find('.gl-new-dropdown-button-text').text()).toBe(
+ mockProjectsList1[0].name,
+ );
+ });
+ });
+
+ describe('when projects are loading', () => {
+ beforeEach(() => {
+ createWrapper({ groupProjectsFlags: { isLoading: true } });
+ });
+
+ it('displays and hides gl-loading-icon while and after fetching data', () => {
+ expect(findInMenuLoadingIcon().isVisible()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 0cae6456887..e4209cd5e55 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -9,19 +9,26 @@ import {
mockMilestone,
labels,
mockActiveIssue,
+ mockGroupProjects,
} from '../mock_data';
import actions, { gqlClient } from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
import { inactiveId } from '~/boards/constants';
import issueMoveListMutation from '~/boards/graphql/issue_move_list.mutation.graphql';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
+import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql';
import updateAssignees from '~/vue_shared/components/sidebar/queries/updateAssignees.mutation.graphql';
-import { fullBoardId, formatListIssues, formatBoardLists } from '~/boards/boards_util';
+import {
+ fullBoardId,
+ formatListIssues,
+ formatBoardLists,
+ formatIssueInput,
+} from '~/boards/boards_util';
import createFlash from '~/flash';
jest.mock('~/flash');
-const expectNotImplemented = action => {
+const expectNotImplemented = (action) => {
it('is not implemented', () => {
expect(action).toThrow(new Error('Not implemented!'));
});
@@ -29,7 +36,7 @@ const expectNotImplemented = action => {
// We need this helper to make sure projectPath is including
// subgroups when the movIssue action is called.
-const getProjectPath = path => path.split('#')[0];
+const getProjectPath = (path) => path.split('#')[0];
beforeEach(() => {
window.gon = { features: {} };
@@ -53,7 +60,7 @@ describe('setInitialBoardData', () => {
});
describe('setFilters', () => {
- it('should commit mutation SET_FILTERS', done => {
+ it('should commit mutation SET_FILTERS', (done) => {
const state = {
filters: {},
};
@@ -72,11 +79,11 @@ describe('setFilters', () => {
});
describe('performSearch', () => {
- it('should dispatch setFilters action', done => {
+ it('should dispatch setFilters action', (done) => {
testAction(actions.performSearch, {}, {}, [], [{ type: 'setFilters', payload: {} }], done);
});
- it('should dispatch setFilters, fetchLists and resetIssues action when graphqlBoardLists FF is on', done => {
+ it('should dispatch setFilters, fetchLists and resetIssues action when graphqlBoardLists FF is on', (done) => {
window.gon = { features: { graphqlBoardLists: true } };
testAction(
actions.performSearch,
@@ -90,7 +97,7 @@ describe('performSearch', () => {
});
describe('setActiveId', () => {
- it('should commit mutation SET_ACTIVE_ID', done => {
+ it('should commit mutation SET_ACTIVE_ID', (done) => {
const state = {
activeId: inactiveId,
};
@@ -108,10 +115,8 @@ describe('setActiveId', () => {
describe('fetchLists', () => {
const state = {
- endpoints: {
- fullPath: 'gitlab-org',
- boardId: 1,
- },
+ fullPath: 'gitlab-org',
+ boardId: '1',
filterParams: {},
boardType: 'group',
};
@@ -131,7 +136,7 @@ describe('fetchLists', () => {
const formattedLists = formatBoardLists(queryResponse.data.group.board.lists);
- it('should commit mutations RECEIVE_BOARD_LISTS_SUCCESS on success', done => {
+ it('should commit mutations RECEIVE_BOARD_LISTS_SUCCESS on success', (done) => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
testAction(
@@ -149,7 +154,7 @@ describe('fetchLists', () => {
);
});
- it('dispatch createList action when backlog list does not exist and is not hidden', done => {
+ it('dispatch createList action when backlog list does not exist and is not hidden', (done) => {
queryResponse = {
data: {
group: {
@@ -181,7 +186,7 @@ describe('fetchLists', () => {
});
describe('createList', () => {
- it('should dispatch addList action when creating backlog list', done => {
+ it('should dispatch addList action when creating backlog list', (done) => {
const backlogList = {
id: 'gid://gitlab/List/1',
listType: 'backlog',
@@ -201,7 +206,8 @@ describe('createList', () => {
);
const state = {
- endpoints: { fullPath: 'gitlab-org', boardId: '1' },
+ fullPath: 'gitlab-org',
+ boardId: '1',
boardType: 'group',
disabled: false,
boardLists: [{ type: 'closed' }],
@@ -217,7 +223,7 @@ describe('createList', () => {
);
});
- it('should commit CREATE_LIST_FAILURE mutation when API returns an error', done => {
+ it('should commit CREATE_LIST_FAILURE mutation when API returns an error', (done) => {
jest.spyOn(gqlClient, 'mutate').mockReturnValue(
Promise.resolve({
data: {
@@ -230,7 +236,8 @@ describe('createList', () => {
);
const state = {
- endpoints: { fullPath: 'gitlab-org', boardId: '1' },
+ fullPath: 'gitlab-org',
+ boardId: '1',
boardType: 'group',
disabled: false,
boardLists: [{ type: 'closed' }],
@@ -248,14 +255,15 @@ describe('createList', () => {
});
describe('moveList', () => {
- it('should commit MOVE_LIST mutation and dispatch updateList action', done => {
+ it('should commit MOVE_LIST mutation and dispatch updateList action', (done) => {
const initialBoardListsState = {
'gid://gitlab/List/1': mockLists[0],
'gid://gitlab/List/2': mockLists[1],
};
const state = {
- endpoints: { fullPath: 'gitlab-org', boardId: '1' },
+ fullPath: 'gitlab-org',
+ boardId: '1',
boardType: 'group',
disabled: false,
boardLists: initialBoardListsState,
@@ -297,7 +305,8 @@ describe('moveList', () => {
};
const state = {
- endpoints: { fullPath: 'gitlab-org', boardId: '1' },
+ fullPath: 'gitlab-org',
+ boardId: '1',
boardType: 'group',
disabled: false,
boardLists: initialBoardListsState,
@@ -319,7 +328,7 @@ describe('moveList', () => {
});
describe('updateList', () => {
- it('should commit UPDATE_LIST_FAILURE mutation when API returns an error', done => {
+ it('should commit UPDATE_LIST_FAILURE mutation when API returns an error', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
updateBoardList: {
@@ -330,7 +339,8 @@ describe('updateList', () => {
});
const state = {
- endpoints: { fullPath: 'gitlab-org', boardId: '1' },
+ fullPath: 'gitlab-org',
+ boardId: '1',
boardType: 'group',
disabled: false,
boardLists: [{ type: 'closed' }],
@@ -429,15 +439,13 @@ describe('fetchIssuesForList', () => {
const listId = mockLists[0].id;
const state = {
- endpoints: {
- fullPath: 'gitlab-org',
- boardId: 1,
- },
+ fullPath: 'gitlab-org',
+ boardId: '1',
filterParams: {},
boardType: 'group',
};
- const mockIssuesNodes = mockIssues.map(issue => ({ node: issue }));
+ const mockIssuesNodes = mockIssues.map((issue) => ({ node: issue }));
const pageInfo = {
endCursor: '',
@@ -470,7 +478,7 @@ describe('fetchIssuesForList', () => {
[listId]: pageInfo,
};
- it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_SUCCESS on success', done => {
+ it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_SUCCESS on success', (done) => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
testAction(
@@ -492,7 +500,7 @@ describe('fetchIssuesForList', () => {
);
});
- it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_FAILURE on failure', done => {
+ it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_FAILURE on failure', (done) => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
testAction(
@@ -525,12 +533,13 @@ describe('moveIssue', () => {
};
const issues = {
- '436': mockIssue,
- '437': mockIssue2,
+ 436: mockIssue,
+ 437: mockIssue2,
};
const state = {
- endpoints: { fullPath: 'gitlab-org', boardId: '1' },
+ fullPath: 'gitlab-org',
+ boardId: '1',
boardType: 'group',
disabled: false,
boardLists: mockLists,
@@ -538,7 +547,7 @@ describe('moveIssue', () => {
issues,
};
- it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_SUCCESS mutation when successful', done => {
+ it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_SUCCESS mutation when successful', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
issueMoveList: {
@@ -582,7 +591,7 @@ describe('moveIssue', () => {
mutation: issueMoveListMutation,
variables: {
projectPath: getProjectPath(mockIssue.referencePath),
- boardId: fullBoardId(state.endpoints.boardId),
+ boardId: fullBoardId(state.boardId),
iid: mockIssue.iid,
fromListId: 1,
toListId: 2,
@@ -613,7 +622,7 @@ describe('moveIssue', () => {
expect(gqlClient.mutate).toHaveBeenCalledWith(mutationVariables);
});
- it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_FAILURE mutation when unsuccessful', done => {
+ it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_FAILURE mutation when unsuccessful', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
issueMoveList: {
@@ -684,7 +693,7 @@ describe('setAssignees', () => {
});
});
- it('calls the correct mutation with the correct values', done => {
+ it('calls the correct mutation with the correct values', (done) => {
testAction(
actions.setAssignees,
{},
@@ -724,8 +733,27 @@ describe('setAssignees', () => {
describe('createNewIssue', () => {
const state = {
boardType: 'group',
- endpoints: {
- fullPath: 'gitlab-org/gitlab',
+ fullPath: 'gitlab-org/gitlab',
+ boardConfig: {
+ labelIds: [],
+ assigneeId: null,
+ milestoneId: -1,
+ },
+ };
+
+ const stateWithBoardConfig = {
+ boardConfig: {
+ labels: [
+ {
+ id: 5,
+ title: 'Test',
+ color: '#ff0000',
+ description: 'testing;',
+ textColor: 'white',
+ },
+ ],
+ assigneeId: 2,
+ milestoneId: 3,
},
};
@@ -743,11 +771,59 @@ describe('createNewIssue', () => {
expect(result).toEqual(mockIssue);
});
- it('should commit CREATE_ISSUE_FAILURE mutation when API returns an error', done => {
+ it('should add board scope to the issue being created', async () => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
createIssue: {
- issue: {},
+ issue: mockIssue,
+ errors: [],
+ },
+ },
+ });
+
+ await actions.createNewIssue({ state: stateWithBoardConfig }, mockIssue);
+ expect(gqlClient.mutate).toHaveBeenCalledWith({
+ mutation: issueCreateMutation,
+ variables: {
+ input: formatIssueInput(mockIssue, stateWithBoardConfig.boardConfig),
+ },
+ });
+ });
+
+ it('should add board scope by merging attributes to the issue being created', async () => {
+ const issue = {
+ ...mockIssue,
+ assigneeIds: ['gid://gitlab/User/1'],
+ labelIds: ['gid://gitlab/GroupLabel/4'],
+ };
+
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ createIssue: {
+ issue,
+ errors: [],
+ },
+ },
+ });
+
+ const payload = formatIssueInput(issue, stateWithBoardConfig.boardConfig);
+
+ await actions.createNewIssue({ state: stateWithBoardConfig }, issue);
+ expect(gqlClient.mutate).toHaveBeenCalledWith({
+ mutation: issueCreateMutation,
+ variables: {
+ input: formatIssueInput(issue, stateWithBoardConfig.boardConfig),
+ },
+ });
+ expect(payload.labelIds).toEqual(['gid://gitlab/GroupLabel/4', 'gid://gitlab/GroupLabel/5']);
+ expect(payload.assigneeIds).toEqual(['gid://gitlab/User/1', 'gid://gitlab/User/2']);
+ });
+
+ it('should commit CREATE_ISSUE_FAILURE mutation when API returns an error', (done) => {
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ createIssue: {
+ issue: mockIssue,
errors: [{ foo: 'bar' }],
},
},
@@ -767,7 +843,7 @@ describe('createNewIssue', () => {
});
describe('addListIssue', () => {
- it('should commit ADD_ISSUE_TO_LIST mutation', done => {
+ it('should commit ADD_ISSUE_TO_LIST mutation', (done) => {
const payload = {
list: mockLists[0],
issue: mockIssue,
@@ -788,14 +864,14 @@ describe('addListIssue', () => {
describe('setActiveIssueLabels', () => {
const state = { issues: { [mockIssue.id]: mockIssue } };
const getters = { activeIssue: mockIssue };
- const testLabelIds = labels.map(label => label.id);
+ const testLabelIds = labels.map((label) => label.id);
const input = {
addLabelIds: testLabelIds,
removeLabelIds: [],
projectPath: 'h/b',
};
- it('should assign labels on success', done => {
+ it('should assign labels on success', (done) => {
jest
.spyOn(gqlClient, 'mutate')
.mockResolvedValue({ data: { updateIssue: { issue: { labels: { nodes: labels } } } } });
@@ -839,7 +915,7 @@ describe('setActiveIssueDueDate', () => {
projectPath: 'h/b',
};
- it('should commit due date after setting the issue', done => {
+ it('should commit due date after setting the issue', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
updateIssue: {
@@ -890,7 +966,7 @@ describe('setActiveIssueSubscribed', () => {
projectPath: 'gitlab-org/gitlab-test',
};
- it('should commit subscribed status', done => {
+ it('should commit subscribed status', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
issueSetSubscription: {
@@ -944,7 +1020,7 @@ describe('setActiveIssueMilestone', () => {
projectPath: 'h/b',
};
- it('should commit milestone after setting the issue', done => {
+ it('should commit milestone after setting the issue', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
updateIssue: {
@@ -986,6 +1062,145 @@ describe('setActiveIssueMilestone', () => {
});
});
+describe('setActiveIssueTitle', () => {
+ const state = { issues: { [mockIssue.id]: mockIssue } };
+ const getters = { activeIssue: mockIssue };
+ const testTitle = 'Test Title';
+ const input = {
+ title: testTitle,
+ projectPath: 'h/b',
+ };
+
+ it('should commit title after setting the issue', (done) => {
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ updateIssue: {
+ issue: {
+ title: testTitle,
+ },
+ errors: [],
+ },
+ },
+ });
+
+ const payload = {
+ issueId: getters.activeIssue.id,
+ prop: 'title',
+ value: testTitle,
+ };
+
+ testAction(
+ actions.setActiveIssueTitle,
+ input,
+ { ...state, ...getters },
+ [
+ {
+ type: types.UPDATE_ISSUE_BY_ID,
+ payload,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('throws error if fails', async () => {
+ jest
+ .spyOn(gqlClient, 'mutate')
+ .mockResolvedValue({ data: { updateIssue: { errors: ['failed mutation'] } } });
+
+ await expect(actions.setActiveIssueTitle({ getters }, input)).rejects.toThrow(Error);
+ });
+});
+
+describe('fetchGroupProjects', () => {
+ const state = {
+ fullPath: 'gitlab-org',
+ };
+
+ const pageInfo = {
+ endCursor: '',
+ hasNextPage: false,
+ };
+
+ const queryResponse = {
+ data: {
+ group: {
+ projects: {
+ nodes: mockGroupProjects,
+ pageInfo: {
+ endCursor: '',
+ hasNextPage: false,
+ },
+ },
+ },
+ },
+ };
+
+ it('should commit mutations REQUEST_GROUP_PROJECTS and RECEIVE_GROUP_PROJECTS_SUCCESS on success', (done) => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ testAction(
+ actions.fetchGroupProjects,
+ {},
+ state,
+ [
+ {
+ type: types.REQUEST_GROUP_PROJECTS,
+ payload: false,
+ },
+ {
+ type: types.RECEIVE_GROUP_PROJECTS_SUCCESS,
+ payload: { projects: mockGroupProjects, pageInfo, fetchNext: false },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('should commit mutations REQUEST_GROUP_PROJECTS and RECEIVE_GROUP_PROJECTS_FAILURE on failure', (done) => {
+ jest.spyOn(gqlClient, 'query').mockRejectedValue();
+
+ testAction(
+ actions.fetchGroupProjects,
+ {},
+ state,
+ [
+ {
+ type: types.REQUEST_GROUP_PROJECTS,
+ payload: false,
+ },
+ {
+ type: types.RECEIVE_GROUP_PROJECTS_FAILURE,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+});
+
+describe('setSelectedProject', () => {
+ it('should commit mutation SET_SELECTED_PROJECT', (done) => {
+ const project = mockGroupProjects[0];
+
+ testAction(
+ actions.setSelectedProject,
+ project,
+ {},
+ [
+ {
+ type: types.SET_SELECTED_PROJECT,
+ payload: project,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+});
+
describe('fetchBacklog', () => {
expectNotImplemented(actions.fetchBacklog);
});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index 6ceb8867d1f..44b41b5667d 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -39,7 +39,7 @@ describe('Boards - Getters', () => {
});
describe('getIssueById', () => {
- const state = { issues: { '1': 'issue' } };
+ const state = { issues: { 1: 'issue' } };
it.each`
id | expected
@@ -56,7 +56,7 @@ describe('Boards - Getters', () => {
${'1'} | ${'issue'}
${''} | ${{}}
`('returns $expected when $id is passed to state', ({ id, expected }) => {
- const state = { issues: { '1': 'issue' }, activeId: id };
+ const state = { issues: { 1: 'issue' }, activeId: id };
expect(getters.activeIssue(state)).toEqual(expected);
});
@@ -84,7 +84,7 @@ describe('Boards - Getters', () => {
issues,
};
it('returns issues for a given listId', () => {
- const getIssueById = issueId => [mockIssue, mockIssue2].find(({ id }) => id === issueId);
+ const getIssueById = (issueId) => [mockIssue, mockIssue2].find(({ id }) => id === issueId);
expect(getters.getIssuesByList(boardsState, { getIssueById })('gid://gitlab/List/2')).toEqual(
mockIssues,
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index d93119ede3d..c5fe0e22c3c 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -1,9 +1,9 @@
import mutations from '~/boards/stores/mutations';
import * as types from '~/boards/stores/mutation_types';
import defaultState from '~/boards/stores/state';
-import { mockLists, rawIssue, mockIssue, mockIssue2 } from '../mock_data';
+import { mockLists, rawIssue, mockIssue, mockIssue2, mockGroupProjects } from '../mock_data';
-const expectNotImplemented = action => {
+const expectNotImplemented = (action) => {
it('is not implemented', () => {
expect(action).toThrow(new Error('Not implemented!'));
});
@@ -23,14 +23,8 @@ describe('Board Store Mutations', () => {
describe('SET_INITIAL_BOARD_DATA', () => {
it('Should set initial Boards data to state', () => {
- const endpoints = {
- boardsEndpoint: '/boards/',
- recentBoardsEndpoint: '/boards/',
- listsEndpoint: '/boards/lists',
- bulkUpdatePath: '/boards/bulkUpdate',
- boardId: 1,
- fullPath: 'gitlab-org',
- };
+ const boardId = 1;
+ const fullPath = 'gitlab-org';
const boardType = 'group';
const disabled = false;
const boardConfig = {
@@ -38,13 +32,15 @@ describe('Board Store Mutations', () => {
};
mutations[types.SET_INITIAL_BOARD_DATA](state, {
- ...endpoints,
+ boardId,
+ fullPath,
boardType,
disabled,
boardConfig,
});
- expect(state.endpoints).toEqual(endpoints);
+ expect(state.boardId).toEqual(boardId);
+ expect(state.fullPath).toEqual(fullPath);
expect(state.boardType).toEqual(boardType);
expect(state.disabled).toEqual(disabled);
expect(state.boardConfig).toEqual(boardConfig);
@@ -240,7 +236,7 @@ describe('Board Store Mutations', () => {
'gid://gitlab/List/1': [mockIssue.id],
};
const issues = {
- '1': mockIssue,
+ 1: mockIssue,
};
state = {
@@ -349,8 +345,8 @@ describe('Board Store Mutations', () => {
};
const issues = {
- '1': mockIssue,
- '2': mockIssue2,
+ 1: mockIssue,
+ 2: mockIssue2,
};
state = {
@@ -378,7 +374,7 @@ describe('Board Store Mutations', () => {
describe('MOVE_ISSUE_SUCCESS', () => {
it('updates issue in issues state', () => {
const issues = {
- '436': { id: rawIssue.id },
+ 436: { id: rawIssue.id },
};
state = {
@@ -390,7 +386,7 @@ describe('Board Store Mutations', () => {
issue: rawIssue,
});
- expect(state.issues).toEqual({ '436': { ...mockIssue, id: 436 } });
+ expect(state.issues).toEqual({ 436: { ...mockIssue, id: 436 } });
});
});
@@ -450,7 +446,7 @@ describe('Board Store Mutations', () => {
'gid://gitlab/List/1': [mockIssue.id],
};
const issues = {
- '1': mockIssue,
+ 1: mockIssue,
};
state = {
@@ -476,8 +472,8 @@ describe('Board Store Mutations', () => {
'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id],
};
const issues = {
- '1': mockIssue,
- '2': mockIssue2,
+ 1: mockIssue,
+ 2: mockIssue2,
};
state = {
@@ -500,8 +496,8 @@ describe('Board Store Mutations', () => {
'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id],
};
const issues = {
- '1': mockIssue,
- '2': mockIssue2,
+ 1: mockIssue,
+ 2: mockIssue2,
};
state = {
@@ -533,4 +529,64 @@ describe('Board Store Mutations', () => {
describe('TOGGLE_EMPTY_STATE', () => {
expectNotImplemented(mutations.TOGGLE_EMPTY_STATE);
});
+
+ describe('REQUEST_GROUP_PROJECTS', () => {
+ it('Should set isLoading in groupProjectsFlags to true in state when fetchNext is false', () => {
+ mutations[types.REQUEST_GROUP_PROJECTS](state, false);
+
+ expect(state.groupProjectsFlags.isLoading).toBe(true);
+ });
+
+ it('Should set isLoading in groupProjectsFlags to true in state when fetchNext is true', () => {
+ mutations[types.REQUEST_GROUP_PROJECTS](state, true);
+
+ expect(state.groupProjectsFlags.isLoadingMore).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_GROUP_PROJECTS_SUCCESS', () => {
+ it('Should set groupProjects and pageInfo to state and isLoading in groupProjectsFlags to false', () => {
+ mutations[types.RECEIVE_GROUP_PROJECTS_SUCCESS](state, {
+ projects: mockGroupProjects,
+ pageInfo: { hasNextPage: false },
+ });
+
+ expect(state.groupProjects).toEqual(mockGroupProjects);
+ expect(state.groupProjectsFlags.isLoading).toBe(false);
+ expect(state.groupProjectsFlags.pageInfo).toEqual({ hasNextPage: false });
+ });
+
+ it('Should merge projects in groupProjects in state when fetchNext is true', () => {
+ state = {
+ ...state,
+ groupProjects: [mockGroupProjects[0]],
+ };
+
+ mutations[types.RECEIVE_GROUP_PROJECTS_SUCCESS](state, {
+ projects: [mockGroupProjects[1]],
+ fetchNext: true,
+ });
+
+ expect(state.groupProjects).toEqual(mockGroupProjects);
+ });
+ });
+
+ describe('RECEIVE_GROUP_PROJECTS_FAILURE', () => {
+ it('Should set error in state and isLoading in groupProjectsFlags to false', () => {
+ mutations[types.RECEIVE_GROUP_PROJECTS_FAILURE](state);
+
+ expect(state.error).toEqual(
+ 'An error occurred while fetching group projects. Please try again.',
+ );
+ expect(state.groupProjectsFlags.isLoading).toBe(false);
+ });
+ });
+
+ describe('SET_SELECTED_PROJECT', () => {
+ it('Should set selectedProject to state', () => {
+ mutations[types.SET_SELECTED_PROJECT](state, mockGroupProjects[0]);
+
+ expect(state.selectedProject).toEqual(mockGroupProjects[0]);
+ });
+ });
});
diff --git a/spec/frontend/branches/branches_delete_modal_spec.js b/spec/frontend/branches/branches_delete_modal_spec.js
index 21608feafc8..8b10cca7a11 100644
--- a/spec/frontend/branches/branches_delete_modal_spec.js
+++ b/spec/frontend/branches/branches_delete_modal_spec.js
@@ -15,7 +15,7 @@ describe('branches delete modal', () => {
</div>
`);
$deleteButton = $('.js-delete-branch');
- submitSpy = jest.fn(event => event.preventDefault());
+ submitSpy = jest.fn((event) => event.preventDefault());
$('#modal-delete-branch form').on('submit', submitSpy);
// eslint-disable-next-line no-new
new DeleteModal();
diff --git a/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
index e07afb5d736..30aa634fc81 100644
--- a/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
+++ b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
@@ -16,17 +16,14 @@ describe('TriggersList', () => {
};
const findTable = () => wrapper.find(GlTable);
- const findHeaderAt = i => wrapper.findAll('thead th').at(i);
+ const findHeaderAt = (i) => wrapper.findAll('thead th').at(i);
const findRows = () => wrapper.findAll('tbody tr');
- const findRowAt = i => findRows().at(i);
- const findCell = (i, col) =>
- findRowAt(i)
- .findAll('td')
- .at(col);
- const findClipboardBtn = i => findCell(i, 0).find(ClipboardButton);
- const findInvalidBadge = i => findCell(i, 0).find(GlBadge);
- const findEditBtn = i => findRowAt(i).find('[data-testid="edit-btn"]');
- const findRevokeBtn = i => findRowAt(i).find('[data-testid="trigger_revoke_button"]');
+ const findRowAt = (i) => findRows().at(i);
+ const findCell = (i, col) => findRowAt(i).findAll('td').at(col);
+ const findClipboardBtn = (i) => findCell(i, 0).find(ClipboardButton);
+ const findInvalidBadge = (i) => findCell(i, 0).find(GlBadge);
+ const findEditBtn = (i) => findRowAt(i).find('[data-testid="edit-btn"]');
+ const findRevokeBtn = (i) => findRowAt(i).find('[data-testid="trigger_revoke_button"]');
beforeEach(() => {
createComponent();
@@ -67,11 +64,7 @@ describe('TriggersList', () => {
it('displays a time ago label when last used', () => {
expect(findCell(0, 3).text()).toBe('Never');
- expect(
- findCell(1, 3)
- .find(TimeAgoTooltip)
- .props('time'),
- ).toBe(triggers[1].lastUsed);
+ expect(findCell(1, 3).find(TimeAgoTooltip).props('time')).toBe(triggers[1].lastUsed);
});
it('displays actions in a rows', () => {
diff --git a/spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js b/spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
index 4a2e56c570d..ad1bdec1735 100644
--- a/spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
+++ b/spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
@@ -31,10 +31,7 @@ describe('VariableList', () => {
it('should add another row when editing the last rows key input', () => {
const $row = $wrapper.find('.js-row');
- $row
- .find('.js-ci-variable-input-key')
- .val('foo')
- .trigger('input');
+ $row.find('.js-ci-variable-input-key').val('foo').trigger('input');
expect($wrapper.find('.js-row').length).toBe(2);
@@ -46,10 +43,7 @@ describe('VariableList', () => {
it('should add another row when editing the last rows value textarea', () => {
const $row = $wrapper.find('.js-row');
- $row
- .find('.js-ci-variable-input-value')
- .val('foo')
- .trigger('input');
+ $row.find('.js-ci-variable-input-value').val('foo').trigger('input');
expect($wrapper.find('.js-row').length).toBe(2);
@@ -61,18 +55,11 @@ describe('VariableList', () => {
it('should remove empty row after blurring', () => {
const $row = $wrapper.find('.js-row');
- $row
- .find('.js-ci-variable-input-key')
- .val('foo')
- .trigger('input');
+ $row.find('.js-ci-variable-input-key').val('foo').trigger('input');
expect($wrapper.find('.js-row').length).toBe(2);
- $row
- .find('.js-ci-variable-input-key')
- .val('')
- .trigger('input')
- .trigger('blur');
+ $row.find('.js-ci-variable-input-key').val('').trigger('input').trigger('blur');
expect($wrapper.find('.js-row').length).toBe(1);
});
diff --git a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
index 7bcd558c60f..faa88394447 100644
--- a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
@@ -10,7 +10,7 @@ describe('Ci environments dropdown', () => {
let wrapper;
let store;
- const createComponent = term => {
+ const createComponent = (term) => {
store = new Vuex.Store({
getters: {
joinedEnvironments: () => ['dev', 'prod', 'staging'],
@@ -27,8 +27,8 @@ describe('Ci environments dropdown', () => {
};
const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
- const findDropdownItemByIndex = index => wrapper.findAll(GlDropdownItem).at(index);
- const findActiveIconByIndex = index => findDropdownItemByIndex(index).find(GlIcon);
+ const findDropdownItemByIndex = (index) => wrapper.findAll(GlDropdownItem).at(index);
+ const findActiveIconByIndex = (index) => findDropdownItemByIndex(index).find(GlIcon);
afterEach(() => {
wrapper.destroy();
@@ -78,7 +78,7 @@ describe('Ci environments dropdown', () => {
});
it('should not display create button', () => {
- const environments = findAllDropdownItems().filter(env => env.text().startsWith('Create'));
+ const environments = findAllDropdownItems().filter((env) => env.text().startsWith('Create'));
expect(environments).toHaveLength(0);
expect(findAllDropdownItems()).toHaveLength(1);
});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index 5c2d096418d..b2e51029a06 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -17,7 +17,7 @@ describe('Ci variable modal', () => {
const createComponent = (method, options = {}) => {
store = createStore();
wrapper = method(CiVariableModal, {
- attachToDocument: true,
+ attachTo: document.body,
stubs: {
GlModal: ModalStub,
},
@@ -31,11 +31,11 @@ describe('Ci variable modal', () => {
const findAddorUpdateButton = () =>
findModal()
.findAll(GlButton)
- .wrappers.find(button => button.props('variant') === 'success');
+ .wrappers.find((button) => button.props('variant') === 'success');
const deleteVariableButton = () =>
findModal()
.findAll(GlButton)
- .wrappers.find(button => button.props('variant') === 'danger');
+ .wrappers.find((button) => button.props('variant') === 'danger');
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
index 7dcd82eac5e..12449fc7615 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
@@ -11,7 +11,7 @@ describe('Ci variable table', () => {
let store;
let isGroup;
- const createComponent = groupState => {
+ const createComponent = (groupState) => {
store = createStore();
store.state.isGroup = groupState;
jest.spyOn(store, 'dispatch').mockImplementation();
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
index 36aeffe7798..fbc34528d4d 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
@@ -17,7 +17,7 @@ describe('Ci variable table', () => {
store.state.isGroup = true;
jest.spyOn(store, 'dispatch').mockImplementation();
wrapper = mount(CiVariableTable, {
- attachToDocument: true,
+ attachTo: document.body,
localVue,
store,
});
diff --git a/spec/frontend/ci_variable_list/store/actions_spec.js b/spec/frontend/ci_variable_list/store/actions_spec.js
index 4b89e467df0..075e5829305 100644
--- a/spec/frontend/ci_variable_list/store/actions_spec.js
+++ b/spec/frontend/ci_variable_list/store/actions_spec.js
@@ -86,7 +86,7 @@ describe('CI variable list store actions', () => {
});
describe('deleteVariable', () => {
- it('dispatch correct actions on successful deleted variable', done => {
+ it('dispatch correct actions on successful deleted variable', (done) => {
mock.onPatch(state.endpoint).reply(200);
testAction(
@@ -105,7 +105,7 @@ describe('CI variable list store actions', () => {
);
});
- it('should show flash error and set error in state on delete failure', done => {
+ it('should show flash error and set error in state on delete failure', (done) => {
mock.onPatch(state.endpoint).reply(500, '');
testAction(
@@ -129,7 +129,7 @@ describe('CI variable list store actions', () => {
});
describe('updateVariable', () => {
- it('dispatch correct actions on successful updated variable', done => {
+ it('dispatch correct actions on successful updated variable', (done) => {
mock.onPatch(state.endpoint).reply(200);
testAction(
@@ -148,7 +148,7 @@ describe('CI variable list store actions', () => {
);
});
- it('should show flash error and set error in state on update failure', done => {
+ it('should show flash error and set error in state on update failure', (done) => {
mock.onPatch(state.endpoint).reply(500, '');
testAction(
@@ -172,7 +172,7 @@ describe('CI variable list store actions', () => {
});
describe('addVariable', () => {
- it('dispatch correct actions on successful added variable', done => {
+ it('dispatch correct actions on successful added variable', (done) => {
mock.onPatch(state.endpoint).reply(200);
testAction(
@@ -191,7 +191,7 @@ describe('CI variable list store actions', () => {
);
});
- it('should show flash error and set error in state on add failure', done => {
+ it('should show flash error and set error in state on add failure', (done) => {
mock.onPatch(state.endpoint).reply(500, '');
testAction(
@@ -215,7 +215,7 @@ describe('CI variable list store actions', () => {
});
describe('fetchVariables', () => {
- it('dispatch correct actions on fetchVariables', done => {
+ it('dispatch correct actions on fetchVariables', (done) => {
mock.onGet(state.endpoint).reply(200, { variables: mockData.mockVariables });
testAction(
@@ -236,7 +236,7 @@ describe('CI variable list store actions', () => {
);
});
- it('should show flash error and set error in state on fetch variables failure', done => {
+ it('should show flash error and set error in state on fetch variables failure', (done) => {
mock.onGet(state.endpoint).reply(500);
testAction(actions.fetchVariables, {}, state, [], [{ type: 'requestVariables' }], () => {
@@ -247,7 +247,7 @@ describe('CI variable list store actions', () => {
});
describe('fetchEnvironments', () => {
- it('dispatch correct actions on fetchEnvironments', done => {
+ it('dispatch correct actions on fetchEnvironments', (done) => {
Api.environments = jest.fn().mockResolvedValue({ data: mockData.mockEnvironments });
testAction(
@@ -268,7 +268,7 @@ describe('CI variable list store actions', () => {
);
});
- it('should show flash error and set error in state on fetch environments failure', done => {
+ it('should show flash error and set error in state on fetch environments failure', (done) => {
Api.environments = jest.fn().mockRejectedValue();
testAction(
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index d3277cdb7cc..cd0eda2ab49 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -279,7 +279,7 @@ describe('Clusters', () => {
});
describe('uninstallApplication', () => {
- it.each(APPLICATIONS)('tries to uninstall %s', applicationId => {
+ it.each(APPLICATIONS)('tries to uninstall %s', (applicationId) => {
jest.spyOn(cluster.service, 'uninstallApplication').mockResolvedValueOnce();
cluster.store.state.applications[applicationId].status = INSTALLED;
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index 6f28573c808..ee4ec4636ea 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -41,109 +41,119 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
role="menu"
tabindex="-1"
>
- <!---->
-
- <li
- class="gl-new-dropdown-item"
- role="presentation"
+ <div
+ class="gl-new-dropdown-inner"
>
- <button
- class="dropdown-item"
- role="menuitem"
- type="button"
+ <!---->
+
+ <div
+ class="gl-new-dropdown-contents"
>
- <svg
- aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon"
- data-testid="mobile-issue-close-icon"
- >
- <use
- href="#mobile-issue-close"
- />
- </svg>
-
- <!---->
-
- <!---->
-
- <div
- class="gl-new-dropdown-item-text-wrapper"
+ <li
+ class="gl-new-dropdown-item"
+ role="presentation"
>
- <p
- class="gl-new-dropdown-item-text-primary"
+ <button
+ class="dropdown-item"
+ role="menuitem"
+ type="button"
>
- <strong>
- Remove integration and resources
- </strong>
+ <svg
+ aria-hidden="true"
+ class="gl-icon s16 gl-new-dropdown-item-check-icon"
+ data-testid="mobile-issue-close-icon"
+ >
+ <use
+ href="#mobile-issue-close"
+ />
+ </svg>
+
+ <!---->
+
+ <!---->
- <div>
- Deletes all GitLab resources attached to this cluster during removal
+ <div
+ class="gl-new-dropdown-item-text-wrapper"
+ >
+ <p
+ class="gl-new-dropdown-item-text-primary"
+ >
+ <strong>
+ Remove integration and resources
+ </strong>
+
+ <div>
+ Deletes all GitLab resources attached to this cluster during removal
+ </div>
+ </p>
+
+ <!---->
</div>
- </p>
-
- <!---->
- </div>
+
+ <!---->
+ </button>
+ </li>
- <!---->
- </button>
- </li>
-
- <li
- class="gl-new-dropdown-divider"
- role="presentation"
- >
- <hr
- aria-orientation="horizontal"
- class="dropdown-divider"
- role="separator"
- />
- </li>
- <li
- class="gl-new-dropdown-item"
- role="presentation"
- >
- <button
- class="dropdown-item"
- role="menuitem"
- type="button"
- >
- <svg
- aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon gl-visibility-hidden"
- data-testid="mobile-issue-close-icon"
+ <li
+ class="gl-new-dropdown-divider"
+ role="presentation"
>
- <use
- href="#mobile-issue-close"
+ <hr
+ aria-orientation="horizontal"
+ class="dropdown-divider"
+ role="separator"
/>
- </svg>
-
- <!---->
-
- <!---->
-
- <div
- class="gl-new-dropdown-item-text-wrapper"
+ </li>
+ <li
+ class="gl-new-dropdown-item"
+ role="presentation"
>
- <p
- class="gl-new-dropdown-item-text-primary"
+ <button
+ class="dropdown-item"
+ role="menuitem"
+ type="button"
>
- <strong>
- Remove integration
- </strong>
+ <svg
+ aria-hidden="true"
+ class="gl-icon s16 gl-new-dropdown-item-check-icon gl-visibility-hidden"
+ data-testid="mobile-issue-close-icon"
+ >
+ <use
+ href="#mobile-issue-close"
+ />
+ </svg>
+
+ <!---->
+
+ <!---->
- <div>
- Removes cluster from project but keeps associated resources
+ <div
+ class="gl-new-dropdown-item-text-wrapper"
+ >
+ <p
+ class="gl-new-dropdown-item-text-primary"
+ >
+ <strong>
+ Remove integration
+ </strong>
+
+ <div>
+ Removes cluster from project but keeps associated resources
+ </div>
+ </p>
+
+ <!---->
</div>
- </p>
-
- <!---->
- </div>
+
+ <!---->
+ </button>
+ </li>
<!---->
- </button>
- </li>
-
- <!---->
+ </div>
+
+ <!---->
+ </div>
</ul>
</div>
diff --git a/spec/frontend/clusters/components/application_row_spec.js b/spec/frontend/clusters/components/application_row_spec.js
index 0a964426c95..49a299cfb3c 100644
--- a/spec/frontend/clusters/components/application_row_spec.js
+++ b/spec/frontend/clusters/components/application_row_spec.js
@@ -15,7 +15,7 @@ describe('Application Row', () => {
wrapper.destroy();
});
- const mountComponent = data => {
+ const mountComponent = (data) => {
wrapper = shallowMount(ApplicationRow, {
stubs: { GlSprintf },
propsData: {
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 5438f3053a8..cf89246c1a5 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -16,7 +16,7 @@ describe('Applications', () => {
gon.features = gon.features || {};
});
- const createApp = ({ applications, type, props } = {}, isShallow) => {
+ const createApp = ({ applications, type, propsData } = {}, isShallow) => {
const mountMethod = isShallow ? shallowMount : mount;
wrapper = mountMethod(Applications, {
@@ -24,13 +24,13 @@ describe('Applications', () => {
propsData: {
type,
applications: { ...APPLICATIONS_MOCK_STATE, ...applications },
- ...props,
+ ...propsData,
},
});
};
- const createShallowApp = options => createApp(options, true);
- const findByTestId = id => wrapper.find(`[data-testid="${id}"]`);
+ const createShallowApp = (options) => createApp(options, true);
+ const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
afterEach(() => {
wrapper.destroy();
});
@@ -544,7 +544,7 @@ describe('Applications', () => {
describe('Cilium application', () => {
it('shows the correct description', () => {
- createApp({ props: { ciliumHelpPath: 'cilium-help-path' } });
+ createApp({ propsData: { ciliumHelpPath: 'cilium-help-path' } });
expect(findByTestId('ciliumDescription').element).toMatchSnapshot();
});
});
diff --git a/spec/frontend/clusters/components/fluentd_output_settings_spec.js b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
index 25db8785edc..cd996ae915b 100644
--- a/spec/frontend/clusters/components/fluentd_output_settings_spec.js
+++ b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
@@ -37,11 +37,11 @@ describe('FluentdOutputSettings', () => {
const findSaveButton = () => wrapper.find({ ref: 'saveBtn' });
const findCancelButton = () => wrapper.find({ ref: 'cancelBtn' });
const findProtocolDropdown = () => wrapper.find(GlDropdown);
- const findCheckbox = name =>
- wrapper.findAll(GlFormCheckbox).wrappers.find(x => x.text() === name);
+ const findCheckbox = (name) =>
+ wrapper.findAll(GlFormCheckbox).wrappers.find((x) => x.text() === name);
const findHost = () => wrapper.find('#fluentd-host');
const findPort = () => wrapper.find('#fluentd-port');
- const changeCheckbox = checkbox => {
+ const changeCheckbox = (checkbox) => {
const currentValue = checkbox.attributes('checked')?.toString() === 'true';
checkbox.vm.$emit('input', !currentValue);
};
@@ -49,9 +49,9 @@ describe('FluentdOutputSettings', () => {
element.value = val;
element.dispatchEvent(new Event('input'));
};
- const changePort = val => changeInput(findPort(), val);
- const changeHost = val => changeInput(findHost(), val);
- const changeProtocol = idx => findProtocolDropdown().vm.$children[idx].$emit('click');
+ const changePort = (val) => changeInput(findPort(), val);
+ const changeHost = (val) => changeInput(findHost(), val);
+ const changeProtocol = (idx) => findProtocolDropdown().vm.$children[idx].$emit('click');
const toApplicationSettings = ({ wafLogEnabled, ciliumLogEnabled, ...settings }) => ({
...settings,
waf_log_enabled: wafLogEnabled,
diff --git a/spec/frontend/clusters/components/uninstall_application_button_spec.js b/spec/frontend/clusters/components/uninstall_application_button_spec.js
index 387e2188572..c106292965e 100644
--- a/spec/frontend/clusters/components/uninstall_application_button_spec.js
+++ b/spec/frontend/clusters/components/uninstall_application_button_spec.js
@@ -24,7 +24,7 @@ describe('UninstallApplicationButton', () => {
${UPDATING} | ${false} | ${true} | ${'Uninstall'}
${UNINSTALLING} | ${true} | ${true} | ${'Uninstalling'}
`('when app status is $status', ({ loading, disabled, status, text }) => {
- beforeAll(() => {
+ beforeEach(() => {
createComponent({ status });
});
diff --git a/spec/frontend/clusters/services/application_state_machine_spec.js b/spec/frontend/clusters/services/application_state_machine_spec.js
index 7eee54949fa..a3850a64f3b 100644
--- a/spec/frontend/clusters/services/application_state_machine_spec.js
+++ b/spec/frontend/clusters/services/application_state_machine_spec.js
@@ -25,7 +25,7 @@ const {
const NO_EFFECTS = 'no effects';
describe('applicationStateMachine', () => {
- const noEffectsToEmptyObject = effects => (typeof effects === 'string' ? {} : effects);
+ const noEffectsToEmptyObject = (effects) => (typeof effects === 'string' ? {} : effects);
describe(`current state is ${NO_STATUS}`, () => {
it.each`
@@ -42,7 +42,7 @@ describe('applicationStateMachine', () => {
${UNINSTALLING} | ${UNINSTALLING} | ${NO_EFFECTS}
${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: NO_STATUS,
@@ -59,7 +59,7 @@ describe('applicationStateMachine', () => {
it.each`
expectedState | event | effects
${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: NOT_INSTALLABLE,
@@ -79,7 +79,7 @@ describe('applicationStateMachine', () => {
${INSTALLED} | ${INSTALLED} | ${{ installFailed: false }}
${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
${UNINSTALLED} | ${UNINSTALLED} | ${{ installFailed: false }}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: INSTALLABLE,
@@ -97,7 +97,7 @@ describe('applicationStateMachine', () => {
expectedState | event | effects
${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: INSTALLING,
@@ -118,7 +118,7 @@ describe('applicationStateMachine', () => {
${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: INSTALLED,
@@ -136,7 +136,7 @@ describe('applicationStateMachine', () => {
expectedState | event | effects
${INSTALLED} | ${UPDATED} | ${{ updateSuccessful: true }}
${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: UPDATING,
@@ -154,7 +154,7 @@ describe('applicationStateMachine', () => {
expectedState | event | effects
${INSTALLABLE} | ${INSTALLABLE} | ${{ uninstallSuccessful: true }}
${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: UNINSTALLING,
@@ -172,7 +172,7 @@ describe('applicationStateMachine', () => {
expectedState | event | effects
${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
status: UNINSTALLED,
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index 381a4717127..df10d9af273 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -237,19 +237,22 @@ describe('Clusters Store', () => {
});
});
- describe.each(APPLICATION_INSTALLED_STATUSES)('given the current app status is %s', status => {
- it('marks application as installed', () => {
- const mockResponseData =
- CLUSTERS_MOCK_DATA.GET['/gitlab-org/gitlab-shell/clusters/2/status.json'].data;
- const runnerAppIndex = 2;
+ describe.each(APPLICATION_INSTALLED_STATUSES)(
+ 'given the current app status is %s',
+ (status) => {
+ it('marks application as installed', () => {
+ const mockResponseData =
+ CLUSTERS_MOCK_DATA.GET['/gitlab-org/gitlab-shell/clusters/2/status.json'].data;
+ const runnerAppIndex = 2;
- mockResponseData.applications[runnerAppIndex].status = status;
+ mockResponseData.applications[runnerAppIndex].status = status;
- store.updateStateFromServer(mockResponseData);
+ store.updateStateFromServer(mockResponseData);
- expect(store.state.applications[RUNNER].installed).toBe(true);
- });
- });
+ expect(store.state.applications[RUNNER].installed).toBe(true);
+ });
+ },
+ );
it('sets default hostname for jupyter when ingress has a ip address', () => {
const mockResponseData =
diff --git a/spec/frontend/clusters_list/components/ancestor_notice_spec.js b/spec/frontend/clusters_list/components/ancestor_notice_spec.js
index 436f1e97b04..79f917d4601 100644
--- a/spec/frontend/clusters_list/components/ancestor_notice_spec.js
+++ b/spec/frontend/clusters_list/components/ancestor_notice_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLink, GlSprintf } from '@gitlab/ui';
+import { GlLink, GlSprintf, GlAlert } from '@gitlab/ui';
import AncestorNotice from '~/clusters_list/components/ancestor_notice.vue';
import ClusterStore from '~/clusters_list/store';
@@ -9,7 +9,7 @@ describe('ClustersAncestorNotice', () => {
const createWrapper = () => {
store = ClusterStore({ ancestorHelperPath: '/some/ancestor/path' });
- wrapper = shallowMount(AncestorNotice, { store, stubs: { GlSprintf } });
+ wrapper = shallowMount(AncestorNotice, { store, stubs: { GlSprintf, GlAlert } });
return wrapper.vm.$nextTick();
};
diff --git a/spec/frontend/clusters_list/components/node_error_help_text_spec.js b/spec/frontend/clusters_list/components/node_error_help_text_spec.js
index 4d157b3a8ab..fa566c2dc57 100644
--- a/spec/frontend/clusters_list/components/node_error_help_text_spec.js
+++ b/spec/frontend/clusters_list/components/node_error_help_text_spec.js
@@ -5,7 +5,7 @@ import NodeErrorHelpText from '~/clusters_list/components/node_error_help_text.v
describe('NodeErrorHelpText', () => {
let wrapper;
- const createWrapper = propsData => {
+ const createWrapper = (propsData) => {
wrapper = shallowMount(NodeErrorHelpText, { propsData, stubs: { GlPopover } });
return wrapper.vm.$nextTick();
};
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index 3d4e07d00eb..6214cb50e13 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -24,7 +24,7 @@ describe('Clusters store actions', () => {
captureException.mockRestore();
});
- it('should report sentry error', done => {
+ it('should report sentry error', (done) => {
const sentryError = new Error('New Sentry Error');
const tag = 'sentryErrorTag';
@@ -62,7 +62,7 @@ describe('Clusters store actions', () => {
afterEach(() => mock.restore());
- it('should commit SET_CLUSTERS_DATA with received response', done => {
+ it('should commit SET_CLUSTERS_DATA with received response', (done) => {
mock.onGet().reply(200, apiData, headers);
testAction(
@@ -79,7 +79,7 @@ describe('Clusters store actions', () => {
);
});
- it('should show flash on API error', done => {
+ it('should show flash on API error', (done) => {
mock.onGet().reply(400, 'Not Found');
testAction(
@@ -126,7 +126,7 @@ describe('Clusters store actions', () => {
pollStop.mockRestore();
});
- it('should stop polling after MAX Requests', done => {
+ it('should stop polling after MAX Requests', (done) => {
testAction(
actions.fetchClusters,
{ endpoint: apiData.endpoint },
@@ -173,7 +173,7 @@ describe('Clusters store actions', () => {
);
});
- it('should stop polling and report to Sentry when data is invalid', done => {
+ it('should stop polling and report to Sentry when data is invalid', (done) => {
const badApiResponse = { clusters: {} };
mock.onGet().reply(200, badApiResponse, pollHeaders);
diff --git a/spec/frontend/code_navigation/store/actions_spec.js b/spec/frontend/code_navigation/store/actions_spec.js
index fbd93b10a14..39cf4acd5ce 100644
--- a/spec/frontend/code_navigation/store/actions_spec.js
+++ b/spec/frontend/code_navigation/store/actions_spec.js
@@ -8,7 +8,7 @@ jest.mock('~/code_navigation/utils');
describe('Code navigation actions', () => {
describe('setInitialData', () => {
- it('commits SET_INITIAL_DATA', done => {
+ it('commits SET_INITIAL_DATA', (done) => {
testAction(
actions.setInitialData,
{ projectPath: 'test' },
@@ -57,7 +57,7 @@ describe('Code navigation actions', () => {
]);
});
- it('commits REQUEST_DATA_SUCCESS with normalized data', done => {
+ it('commits REQUEST_DATA_SUCCESS with normalized data', (done) => {
testAction(
actions.fetchData,
null,
@@ -84,7 +84,7 @@ describe('Code navigation actions', () => {
);
});
- it('calls addInteractionClass with data', done => {
+ it('calls addInteractionClass with data', (done) => {
testAction(
actions.fetchData,
null,
@@ -125,7 +125,7 @@ describe('Code navigation actions', () => {
mock.onGet(codeNavigationPath).replyOnce(500);
});
- it('dispatches requestDataError', done => {
+ it('dispatches requestDataError', (done) => {
testAction(
actions.fetchData,
null,
@@ -175,15 +175,15 @@ describe('Code navigation actions', () => {
target = document.querySelector('.js-test');
});
- it('returns early when no data exists', done => {
+ it('returns early when no data exists', (done) => {
testAction(actions.showDefinition, { target }, {}, [], [], done);
});
- it('commits SET_CURRENT_DEFINITION when target is not code navitation element', done => {
+ it('commits SET_CURRENT_DEFINITION when target is not code navitation element', (done) => {
testAction(actions.showDefinition, { target }, { data: {} }, [], [], done);
});
- it('commits SET_CURRENT_DEFINITION with LSIF data', done => {
+ it('commits SET_CURRENT_DEFINITION with LSIF data', (done) => {
target.classList.add('js-code-navigation');
target.setAttribute('data-line-index', '0');
target.setAttribute('data-char-index', '0');
diff --git a/spec/frontend/collapsed_sidebar_todo_spec.js b/spec/frontend/collapsed_sidebar_todo_spec.js
index 86f4c450c05..cc89a3c68f0 100644
--- a/spec/frontend/collapsed_sidebar_todo_spec.js
+++ b/spec/frontend/collapsed_sidebar_todo_spec.js
@@ -2,9 +2,9 @@
import { clone } from 'lodash';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'spec/test_constants';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import Sidebar from '~/right_sidebar';
-import waitForPromises from './helpers/wait_for_promises';
describe('Issuable right sidebar collapsed todo toggle', () => {
const fixtureName = 'issues/open-issue.html';
@@ -62,7 +62,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
).toBe('Add a to do');
});
- it('toggle todo state', done => {
+ it('toggle todo state', (done) => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
setImmediate(() => {
@@ -80,7 +80,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
});
});
- it('toggle todo state of expanded todo toggle', done => {
+ it('toggle todo state of expanded todo toggle', (done) => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
setImmediate(() => {
@@ -92,7 +92,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
});
});
- it('toggles todo button tooltip', done => {
+ it('toggles todo button tooltip', (done) => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
setImmediate(() => {
@@ -106,7 +106,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
});
});
- it('marks todo as done', done => {
+ it('marks todo as done', (done) => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
waitForPromises()
@@ -131,7 +131,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
.catch(done.fail);
});
- it('updates aria-label to Mark as done', done => {
+ it('updates aria-label to Mark as done', (done) => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
setImmediate(() => {
@@ -145,7 +145,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
});
});
- it('updates aria-label to add todo', done => {
+ it('updates aria-label to add todo', (done) => {
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
waitForPromises()
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index 625024ee61f..6e4368b5de8 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -1,11 +1,11 @@
import Visibility from 'visibilityjs';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { getJSONFixture } from 'helpers/fixtures';
import Poll from '~/lib/utils/poll';
import { deprecatedCreateFlash as flash } from '~/flash';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
-import { getJSONFixture } from '../helpers/fixtures';
jest.mock('~/lib/utils/poll');
jest.mock('visibilityjs');
@@ -105,7 +105,7 @@ describe('Commit pipeline status component', () => {
describe('when polling', () => {
let pollConfig;
beforeEach(() => {
- Poll.mockImplementation(config => {
+ Poll.mockImplementation((config) => {
pollConfig = config;
return { makeRequest: jest.fn(), restart: jest.fn(), stop: jest.fn() };
});
diff --git a/spec/frontend/commit/pipelines/pipelines_spec.js b/spec/frontend/commit/pipelines/pipelines_spec.js
index a196b66daa0..15b1f224699 100644
--- a/spec/frontend/commit/pipelines/pipelines_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_spec.js
@@ -31,7 +31,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const { pipelines } = getJSONFixture(jsonFixtureName);
PipelinesTable = Vue.extend(pipelinesTable);
- pipeline = pipelines.find(p => p.user !== null && p.commit !== null);
+ pipeline = pipelines.find((p) => p.user !== null && p.commit !== null);
});
afterEach(() => {
@@ -47,7 +47,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, props);
});
- it('should render the empty state', done => {
+ it('should render the empty state', (done) => {
setImmediate(() => {
expect(vm.$el.querySelector('.empty-state')).toBeDefined();
expect(vm.$el.querySelector('.realtime-loading')).toBe(null);
@@ -63,7 +63,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, props);
});
- it('should render a table with the received pipelines', done => {
+ it('should render a table with the received pipelines', (done) => {
setImmediate(() => {
expect(vm.$el.querySelectorAll('.ci-table .commit').length).toEqual(1);
expect(vm.$el.querySelector('.realtime-loading')).toBe(null);
@@ -74,7 +74,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
describe('with pagination', () => {
- it('should make an API request when using pagination', done => {
+ it('should make an API request when using pagination', (done) => {
setImmediate(() => {
jest.spyOn(vm, 'updateContent').mockImplementation(() => {});
@@ -102,11 +102,11 @@ describe('Pipelines table in Commits and Merge requests', () => {
mock.onGet('endpoint.json').reply(200, [pipeline]);
});
- it('should receive update-pipelines-count event', done => {
+ it('should receive update-pipelines-count event', (done) => {
const element = document.createElement('div');
document.body.appendChild(element);
- element.addEventListener('update-pipelines-count', event => {
+ element.addEventListener('update-pipelines-count', (event) => {
expect(event.detail.pipelines).toEqual([pipeline]);
done();
});
@@ -126,7 +126,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
describe('when latest pipeline has detached flag', () => {
- it('renders the run pipeline button', done => {
+ it('renders the run pipeline button', (done) => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
pipelineCopy.flags.merge_request_pipeline = true;
@@ -143,7 +143,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
describe('when latest pipeline does not have detached flag', () => {
- it('does not render the run pipeline button', done => {
+ it('does not render the run pipeline button', (done) => {
pipelineCopy.flags.detached_merge_request_pipeline = false;
pipelineCopy.flags.merge_request_pipeline = false;
@@ -163,7 +163,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- beforeEach(done => {
+ beforeEach((done) => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
@@ -182,7 +182,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
- it('on desktop, shows a loading button', done => {
+ it('on desktop, shows a loading button', (done) => {
findRunPipelineBtn().click();
vm.$nextTick(() => {
@@ -200,7 +200,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
- it('on mobile, shows a loading button', done => {
+ it('on mobile, shows a loading button', (done) => {
findRunPipelineBtnMobile().click();
vm.$nextTick(() => {
@@ -223,7 +223,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- beforeEach(done => {
+ beforeEach((done) => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
@@ -244,7 +244,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
- it('on desktop, shows a security warning modal', done => {
+ it('on desktop, shows a security warning modal', (done) => {
findRunPipelineBtn().click();
vm.$nextTick(() => {
@@ -253,7 +253,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
- it('on mobile, shows a security warning modal', done => {
+ it('on mobile, shows a security warning modal', (done) => {
findRunPipelineBtnMobile().click();
vm.$nextTick(() => {
@@ -271,7 +271,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, props);
});
- it('should render error state', done => {
+ it('should render error state', (done) => {
setImmediate(() => {
expect(vm.$el.querySelector('.js-pipelines-error-state')).toBeDefined();
expect(vm.$el.querySelector('.realtime-loading')).toBe(null);
diff --git a/spec/frontend/commits_spec.js b/spec/frontend/commits_spec.js
index 42bd37570b1..e1b947ddd0d 100644
--- a/spec/frontend/commits_spec.js
+++ b/spec/frontend/commits_spec.js
@@ -70,7 +70,7 @@ describe('Commits List', () => {
mock.restore();
});
- it('should save the last search string', done => {
+ it('should save the last search string', (done) => {
commitsList.searchField.val('GitLab');
commitsList
.filterResults()
@@ -83,7 +83,7 @@ describe('Commits List', () => {
.catch(done.fail);
});
- it('should not make ajax call if the input does not change', done => {
+ it('should not make ajax call if the input does not change', (done) => {
commitsList
.filterResults()
.then(() => {
diff --git a/spec/frontend/commons/nav/user_merge_requests_spec.js b/spec/frontend/commons/nav/user_merge_requests_spec.js
index 4da6d53557a..c441668f7c7 100644
--- a/spec/frontend/commons/nav/user_merge_requests_spec.js
+++ b/spec/frontend/commons/nav/user_merge_requests_spec.js
@@ -3,12 +3,12 @@ import {
closeUserCountsBroadcast,
refreshUserMergeRequestCounts,
} from '~/commons/nav/user_merge_requests';
-import Api from '~/api';
+import * as UserApi from '~/api/user_api';
jest.mock('~/api');
const TEST_COUNT = 1000;
-const MR_COUNT_CLASS = 'merge-requests-count';
+const MR_COUNT_CLASS = 'js-merge-requests-count';
describe('User Merge Requests', () => {
let channelMock;
@@ -24,18 +24,21 @@ describe('User Merge Requests', () => {
newBroadcastChannelMock = jest.fn().mockImplementation(() => channelMock);
global.BroadcastChannel = newBroadcastChannelMock;
- setFixtures(`<div class="${MR_COUNT_CLASS}">0</div>`);
+ setFixtures(
+ `<div><div class="${MR_COUNT_CLASS}">0</div><div class="js-assigned-mr-count"></div><div class="js-reviewer-mr-count"></div></div>`,
+ );
});
const findMRCountText = () => document.body.querySelector(`.${MR_COUNT_CLASS}`).textContent;
describe('refreshUserMergeRequestCounts', () => {
beforeEach(() => {
- Api.userCounts.mockReturnValue(
- Promise.resolve({
- data: { merge_requests: TEST_COUNT },
- }),
- );
+ jest.spyOn(UserApi, 'getUserCounts').mockResolvedValue({
+ data: {
+ assigned_merge_requests: TEST_COUNT,
+ review_requested_merge_requests: TEST_COUNT,
+ },
+ });
});
describe('with open broadcast channel', () => {
@@ -46,15 +49,15 @@ describe('User Merge Requests', () => {
});
it('updates the top count of merge requests', () => {
- expect(findMRCountText()).toEqual(TEST_COUNT.toLocaleString());
+ expect(findMRCountText()).toEqual(Number(TEST_COUNT + TEST_COUNT).toLocaleString());
});
it('calls the API', () => {
- expect(Api.userCounts).toHaveBeenCalled();
+ expect(UserApi.getUserCounts).toHaveBeenCalled();
});
it('posts count to BroadcastChannel', () => {
- expect(channelMock.postMessage).toHaveBeenCalledWith(TEST_COUNT);
+ expect(channelMock.postMessage).toHaveBeenCalledWith(TEST_COUNT + TEST_COUNT);
});
});
diff --git a/spec/frontend/confirm_modal_spec.js b/spec/frontend/confirm_modal_spec.js
index 70076532a94..8a12ff3a01f 100644
--- a/spec/frontend/confirm_modal_spec.js
+++ b/spec/frontend/confirm_modal_spec.js
@@ -29,7 +29,7 @@ describe('ConfirmModal', () => {
beforeEach(() => {
const buttonContainer = document.createElement('div');
- buttons.forEach(x => {
+ buttons.forEach((x) => {
const button = document.createElement('button');
button.setAttribute('class', 'js-confirm-modal-button');
button.setAttribute('data-path', x.path);
@@ -50,7 +50,7 @@ describe('ConfirmModal', () => {
const findModal = () => document.querySelector('.gl-modal');
const findModalOkButton = (modal, variant) =>
modal.querySelector(`.modal-footer .btn-${variant}`);
- const findModalCancelButton = modal => modal.querySelector('.modal-footer .btn-secondary');
+ const findModalCancelButton = (modal) => modal.querySelector('.modal-footer .btn-secondary');
const modalIsHidden = () => findModal() === null;
const serializeModal = (modal, buttonIndex) => {
@@ -63,7 +63,7 @@ describe('ConfirmModal', () => {
title: modal.querySelector('.modal-title').innerHTML,
message: modal.querySelector('.modal-body div').innerHTML,
okVariant: [...findModalOkButton(modal, modalAttributes.okVariant).classList]
- .find(x => x.match('btn-'))
+ .find((x) => x.match('btn-'))
.replace('btn-', ''),
okTitle: findModalOkButton(modal, modalAttributes.okVariant).innerHTML,
},
@@ -78,9 +78,7 @@ describe('ConfirmModal', () => {
describe('when button clicked', () => {
beforeEach(() => {
initConfirmModal();
- findJsHooks()
- .item(0)
- .click();
+ findJsHooks().item(0).click();
});
it('does not replace JsHook with GlModal', () => {
@@ -116,9 +114,7 @@ describe('ConfirmModal', () => {
`(`when multiple buttons exist`, ({ index }) => {
beforeEach(() => {
initConfirmModal();
- findJsHooks()
- .item(index)
- .click();
+ findJsHooks().item(index).click();
});
it('correct props are passed to gl-modal', () => {
diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js
index ad490ea4b67..7d2f93c4940 100644
--- a/spec/frontend/contributors/store/actions_spec.js
+++ b/spec/frontend/contributors/store/actions_spec.js
@@ -17,7 +17,7 @@ describe('Contributors store actions', () => {
mock = new MockAdapter(axios);
});
- it('should commit SET_CHART_DATA with received response', done => {
+ it('should commit SET_CHART_DATA with received response', (done) => {
mock.onGet().reply(200, chartData);
testAction(
@@ -37,7 +37,7 @@ describe('Contributors store actions', () => {
);
});
- it('should show flash on API error', done => {
+ it('should show flash on API error', (done) => {
mock.onGet().reply(400, 'Not Found');
testAction(
diff --git a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
index 17abf409717..90c0e2d7827 100644
--- a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
+++ b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
@@ -8,9 +8,9 @@ import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_searc
describe('ClusterFormDropdown', () => {
let wrapper;
- const firstItem = { name: 'item 1', value: 1 };
- const secondItem = { name: 'item 2', value: 2 };
- const items = [firstItem, secondItem, { name: 'item 3', value: 3 }];
+ const firstItem = { name: 'item 1', value: '1' };
+ const secondItem = { name: 'item 2', value: '2' };
+ const items = [firstItem, secondItem, { name: 'item 3', value: '3' }];
beforeEach(() => {
wrapper = shallowMount(ClusterFormDropdown);
@@ -44,10 +44,7 @@ describe('ClusterFormDropdown', () => {
wrapper.setProps({ items });
return wrapper.vm.$nextTick().then(() => {
- wrapper
- .findAll('.js-dropdown-item')
- .at(1)
- .trigger('click');
+ wrapper.findAll('.js-dropdown-item').at(1).trigger('click');
return wrapper.vm.$nextTick();
});
});
@@ -58,24 +55,18 @@ describe('ClusterFormDropdown', () => {
});
describe('when multiple items are selected', () => {
- const value = [1];
+ const value = ['1'];
beforeEach(() => {
wrapper.setProps({ items, multiple: true, value });
return wrapper.vm
.$nextTick()
.then(() => {
- wrapper
- .findAll('.js-dropdown-item')
- .at(0)
- .trigger('click');
+ wrapper.findAll('.js-dropdown-item').at(0).trigger('click');
return wrapper.vm.$nextTick();
})
.then(() => {
- wrapper
- .findAll('.js-dropdown-item')
- .at(1)
- .trigger('click');
+ wrapper.findAll('.js-dropdown-item').at(1).trigger('click');
return wrapper.vm.$nextTick();
});
});
@@ -102,10 +93,7 @@ describe('ClusterFormDropdown', () => {
wrapper.setProps({ items, multiple: true, value: null });
return wrapper.vm.$nextTick().then(() => {
- wrapper
- .findAll('.js-dropdown-item')
- .at(0)
- .trigger('click');
+ wrapper.findAll('.js-dropdown-item').at(0).trigger('click');
expect(wrapper.emitted('input')[0]).toEqual([[firstItem.value]]);
});
@@ -116,7 +104,7 @@ describe('ClusterFormDropdown', () => {
it('displays selected item custom label', () => {
const labelProperty = 'customLabel';
const label = 'Name';
- const currentValue = 1;
+ const currentValue = '1';
const customLabelItems = [{ [labelProperty]: label, value: currentValue }];
wrapper.setProps({ labelProperty, items: customLabelItems, value: currentValue });
@@ -128,12 +116,9 @@ describe('ClusterFormDropdown', () => {
});
describe('when loading', () => {
- it('dropdown button isLoading', () => {
- wrapper.setProps({ loading: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(DropdownButton).props('isLoading')).toBe(true);
- });
+ it('dropdown button isLoading', async () => {
+ await wrapper.setProps({ loading: true });
+ expect(wrapper.find(DropdownButton).props('isLoading')).toBe(true);
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index f9984091df0..a4835f8c1c1 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -128,7 +128,7 @@ describe('EksClusterConfigurationForm', () => {
});
};
- const createValidStateStore = initialState => {
+ const createValidStateStore = (initialState) => {
createStore({
initialState: {
clusterName: 'cluster name',
diff --git a/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js b/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js
index 03c22c570a8..2853d18e2cb 100644
--- a/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/services/aws_services_facade_spec.js
@@ -101,7 +101,10 @@ describe('awsServicesFacade', () => {
let vpcsOutput;
beforeEach(() => {
- vpcs = [{ VpcId: 'vpc-1', Tags: [] }, { VpcId: 'vpc-2', Tags: [] }];
+ vpcs = [
+ { VpcId: 'vpc-1', Tags: [] },
+ { VpcId: 'vpc-2', Tags: [] },
+ ];
vpcsOutput = vpcs.map(({ VpcId: vpcId }) => ({ name: vpcId, value: vpcId }));
mockDescribeVpcsPromise.mockResolvedValueOnce({ Vpcs: vpcs });
diff --git a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
index f14a555f357..35348d3a03b 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
@@ -94,7 +94,7 @@ describe('EKS Cluster Store Actions', () => {
${'setNodeCount'} | ${SET_NODE_COUNT} | ${{ nodeCount }} | ${'node count'}
${'setGitlabManagedCluster'} | ${SET_GITLAB_MANAGED_CLUSTER} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
${'setNamespacePerEnvironment'} | ${SET_NAMESPACE_PER_ENVIRONMENT} | ${namespacePerEnvironment} | ${'namespace per environment'}
- `(`$action commits $mutation with $payloadDescription payload`, data => {
+ `(`$action commits $mutation with $payloadDescription payload`, (data) => {
const { action, mutation, payload } = data;
testAction(actions[action], payload, state, [{ type: mutation, payload }]);
diff --git a/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js b/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
index 7c26aeb9b93..46c37961dd3 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
@@ -6,7 +6,7 @@ describe('EKS Cluster Store Getters', () => {
expect(subnetValid({ selectedSubnet: [1, 2] })).toBe(true);
});
- it.each([[[], [1]]])('returns false if there are 1 or less selected subnets', subnets => {
+ it.each([[[], [1]]])('returns false if there are 1 or less selected subnets', (subnets) => {
expect(subnetValid({ selectedSubnet: subnets })).toBe(false);
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js b/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js
index 8bb014d4758..633cea595d1 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/mutations_spec.js
@@ -66,7 +66,7 @@ describe('Create EKS cluster store mutations', () => {
${SET_INSTANCE_TYPE} | ${'selectedInstanceType'} | ${{ instanceType }} | ${instanceType} | ${'selected instance type payload'}
${SET_NODE_COUNT} | ${'nodeCount'} | ${{ nodeCount }} | ${nodeCount} | ${'node count payload'}
${SET_GITLAB_MANAGED_CLUSTER} | ${'gitlabManagedCluster'} | ${{ gitlabManagedCluster }} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
- `(`$mutation sets $mutatedProperty to $expectedValueDescription`, data => {
+ `(`$mutation sets $mutatedProperty to $expectedValueDescription`, (data) => {
const { mutation, mutatedProperty, payload, expectedValue } = data;
mutations[mutation](state, payload);
diff --git a/spec/frontend/create_cluster/gke_cluster/gapi_loader_spec.js b/spec/frontend/create_cluster/gke_cluster/gapi_loader_spec.js
new file mode 100644
index 00000000000..9e4d6996340
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/gapi_loader_spec.js
@@ -0,0 +1,47 @@
+import gapiLoader from '~/create_cluster/gke_cluster/gapi_loader';
+
+describe('gapiLoader', () => {
+ // A mock for document.head.appendChild to intercept the script tag injection.
+ let mockDOMHeadAppendChild;
+
+ beforeEach(() => {
+ mockDOMHeadAppendChild = jest.spyOn(document.head, 'appendChild');
+ });
+
+ afterEach(() => {
+ mockDOMHeadAppendChild.mockRestore();
+ delete window.gapi;
+ delete window.gapiPromise;
+ delete window.onGapiLoad;
+ });
+
+ it('returns a promise', () => {
+ expect(gapiLoader()).toBeInstanceOf(Promise);
+ });
+
+ it('returns the same promise when already loading', () => {
+ const first = gapiLoader();
+ const second = gapiLoader();
+ expect(first).toBe(second);
+ });
+
+ it('resolves the promise when the script loads correctly', async () => {
+ mockDOMHeadAppendChild.mockImplementationOnce((script) => {
+ script.removeAttribute('src');
+ script.appendChild(
+ document.createTextNode(`window.gapi = 'hello gapi'; window.onGapiLoad()`),
+ );
+ document.head.appendChild(script);
+ });
+ await expect(gapiLoader()).resolves.toBe('hello gapi');
+ expect(mockDOMHeadAppendChild).toHaveBeenCalled();
+ });
+
+ it('rejects the promise when the script fails loading', async () => {
+ mockDOMHeadAppendChild.mockImplementationOnce((script) => {
+ script.onerror(new Error('hello error'));
+ });
+ await expect(gapiLoader()).rejects.toThrow('hello error');
+ expect(mockDOMHeadAppendChild).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/helpers.js b/spec/frontend/create_cluster/gke_cluster/helpers.js
index 52b43b82698..026e99fa8f4 100644
--- a/spec/frontend/create_cluster/gke_cluster/helpers.js
+++ b/spec/frontend/create_cluster/gke_cluster/helpers.js
@@ -8,7 +8,7 @@ const cloudbilling = {
projects: {
getBillingInfo: jest.fn(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
result: { billingEnabled: true },
});
@@ -21,7 +21,7 @@ const cloudresourcemanager = {
projects: {
list: jest.fn(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
result: { ...gapiProjectsResponseMock },
});
@@ -34,7 +34,7 @@ const compute = {
zones: {
list: jest.fn(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
result: { ...gapiZonesResponseMock },
});
@@ -44,7 +44,7 @@ const compute = {
machineTypes: {
list: jest.fn(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
result: { ...gapiMachineTypesResponseMock },
});
diff --git a/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js b/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
index c1ac3841136..8852baafec7 100644
--- a/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
@@ -14,7 +14,7 @@ import {
describe('GCP Cluster Dropdown Store Actions', () => {
describe('setProject', () => {
- it('should set project', done => {
+ it('should set project', (done) => {
testAction(
actions.setProject,
selectedProjectMock,
@@ -27,7 +27,7 @@ describe('GCP Cluster Dropdown Store Actions', () => {
});
describe('setZone', () => {
- it('should set zone', done => {
+ it('should set zone', (done) => {
testAction(
actions.setZone,
selectedZoneMock,
@@ -40,7 +40,7 @@ describe('GCP Cluster Dropdown Store Actions', () => {
});
describe('setMachineType', () => {
- it('should set machine type', done => {
+ it('should set machine type', (done) => {
testAction(
actions.setMachineType,
selectedMachineTypeMock,
@@ -53,7 +53,7 @@ describe('GCP Cluster Dropdown Store Actions', () => {
});
describe('setIsValidatingProjectBilling', () => {
- it('should set machine type', done => {
+ it('should set machine type', (done) => {
testAction(
actions.setIsValidatingProjectBilling,
true,
@@ -71,10 +71,12 @@ describe('GCP Cluster Dropdown Store Actions', () => {
beforeAll(() => {
originalGapi = window.gapi;
window.gapi = gapi;
+ window.gapiPromise = Promise.resolve(gapi);
});
afterAll(() => {
window.gapi = originalGapi;
+ delete window.gapiPromise;
});
describe('fetchProjects', () => {
@@ -92,7 +94,7 @@ describe('GCP Cluster Dropdown Store Actions', () => {
});
describe('validateProjectBilling', () => {
- it('checks project billing status from Google API', done => {
+ it('checks project billing status from Google API', (done) => {
testAction(
actions.validateProjectBilling,
true,
diff --git a/spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js b/spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js
index b1c25d8fff7..c22167a078c 100644
--- a/spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster_namespace/gke_cluster_namespace_spec.js
@@ -2,8 +2,8 @@ import initGkeNamespace from '~/create_cluster/gke_cluster_namespace';
describe('GKE cluster namespace', () => {
const changeEvent = new Event('change');
- const isHidden = el => el.classList.contains('hidden');
- const hasDisabledInput = el => el.querySelector('input').disabled;
+ const isHidden = (el) => el.classList.contains('hidden');
+ const hasDisabledInput = (el) => el.querySelector('input').disabled;
let glManagedCheckbox;
let selfManaged;
diff --git a/spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js b/spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js
index 5edd237133d..4b700e31675 100644
--- a/spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js
+++ b/spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js
@@ -27,7 +27,7 @@ describe('Cluster dropdown store mutations', () => {
${RECEIVE_ITEMS_SUCCESS} | ${'items'} | ${{ items }} | ${items} | ${'items payload'}
${RECEIVE_ITEMS_ERROR} | ${'isLoadingItems'} | ${{ error }} | ${false} | ${false}
${RECEIVE_ITEMS_ERROR} | ${'error'} | ${{ error }} | ${error} | ${'received error object'}
- `(`$mutation sets $mutatedProperty to $expectedValueDescription`, data => {
+ `(`$mutation sets $mutatedProperty to $expectedValueDescription`, (data) => {
const { mutation, mutatedProperty, payload, expectedValue } = data;
mutations[mutation](state, payload);
diff --git a/spec/frontend/create_item_dropdown_spec.js b/spec/frontend/create_item_dropdown_spec.js
index a814952faab..7314eb5eee8 100644
--- a/spec/frontend/create_item_dropdown_spec.js
+++ b/spec/frontend/create_item_dropdown_spec.js
@@ -27,20 +27,14 @@ describe('CreateItemDropdown', () => {
function createItemAndClearInput(text) {
// Filter for the new item
- $wrapperEl
- .find('.dropdown-input-field')
- .val(text)
- .trigger('input');
+ $wrapperEl.find('.dropdown-input-field').val(text).trigger('input');
// Create the new item
const $createButton = $wrapperEl.find('.js-dropdown-create-new-item');
$createButton.click();
// Clear out the filter
- $wrapperEl
- .find('.dropdown-input-field')
- .val('')
- .trigger('input');
+ $wrapperEl.find('.dropdown-input-field').val('').trigger('input');
}
beforeEach(() => {
@@ -91,10 +85,7 @@ describe('CreateItemDropdown', () => {
$('.js-dropdown-menu-toggle').click();
// Filter for the new item
- $wrapperEl
- .find('.dropdown-input-field')
- .val(NEW_ITEM_TEXT)
- .trigger('input');
+ $wrapperEl.find('.dropdown-input-field').val(NEW_ITEM_TEXT).trigger('input');
});
it('create new item button should include the filter text', () => {
@@ -171,7 +162,7 @@ describe('CreateItemDropdown', () => {
getData: (term, callback) => {
callback(DROPDOWN_ITEM_DATA);
},
- createNewItemFromValue: newValue => ({
+ createNewItemFromValue: (newValue) => ({
title: `${newValue}-title`,
id: `${newValue}-id`,
text: `${newValue}-text`,
diff --git a/spec/frontend/create_merge_request_dropdown_spec.js b/spec/frontend/create_merge_request_dropdown_spec.js
index bdf03853597..698725b769d 100644
--- a/spec/frontend/create_merge_request_dropdown_spec.js
+++ b/spec/frontend/create_merge_request_dropdown_spec.js
@@ -1,8 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import CreateMergeRequestDropdown from '~/create_merge_request_dropdown';
import confidentialState from '~/confidential_merge_request/state';
-import { TEST_HOST } from './helpers/test_constants';
describe('CreateMergeRequestDropdown', () => {
let axiosMock;
@@ -36,7 +36,7 @@ describe('CreateMergeRequestDropdown', () => {
});
describe('getRef', () => {
- it('escapes branch names correctly', done => {
+ it('escapes branch names correctly', (done) => {
const endpoint = `${dropdown.refsPath}contains%23hash`;
jest.spyOn(axios, 'get');
axiosMock.onGet(endpoint).replyOnce({});
diff --git a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
index 79c37293fe5..c41adf523f8 100644
--- a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
+++ b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
@@ -1,22 +1,20 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
-import waitForPromises from 'helpers/wait_for_promises';
import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
import axios from '~/lib/utils/axios_utils';
const { CancelToken } = axios;
describe('custom metrics form fields component', () => {
- let component;
+ let wrapper;
let mockAxios;
- const getNamedInput = name => component.element.querySelector(`input[name="${name}"]`);
+ const getNamedInput = (name) => wrapper.element.querySelector(`input[name="${name}"]`);
const validateQueryPath = `${TEST_HOST}/mock/path`;
- const validQueryResponse = { data: { success: true, query: { valid: true, error: '' } } };
+ const validQueryResponse = { success: true, query: { valid: true, error: '' } };
const csrfToken = 'mockToken';
const formOperation = 'post';
- const debouncedValidateQueryMock = jest.fn();
const makeFormData = (data = {}) => ({
formData: {
title: '',
@@ -28,43 +26,42 @@ describe('custom metrics form fields component', () => {
...data,
},
});
- const mountComponent = (props, methods = {}) => {
- component = mount(CustomMetricsFormFields, {
+ const mountComponent = (props) => {
+ wrapper = mount(CustomMetricsFormFields, {
propsData: {
formOperation,
validateQueryPath,
...props,
},
csrfToken,
- methods,
});
};
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onPost(validateQueryPath).reply(validQueryResponse);
});
afterEach(() => {
- component.destroy();
+ wrapper.destroy();
mockAxios.restore();
});
- it('checks form validity', done => {
+ it('checks form validity', async () => {
+ mockAxios.onPost(validateQueryPath).reply(200, validQueryResponse);
mountComponent({
metricPersisted: true,
...makeFormData({
- title: 'title',
+ title: 'title-old',
yLabel: 'yLabel',
unit: 'unit',
group: 'group',
}),
});
- component.vm.$nextTick(() => {
- expect(component.vm.formIsValid).toBe(false);
- done();
- });
+ wrapper.find(`input[name="prometheus_metric[query]"]`).setValue('query');
+ await axios.waitForAll();
+
+ expect(wrapper.emitted('formValidation')).toStrictEqual([[true]]);
});
describe('hidden inputs', () => {
@@ -110,9 +107,6 @@ describe('custom metrics form fields component', () => {
describe('query input', () => {
const queryInputName = 'prometheus_metric[query]';
- beforeEach(() => {
- mockAxios.onPost(validateQueryPath).reply(validQueryResponse);
- });
it('is empty by default', () => {
mountComponent();
@@ -135,147 +129,65 @@ describe('custom metrics form fields component', () => {
jest.runAllTimers();
});
- it('checks validity on user input', () => {
+ it('checks validity on user input', async () => {
const query = 'changedQuery';
- mountComponent(
- {},
- {
- debouncedValidateQuery: debouncedValidateQueryMock,
- },
- );
- const queryInput = component.find(`input[name="${queryInputName}"]`);
+ mountComponent();
+
+ expect(mockAxios.history.post).toHaveLength(0);
+ const queryInput = wrapper.find(`input[name="${queryInputName}"]`);
queryInput.setValue(query);
- queryInput.trigger('input');
- expect(debouncedValidateQueryMock).toHaveBeenCalledWith(query);
+ await axios.waitForAll();
+ expect(mockAxios.history.post).toHaveLength(1);
});
describe('when query validation is in flight', () => {
beforeEach(() => {
- mountComponent(
- { metricPersisted: true, ...makeFormData({ query: 'validQuery' }) },
- {
- requestValidation: jest.fn().mockImplementation(
- () =>
- new Promise(resolve =>
- setTimeout(() => {
- resolve(validQueryResponse);
- }, 4000),
- ),
- ),
- },
- );
+ mountComponent({ metricPersisted: true, ...makeFormData({ query: 'validQuery' }) });
+ mockAxios.onPost(validateQueryPath).reply(200, validQueryResponse);
});
- afterEach(() => {
- jest.clearAllTimers();
- });
-
- it('expect queryValidateInFlight is in flight', done => {
- const queryInput = component.find(`input[name="${queryInputName}"]`);
+ it('expect loading message to display', async () => {
+ const queryInput = wrapper.find(`input[name="${queryInputName}"]`);
queryInput.setValue('query');
- queryInput.trigger('input');
-
- component.vm.$nextTick(() => {
- expect(component.vm.queryValidateInFlight).toBe(true);
- jest.runOnlyPendingTimers();
- waitForPromises()
- .then(() => {
- component.vm.$nextTick(() => {
- expect(component.vm.queryValidateInFlight).toBe(false);
- expect(component.vm.queryIsValid).toBe(true);
- done();
- });
- })
- .catch(done.fail);
- });
- });
- it('expect loading message to display', done => {
- const queryInput = component.find(`input[name="${queryInputName}"]`);
- queryInput.setValue('query');
- queryInput.trigger('input');
- component.vm.$nextTick(() => {
- expect(component.text()).toContain('Validating query');
- jest.runOnlyPendingTimers();
- done();
- });
+ expect(wrapper.text()).toContain('Validating query');
});
- it('expect loading message to disappear', done => {
- const queryInput = component.find(`input[name="${queryInputName}"]`);
+ it('expect loading message to disappear', async () => {
+ const queryInput = wrapper.find(`input[name="${queryInputName}"]`);
queryInput.setValue('query');
- queryInput.trigger('input');
- component.vm.$nextTick(() => {
- jest.runOnlyPendingTimers();
- waitForPromises()
- .then(() => {
- component.vm.$nextTick(() => {
- expect(component.vm.queryValidateInFlight).toBe(false);
- expect(component.vm.queryIsValid).toBe(true);
- expect(component.vm.errorMessage).toBe('');
- done();
- });
- })
- .catch(done.fail);
- });
+
+ await axios.waitForAll();
+ expect(wrapper.text()).not.toContain('Validating query');
});
});
describe('when query is invalid', () => {
const errorMessage = 'mockErrorMessage';
- const invalidQueryResponse = {
- data: { success: true, query: { valid: false, error: errorMessage } },
- };
+ const invalidQueryResponse = { success: true, query: { valid: false, error: errorMessage } };
beforeEach(() => {
- mountComponent(
- { metricPersisted: true, ...makeFormData({ query: 'invalidQuery' }) },
- {
- requestValidation: jest
- .fn()
- .mockImplementation(() => Promise.resolve(invalidQueryResponse)),
- },
- );
+ mockAxios.onPost(validateQueryPath).reply(200, invalidQueryResponse);
+ mountComponent({ metricPersisted: true, ...makeFormData({ query: 'invalidQuery' }) });
+ return axios.waitForAll();
});
- it('sets queryIsValid to false', done => {
- component.vm.$nextTick(() => {
- expect(component.vm.queryValidateInFlight).toBe(false);
- expect(component.vm.queryIsValid).toBe(false);
- done();
- });
- });
-
- it('shows invalid query message', done => {
- component.vm.$nextTick(() => {
- expect(component.text()).toContain(errorMessage);
- done();
- });
+ it('shows invalid query message', async () => {
+ expect(wrapper.text()).toContain(errorMessage);
});
});
describe('when query is valid', () => {
beforeEach(() => {
- mountComponent(
- { metricPersisted: true, ...makeFormData({ query: 'validQuery' }) },
- {
- requestValidation: jest
- .fn()
- .mockImplementation(() => Promise.resolve(validQueryResponse)),
- },
- );
+ mockAxios.onPost(validateQueryPath).reply(200, validQueryResponse);
+ mountComponent({ metricPersisted: true, ...makeFormData({ query: 'validQuery' }) });
});
- it('sets queryIsValid to true when query is valid', done => {
- component.vm.$nextTick(() => {
- expect(component.vm.queryIsValid).toBe(true);
- done();
- });
- });
+ it('shows valid query message', async () => {
+ await axios.waitForAll();
- it('shows valid query message', () => {
- expect(component.text()).toContain('PromQL query is valid');
+ expect(wrapper.text()).toContain('PromQL query is valid');
});
});
});
diff --git a/spec/frontend/cycle_analytics/limit_warning_component_spec.js b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
index e712dea67cb..edde3725dd6 100644
--- a/spec/frontend/cycle_analytics/limit_warning_component_spec.js
+++ b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
@@ -5,7 +5,7 @@ import LimitWarningComponent from '~/cycle_analytics/components/limit_warning_co
Vue.use(Translate);
-const createComponent = props =>
+const createComponent = (props) =>
shallowMount(LimitWarningComponent, {
propsData: {
...props,
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
index 9f28ddfd230..650825d1cb7 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
@@ -19,7 +19,7 @@ describe('Deploy freeze modal', () => {
timezoneData: timezoneDataFixture,
});
wrapper = shallowMount(DeployFreezeModal, {
- attachToDocument: true,
+ attachTo: document.body,
stubs: {
GlModal,
},
@@ -29,10 +29,7 @@ describe('Deploy freeze modal', () => {
});
const findModal = () => wrapper.find(GlModal);
- const addDeployFreezeButton = () =>
- findModal()
- .findAll(GlButton)
- .at(1);
+ const addDeployFreezeButton = () => findModal().findAll(GlButton).at(1);
const setInput = (freezeStartCron, freezeEndCron, selectedTimezone) => {
store.state.freezeStartCron = freezeStartCron;
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
index 8480705b5e3..3b8e8f8485e 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
@@ -18,7 +18,7 @@ describe('Deploy freeze table', () => {
});
jest.spyOn(store, 'dispatch').mockImplementation();
wrapper = mount(DeployFreezeTable, {
- attachToDocument: true,
+ attachTo: document.body,
localVue,
store,
});
diff --git a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js b/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
index 2aa977dfa5a..1b03cc03d02 100644
--- a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
+++ b/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
@@ -30,7 +30,7 @@ describe('Deploy freeze timezone dropdown', () => {
};
const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
- const findDropdownItemByIndex = index => wrapper.findAll(GlDropdownItem).at(index);
+ const findDropdownItemByIndex = (index) => wrapper.findAll(GlDropdownItem).at(index);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/deploy_keys/components/app_spec.js b/spec/frontend/deploy_keys/components/app_spec.js
index 291502c9ed7..479320f92f2 100644
--- a/spec/frontend/deploy_keys/components/app_spec.js
+++ b/spec/frontend/deploy_keys/components/app_spec.js
@@ -35,7 +35,7 @@ describe('Deploy keys app component', () => {
});
const findLoadingIcon = () => wrapper.find('.gl-spinner');
- const findKeyPanels = () => wrapper.findAll('.deploy-keys .nav-links li');
+ const findKeyPanels = () => wrapper.findAll('.deploy-keys .gl-tabs-nav li');
it('renders loading icon while waiting for request', () => {
mock.onGet(TEST_ENDPOINT).reply(() => new Promise());
@@ -54,22 +54,14 @@ describe('Deploy keys app component', () => {
});
it.each`
- selector | label | count
- ${'.js-deployKeys-tab-enabled_keys'} | ${'Enabled deploy keys'} | ${1}
- ${'.js-deployKeys-tab-available_project_keys'} | ${'Privately accessible deploy keys'} | ${0}
- ${'.js-deployKeys-tab-public_keys'} | ${'Publicly accessible deploy keys'} | ${1}
- `('$selector title is $label with keys count equal to $count', ({ selector, label, count }) => {
+ selector
+ ${'.js-deployKeys-tab-enabled_keys'}
+ ${'.js-deployKeys-tab-available_project_keys'}
+ ${'.js-deployKeys-tab-public_keys'}
+ `('$selector title exists', ({ selector }) => {
return mountComponent().then(() => {
const element = wrapper.find(selector);
expect(element.exists()).toBe(true);
- expect(element.text().trim()).toContain(label);
-
- expect(
- element
- .find('.badge')
- .text()
- .trim(),
- ).toBe(count.toString());
});
});
diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js
index d990c64c241..fcb4e31dec8 100644
--- a/spec/frontend/deploy_keys/components/key_spec.js
+++ b/spec/frontend/deploy_keys/components/key_spec.js
@@ -9,13 +9,9 @@ describe('Deploy keys key', () => {
const data = getJSONFixture('deploy_keys/keys.json');
- const findTextAndTrim = selector =>
- wrapper
- .find(selector)
- .text()
- .trim();
+ const findTextAndTrim = (selector) => wrapper.find(selector).text().trim();
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = mount(key, {
propsData: {
store,
@@ -103,10 +99,7 @@ describe('Deploy keys key', () => {
it('expands all project labels after click', () => {
createComponent({ deployKey });
const { length } = deployKey.deploy_keys_projects;
- wrapper
- .findAll('.deploy-project-label')
- .at(1)
- .trigger('click');
+ wrapper.findAll('.deploy-project-label').at(1).trigger('click');
return wrapper.vm.$nextTick().then(() => {
const labels = wrapper.findAll('.deploy-project-label');
diff --git a/spec/frontend/deploy_keys/components/keys_panel_spec.js b/spec/frontend/deploy_keys/components/keys_panel_spec.js
index 53c8ba073bc..34b592017e9 100644
--- a/spec/frontend/deploy_keys/components/keys_panel_spec.js
+++ b/spec/frontend/deploy_keys/components/keys_panel_spec.js
@@ -8,7 +8,7 @@ describe('Deploy keys panel', () => {
const findTableRowHeader = () => wrapper.find('.table-row-header');
- const mountComponent = props => {
+ const mountComponent = (props) => {
const store = new DeployKeysStore();
store.keys = data;
wrapper = mount(deployKeysPanel, {
@@ -48,12 +48,9 @@ describe('Deploy keys panel', () => {
expect(wrapper.find('.settings-message').exists()).toBe(true);
- expect(
- wrapper
- .find('.settings-message')
- .text()
- .trim(),
- ).toBe('No deploy keys found. Create one with the form above.');
+ expect(wrapper.find('.settings-message').text().trim()).toBe(
+ 'No deploy keys found. Create one with the form above.',
+ );
});
it('renders no table header if keys are empty', () => {
diff --git a/spec/frontend/deprecated_jquery_dropdown_spec.js b/spec/frontend/deprecated_jquery_dropdown_spec.js
index e6323859899..6070532a1bf 100644
--- a/spec/frontend/deprecated_jquery_dropdown_spec.js
+++ b/spec/frontend/deprecated_jquery_dropdown_spec.js
@@ -56,8 +56,8 @@ describe('deprecatedJQueryDropdown', () => {
search: {
fields: ['name'],
},
- text: project => project.name_with_namespace || project.name,
- id: project => project.id,
+ text: (project) => project.name_with_namespace || project.name,
+ id: (project) => project.id,
...extraOpts,
};
test.dropdownButtonElement = $('#js-project-dropdown', test.dropdownContainerElement);
@@ -230,10 +230,7 @@ describe('deprecatedJQueryDropdown', () => {
it('should still have input value on close and restore', () => {
const $searchInput = $(SEARCH_INPUT_SELECTOR);
initDropDown.call(this, false, true);
- $searchInput
- .trigger('focus')
- .val('g')
- .trigger('input');
+ $searchInput.trigger('focus').val('g').trigger('input');
expect($searchInput.val()).toEqual('g');
test.dropdownButtonElement.trigger('hidden.bs.dropdown');
diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
index 9fbd9b2c2a3..77fc70e08d1 100644
--- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
@@ -108,7 +108,7 @@ describe('Design discussions component', () => {
it('renders correct amount of discussion notes', () => {
expect(findDesignNotes()).toHaveLength(2);
- expect(findDesignNotes().wrappers.every(w => w.isVisible())).toBe(true);
+ expect(findDesignNotes().wrappers.every((w) => w.isVisible())).toBe(true);
});
it('renders reply placeholder', () => {
@@ -150,16 +150,8 @@ describe('Design discussions component', () => {
});
it('shows only the first note', () => {
- expect(
- findDesignNotes()
- .at(0)
- .isVisible(),
- ).toBe(true);
- expect(
- findDesignNotes()
- .at(1)
- .isVisible(),
- ).toBe(false);
+ expect(findDesignNotes().at(0).isVisible()).toBe(true);
+ expect(findDesignNotes().at(1).isVisible()).toBe(false);
});
it('renders resolved message', () => {
@@ -193,11 +185,7 @@ describe('Design discussions component', () => {
});
it('renders the second note', () => {
- expect(
- findDesignNotes()
- .at(1)
- .isVisible(),
- ).toBe(true);
+ expect(findDesignNotes().at(1).isVisible()).toBe(true);
});
it('renders a reply placeholder', () => {
@@ -263,7 +251,7 @@ describe('Design discussions component', () => {
describe('when any note from a discussion is active', () => {
it.each([notes[0], notes[0].discussion.notes.nodes[1]])(
'applies correct class to all notes in the active discussion',
- note => {
+ (note) => {
createComponent(
{ discussion: mockDiscussion },
{
@@ -277,7 +265,7 @@ describe('Design discussions component', () => {
expect(
wrapper
.findAll(DesignNote)
- .wrappers.every(designNote => designNote.classes('gl-bg-blue-50')),
+ .wrappers.every((designNote) => designNote.classes('gl-bg-blue-50')),
).toBe(true);
},
);
diff --git a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
index 1a80fc4e761..a338a5ef200 100644
--- a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
@@ -36,7 +36,7 @@ describe('Design reply form component', () => {
it('textarea has focus after component mount', () => {
// We need to attach to document, so that `document.activeElement` is properly set in jsdom
- createComponent({}, { attachToDocument: true });
+ createComponent({}, { attachTo: document.body });
expect(findTextarea().element).toEqual(document.activeElement);
});
diff --git a/spec/frontend/design_management/components/design_overlay_spec.js b/spec/frontend/design_management/components/design_overlay_spec.js
index f4fd4c70dfc..a026cc39c84 100644
--- a/spec/frontend/design_management/components/design_overlay_spec.js
+++ b/spec/frontend/design_management/components/design_overlay_spec.js
@@ -13,7 +13,7 @@ describe('Design overlay component', () => {
const findAllNotes = () => wrapper.findAll('.js-image-badge');
const findCommentBadge = () => wrapper.find('.comment-indicator');
- const findBadgeAtIndex = noteIndex => findAllNotes().at(noteIndex);
+ const findBadgeAtIndex = (noteIndex) => findAllNotes().at(noteIndex);
const findFirstBadge = () => findBadgeAtIndex(0);
const findSecondBadge = () => findBadgeAtIndex(1);
@@ -108,7 +108,7 @@ describe('Design overlay component', () => {
describe('when no discussion is active', () => {
it('should not apply inactive class to any pins', () => {
expect(
- findAllNotes(0).wrappers.every(designNote => designNote.classes('gl-bg-blue-50')),
+ findAllNotes(0).wrappers.every((designNote) => designNote.classes('gl-bg-blue-50')),
).toBe(false);
});
});
@@ -116,7 +116,7 @@ describe('Design overlay component', () => {
describe('when a discussion is active', () => {
it.each([notes[0].discussion.notes.nodes[1], notes[0].discussion.notes.nodes[0]])(
'should not apply inactive class to the pin for the active discussion',
- note => {
+ (note) => {
wrapper.setData({
activeDiscussion: {
id: note.id,
diff --git a/spec/frontend/design_management/components/design_presentation_spec.js b/spec/frontend/design_management/components/design_presentation_spec.js
index d633d00f2ed..31fd154dc4b 100644
--- a/spec/frontend/design_management/components/design_presentation_spec.js
+++ b/spec/frontend/design_management/components/design_presentation_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import DesignPresentation from '~/design_management/components/design_presentation.vue';
import DesignOverlay from '~/design_management/components/design_overlay.vue';
@@ -100,12 +101,12 @@ describe('Design management design presentation component', () => {
clientY: endCoords.clientY,
});
- return wrapper.vm.$nextTick();
+ return nextTick();
})
.then(() => {
if (mouseup) {
addCommentOverlay.trigger(event.mouseup);
- return wrapper.vm.$nextTick();
+ return nextTick();
}
return undefined;
@@ -125,7 +126,7 @@ describe('Design management design presentation component', () => {
mockOverlayData,
);
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.element).toMatchSnapshot();
});
});
@@ -133,7 +134,7 @@ describe('Design management design presentation component', () => {
it('renders empty state when no image provided', () => {
createComponent();
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.element).toMatchSnapshot();
});
});
@@ -149,7 +150,7 @@ describe('Design management design presentation component', () => {
wrapper.vm.openCommentForm({ x: 1, y: 1 });
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.emitted('openCommentForm')).toEqual([
[{ ...mockOverlayData.overlayDimensions, x: 1, y: 1 }],
]);
@@ -166,7 +167,7 @@ describe('Design management design presentation component', () => {
mockOverlayData,
);
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.vm.currentCommentForm).toBeNull();
expect(wrapper.element).toMatchSnapshot();
});
@@ -182,7 +183,7 @@ describe('Design management design presentation component', () => {
mockOverlayData,
);
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.vm.currentCommentForm).toBeNull();
expect(wrapper.element).toMatchSnapshot();
});
@@ -206,7 +207,7 @@ describe('Design management design presentation component', () => {
},
);
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.vm.currentCommentForm).toEqual({
x: 1,
y: 1,
@@ -379,7 +380,7 @@ describe('Design management design presentation component', () => {
});
describe('onImageResize', () => {
- it('sets zoom focal point on initial load', () => {
+ beforeEach(() => {
createComponent(
{
image: 'test.jpg',
@@ -388,22 +389,21 @@ describe('Design management design presentation component', () => {
mockOverlayData,
);
- wrapper.setMethods({
- shiftZoomFocalPoint: jest.fn(),
- scaleZoomFocalPoint: jest.fn(),
- scrollToFocalPoint: jest.fn(),
- });
-
+ jest.spyOn(wrapper.vm, 'shiftZoomFocalPoint');
+ jest.spyOn(wrapper.vm, 'scaleZoomFocalPoint');
+ jest.spyOn(wrapper.vm, 'scrollToFocalPoint');
wrapper.vm.onImageResize({ width: 10, height: 10 });
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shiftZoomFocalPoint).toHaveBeenCalled();
- expect(wrapper.vm.initialLoad).toBe(false);
- });
+ return nextTick();
+ });
+
+ it('sets zoom focal point on initial load', () => {
+ expect(wrapper.vm.shiftZoomFocalPoint).toHaveBeenCalled();
+ expect(wrapper.vm.initialLoad).toBe(false);
});
it('calls scaleZoomFocalPoint and scrollToFocalPoint after initial load', () => {
wrapper.vm.onImageResize({ width: 10, height: 10 });
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.vm.scaleZoomFocalPoint).toHaveBeenCalled();
expect(wrapper.vm.scrollToFocalPoint).toHaveBeenCalled();
});
@@ -506,7 +506,7 @@ describe('Design management design presentation component', () => {
.$nextTick()
.then(() => {
addCommentOverlay.trigger('mouseup');
- return wrapper.vm.$nextTick();
+ return nextTick();
})
.then(() => {
expect(wrapper.emitted('openCommentForm')).toBeDefined();
diff --git a/spec/frontend/design_management/components/design_scaler_spec.js b/spec/frontend/design_management/components/design_scaler_spec.js
index 290ec3a18e3..40f53e8d0bf 100644
--- a/spec/frontend/design_management/components/design_scaler_spec.js
+++ b/spec/frontend/design_management/components/design_scaler_spec.js
@@ -10,7 +10,7 @@ describe('Design management design scaler component', () => {
const getResetScaleButton = () => getButtons().at(1);
const getIncreaseScaleButton = () => getButtons().at(2);
- const setScale = scale => wrapper.vm.setScale(scale);
+ const setScale = (scale) => wrapper.vm.setScale(scale);
const createComponent = () => {
wrapper = shallowMount(DesignScaler);
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index de276bd300b..36a2ffd19c3 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -10,7 +10,10 @@ exports[`Design management list item component when item appears in view after i
exports[`Design management list item component with notes renders item with multiple comments 1`] = `
<router-link-stub
+ ariacurrentvalue="page"
class="card gl-cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
+ event="click"
+ tag="a"
to="[object Object]"
>
<div
@@ -81,7 +84,10 @@ exports[`Design management list item component with notes renders item with mult
exports[`Design management list item component with notes renders item with single comment 1`] = `
<router-link-stub
+ ariacurrentvalue="page"
class="card gl-cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
+ event="click"
+ tag="a"
to="[object Object]"
>
<div
diff --git a/spec/frontend/design_management/components/toolbar/index_spec.js b/spec/frontend/design_management/components/toolbar/index_spec.js
index 2914365b0df..6ac088a2c53 100644
--- a/spec/frontend/design_management/components/toolbar/index_spec.js
+++ b/spec/frontend/design_management/components/toolbar/index_spec.js
@@ -116,6 +116,8 @@ describe('Design management toolbar component', () => {
});
it('renders download button with correct link', () => {
+ createComponent();
+
expect(wrapper.find(GlButton).attributes('href')).toBe(
'/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d',
);
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
index 8c6b446794f..63afc3f000d 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
@@ -4,6 +4,7 @@ exports[`Design management design version dropdown component renders design vers
<gl-dropdown-stub
category="primary"
headertext=""
+ hideheaderborder="true"
issueiid=""
projectpath=""
size="small"
@@ -44,6 +45,7 @@ exports[`Design management design version dropdown component renders design vers
<gl-dropdown-stub
category="primary"
headertext=""
+ hideheaderborder="true"
issueiid=""
projectpath=""
size="small"
diff --git a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
index 4ef787ac754..d841aabf2f3 100644
--- a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
+++ b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
@@ -6,7 +6,7 @@ import mockAllVersions from './mock_data/all_versions';
const LATEST_VERSION_ID = 3;
const PREVIOUS_VERSION_ID = 2;
-const designRouteFactory = versionId => ({
+const designRouteFactory = (versionId) => ({
path: `/designs?version=${versionId}`,
query: {
version: `${versionId}`,
@@ -42,7 +42,7 @@ describe('Design management design version dropdown component', () => {
wrapper.destroy();
});
- const findVersionLink = index => wrapper.findAll(GlDropdownItem).at(index);
+ const findVersionLink = (index) => wrapper.findAll(GlDropdownItem).at(index);
it('renders design version dropdown button', () => {
createComponent();
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 147169dd9aa..7d28d6f6d11 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -1,14 +1,15 @@
+import { nextTick } from 'vue';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueApollo, { ApolloMutation } from 'vue-apollo';
import VueDraggable from 'vuedraggable';
import VueRouter from 'vue-router';
import { GlEmptyState } from '@gitlab/ui';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import getDesignListQuery from 'shared_queries/design_management/get_design_list.query.graphql';
import permissionsQuery from 'shared_queries/design_management/design_permissions.query.graphql';
import Index from '~/design_management/pages/index.vue';
-import uploadDesignQuery from '~/design_management/graphql/mutations/upload_design.mutation.graphql';
+import uploadDesignMutation from '~/design_management/graphql/mutations/upload_design.mutation.graphql';
import DesignDestroyer from '~/design_management/components/design_destroyer.vue';
import DesignDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue';
import DeleteButton from '~/design_management/components/delete_button.vue';
@@ -113,7 +114,7 @@ describe('Design management index page', () => {
async function moveDesigns(localWrapper) {
await jest.runOnlyPendingTimers();
- await localWrapper.vm.$nextTick();
+ await nextTick();
localWrapper.find(VueDraggable).vm.$emit('input', reorderedDesigns);
localWrapper.find(VueDraggable).vm.$emit('change', {
@@ -159,7 +160,7 @@ describe('Design management index page', () => {
localVue,
router,
stubs: { DesignDestroyer, ApolloMutation, VueDraggable, ...stubs },
- attachToDocument: true,
+ attachTo: document.body,
provide: {
projectPath: 'project-path',
issueIid: '1',
@@ -200,14 +201,13 @@ describe('Design management index page', () => {
expect(wrapper.element).toMatchSnapshot();
});
- it('renders error', () => {
+ it('renders error', async () => {
createComponent();
wrapper.setData({ error: true });
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.element).toMatchSnapshot();
- });
+ await nextTick();
+ expect(wrapper.element).toMatchSnapshot();
});
it('renders a toolbar with buttons when there are designs', () => {
@@ -254,10 +254,10 @@ describe('Design management index page', () => {
createComponent({ designCollection: { designs: [], copyState: 'READY' } });
});
- it('renders design dropzone', () =>
- wrapper.vm.$nextTick().then(() => {
- expect(findDropzone().exists()).toBe(true);
- }));
+ it('renders design dropzone', async () => {
+ await nextTick();
+ expect(findDropzone().exists()).toBe(true);
+ });
it('has correct classes applied to design dropzone', () => {
expect(dropzoneClasses()).not.toContain('design-list-item');
@@ -268,10 +268,10 @@ describe('Design management index page', () => {
expect(findDropzoneWrapper().classes()).toEqual(['col-12']);
});
- it('does not render a toolbar with buttons', () =>
- wrapper.vm.$nextTick().then(() => {
- expect(findToolbar().exists()).toBe(false);
- }));
+ it('does not render a toolbar with buttons', async () => {
+ await nextTick();
+ expect(findToolbar().exists()).toBe(false);
+ });
});
describe('handling design collection copy state', () => {
@@ -290,7 +290,7 @@ describe('Design management index page', () => {
});
describe('uploading designs', () => {
- it('calls mutation on upload', () => {
+ it('calls mutation on upload', async () => {
createComponent({ stubs: { GlEmptyState } });
const mutationVariables = {
@@ -298,7 +298,7 @@ describe('Design management index page', () => {
context: {
hasUpload: true,
},
- mutation: uploadDesignQuery,
+ mutation: uploadDesignMutation,
variables: {
files: [{ name: 'test' }],
projectPath: 'project-path',
@@ -348,21 +348,16 @@ describe('Design management index page', () => {
},
};
- return wrapper.vm
- .$nextTick()
- .then(() => {
- findDropzone().vm.$emit('change', [{ name: 'test' }]);
- expect(mutate).toHaveBeenCalledWith(mutationVariables);
- expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
- expect(wrapper.vm.isSaving).toBeTruthy();
- })
- .then(() => {
- expect(dropzoneClasses()).toContain('design-list-item');
- expect(dropzoneClasses()).toContain('design-list-item-new');
- });
+ await nextTick();
+ findDropzone().vm.$emit('change', [{ name: 'test' }]);
+ expect(mutate).toHaveBeenCalledWith(mutationVariables);
+ expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
+ expect(wrapper.vm.isSaving).toBeTruthy();
+ expect(dropzoneClasses()).toContain('design-list-item');
+ expect(dropzoneClasses()).toContain('design-list-item-new');
});
- it('sets isSaving', () => {
+ it('sets isSaving', async () => {
createComponent();
const uploadDesign = wrapper.vm.onUploadDesign([
@@ -373,35 +368,31 @@ describe('Design management index page', () => {
expect(wrapper.vm.isSaving).toBe(true);
- return uploadDesign.then(() => {
- expect(wrapper.vm.isSaving).toBe(false);
- });
+ await uploadDesign;
+ expect(wrapper.vm.isSaving).toBe(false);
});
- it('updates state appropriately after upload complete', () => {
+ it('updates state appropriately after upload complete', async () => {
createComponent({ stubs: { GlEmptyState } });
wrapper.setData({ filesToBeSaved: [{ name: 'test' }] });
wrapper.vm.onUploadDesignDone(designUploadMutationCreatedResponse);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.filesToBeSaved).toEqual([]);
- expect(wrapper.vm.isSaving).toBeFalsy();
- expect(wrapper.vm.isLatestVersion).toBe(true);
- });
+ await nextTick();
+
+ expect(wrapper.vm.filesToBeSaved).toEqual([]);
+ expect(wrapper.vm.isSaving).toBeFalsy();
+ expect(wrapper.vm.isLatestVersion).toBe(true);
});
- it('updates state appropriately after upload error', () => {
+ it('updates state appropriately after upload error', async () => {
createComponent({ stubs: { GlEmptyState } });
wrapper.setData({ filesToBeSaved: [{ name: 'test' }] });
wrapper.vm.onUploadDesignError();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.filesToBeSaved).toEqual([]);
- expect(wrapper.vm.isSaving).toBeFalsy();
- expect(createFlash).toHaveBeenCalled();
-
- createFlash.mockReset();
- });
+ await nextTick();
+ expect(wrapper.vm.filesToBeSaved).toEqual([]);
+ expect(wrapper.vm.isSaving).toBeFalsy();
+ expect(createFlash).toHaveBeenCalled();
});
it('does not call mutation if createDesign is false', () => {
@@ -415,10 +406,6 @@ describe('Design management index page', () => {
describe('upload count limit', () => {
const MAXIMUM_FILE_UPLOAD_LIMIT = 10;
- afterEach(() => {
- createFlash.mockReset();
- });
-
it('does not warn when the max files are uploaded', () => {
createComponent();
@@ -436,7 +423,7 @@ describe('Design management index page', () => {
});
});
- it('flashes warning if designs are skipped', () => {
+ it('flashes warning if designs are skipped', async () => {
createComponent({
mockMutate: () =>
Promise.resolve({
@@ -450,25 +437,22 @@ describe('Design management index page', () => {
},
]);
- return uploadDesign.then(() => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
- message: 'Upload skipped. test.jpg did not change.',
- types: 'warning',
- });
+ await uploadDesign;
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Upload skipped. test.jpg did not change.',
+ types: 'warning',
});
});
describe('dragging onto an existing design', () => {
+ let mockMutate;
beforeEach(() => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ mockMutate = jest.fn().mockResolvedValue();
+ createComponent({ designs: mockDesigns, allVersions: [mockVersion], mockMutate });
});
- it('calls onUploadDesign with valid upload', () => {
- wrapper.setMethods({
- onUploadDesign: jest.fn(),
- });
-
+ it('uploads designs with valid upload', () => {
const mockUploadPayload = [
{
name: mockDesigns[0].filename,
@@ -478,8 +462,13 @@ describe('Design management index page', () => {
const designDropzone = findFirstDropzoneWithDesign();
designDropzone.vm.$emit('change', mockUploadPayload);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith(mockUploadPayload);
+ const [{ mutation, variables }] = mockMutate.mock.calls[0];
+ expect(mutation).toBe(uploadDesignMutation);
+ expect(variables).toStrictEqual({
+ files: mockUploadPayload,
+ iid: '1',
+ projectPath: 'project-path',
+ });
});
it.each`
@@ -544,57 +533,39 @@ describe('Design management index page', () => {
expect(findToolbar().isVisible()).toBe(true);
});
- it('adds two designs to selected designs when their checkboxes are checked', () => {
- findDesignCheckboxes()
- .at(0)
- .trigger('click');
-
- return wrapper.vm
- .$nextTick()
- .then(() => {
- findDesignCheckboxes()
- .at(1)
- .trigger('click');
-
- return wrapper.vm.$nextTick();
- })
- .then(() => {
- expect(findDeleteButton().exists()).toBe(true);
- expect(findSelectAllButton().text()).toBe('Deselect all');
- findDeleteButton().vm.$emit('deleteSelectedDesigns');
- const [{ variables }] = mutate.mock.calls[0];
- expect(variables.filenames).toStrictEqual([
- mockDesigns[0].filename,
- mockDesigns[1].filename,
- ]);
- });
+ it('adds two designs to selected designs when their checkboxes are checked', async () => {
+ findDesignCheckboxes().at(0).trigger('click');
+
+ await nextTick();
+ findDesignCheckboxes().at(1).trigger('click');
+
+ await nextTick();
+ expect(findDeleteButton().exists()).toBe(true);
+ expect(findSelectAllButton().text()).toBe('Deselect all');
+ findDeleteButton().vm.$emit('deleteSelectedDesigns');
+ const [{ variables }] = mutate.mock.calls[0];
+ expect(variables.filenames).toStrictEqual([mockDesigns[0].filename, mockDesigns[1].filename]);
});
- it('adds all designs to selected designs when Select All button is clicked', () => {
+ it('adds all designs to selected designs when Select All button is clicked', async () => {
findSelectAllButton().vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(findDeleteButton().props().hasSelectedDesigns).toBe(true);
- expect(findSelectAllButton().text()).toBe('Deselect all');
- expect(wrapper.vm.selectedDesigns).toEqual(mockDesigns.map(design => design.filename));
- });
+ await nextTick();
+ expect(findDeleteButton().props().hasSelectedDesigns).toBe(true);
+ expect(findSelectAllButton().text()).toBe('Deselect all');
+ expect(wrapper.vm.selectedDesigns).toEqual(mockDesigns.map((design) => design.filename));
});
- it('removes all designs from selected designs when at least one design was selected', () => {
- findDesignCheckboxes()
- .at(0)
- .trigger('click');
-
- return wrapper.vm
- .$nextTick()
- .then(() => {
- findSelectAllButton().vm.$emit('click');
- })
- .then(() => {
- expect(findDeleteButton().props().hasSelectedDesigns).toBe(false);
- expect(findSelectAllButton().text()).toBe('Select all');
- expect(wrapper.vm.selectedDesigns).toEqual([]);
- });
+ it('removes all designs from selected designs when at least one design was selected', async () => {
+ findDesignCheckboxes().at(0).trigger('click');
+ await nextTick();
+
+ findSelectAllButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findDeleteButton().props().hasSelectedDesigns).toBe(false);
+ expect(findSelectAllButton().text()).toBe('Select all');
+ expect(wrapper.vm.selectedDesigns).toEqual([]);
});
});
@@ -632,12 +603,10 @@ describe('Design management index page', () => {
describe('pasting a design', () => {
let event;
+ let mockMutate;
beforeEach(() => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
-
- wrapper.setMethods({
- onUploadDesign: jest.fn(),
- });
+ mockMutate = jest.fn().mockResolvedValue({});
+ createComponent({ designs: mockDesigns, allVersions: [mockVersion], mockMutate });
event = new Event('paste');
event.clipboardData = {
@@ -646,36 +615,56 @@ describe('Design management index page', () => {
};
});
- it('does not call paste event if designs wrapper is not hovered', () => {
+ it('does not upload designs if designs wrapper is not hovered', () => {
document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
+ expect(mockMutate).not.toHaveBeenCalled();
});
describe('when designs wrapper is hovered', () => {
+ let realDateNow;
+ const today = () => new Date('2020-12-25');
+ beforeAll(() => {
+ realDateNow = Date.now;
+ global.Date.now = today;
+ });
+
+ afterAll(() => {
+ global.Date.now = realDateNow;
+ });
+
beforeEach(() => {
findDesignsWrapper().trigger('mouseenter');
});
- it('calls onUploadDesign with valid paste', () => {
+ it('uploads design with valid paste', () => {
document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith([
- new File([{ name: 'image.png' }], 'test.png'),
- ]);
+ const [{ mutation, variables }] = mockMutate.mock.calls[0];
+ expect(mutation).toBe(uploadDesignMutation);
+ expect(variables).toStrictEqual({
+ files: expect.any(Array),
+ iid: '1',
+ projectPath: 'project-path',
+ });
+ expect(variables.files).toEqual(event.clipboardData.files.map((f) => new File([f], '')));
});
it('renames a design if it has an image.png filename', () => {
+ event.clipboardData.getData = () => 'image.png';
document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith([
- new File([{ name: 'image.png' }], `design_${Date.now()}.png`),
- ]);
+ const [{ mutation, variables }] = mockMutate.mock.calls[0];
+ expect(mutation).toBe(uploadDesignMutation);
+ expect(variables).toStrictEqual({
+ files: expect.any(Array),
+ iid: '1',
+ projectPath: 'project-path',
+ });
+ expect(variables.files[0].name).toEqual(`design_${Date.now()}.png`);
});
- it('does not call onUploadDesign with invalid paste', () => {
+ it('does not call upload with invalid paste', () => {
event.clipboardData = {
items: [{ type: 'text/plain' }, { type: 'text' }],
files: [],
@@ -683,28 +672,27 @@ describe('Design management index page', () => {
document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
+ expect(mockMutate).not.toHaveBeenCalled();
});
it('removes onPaste listener after mouseleave event', async () => {
findDesignsWrapper().trigger('mouseleave');
document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
+ expect(mockMutate).not.toHaveBeenCalled();
});
});
});
describe('when navigating', () => {
- it('should trigger a scrollIntoView method if designs route is detected', () => {
+ it('should trigger a scrollIntoView method if designs route is detected', async () => {
router.replace({
path: '/designs',
});
createComponent({ loading: true });
- return wrapper.vm.$nextTick().then(() => {
- expect(scrollIntoViewMock).toHaveBeenCalled();
- });
+ await nextTick();
+ expect(scrollIntoViewMock).toHaveBeenCalled();
});
});
@@ -713,14 +701,10 @@ describe('Design management index page', () => {
createComponentWithApollo({});
await jest.runOnlyPendingTimers();
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDesigns()).toHaveLength(3);
- expect(
- findDesigns()
- .at(0)
- .props('id'),
- ).toBe('1');
+ expect(findDesigns().at(0).props('id')).toBe('1');
});
it('calls a mutation with correct parameters and reorders designs', async () => {
@@ -730,13 +714,9 @@ describe('Design management index page', () => {
expect(moveDesignHandler).toHaveBeenCalled();
- await wrapper.vm.$nextTick();
+ await nextTick();
- expect(
- findDesigns()
- .at(0)
- .props('id'),
- ).toBe('2');
+ expect(findDesigns().at(0).props('id')).toBe('2');
});
it('prevents reordering when reorderDesigns mutation is in progress', async () => {
@@ -747,8 +727,8 @@ describe('Design management index page', () => {
expect(draggableAttributes().disabled).toBe(true);
await jest.runOnlyPendingTimers(); // kick off the mocked GQL stuff (promises)
- await wrapper.vm.$nextTick(); // kick off the DOM update
- await wrapper.vm.$nextTick(); // kick off the DOM update for finally block
+ await nextTick(); // kick off the DOM update
+ await nextTick(); // kick off the DOM update for finally block
expect(draggableAttributes().disabled).toBe(false);
});
@@ -760,7 +740,7 @@ describe('Design management index page', () => {
await moveDesigns(wrapper);
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(createFlash).toHaveBeenCalledWith({ message: 'Houston, we have a problem' });
});
@@ -772,9 +752,9 @@ describe('Design management index page', () => {
await moveDesigns(wrapper);
- await wrapper.vm.$nextTick(); // kick off the DOM update
+ await nextTick(); // kick off the DOM update
await jest.runOnlyPendingTimers(); // kick off the mocked GQL stuff (promises)
- await wrapper.vm.$nextTick(); // kick off the DOM update for flash
+ await nextTick(); // kick off the DOM update for flash
expect(createFlash).toHaveBeenCalledWith({
message: 'Something went wrong when reordering designs. Please try again',
diff --git a/spec/frontend/design_management/router_spec.js b/spec/frontend/design_management/router_spec.js
index fac4f7d368d..0b4e68eea78 100644
--- a/spec/frontend/design_management/router_spec.js
+++ b/spec/frontend/design_management/router_spec.js
@@ -40,7 +40,7 @@ describe('Design management router', () => {
window.location.hash = '';
});
- describe.each([['/'], [{ name: DESIGNS_ROUTE_NAME }]])('root route', routeArg => {
+ describe.each([['/'], [{ name: DESIGNS_ROUTE_NAME }]])('root route', (routeArg) => {
it('pushes home component', () => {
const wrapper = factory(routeArg);
@@ -50,7 +50,7 @@ describe('Design management router', () => {
describe.each([['/designs/1'], [{ name: DESIGN_ROUTE_NAME, params: { id: '1' } }]])(
'designs detail route',
- routeArg => {
+ (routeArg) => {
it('pushes designs detail component', () => {
const wrapper = factory(routeArg);
diff --git a/spec/frontend/design_management/utils/error_messages_spec.js b/spec/frontend/design_management/utils/error_messages_spec.js
index f5072c3b6b7..b80dcd9abde 100644
--- a/spec/frontend/design_management/utils/error_messages_spec.js
+++ b/spec/frontend/design_management/utils/error_messages_spec.js
@@ -3,7 +3,7 @@ import {
designUploadSkippedWarning,
} from '~/design_management/utils/error_messages';
-const mockFilenames = n =>
+const mockFilenames = (n) =>
Array(n)
.fill(0)
.map((_, i) => ({ filename: `${i + 1}.jpg` }));
@@ -14,7 +14,7 @@ describe('Error message', () => {
const pluralMsg = 'Could not archive designs. Please try again.';
describe('when [singular=true]', () => {
- it.each([[undefined], [true]])('uses singular grammar', singularOption => {
+ it.each([[undefined], [true]])('uses singular grammar', (singularOption) => {
expect(designDeletionError({ singular: singularOption })).toEqual(singularMsg);
});
});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 416564b72c3..7fbeb33dd93 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -1,5 +1,6 @@
+import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlPagination } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'spec/test_constants';
@@ -26,6 +27,8 @@ const TEST_ENDPOINT = `${TEST_HOST}/diff/endpoint`;
const COMMIT_URL = `${TEST_HOST}/COMMIT/OLD`;
const UPDATED_COMMIT_URL = `${TEST_HOST}/COMMIT/NEW`;
+Vue.use(Vuex);
+
function getCollapsedFilesWarning(wrapper) {
return wrapper.find(CollapsedFilesWarning);
}
@@ -37,7 +40,6 @@ describe('diffs/components/app', () => {
let mock;
function createComponent(props = {}, extendStore = () => {}, provisions = {}) {
- const localVue = createLocalVue();
const provide = {
...provisions,
glFeatures: {
@@ -45,16 +47,13 @@ describe('diffs/components/app', () => {
},
};
- localVue.use(Vuex);
-
store = createDiffsStore();
store.state.diffs.isLoading = false;
store.state.diffs.isTreeLoaded = true;
extendStore(store);
- wrapper = shallowMount(localVue.extend(App), {
- localVue,
+ wrapper = shallowMount(App, {
propsData: {
endpoint: TEST_ENDPOINT,
endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
@@ -70,11 +69,6 @@ describe('diffs/components/app', () => {
},
provide,
store,
- methods: {
- isLatestVersion() {
- return true;
- },
- },
});
}
@@ -102,13 +96,13 @@ describe('diffs/components/app', () => {
});
describe('fetch diff methods', () => {
- beforeEach(done => {
+ beforeEach(() => {
const fetchResolver = () => {
store.state.diffs.retrievingBatches = false;
store.state.notes.discussions = 'test';
return Promise.resolve({ real_size: 100 });
};
- jest.spyOn(window, 'requestIdleCallback').mockImplementation(fn => fn());
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
createComponent();
jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchDiffFilesBatch').mockImplementation(fetchResolver);
@@ -119,68 +113,55 @@ describe('diffs/components/app', () => {
jest.spyOn(wrapper.vm, 'unwatchRetrievingBatches').mockImplementation(() => {});
store.state.diffs.retrievingBatches = true;
store.state.diffs.diffFiles = [];
- wrapper.vm.$nextTick(done);
+ return nextTick();
});
- it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => {
+ it('calls batch methods if diffsBatchLoad is enabled, and not latest version', async () => {
expect(wrapper.vm.diffFilesLength).toEqual(0);
- wrapper.vm.isLatestVersion = () => false;
wrapper.vm.fetchData(false);
- setImmediate(() => {
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
- expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
- expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
- expect(wrapper.vm.diffFilesLength).toEqual(100);
- expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
- done();
- });
+ await nextTick();
+
+ expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toBe(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
});
- it('calls batch methods if diffsBatchLoad is enabled, and latest version', done => {
+ it('calls batch methods if diffsBatchLoad is enabled, and latest version', async () => {
expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.fetchData(false);
- setImmediate(() => {
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
- expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
- expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
- expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
- expect(wrapper.vm.diffFilesLength).toEqual(100);
- expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
- done();
- });
- });
- });
-
- it('adds container-limiting classes when showFileTree is false with inline diffs', () => {
- createComponent({}, ({ state }) => {
- state.diffs.showTreeList = false;
- state.diffs.isParallelView = false;
- });
-
- expect(wrapper.find('.container-limited.limit-container-width').exists()).toBe(true);
- });
+ await nextTick();
- it('does not add container-limiting classes when showFileTree is false with inline diffs', () => {
- createComponent({}, ({ state }) => {
- state.diffs.showTreeList = true;
- state.diffs.isParallelView = false;
+ expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toBe(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
});
-
- expect(wrapper.find('.container-limited.limit-container-width').exists()).toBe(false);
});
- it('does not add container-limiting classes when isFluidLayout', () => {
- createComponent({ isFluidLayout: true }, ({ state }) => {
- state.diffs.isParallelView = false;
- });
-
- expect(wrapper.find('.container-limited.limit-container-width').exists()).toBe(false);
- });
+ it.each`
+ props | state | expected
+ ${{ isFluidLayout: true }} | ${{ isParallelView: false }} | ${false}
+ ${{}} | ${{ isParallelView: false }} | ${true}
+ ${{}} | ${{ showTreeList: true, diffFiles: [{}], isParallelView: false }} | ${false}
+ ${{}} | ${{ showTreeList: false, diffFiles: [{}], isParallelView: false }} | ${true}
+ ${{}} | ${{ showTreeList: false, diffFiles: [], isParallelView: false }} | ${true}
+ `(
+ 'uses container-limiting classes ($expected) with state ($state) and props ($props)',
+ ({ props, state, expected }) => {
+ createComponent(props, ({ state: origState }) => Object.assign(origState.diffs, state));
+
+ expect(wrapper.find('.container-limited.limit-container-width').exists()).toBe(expected);
+ },
+ );
it('displays loading icon on loading', () => {
createComponent({}, ({ state }) => {
@@ -216,28 +197,25 @@ describe('diffs/components/app', () => {
window.location.hash = 'ABC_123';
});
- it('sets highlighted row if hash exists in location object', done => {
+ it('sets highlighted row if hash exists in location object', async () => {
createComponent({
shouldShow: true,
});
// Component uses $nextTick so we wait until that has finished
- setImmediate(() => {
- expect(store.state.diffs.highlightedRow).toBe('ABC_123');
+ await nextTick();
- done();
- });
+ expect(store.state.diffs.highlightedRow).toBe('ABC_123');
});
- it('marks current diff file based on currently highlighted row', () => {
+ it('marks current diff file based on currently highlighted row', async () => {
createComponent({
shouldShow: true,
});
// Component uses $nextTick so we wait until that has finished
- return wrapper.vm.$nextTick().then(() => {
- expect(store.state.diffs.currentDiffFileId).toBe('ABC');
- });
+ await nextTick();
+ expect(store.state.diffs.currentDiffFileId).toBe('ABC');
});
});
@@ -261,23 +239,23 @@ describe('diffs/components/app', () => {
});
it('sets width of tree list', () => {
- createComponent();
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
+ });
expect(wrapper.find('.js-diff-tree-list').element.style.width).toEqual('320px');
});
});
- it('marks current diff file based on currently highlighted row', done => {
+ it('marks current diff file based on currently highlighted row', async () => {
createComponent({
shouldShow: true,
});
- // Component uses $nextTick so we wait until that has finished
- setImmediate(() => {
- expect(store.state.diffs.currentDiffFileId).toBe('ABC');
+ // Component uses nextTick so we wait until that has finished
+ await nextTick();
- done();
- });
+ expect(store.state.diffs.currentDiffFileId).toBe('ABC');
});
describe('empty state', () => {
@@ -297,79 +275,43 @@ describe('diffs/components/app', () => {
expect(wrapper.find(NoChanges).exists()).toBe(false);
expect(wrapper.findAll(DiffFile).length).toBe(1);
});
-
- it('does not render empty state when versions match', () => {
- createComponent({}, ({ state }) => {
- state.diffs.startVersion = mergeRequestDiff;
- state.diffs.mergeRequestDiff = mergeRequestDiff;
- });
-
- expect(wrapper.find(NoChanges).exists()).toBe(false);
- });
});
describe('keyboard shortcut navigation', () => {
let spies = [];
- let jumpSpy;
let moveSpy;
+ let jumpSpy;
- function setup(componentProps, featureFlags) {
- createComponent(
- componentProps,
- ({ state }) => {
- state.diffs.commit = { id: 'SHA123' };
- },
- { glFeatures: { mrCommitNeighborNav: true, ...featureFlags } },
- );
+ function setup(componentProps) {
+ createComponent(componentProps, ({ state }) => {
+ state.diffs.commit = { id: 'SHA123' };
+ });
moveSpy = jest.spyOn(wrapper.vm, 'moveToNeighboringCommit').mockImplementation(() => {});
- jumpSpy = jest.fn();
+ jumpSpy = jest.spyOn(wrapper.vm, 'jumpToFile').mockImplementation(() => {});
spies = [jumpSpy, moveSpy];
- wrapper.setMethods({
- jumpToFile: jumpSpy,
- });
}
describe('visible app', () => {
it.each`
- key | name | spy | args | featureFlags
- ${'['} | ${'jumpToFile'} | ${0} | ${[-1]} | ${{}}
- ${'k'} | ${'jumpToFile'} | ${0} | ${[-1]} | ${{}}
- ${']'} | ${'jumpToFile'} | ${0} | ${[+1]} | ${{}}
- ${'j'} | ${'jumpToFile'} | ${0} | ${[+1]} | ${{}}
- ${'x'} | ${'moveToNeighboringCommit'} | ${1} | ${[{ direction: 'previous' }]} | ${{ mrCommitNeighborNav: true }}
- ${'c'} | ${'moveToNeighboringCommit'} | ${1} | ${[{ direction: 'next' }]} | ${{ mrCommitNeighborNav: true }}
+ key | name | spy | args
+ ${'['} | ${'jumpToFile'} | ${0} | ${[-1]}
+ ${'k'} | ${'jumpToFile'} | ${0} | ${[-1]}
+ ${']'} | ${'jumpToFile'} | ${0} | ${[+1]}
+ ${'j'} | ${'jumpToFile'} | ${0} | ${[+1]}
+ ${'x'} | ${'moveToNeighboringCommit'} | ${1} | ${[{ direction: 'previous' }]}
+ ${'c'} | ${'moveToNeighboringCommit'} | ${1} | ${[{ direction: 'next' }]}
`(
'calls `$name()` with correct parameters whenever the "$key" key is pressed',
- ({ key, spy, args, featureFlags }) => {
- setup({ shouldShow: true }, featureFlags);
-
- return wrapper.vm.$nextTick().then(() => {
- expect(spies[spy]).not.toHaveBeenCalled();
-
- Mousetrap.trigger(key);
-
- expect(spies[spy]).toHaveBeenCalledWith(...args);
- });
- },
- );
-
- it.each`
- key | name | spy | featureFlags
- ${'x'} | ${'moveToNeighboringCommit'} | ${1} | ${{ mrCommitNeighborNav: false }}
- ${'c'} | ${'moveToNeighboringCommit'} | ${1} | ${{ mrCommitNeighborNav: false }}
- `(
- 'does not call `$name()` even when the correct key is pressed if the feature flag is disabled',
- ({ key, spy, featureFlags }) => {
- setup({ shouldShow: true }, featureFlags);
+ async ({ key, spy, args }) => {
+ setup({ shouldShow: true });
- return wrapper.vm.$nextTick().then(() => {
- expect(spies[spy]).not.toHaveBeenCalled();
+ await nextTick();
+ expect(spies[spy]).not.toHaveBeenCalled();
- Mousetrap.trigger(key);
+ Mousetrap.trigger(key);
- expect(spies[spy]).not.toHaveBeenCalled();
- });
+ expect(spies[spy]).toHaveBeenCalledWith(...args);
},
);
@@ -379,25 +321,23 @@ describe('diffs/components/app', () => {
${'r'} | ${'moveToNeighboringCommit'} | ${1} | ${['x', 'c']}
`(
`does not call \`$name()\` when a key that is not one of \`$allowed\` is pressed`,
- ({ key, spy }) => {
- setup({ shouldShow: true }, { mrCommitNeighborNav: true });
+ async ({ key, spy }) => {
+ setup({ shouldShow: true });
- return wrapper.vm.$nextTick().then(() => {
- Mousetrap.trigger(key);
+ await nextTick();
+ Mousetrap.trigger(key);
- expect(spies[spy]).not.toHaveBeenCalled();
- });
+ expect(spies[spy]).not.toHaveBeenCalled();
},
);
});
describe('hidden app', () => {
- beforeEach(() => {
- setup({ shouldShow: false }, { mrCommitNeighborNav: true });
+ beforeEach(async () => {
+ setup({ shouldShow: false });
- return wrapper.vm.$nextTick().then(() => {
- Mousetrap.reset();
- });
+ await nextTick();
+ Mousetrap.reset();
});
it.each`
@@ -420,8 +360,6 @@ describe('diffs/components/app', () => {
let spy;
beforeEach(() => {
- spy = jest.fn();
-
createComponent({}, () => {
store.state.diffs.diffFiles = [
{ file_hash: '111', file_path: '111.js' },
@@ -429,66 +367,49 @@ describe('diffs/components/app', () => {
{ file_hash: '333', file_path: '333.js' },
];
});
-
- wrapper.setMethods({
- scrollToFile: spy,
- });
+ spy = jest.spyOn(store, 'dispatch');
});
afterEach(() => {
wrapper.destroy();
});
- it('jumps to next and previous files in the list', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- wrapper.vm.jumpToFile(+1);
+ it('jumps to next and previous files in the list', async () => {
+ await nextTick();
+
+ wrapper.vm.jumpToFile(+1);
- expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['222.js']);
- store.state.diffs.currentDiffFileId = '222';
- wrapper.vm.jumpToFile(+1);
+ expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '222.js']);
+ store.state.diffs.currentDiffFileId = '222';
+ wrapper.vm.jumpToFile(+1);
- expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['333.js']);
- store.state.diffs.currentDiffFileId = '333';
- wrapper.vm.jumpToFile(-1);
+ expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '333.js']);
+ store.state.diffs.currentDiffFileId = '333';
+ wrapper.vm.jumpToFile(-1);
- expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['222.js']);
- })
- .then(done)
- .catch(done.fail);
+ expect(spy.mock.calls[spy.mock.calls.length - 1]).toEqual(['diffs/scrollToFile', '222.js']);
});
- it('does not jump to previous file from the first one', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- store.state.diffs.currentDiffFileId = '333';
+ it('does not jump to previous file from the first one', async () => {
+ await nextTick();
+ store.state.diffs.currentDiffFileId = '333';
- expect(wrapper.vm.currentDiffIndex).toEqual(2);
+ expect(wrapper.vm.currentDiffIndex).toBe(2);
- wrapper.vm.jumpToFile(+1);
+ wrapper.vm.jumpToFile(+1);
- expect(wrapper.vm.currentDiffIndex).toEqual(2);
- expect(spy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
+ expect(wrapper.vm.currentDiffIndex).toBe(2);
+ expect(spy).not.toHaveBeenCalled();
});
- it('does not jump to next file from the last one', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.vm.currentDiffIndex).toEqual(0);
+ it('does not jump to next file from the last one', async () => {
+ await nextTick();
+ expect(wrapper.vm.currentDiffIndex).toBe(0);
- wrapper.vm.jumpToFile(-1);
+ wrapper.vm.jumpToFile(-1);
- expect(wrapper.vm.currentDiffIndex).toEqual(0);
- expect(spy).not.toHaveBeenCalled();
- })
- .then(done)
- .catch(done.fail);
+ expect(wrapper.vm.currentDiffIndex).toBe(0);
+ expect(spy).not.toHaveBeenCalled();
});
});
@@ -514,7 +435,7 @@ describe('diffs/components/app', () => {
window.location = location;
});
- it('when the commit changes and the app is not loading it should update the history, refetch the diff data, and update the view', () => {
+ it('when the commit changes and the app is not loading it should update the history, refetch the diff data, and update the view', async () => {
createComponent({}, ({ state }) => {
state.diffs.commit = { ...state.diffs.commit, id: 'OLD' };
});
@@ -522,14 +443,13 @@ describe('diffs/components/app', () => {
store.state.diffs.commit = { id: 'NEW' };
- return wrapper.vm.$nextTick().then(() => {
- expect(urlUtils.updateHistory).toHaveBeenCalledWith({
- title: document.title,
- url: UPDATED_COMMIT_URL,
- });
- expect(wrapper.vm.refetchDiffData).toHaveBeenCalled();
- expect(wrapper.vm.adjustView).toHaveBeenCalled();
+ await nextTick();
+ expect(urlUtils.updateHistory).toHaveBeenCalledWith({
+ title: document.title,
+ url: UPDATED_COMMIT_URL,
});
+ expect(wrapper.vm.refetchDiffData).toHaveBeenCalled();
+ expect(wrapper.vm.adjustView).toHaveBeenCalled();
});
it.each`
@@ -538,7 +458,7 @@ describe('diffs/components/app', () => {
${false} | ${'NEW'} | ${'NEW'}
`(
'given `{ "isLoading": $isLoading, "oldSha": "$oldSha", "newSha": "$newSha" }`, nothing should happen',
- ({ isLoading, oldSha, newSha }) => {
+ async ({ isLoading, oldSha, newSha }) => {
createComponent({}, ({ state }) => {
state.diffs.isLoading = isLoading;
state.diffs.commit = { ...state.diffs.commit, id: oldSha };
@@ -547,11 +467,10 @@ describe('diffs/components/app', () => {
store.state.diffs.commit = { id: newSha };
- return wrapper.vm.$nextTick().then(() => {
- expect(urlUtils.updateHistory).not.toHaveBeenCalled();
- expect(wrapper.vm.refetchDiffData).not.toHaveBeenCalled();
- expect(wrapper.vm.adjustView).not.toHaveBeenCalled();
- });
+ await nextTick();
+ expect(urlUtils.updateHistory).not.toHaveBeenCalled();
+ expect(wrapper.vm.refetchDiffData).not.toHaveBeenCalled();
+ expect(wrapper.vm.adjustView).not.toHaveBeenCalled();
},
);
});
@@ -559,6 +478,7 @@ describe('diffs/components/app', () => {
describe('diffs', () => {
it('should render compare versions component', () => {
createComponent({}, ({ state }) => {
+ state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
state.diffs.mergeRequestDiffs = diffsMockData;
state.diffs.targetBranchName = 'target-branch';
state.diffs.mergeRequestDiff = mergeRequestDiff;
@@ -567,7 +487,8 @@ describe('diffs/components/app', () => {
expect(wrapper.find(CompareVersions).exists()).toBe(true);
expect(wrapper.find(CompareVersions).props()).toEqual(
expect.objectContaining({
- mergeRequestDiffs: diffsMockData,
+ isLimitedContainer: false,
+ diffFilesCountText: null,
}),
);
});
@@ -635,20 +556,22 @@ describe('diffs/components/app', () => {
expect(wrapper.find(DiffFile).exists()).toBe(true);
});
- it('should render tree list', () => {
+ it("doesn't render tree list when no changes exist", () => {
createComponent();
- expect(wrapper.find(TreeList).exists()).toBe(true);
+ expect(wrapper.find(TreeList).exists()).toBe(false);
});
- });
- describe('setTreeDisplay', () => {
- let setShowTreeList;
+ it('should render tree list', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
+ });
- beforeEach(() => {
- setShowTreeList = jest.fn();
+ expect(wrapper.find(TreeList).exists()).toBe(true);
});
+ });
+ describe('setTreeDisplay', () => {
afterEach(() => {
localStorage.removeItem('mr_tree_show');
});
@@ -657,14 +580,13 @@ describe('diffs/components/app', () => {
createComponent({}, ({ state }) => {
state.diffs.diffFiles.push({ sha: '123' });
});
-
- wrapper.setMethods({
- setShowTreeList,
- });
-
+ jest.spyOn(store, 'dispatch');
wrapper.vm.setTreeDisplay();
- expect(setShowTreeList).toHaveBeenCalledWith({ showTreeList: false, saving: false });
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/setShowTreeList', {
+ showTreeList: false,
+ saving: false,
+ });
});
it('calls setShowTreeList with true when more than 1 file is in diffs array', () => {
@@ -672,14 +594,14 @@ describe('diffs/components/app', () => {
state.diffs.diffFiles.push({ sha: '123' });
state.diffs.diffFiles.push({ sha: '124' });
});
-
- wrapper.setMethods({
- setShowTreeList,
- });
+ jest.spyOn(store, 'dispatch');
wrapper.vm.setTreeDisplay();
- expect(setShowTreeList).toHaveBeenCalledWith({ showTreeList: true, saving: false });
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/setShowTreeList', {
+ showTreeList: true,
+ saving: false,
+ });
});
it.each`
@@ -692,14 +614,14 @@ describe('diffs/components/app', () => {
createComponent({}, ({ state }) => {
state.diffs.diffFiles.push({ sha: '123' });
});
-
- wrapper.setMethods({
- setShowTreeList,
- });
+ jest.spyOn(store, 'dispatch');
wrapper.vm.setTreeDisplay();
- expect(setShowTreeList).toHaveBeenCalledWith({ showTreeList, saving: false });
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/setShowTreeList', {
+ showTreeList,
+ saving: false,
+ });
});
});
@@ -710,7 +632,7 @@ describe('diffs/components/app', () => {
state.diffs.diffFiles.push({ file_hash: '312' });
});
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(wrapper.findAll(DiffFile).length).toBe(1);
});
@@ -724,7 +646,7 @@ describe('diffs/components/app', () => {
state.diffs.diffFiles.push({ file_hash: '123' }, { file_hash: '312' });
});
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(paginator().attributes('prevpage')).toBe(undefined);
expect(paginator().attributes('nextpage')).toBe('2');
@@ -736,7 +658,7 @@ describe('diffs/components/app', () => {
state.diffs.currentDiffFileId = '312';
});
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(paginator().attributes('prevpage')).toBe('1');
expect(paginator().attributes('nextpage')).toBe(undefined);
@@ -748,7 +670,7 @@ describe('diffs/components/app', () => {
state.diffs.currentDiffFileId = '123';
});
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(fileByFileNav().exists()).toBe(false);
});
@@ -765,13 +687,13 @@ describe('diffs/components/app', () => {
state.diffs.currentDiffFileId = currentDiffFileId;
});
- await wrapper.vm.$nextTick();
+ await nextTick();
jest.spyOn(wrapper.vm, 'navigateToDiffFileIndex');
paginator().vm.$emit('input', targetFile);
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(wrapper.vm.navigateToDiffFileIndex).toHaveBeenCalledWith(targetFile - 1);
},
@@ -787,10 +709,10 @@ describe('diffs/components/app', () => {
'triggers the action with the new fileByFile setting - $setting - when the event with that setting is received',
async ({ setting }) => {
createComponent();
- await wrapper.vm.$nextTick();
+ await nextTick();
eventHub.$emit(EVT_VIEW_FILE_BY_FILE, { setting });
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(store.state.diffs.viewDiffsFileByFile).toBe(setting);
},
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index 8a7eb6aaca6..f588f65dafd 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -37,18 +37,12 @@ describe('diffs/components/commit_item', () => {
const getPrevCommitNavElement = () =>
getCommitNavButtonsElement().find('.btn-group > *:first-child');
- const mountComponent = (propsData, featureFlags = {}) => {
+ const mountComponent = (propsData) => {
wrapper = mount(Component, {
propsData: {
commit,
...propsData,
},
- provide: {
- glFeatures: {
- mrCommitNeighborNav: true,
- ...featureFlags,
- },
- },
stubs: {
CommitPipelineStatus: true,
},
@@ -224,12 +218,6 @@ describe('diffs/components/commit_item', () => {
expect(getCommitNavButtonsElement().exists()).toEqual(true);
});
- it('does not render the commit navigation buttons if the `mrCommitNeighborNav` feature flag is disabled', () => {
- mountComponent({ commit: mrCommit }, { mrCommitNeighborNav: false });
-
- expect(getCommitNavButtonsElement().exists()).toEqual(false);
- });
-
describe('prev commit', () => {
const { location } = window;
diff --git a/spec/frontend/diffs/components/compare_dropdown_layout_spec.js b/spec/frontend/diffs/components/compare_dropdown_layout_spec.js
index 92e4a2d9c62..d99933a1ee9 100644
--- a/spec/frontend/diffs/components/compare_dropdown_layout_spec.js
+++ b/spec/frontend/diffs/components/compare_dropdown_layout_spec.js
@@ -31,7 +31,7 @@ describe('CompareDropdownLayout', () => {
const findListItems = () => wrapper.findAll('li');
const findListItemsData = () =>
- findListItems().wrappers.map(listItem => ({
+ findListItems().wrappers.map((listItem) => ({
href: listItem.find('a').attributes('href'),
text: trimText(listItem.text()),
createdAt: listItem.findAll(TimeAgo).wrappers[0]?.props('time'),
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 09e9669c474..949cc855200 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -11,32 +11,34 @@ localVue.use(Vuex);
describe('CompareVersions', () => {
let wrapper;
+ let store;
const targetBranchName = 'tmp-wine-dev';
- const createWrapper = props => {
- const store = createStore();
- const mergeRequestDiff = diffsMockData[0];
-
- store.state.diffs.addedLines = 10;
- store.state.diffs.removedLines = 20;
- store.state.diffs.diffFiles.push('test');
- store.state.diffs.targetBranchName = targetBranchName;
- store.state.diffs.mergeRequestDiff = mergeRequestDiff;
- store.state.diffs.mergeRequestDiffs = diffsMockData;
-
+ const createWrapper = (props) => {
wrapper = mount(CompareVersionsComponent, {
localVue,
store,
propsData: {
mergeRequestDiffs: diffsMockData,
- diffFilesCountText: null,
+ diffFilesCountText: '1',
...props,
},
});
};
+ const findLimitedContainer = () => wrapper.find('.container-limited.limit-container-width');
+ const findCompareSourceDropdown = () => wrapper.find('.mr-version-dropdown');
+ const findCompareTargetDropdown = () => wrapper.find('.mr-version-compare-dropdown');
beforeEach(() => {
- createWrapper();
+ store = createStore();
+ const mergeRequestDiff = diffsMockData[0];
+
+ store.state.diffs.addedLines = 10;
+ store.state.diffs.removedLines = 20;
+ store.state.diffs.diffFiles.push('test');
+ store.state.diffs.targetBranchName = targetBranchName;
+ store.state.diffs.mergeRequestDiff = mergeRequestDiff;
+ store.state.diffs.mergeRequestDiffs = diffsMockData;
});
afterEach(() => {
@@ -45,6 +47,10 @@ describe('CompareVersions', () => {
});
describe('template', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
it('should render Tree List toggle button with correct attribute values', () => {
const treeListBtn = wrapper.find('.js-toggle-tree-list');
@@ -54,8 +60,8 @@ describe('CompareVersions', () => {
});
it('should render comparison dropdowns with correct values', () => {
- const sourceDropdown = wrapper.find('.mr-version-dropdown');
- const targetDropdown = wrapper.find('.mr-version-compare-dropdown');
+ const sourceDropdown = findCompareSourceDropdown();
+ const targetDropdown = findCompareTargetDropdown();
expect(sourceDropdown.exists()).toBe(true);
expect(targetDropdown.exists()).toBe(true);
@@ -63,16 +69,6 @@ describe('CompareVersions', () => {
expect(targetDropdown.find('button').html()).toContain(targetBranchName);
});
- it('should not render comparison dropdowns if no mergeRequestDiffs are specified', () => {
- createWrapper({ mergeRequestDiffs: [] });
-
- const sourceDropdown = wrapper.find('.mr-version-dropdown');
- const targetDropdown = wrapper.find('.mr-version-compare-dropdown');
-
- expect(sourceDropdown.exists()).toBe(false);
- expect(targetDropdown.exists()).toBe(false);
- });
-
it('should render view types buttons with correct values', () => {
const inlineBtn = wrapper.find('#inline-diff-btn');
const parallelBtn = wrapper.find('#parallel-diff-btn');
@@ -88,22 +84,34 @@ describe('CompareVersions', () => {
it('adds container-limiting classes when showFileTree is false with inline diffs', () => {
createWrapper({ isLimitedContainer: true });
- const limitedContainer = wrapper.find('.container-limited.limit-container-width');
-
- expect(limitedContainer.exists()).toBe(true);
+ expect(findLimitedContainer().exists()).toBe(true);
});
it('does not add container-limiting classes when showFileTree is false with inline diffs', () => {
createWrapper({ isLimitedContainer: false });
- const limitedContainer = wrapper.find('.container-limited.limit-container-width');
+ expect(findLimitedContainer().exists()).toBe(false);
+ });
+ });
- expect(limitedContainer.exists()).toBe(false);
+ describe('noChangedFiles', () => {
+ beforeEach(() => {
+ store.state.diffs.diffFiles = [];
+ });
+
+ it('should not render Tree List toggle button when there are no changes', () => {
+ createWrapper();
+
+ const treeListBtn = wrapper.find('.js-toggle-tree-list');
+
+ expect(treeListBtn.exists()).toBe(false);
});
});
describe('setInlineDiffViewType', () => {
it('should persist the view type in the url', () => {
+ createWrapper();
+
const viewTypeBtn = wrapper.find('#inline-diff-btn');
viewTypeBtn.trigger('click');
@@ -113,6 +121,7 @@ describe('CompareVersions', () => {
describe('setParallelDiffViewType', () => {
it('should persist the view type in the url', () => {
+ createWrapper();
const viewTypeBtn = wrapper.find('#parallel-diff-btn');
viewTypeBtn.trigger('click');
@@ -121,11 +130,14 @@ describe('CompareVersions', () => {
});
describe('commit', () => {
- beforeEach(done => {
- wrapper.vm.$store.state.diffs.commit = getDiffWithCommit().commit;
- wrapper.mergeRequestDiffs = [];
+ beforeEach(() => {
+ store.state.diffs.commit = getDiffWithCommit().commit;
+ createWrapper();
+ });
- wrapper.vm.$nextTick(done);
+ it('does not render compare dropdowns', () => {
+ expect(findCompareSourceDropdown().exists()).toBe(false);
+ expect(findCompareTargetDropdown().exists()).toBe(false);
});
it('renders latest version button', () => {
@@ -137,4 +149,16 @@ describe('CompareVersions', () => {
expect(wrapper.text()).toContain(wrapper.vm.commit.short_id);
});
});
+
+ describe('with no versions', () => {
+ beforeEach(() => {
+ store.state.diffs.mergeRequestDiffs = [];
+ createWrapper();
+ });
+
+ it('does not render compare dropdowns', () => {
+ expect(findCompareSourceDropdown().exists()).toBe(false);
+ expect(findCompareTargetDropdown().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index 43d295ff1b3..c1cf4793c88 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -102,7 +102,7 @@ describe('DiffContent', () => {
describe('with text based files', () => {
afterEach(() => {
- [isParallelViewGetterMock, isInlineViewGetterMock].forEach(m => m.mockRestore());
+ [isParallelViewGetterMock, isInlineViewGetterMock].forEach((m) => m.mockRestore());
});
const textDiffFile = { ...defaultProps.diffFile, viewer: { name: diffViewerModes.text } };
diff --git a/spec/frontend/diffs/components/diff_discussions_spec.js b/spec/frontend/diffs/components/diff_discussions_spec.js
index 96b76183cee..5c390054247 100644
--- a/spec/frontend/diffs/components/diff_discussions_spec.js
+++ b/spec/frontend/diffs/components/diff_discussions_spec.js
@@ -15,7 +15,7 @@ describe('DiffDiscussions', () => {
let wrapper;
const getDiscussionsMockData = () => [{ ...discussionsMockData }];
- const createComponent = props => {
+ const createComponent = (props) => {
store = createStore();
wrapper = mount(localVue.extend(DiffDiscussions), {
store,
@@ -91,12 +91,7 @@ describe('DiffDiscussions', () => {
const noteableDiscussion = wrapper.find(NoteableDiscussion);
expect(noteableDiscussion.find('.badge-pill').exists()).toBe(true);
- expect(
- noteableDiscussion
- .find('.badge-pill')
- .text()
- .trim(),
- ).toBe('1');
+ expect(noteableDiscussion.find('.badge-pill').text().trim()).toBe('1');
});
});
});
diff --git a/spec/frontend/diffs/components/diff_expansion_cell_spec.js b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
index a3b4b5c3abb..62e85b31f76 100644
--- a/spec/frontend/diffs/components/diff_expansion_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
@@ -1,6 +1,5 @@
-import Vue from 'vue';
import { cloneDeep } from 'lodash';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import { getByText } from '@testing-library/dom';
import { createStore } from '~/mr_notes/stores';
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
@@ -14,7 +13,7 @@ const lineSources = {
[INLINE_DIFF_VIEW_TYPE]: 'highlighted_diff_lines',
};
const lineHandlers = {
- [INLINE_DIFF_VIEW_TYPE]: line => line,
+ [INLINE_DIFF_VIEW_TYPE]: (line) => line,
};
function makeLoadMoreLinesPayload({
@@ -59,7 +58,6 @@ describe('DiffExpansionCell', () => {
let mockFile;
let mockLine;
let store;
- let vm;
beforeEach(() => {
mockFile = cloneDeep(diffFileMockData);
@@ -70,7 +68,6 @@ describe('DiffExpansionCell', () => {
});
const createComponent = (options = {}) => {
- const cmp = Vue.extend(DiffExpansionCell);
const defaults = {
fileHash: mockFile.file_hash,
contextLinesPath: 'contextLinesPath',
@@ -78,46 +75,46 @@ describe('DiffExpansionCell', () => {
isTop: false,
isBottom: false,
};
- const props = { ...defaults, ...options };
+ const propsData = { ...defaults, ...options };
- vm = createComponentWithStore(cmp, store, props).$mount();
+ return mount(DiffExpansionCell, { store, propsData });
};
- const findExpandUp = () => vm.$el.querySelector(EXPAND_UP_CLASS);
- const findExpandDown = () => vm.$el.querySelector(EXPAND_DOWN_CLASS);
- const findExpandAll = () => getByText(vm.$el, 'Show all unchanged lines');
+ const findExpandUp = (wrapper) => wrapper.find(EXPAND_UP_CLASS);
+ const findExpandDown = (wrapper) => wrapper.find(EXPAND_DOWN_CLASS);
+ const findExpandAll = ({ element }) => getByText(element, 'Show all unchanged lines');
describe('top row', () => {
it('should have "expand up" and "show all" option', () => {
- createComponent({
+ const wrapper = createComponent({
isTop: true,
});
- expect(findExpandUp()).not.toBe(null);
- expect(findExpandDown()).toBe(null);
- expect(findExpandAll()).not.toBe(null);
+ expect(findExpandUp(wrapper).exists()).toBe(true);
+ expect(findExpandDown(wrapper).exists()).toBe(false);
+ expect(findExpandAll(wrapper)).not.toBe(null);
});
});
describe('middle row', () => {
it('should have "expand down", "show all", "expand up" option', () => {
- createComponent();
+ const wrapper = createComponent();
- expect(findExpandUp()).not.toBe(null);
- expect(findExpandDown()).not.toBe(null);
- expect(findExpandAll()).not.toBe(null);
+ expect(findExpandUp(wrapper).exists()).toBe(true);
+ expect(findExpandDown(wrapper).exists()).toBe(true);
+ expect(findExpandAll(wrapper)).not.toBe(null);
});
});
describe('bottom row', () => {
it('should have "expand down" and "show all" option', () => {
- createComponent({
+ const wrapper = createComponent({
isBottom: true,
});
- expect(findExpandUp()).toBe(null);
- expect(findExpandDown()).not.toBe(null);
- expect(findExpandAll()).not.toBe(null);
+ expect(findExpandUp(wrapper).exists()).toBe(false);
+ expect(findExpandDown(wrapper).exists()).toBe(true);
+ expect(findExpandAll(wrapper)).not.toBe(null);
});
});
@@ -144,9 +141,9 @@ describe('DiffExpansionCell', () => {
newLineNumber,
});
- createComponent();
+ const wrapper = createComponent();
- findExpandAll().click();
+ findExpandAll(wrapper).click();
expect(store.dispatch).toHaveBeenCalledWith(
'diffs/loadMoreLines',
@@ -167,9 +164,9 @@ describe('DiffExpansionCell', () => {
const oldLineNumber = mockLine.meta_data.old_pos;
const newLineNumber = mockLine.meta_data.new_pos;
- createComponent();
+ const wrapper = createComponent();
- findExpandUp().click();
+ findExpandUp(wrapper).trigger('click');
expect(store.dispatch).toHaveBeenCalledWith(
'diffs/loadMoreLines',
@@ -195,9 +192,9 @@ describe('DiffExpansionCell', () => {
mockLine.meta_data.old_pos = 200;
mockLine.meta_data.new_pos = 200;
- createComponent();
+ const wrapper = createComponent();
- findExpandDown().click();
+ findExpandDown(wrapper).trigger('click');
expect(store.dispatch).toHaveBeenCalledWith('diffs/loadMoreLines', {
endpoint: 'contextLinesPath',
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index 1b41456f2f5..e9a63e861ed 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -62,7 +62,7 @@ describe('DiffFileHeader component', () => {
diffHasDiscussionsResultMock,
diffHasExpandedDiscussionsResultMock,
...Object.values(mockStoreConfig.modules.diffs.actions),
- ].forEach(mock => mock.mockReset());
+ ].forEach((mock) => mock.mockReset());
wrapper.destroy();
});
@@ -80,7 +80,7 @@ describe('DiffFileHeader component', () => {
const findCollapseIcon = () => wrapper.find({ ref: 'collapseIcon' });
const findEditButton = () => wrapper.find({ ref: 'editButton' });
- const createComponent = props => {
+ const createComponent = (props) => {
mockStoreConfig = cloneDeep(defaultMockStoreConfig);
const store = new Vuex.Store(mockStoreConfig);
@@ -219,7 +219,7 @@ describe('DiffFileHeader component', () => {
});
describe('for any file', () => {
- const otherModes = Object.keys(diffViewerModes).filter(m => m !== 'mode_changed');
+ const otherModes = Object.keys(diffViewerModes).filter((m) => m !== 'mode_changed');
it('for mode_changed file mode displays mode changes', () => {
createComponent({
@@ -236,20 +236,23 @@ describe('DiffFileHeader component', () => {
expect(findModeChangedLine().text()).toMatch(/old-mode.+new-mode/);
});
- it.each(otherModes.map(m => [m]))('for %s file mode does not display mode changes', mode => {
- createComponent({
- diffFile: {
- ...diffFile,
- a_mode: 'old-mode',
- b_mode: 'new-mode',
- viewer: {
- ...diffFile.viewer,
- name: diffViewerModes[mode],
+ it.each(otherModes.map((m) => [m]))(
+ 'for %s file mode does not display mode changes',
+ (mode) => {
+ createComponent({
+ diffFile: {
+ ...diffFile,
+ a_mode: 'old-mode',
+ b_mode: 'new-mode',
+ viewer: {
+ ...diffFile.viewer,
+ name: diffViewerModes[mode],
+ },
},
- },
- });
- expect(findModeChangedLine().exists()).toBeFalsy();
- });
+ });
+ expect(findModeChangedLine().exists()).toBeFalsy();
+ },
+ );
it('displays the LFS label for files stored in LFS', () => {
createComponent({
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 71e0ffd176f..c715d779986 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -1,6 +1,9 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import httpStatus from '~/lib/utils/http_status';
import createDiffsStore from '~/diffs/store/modules';
import createNotesStore from '~/notes/stores/modules';
import diffFileMockDataReadable from '../mock_data/diff_file';
@@ -96,13 +99,13 @@ function createComponent({ file, first = false, last = false }) {
};
}
-const findDiffHeader = wrapper => wrapper.find(DiffFileHeaderComponent);
-const findDiffContentArea = wrapper => wrapper.find('[data-testid="content-area"]');
-const findLoader = wrapper => wrapper.find('[data-testid="loader-icon"]');
-const findToggleButton = wrapper => wrapper.find('[data-testid="expand-button"]');
+const findDiffHeader = (wrapper) => wrapper.find(DiffFileHeaderComponent);
+const findDiffContentArea = (wrapper) => wrapper.find('[data-testid="content-area"]');
+const findLoader = (wrapper) => wrapper.find('[data-testid="loader-icon"]');
+const findToggleButton = (wrapper) => wrapper.find('[data-testid="expand-button"]');
-const toggleFile = wrapper => findDiffHeader(wrapper).vm.$emit('toggleFile');
-const isDisplayNone = element => element.style.display === 'none';
+const toggleFile = (wrapper) => findDiffHeader(wrapper).vm.$emit('toggleFile');
+const isDisplayNone = (element) => element.style.display === 'none';
const getReadableFile = () => JSON.parse(JSON.stringify(diffFileMockDataReadable));
const getUnreadableFile = () => JSON.parse(JSON.stringify(diffFileMockDataUnreadable));
@@ -118,14 +121,17 @@ const changeViewerType = (store, newType, index = 0) =>
describe('DiffFile', () => {
let wrapper;
let store;
+ let axiosMock;
beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
({ wrapper, store } = createComponent({ file: getReadableFile() }));
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
+ axiosMock.restore();
});
describe('bus events', () => {
@@ -157,7 +163,7 @@ describe('DiffFile', () => {
await wrapper.vm.$nextTick();
expect(eventHub.$emit).toHaveBeenCalledTimes(events.length);
- events.forEach(event => {
+ events.forEach((event) => {
expect(eventHub.$emit).toHaveBeenCalledWith(event);
});
},
@@ -174,7 +180,7 @@ describe('DiffFile', () => {
}));
jest.spyOn(wrapper.vm, 'loadCollapsedDiff').mockResolvedValue(getReadableFile());
- jest.spyOn(window, 'requestIdleCallback').mockImplementation(fn => fn());
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
makeFileAutomaticallyCollapsed(store);
@@ -247,7 +253,7 @@ describe('DiffFile', () => {
it('should not have any content at all', async () => {
await wrapper.vm.$nextTick();
- Array.from(findDiffContentArea(wrapper).element.children).forEach(child => {
+ Array.from(findDiffContentArea(wrapper).element.children).forEach((child) => {
expect(isDisplayNone(child)).toBe(true);
});
});
@@ -353,8 +359,10 @@ describe('DiffFile', () => {
describe('loading', () => {
it('should have loading icon while loading a collapsed diffs', async () => {
+ const { load_collapsed_diff_url } = store.state.diffs.diffFiles[0];
+ axiosMock.onGet(load_collapsed_diff_url).reply(httpStatus.OK, getReadableFile());
makeFileAutomaticallyCollapsed(store);
- wrapper.vm.isLoadingCollapsedDiff = true;
+ wrapper.vm.requestDiff();
await wrapper.vm.$nextTick();
diff --git a/spec/frontend/diffs/components/diff_gutter_avatars_spec.js b/spec/frontend/diffs/components/diff_gutter_avatars_spec.js
index 61e110b345a..5884a9ebd3a 100644
--- a/spec/frontend/diffs/components/diff_gutter_avatars_spec.js
+++ b/spec/frontend/diffs/components/diff_gutter_avatars_spec.js
@@ -66,9 +66,7 @@ describe('DiffGutterAvatars', () => {
});
it('should emit toggleDiscussions event on avatars click', () => {
- findUserAvatars()
- .at(0)
- .trigger('click');
+ findUserAvatars().at(0).trigger('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js
index 75ec5c202af..faa68159c58 100644
--- a/spec/frontend/diffs/components/diff_line_note_form_spec.js
+++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js
@@ -11,14 +11,16 @@ describe('DiffLineNoteForm', () => {
let diffLines;
const getDiffFileMock = () => ({ ...diffFileMockData });
- beforeEach(() => {
+ const createComponent = (args = {}) => {
diffFile = getDiffFileMock();
diffLines = diffFile.highlighted_diff_lines;
const store = createStore();
store.state.notes.userData.id = 1;
store.state.notes.noteableData = noteableDataMock;
- wrapper = shallowMount(DiffLineNoteForm, {
+ store.replaceState({ ...store.state, ...args.state });
+
+ return shallowMount(DiffLineNoteForm, {
store,
propsData: {
diffFileHash: diffFile.file_hash,
@@ -27,9 +29,13 @@ describe('DiffLineNoteForm', () => {
noteTargetLine: diffLines[0],
},
});
- });
+ };
describe('methods', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
describe('handleCancelCommentForm', () => {
it('should ask for confirmation when shouldConfirm and isDirty passed as truthy', () => {
jest.spyOn(window, 'confirm').mockReturnValue(false);
@@ -51,7 +57,7 @@ describe('DiffLineNoteForm', () => {
expect(window.confirm).not.toHaveBeenCalled();
});
- it('should call cancelCommentForm with lineCode', done => {
+ it('should call cancelCommentForm with lineCode', (done) => {
jest.spyOn(window, 'confirm').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'cancelCommentForm').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'resetAutoSave').mockImplementation(() => {});
@@ -72,7 +78,7 @@ describe('DiffLineNoteForm', () => {
});
describe('saveNoteForm', () => {
- it('should call saveNote action with proper params', done => {
+ it('should call saveNote action with proper params', (done) => {
const saveDiffDiscussionSpy = jest
.spyOn(wrapper.vm, 'saveDiffDiscussion')
.mockReturnValue(Promise.resolve());
@@ -114,14 +120,39 @@ describe('DiffLineNoteForm', () => {
describe('mounted', () => {
it('should init autosave', () => {
const key = 'autosave/Note/Issue/98//DiffNote//1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1';
+ wrapper = createComponent();
expect(wrapper.vm.autosave).toBeDefined();
expect(wrapper.vm.autosave.key).toEqual(key);
});
+
+ it('should set selectedCommentPosition', () => {
+ wrapper = createComponent();
+ let startLineCode = wrapper.vm.commentLineStart.line_code;
+ let lineCode = wrapper.vm.line.line_code;
+
+ expect(startLineCode).toEqual(lineCode);
+ wrapper.destroy();
+
+ const state = {
+ notes: {
+ selectedCommentPosition: {
+ start: {
+ line_code: 'test',
+ },
+ },
+ },
+ };
+ wrapper = createComponent({ state });
+ startLineCode = wrapper.vm.commentLineStart.line_code;
+ lineCode = state.notes.selectedCommentPosition.start.line_code;
+ expect(startLineCode).toEqual(lineCode);
+ });
});
describe('template', () => {
it('should have note form', () => {
+ wrapper = createComponent();
expect(wrapper.find(NoteForm).exists()).toBe(true);
});
});
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 0ec075c8ad8..c06d8e78316 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -1,7 +1,10 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { getByTestId, fireEvent } from '@testing-library/dom';
import Vuex from 'vuex';
import diffsModule from '~/diffs/store/modules';
import DiffRow from '~/diffs/components/diff_row.vue';
+import diffFileMockData from '../mock_data/diff_file';
+import { mapParallel } from '~/diffs/components/diff_row_utils';
describe('DiffRow', () => {
const testLines = [
@@ -42,16 +45,16 @@ describe('DiffRow', () => {
fileHash: 'abc',
filePath: 'abc',
line: {},
+ index: 0,
...props,
};
- return shallowMount(DiffRow, { propsData, localVue, store });
- };
- it('isHighlighted returns true if isCommented is true', () => {
- const props = { isCommented: true };
- const wrapper = createWrapper({ props });
- expect(wrapper.vm.isHighlighted).toBe(true);
- });
+ const provide = {
+ glFeatures: { dragCommentSelection: true },
+ };
+
+ return shallowMount(DiffRow, { propsData, localVue, store, provide });
+ };
it('isHighlighted returns true given line.left', () => {
const props = {
@@ -124,4 +127,88 @@ describe('DiffRow', () => {
const lineNumber = testLines[0].right.new_line;
expect(wrapper.find(`[data-linenumber="${lineNumber}"]`).exists()).toBe(true);
});
+
+ describe('drag operations', () => {
+ let line;
+
+ beforeEach(() => {
+ line = { ...testLines[0] };
+ });
+
+ it.each`
+ side
+ ${'left'}
+ ${'right'}
+ `('emits `enterdragging` onDragEnter $side side', ({ side }) => {
+ const expectation = { ...line[side], index: 0 };
+ const wrapper = createWrapper({ props: { line } });
+ fireEvent.dragEnter(getByTestId(wrapper.element, `${side}-side`));
+
+ expect(wrapper.emitted().enterdragging).toBeTruthy();
+ expect(wrapper.emitted().enterdragging[0]).toEqual([expectation]);
+ });
+
+ it.each`
+ side
+ ${'left'}
+ ${'right'}
+ `('emits `stopdragging` onDrop $side side', ({ side }) => {
+ const wrapper = createWrapper({ props: { line } });
+ fireEvent.dragEnd(getByTestId(wrapper.element, `${side}-side`));
+
+ expect(wrapper.emitted().stopdragging).toBeTruthy();
+ });
+ });
+
+ describe('sets coverage title and class', () => {
+ const thisLine = diffFileMockData.parallel_diff_lines[2];
+ const rightLine = diffFileMockData.parallel_diff_lines[2].right;
+
+ const mockDiffContent = {
+ diffFile: diffFileMockData,
+ shouldRenderDraftRow: jest.fn(),
+ hasParallelDraftLeft: jest.fn(),
+ hasParallelDraftRight: jest.fn(),
+ draftForLine: jest.fn(),
+ };
+
+ const applyMap = mapParallel(mockDiffContent);
+ const props = {
+ line: applyMap(thisLine),
+ fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
+ contextLinesPath: 'contextLinesPath',
+ isHighlighted: false,
+ };
+ const name = diffFileMockData.file_path;
+ const line = rightLine.new_line;
+
+ it('for lines with coverage', () => {
+ const coverageFiles = { files: { [name]: { [line]: 5 } } };
+ const wrapper = createWrapper({ props, state: { coverageFiles } });
+ const coverage = wrapper.find('.line-coverage.right-side');
+
+ expect(coverage.attributes('title')).toContain('Test coverage: 5 hits');
+ expect(coverage.classes('coverage')).toBeTruthy();
+ });
+
+ it('for lines without coverage', () => {
+ const coverageFiles = { files: { [name]: { [line]: 0 } } };
+ const wrapper = createWrapper({ props, state: { coverageFiles } });
+ const coverage = wrapper.find('.line-coverage.right-side');
+
+ expect(coverage.attributes('title')).toContain('No test coverage');
+ expect(coverage.classes('no-coverage')).toBeTruthy();
+ });
+
+ it('for unknown lines', () => {
+ const coverageFiles = {};
+ const wrapper = createWrapper({ props, state: { coverageFiles } });
+ const coverage = wrapper.find('.line-coverage.right-side');
+
+ expect(coverage.attributes('title')).toBeFalsy();
+ expect(coverage.classes('coverage')).toBeFalsy();
+ expect(coverage.classes('no-coverage')).toBeFalsy();
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
index c001857fa49..d70d6b609ac 100644
--- a/spec/frontend/diffs/components/diff_row_utils_spec.js
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -126,14 +126,14 @@ describe('lineCode', () => {
describe('classNameMapCell', () => {
it.each`
- line | hll | loggedIn | hovered | expectation
- ${undefined} | ${true} | ${true} | ${true} | ${[]}
- ${{ type: 'new' }} | ${false} | ${false} | ${false} | ${['new', { hll: false, 'is-over': false }]}
- ${{ type: 'new' }} | ${true} | ${true} | ${false} | ${['new', { hll: true, 'is-over': false }]}
- ${{ type: 'new' }} | ${true} | ${false} | ${true} | ${['new', { hll: true, 'is-over': false }]}
- ${{ type: 'new' }} | ${true} | ${true} | ${true} | ${['new', { hll: true, 'is-over': true }]}
- `('should return $expectation', ({ line, hll, loggedIn, hovered, expectation }) => {
- const classes = utils.classNameMapCell(line, hll, loggedIn, hovered);
+ line | hll | isLoggedIn | isHover | expectation
+ ${undefined} | ${true} | ${true} | ${true} | ${[]}
+ ${{ type: 'new' }} | ${false} | ${false} | ${false} | ${['new', { hll: false, 'is-over': false, new_line: true, old_line: false }]}
+ ${{ type: 'new' }} | ${true} | ${true} | ${false} | ${['new', { hll: true, 'is-over': false, new_line: true, old_line: false }]}
+ ${{ type: 'new' }} | ${true} | ${false} | ${true} | ${['new', { hll: true, 'is-over': false, new_line: true, old_line: false }]}
+ ${{ type: 'new' }} | ${true} | ${true} | ${true} | ${['new', { hll: true, 'is-over': true, new_line: true, old_line: false }]}
+ `('should return $expectation', ({ line, hll, isLoggedIn, isHover, expectation }) => {
+ const classes = utils.classNameMapCell({ line, hll, isLoggedIn, isHover });
expect(classes).toEqual(expectation);
});
});
diff --git a/spec/frontend/diffs/components/diff_stats_spec.js b/spec/frontend/diffs/components/diff_stats_spec.js
index 4dcbb3ec332..0aaec027c0a 100644
--- a/spec/frontend/diffs/components/diff_stats_spec.js
+++ b/spec/frontend/diffs/components/diff_stats_spec.js
@@ -39,7 +39,7 @@ describe('diff_stats', () => {
});
describe('line changes', () => {
- const findFileLine = name => wrapper.find(name);
+ const findFileLine = (name) => wrapper.find(name);
it('shows the amount of lines added', () => {
expect(findFileLine('.js-file-addition-line').text()).toBe(TEST_ADDED_LINES.toString());
@@ -51,10 +51,10 @@ describe('diff_stats', () => {
});
describe('files changes', () => {
- const findIcon = name =>
+ const findIcon = (name) =>
wrapper
.findAll(GlIcon)
- .filter(c => c.attributes('name') === name)
+ .filter((c) => c.attributes('name') === name)
.at(0).element.parentNode;
it('shows amount of file changed with plural "files" when 0 files has changed', () => {
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index 4d90112d8f6..3d36ebf14a3 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -1,19 +1,19 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import DiffView from '~/diffs/components/diff_view.vue';
-// import DraftNote from '~/batch_comments/components/draft_note.vue';
-// import DiffRow from '~/diffs/components/diff_row.vue';
-// import DiffCommentCell from '~/diffs/components/diff_comment_cell.vue';
-// import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
describe('DiffView', () => {
const DiffExpansionCell = { template: `<div/>` };
const DiffRow = { template: `<div/>` };
const DiffCommentCell = { template: `<div/>` };
const DraftNote = { template: `<div/>` };
- const createWrapper = props => {
- const localVue = createLocalVue();
- localVue.use(Vuex);
+ const showCommentForm = jest.fn();
+ const setSelectedCommentPosition = jest.fn();
+ const getDiffRow = (wrapper) => wrapper.findComponent(DiffRow).vm;
+
+ const createWrapper = (props) => {
+ Vue.use(Vuex);
const batchComments = {
getters: {
@@ -26,8 +26,13 @@ describe('DiffView', () => {
},
namespaced: true,
};
- const diffs = { getters: { commitId: () => 'abc123' }, namespaced: true };
+ const diffs = {
+ actions: { showCommentForm },
+ getters: { commitId: () => 'abc123' },
+ namespaced: true,
+ };
const notes = {
+ actions: { setSelectedCommentPosition },
state: { selectedCommentPosition: null, selectedCommentPositionHover: null },
};
@@ -41,7 +46,7 @@ describe('DiffView', () => {
...props,
};
const stubs = { DiffExpansionCell, DiffRow, DiffCommentCell, DraftNote };
- return shallowMount(DiffView, { propsData, store, localVue, stubs });
+ return shallowMount(DiffView, { propsData, store, stubs });
};
it('renders a match line', () => {
@@ -64,12 +69,7 @@ describe('DiffView', () => {
inline: type === 'inline',
});
expect(wrapper.findAll(DiffCommentCell).length).toBe(total);
- expect(
- wrapper
- .find(container)
- .find(DiffCommentCell)
- .exists(),
- ).toBe(true);
+ expect(wrapper.find(container).find(DiffCommentCell).exists()).toBe(true);
},
);
@@ -79,4 +79,55 @@ describe('DiffView', () => {
});
expect(wrapper.find(DraftNote).exists()).toBe(true);
});
+
+ describe('drag operations', () => {
+ it('sets `dragStart` onStartDragging', () => {
+ const wrapper = createWrapper({ diffLines: [{}] });
+
+ wrapper.findComponent(DiffRow).vm.$emit('startdragging', { test: true });
+ expect(wrapper.vm.dragStart).toEqual({ test: true });
+ });
+
+ it('does not call `setSelectedCommentPosition` on different chunks onDragOver', () => {
+ const wrapper = createWrapper({ diffLines: [{}] });
+ const diffRow = getDiffRow(wrapper);
+
+ diffRow.$emit('startdragging', { chunk: 0 });
+ diffRow.$emit('enterdragging', { chunk: 1 });
+
+ expect(setSelectedCommentPosition).not.toHaveBeenCalled();
+ });
+
+ it.each`
+ start | end | expectation
+ ${1} | ${2} | ${{ start: { index: 1 }, end: { index: 2 } }}
+ ${2} | ${1} | ${{ start: { index: 1 }, end: { index: 2 } }}
+ ${1} | ${1} | ${{ start: { index: 1 }, end: { index: 1 } }}
+ `(
+ 'calls `setSelectedCommentPosition` with correct `updatedLineRange`',
+ ({ start, end, expectation }) => {
+ const wrapper = createWrapper({ diffLines: [{}] });
+ const diffRow = getDiffRow(wrapper);
+
+ diffRow.$emit('startdragging', { chunk: 1, index: start });
+ diffRow.$emit('enterdragging', { chunk: 1, index: end });
+
+ const arg = setSelectedCommentPosition.mock.calls[0][1];
+
+ expect(arg).toMatchObject(expectation);
+ },
+ );
+
+ it('sets `dragStart` to null onStopDragging', () => {
+ const wrapper = createWrapper({ diffLines: [{}] });
+ const diffRow = getDiffRow(wrapper);
+
+ diffRow.$emit('startdragging', { test: true });
+ expect(wrapper.vm.dragStart).toEqual({ test: true });
+
+ diffRow.$emit('stopdragging');
+ expect(wrapper.vm.dragStart).toBeNull();
+ expect(showCommentForm).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/file_row_stats_spec.js b/spec/frontend/diffs/components/file_row_stats_spec.js
index 34d85ba10b0..3f5a63c19e5 100644
--- a/spec/frontend/diffs/components/file_row_stats_spec.js
+++ b/spec/frontend/diffs/components/file_row_stats_spec.js
@@ -1,33 +1,21 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import FileRowStats from '~/diffs/components/file_row_stats.vue';
describe('Diff file row stats', () => {
- let Component;
- let vm;
-
- beforeAll(() => {
- Component = Vue.extend(FileRowStats);
- });
-
- beforeEach(() => {
- vm = mountComponent(Component, {
+ const wrapper = mount(FileRowStats, {
+ propsData: {
file: {
addedLines: 20,
removedLines: 10,
},
- });
- });
-
- afterEach(() => {
- vm.$destroy();
+ },
});
it('renders added lines count', () => {
- expect(vm.$el.querySelector('.cgreen').textContent).toContain('+20');
+ expect(wrapper.find('.cgreen').text()).toContain('+20');
});
it('renders removed lines count', () => {
- expect(vm.$el.querySelector('.cred').textContent).toContain('-10');
+ expect(wrapper.find('.cred').text()).toContain('-10');
});
});
diff --git a/spec/frontend/diffs/components/hidden_files_warning_spec.js b/spec/frontend/diffs/components/hidden_files_warning_spec.js
index 6fb4e4645f8..3f1f23a40f5 100644
--- a/spec/frontend/diffs/components/hidden_files_warning_spec.js
+++ b/spec/frontend/diffs/components/hidden_files_warning_spec.js
@@ -26,13 +26,15 @@ describe('HiddenFilesWarning', () => {
});
it('has a correct plain diff URL', () => {
- const plainDiffLink = wrapper.findAll('a').wrappers.filter(x => x.text() === 'Plain diff')[0];
+ const plainDiffLink = wrapper.findAll('a').wrappers.filter((x) => x.text() === 'Plain diff')[0];
expect(plainDiffLink.attributes('href')).toBe(propsData.plainDiffPath);
});
it('has a correct email patch URL', () => {
- const emailPatchLink = wrapper.findAll('a').wrappers.filter(x => x.text() === 'Email patch')[0];
+ const emailPatchLink = wrapper
+ .findAll('a')
+ .wrappers.filter((x) => x.text() === 'Email patch')[0];
expect(emailPatchLink.attributes('href')).toBe(propsData.emailPatchPath);
});
diff --git a/spec/frontend/diffs/components/image_diff_overlay_spec.js b/spec/frontend/diffs/components/image_diff_overlay_spec.js
index 087715111b4..93c9b922fdd 100644
--- a/spec/frontend/diffs/components/image_diff_overlay_spec.js
+++ b/spec/frontend/diffs/components/image_diff_overlay_spec.js
@@ -21,6 +21,11 @@ describe('Diffs image diff overlay component', () => {
wrapper = shallowMount(ImageDiffOverlay, {
store,
+ parentComponent: {
+ data() {
+ return dimensions;
+ },
+ },
propsData: {
discussions: [...imageDiffDiscussions],
fileHash: 'ABC',
@@ -28,9 +33,6 @@ describe('Diffs image diff overlay component', () => {
renderedHeight: 200,
...props,
},
- methods: {
- getImageDimensions: jest.fn().mockReturnValue(dimensions),
- },
});
}
@@ -49,18 +51,8 @@ describe('Diffs image diff overlay component', () => {
createComponent();
const imageBadges = getAllImageBadges();
- expect(
- imageBadges
- .at(0)
- .text()
- .trim(),
- ).toBe('1');
- expect(
- imageBadges
- .at(1)
- .text()
- .trim(),
- ).toBe('2');
+ expect(imageBadges.at(0).text().trim()).toBe('1');
+ expect(imageBadges.at(1).text().trim()).toBe('2');
});
it('renders icon when showCommentIcon is true', () => {
@@ -124,7 +116,7 @@ describe('Diffs image diff overlay component', () => {
describe('comment form', () => {
const getCommentIndicator = () => wrapper.find('.comment-indicator');
beforeEach(() => {
- createComponent({ canComment: true }, store => {
+ createComponent({ canComment: true }, (store) => {
store.state.diffs.commentForms.push({
fileHash: 'ABC',
x: 20,
diff --git a/spec/frontend/diffs/components/no_changes_spec.js b/spec/frontend/diffs/components/no_changes_spec.js
index 78805a1cddc..df9af51f9cf 100644
--- a/spec/frontend/diffs/components/no_changes_spec.js
+++ b/spec/frontend/diffs/components/no_changes_spec.js
@@ -1,20 +1,22 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlButton } from '@gitlab/ui';
import { createStore } from '~/mr_notes/stores';
import NoChanges from '~/diffs/components/no_changes.vue';
+import diffsMockData from '../mock_data/merge_request_diffs';
-describe('Diff no changes empty state', () => {
- let vm;
+const localVue = createLocalVue();
+localVue.use(Vuex);
- function createComponent(extendStore = () => {}) {
- const localVue = createLocalVue();
- localVue.use(Vuex);
+const TEST_TARGET_BRANCH = 'foo';
+const TEST_SOURCE_BRANCH = 'dev/update';
- const store = createStore();
- extendStore(store);
+describe('Diff no changes empty state', () => {
+ let wrapper;
+ let store;
- vm = shallowMount(NoChanges, {
+ function createComponent(mountFn = shallowMount) {
+ wrapper = mountFn(NoChanges, {
localVue,
store,
propsData: {
@@ -23,26 +25,61 @@ describe('Diff no changes empty state', () => {
});
}
+ beforeEach(() => {
+ store = createStore();
+ store.state.diffs.mergeRequestDiff = {};
+ store.state.notes.noteableData = {
+ target_branch: TEST_TARGET_BRANCH,
+ source_branch: TEST_SOURCE_BRANCH,
+ };
+ store.state.diffs.mergeRequestDiffs = diffsMockData;
+ });
+
afterEach(() => {
- vm.destroy();
+ wrapper.destroy();
+ wrapper = null;
});
+ const findMessage = () => wrapper.find('[data-testid="no-changes-message"]');
+
it('prevents XSS', () => {
- createComponent(store => {
- // eslint-disable-next-line no-param-reassign
- store.state.notes.noteableData = {
- source_branch: '<script>alert("test");</script>',
- target_branch: '<script>alert("test");</script>',
- };
- });
+ store.state.notes.noteableData = {
+ source_branch: '<script>alert("test");</script>',
+ target_branch: '<script>alert("test");</script>',
+ };
- expect(vm.find('script').exists()).toBe(false);
+ createComponent();
+
+ expect(wrapper.find('script').exists()).toBe(false);
});
describe('Renders', () => {
it('Show create commit button', () => {
createComponent();
- expect(vm.find(GlButton).exists()).toBe(true);
+
+ expect(wrapper.find(GlButton).exists()).toBe(true);
});
+
+ it.each`
+ expectedText | sourceIndex | targetIndex
+ ${`No changes between ${TEST_SOURCE_BRANCH} and ${TEST_TARGET_BRANCH}`} | ${null} | ${null}
+ ${`No changes between ${TEST_SOURCE_BRANCH} and version 1`} | ${diffsMockData[0].version_index} | ${1}
+ ${`No changes between version 3 and version 2`} | ${3} | ${2}
+ ${`No changes between version 3 and ${TEST_TARGET_BRANCH}`} | ${3} | ${-1}
+ `(
+ 'renders text "$expectedText" (sourceIndex=$sourceIndex and targetIndex=$targetIndex)',
+ ({ expectedText, targetIndex, sourceIndex }) => {
+ if (targetIndex !== null) {
+ store.state.diffs.startVersion = { version_index: targetIndex };
+ }
+ if (sourceIndex !== null) {
+ store.state.diffs.mergeRequestDiff.version_index = sourceIndex;
+ }
+
+ createComponent(mount);
+
+ expect(findMessage().text()).toBe(expectedText);
+ },
+ );
});
});
diff --git a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
index 57eff177261..445553706b7 100644
--- a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
@@ -40,7 +40,7 @@ describe('ParallelDiffTableRow', () => {
vm = wrapper.vm;
});
- it('does not highlight non empty line content when line does not match highlighted row', done => {
+ it('does not highlight non empty line content when line does not match highlighted row', (done) => {
vm.$nextTick()
.then(() => {
expect(vm.$el.querySelector('.line_content.right-side').classList).not.toContain('hll');
@@ -49,7 +49,7 @@ describe('ParallelDiffTableRow', () => {
.catch(done.fail);
});
- it('highlights nonempty line content when line is the highlighted row', done => {
+ it('highlights nonempty line content when line is the highlighted row', (done) => {
vm.$nextTick()
.then(() => {
vm.$store.state.diffs.highlightedRow = rightLine.line_code;
@@ -86,7 +86,7 @@ describe('ParallelDiffTableRow', () => {
}).$mount();
});
- it('does not highlight either line when line does not match highlighted row', done => {
+ it('does not highlight either line when line does not match highlighted row', (done) => {
vm.$nextTick()
.then(() => {
expect(vm.$el.querySelector('.line_content.right-side').classList).not.toContain('hll');
@@ -96,7 +96,7 @@ describe('ParallelDiffTableRow', () => {
.catch(done.fail);
});
- it('adds hll class to lineContent when line is the highlighted row', done => {
+ it('adds hll class to lineContent when line is the highlighted row', (done) => {
vm.$nextTick()
.then(() => {
vm.$store.state.diffs.highlightedRow = rightLine.line_code;
@@ -112,7 +112,7 @@ describe('ParallelDiffTableRow', () => {
});
describe('sets coverage title and class', () => {
- it('for lines with coverage', done => {
+ it('for lines with coverage', (done) => {
vm.$nextTick()
.then(() => {
const name = diffFileMockData.file_path;
@@ -132,7 +132,7 @@ describe('ParallelDiffTableRow', () => {
.catch(done.fail);
});
- it('for lines without coverage', done => {
+ it('for lines without coverage', (done) => {
vm.$nextTick()
.then(() => {
const name = diffFileMockData.file_path;
@@ -152,7 +152,7 @@ describe('ParallelDiffTableRow', () => {
.catch(done.fail);
});
- it('for unknown lines', done => {
+ it('for unknown lines', (done) => {
vm.$nextTick()
.then(() => {
vm.$store.state.diffs.coverageFiles = {};
diff --git a/spec/frontend/diffs/components/settings_dropdown_spec.js b/spec/frontend/diffs/components/settings_dropdown_spec.js
index eb9f9b4db73..fcb627c570a 100644
--- a/spec/frontend/diffs/components/settings_dropdown_spec.js
+++ b/spec/frontend/diffs/components/settings_dropdown_spec.js
@@ -73,7 +73,7 @@ describe('Diff settings dropdown component', () => {
});
it('sets list button as selected when renderTreeList is false', () => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
renderTreeList: false,
});
@@ -84,7 +84,7 @@ describe('Diff settings dropdown component', () => {
});
it('sets tree button as selected when renderTreeList is true', () => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
renderTreeList: true,
});
@@ -97,7 +97,7 @@ describe('Diff settings dropdown component', () => {
describe('compare changes', () => {
it('sets inline button as selected', () => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
diffViewType: INLINE_DIFF_VIEW_TYPE,
});
@@ -108,7 +108,7 @@ describe('Diff settings dropdown component', () => {
});
it('sets parallel button as selected', () => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
diffViewType: PARALLEL_DIFF_VIEW_TYPE,
});
@@ -137,7 +137,7 @@ describe('Diff settings dropdown component', () => {
describe('whitespace toggle', () => {
it('does not set as checked when showWhitespace is false', () => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
showWhitespace: false,
});
@@ -147,7 +147,7 @@ describe('Diff settings dropdown component', () => {
});
it('sets as checked when showWhitespace is true', () => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
showWhitespace: true,
});
@@ -183,7 +183,7 @@ describe('Diff settings dropdown component', () => {
`(
'sets the checkbox to { checked: $checked } if the fileByFile setting is $fileByFile',
async ({ fileByFile, checked }) => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
viewDiffsFileByFile: fileByFile,
});
@@ -202,7 +202,7 @@ describe('Diff settings dropdown component', () => {
`(
'when the file by file setting starts as $start, toggling the checkbox should emit an event set to $emit',
async ({ start, emit }) => {
- createComponent(store => {
+ createComponent((store) => {
Object.assign(store.state.diffs, {
viewDiffsFileByFile: start,
});
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
index c89403e4869..4666321e0c2 100644
--- a/spec/frontend/diffs/components/tree_list_spec.js
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -88,16 +88,8 @@ describe('Diffs tree list component', () => {
it('renders tree', () => {
expect(getFileRows()).toHaveLength(2);
- expect(
- getFileRows()
- .at(0)
- .html(),
- ).toContain('index.js');
- expect(
- getFileRows()
- .at(1)
- .html(),
- ).toContain('app');
+ expect(getFileRows().at(0).html()).toContain('index.js');
+ expect(getFileRows().at(1).html()).toContain('app');
});
it('hides file stats', () => {
@@ -111,9 +103,7 @@ describe('Diffs tree list component', () => {
it('calls toggleTreeOpen when clicking folder', () => {
jest.spyOn(wrapper.vm.$store, 'dispatch').mockReturnValue(undefined);
- getFileRows()
- .at(1)
- .trigger('click');
+ getFileRows().at(1).trigger('click');
expect(wrapper.vm.$store.dispatch).toHaveBeenCalledWith('diffs/toggleTreeOpen', 'app');
});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index fef7676e795..056ac23fcf7 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -2,7 +2,8 @@ import MockAdapter from 'axios-mock-adapter';
import Cookies from 'js-cookie';
import mockDiffFile from 'jest/diffs/mock_data/diff_file';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
+import testAction from 'helpers/vuex_action_helper';
import {
DIFF_VIEW_COOKIE_NAME,
INLINE_DIFF_VIEW_TYPE,
@@ -49,11 +50,11 @@ import {
setCurrentDiffFileIdFromNote,
navigateToDiffFileIndex,
setFileByFile,
+ reviewFile,
} from '~/diffs/store/actions';
import eventHub from '~/notes/event_hub';
import * as types from '~/diffs/store/mutation_types';
import axios from '~/lib/utils/axios_utils';
-import testAction from '../../helpers/vuex_action_helper';
import * as utils from '~/diffs/store/utils';
import * as commonUtils from '~/lib/utils/common_utils';
import { mergeUrlParams } from '~/lib/utils/url_utility';
@@ -77,22 +78,22 @@ describe('DiffsStoreActions', () => {
jest.spyOn(commonUtils, 'scrollToElement').mockImplementation(() => null);
jest.spyOn(utils, 'convertExpandLines').mockImplementation(() => null);
jest.spyOn(utils, 'idleCallback').mockImplementation(() => null);
- ['requestAnimationFrame', 'requestIdleCallback'].forEach(method => {
- global[method] = cb => {
+ ['requestAnimationFrame', 'requestIdleCallback'].forEach((method) => {
+ global[method] = (cb) => {
cb();
};
});
});
afterEach(() => {
- ['requestAnimationFrame', 'requestIdleCallback'].forEach(method => {
+ ['requestAnimationFrame', 'requestIdleCallback'].forEach((method) => {
global[method] = originalMethods[method];
});
createFlash.mockClear();
});
describe('setBaseConfig', () => {
- it('should set given endpoint and project path', done => {
+ it('should set given endpoint and project path', (done) => {
const endpoint = '/diffs/set/endpoint';
const endpointMetadata = '/diffs/set/endpoint/metadata';
const endpointBatch = '/diffs/set/endpoint/batch';
@@ -152,7 +153,7 @@ describe('DiffsStoreActions', () => {
mock.restore();
});
- it('should fetch batch diff files', done => {
+ it('should fetch batch diff files', (done) => {
const endpointBatch = '/fetch/diffs_batch';
const res1 = { diff_files: [{ file_hash: 'test' }], pagination: { next_page: 2 } };
const res2 = { diff_files: [{ file_hash: 'test2' }], pagination: {} };
@@ -240,7 +241,7 @@ describe('DiffsStoreActions', () => {
mock.onGet(endpointMetadata).reply(200, diffMetadata);
});
- it('should fetch diff meta information', done => {
+ it('should fetch diff meta information', (done) => {
testAction(
fetchDiffFilesMeta,
{},
@@ -270,8 +271,8 @@ describe('DiffsStoreActions', () => {
afterEach(() => mock.restore());
- it('should commit SET_COVERAGE_DATA with received response', done => {
- const data = { files: { 'app.js': { '1': 0, '2': 1 } } };
+ it('should commit SET_COVERAGE_DATA with received response', (done) => {
+ const data = { files: { 'app.js': { 1: 0, 2: 1 } } };
mock.onGet(endpointCoverage).reply(200, { data });
@@ -285,7 +286,7 @@ describe('DiffsStoreActions', () => {
);
});
- it('should show flash on API error', done => {
+ it('should show flash on API error', (done) => {
mock.onGet(endpointCoverage).reply(400);
testAction(fetchCoverageFiles, {}, { endpointCoverage }, [], [], () => {
@@ -310,7 +311,7 @@ describe('DiffsStoreActions', () => {
window.location.hash = '';
});
- it('should merge discussions into diffs', done => {
+ it('should merge discussions into diffs', (done) => {
window.location.hash = 'ABC_123';
const state = {
@@ -404,7 +405,7 @@ describe('DiffsStoreActions', () => {
);
});
- it('dispatches setCurrentDiffFileIdFromNote with note ID', done => {
+ it('dispatches setCurrentDiffFileIdFromNote with note ID', (done) => {
window.location.hash = 'note_123';
testAction(
@@ -419,7 +420,7 @@ describe('DiffsStoreActions', () => {
});
describe('removeDiscussionsFromDiff', () => {
- it('should remove discussions from diffs', done => {
+ it('should remove discussions from diffs', (done) => {
const state = {
diffFiles: [
{
@@ -511,7 +512,7 @@ describe('DiffsStoreActions', () => {
});
describe('setInlineDiffViewType', () => {
- it('should set diff view type to inline and also set the cookie properly', done => {
+ it('should set diff view type to inline and also set the cookie properly', (done) => {
testAction(
setInlineDiffViewType,
null,
@@ -529,7 +530,7 @@ describe('DiffsStoreActions', () => {
});
describe('setParallelDiffViewType', () => {
- it('should set diff view type to parallel and also set the cookie properly', done => {
+ it('should set diff view type to parallel and also set the cookie properly', (done) => {
testAction(
setParallelDiffViewType,
null,
@@ -547,7 +548,7 @@ describe('DiffsStoreActions', () => {
});
describe('showCommentForm', () => {
- it('should call mutation to show comment form', done => {
+ it('should call mutation to show comment form', (done) => {
const payload = { lineCode: 'lineCode', fileHash: 'hash' };
testAction(
@@ -562,7 +563,7 @@ describe('DiffsStoreActions', () => {
});
describe('cancelCommentForm', () => {
- it('should call mutation to cancel comment form', done => {
+ it('should call mutation to cancel comment form', (done) => {
const payload = { lineCode: 'lineCode', fileHash: 'hash' };
testAction(
@@ -577,7 +578,7 @@ describe('DiffsStoreActions', () => {
});
describe('loadMoreLines', () => {
- it('should call mutation to show comment form', done => {
+ it('should call mutation to show comment form', (done) => {
const endpoint = '/diffs/load/more/lines';
const params = { since: 6, to: 26 };
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
@@ -610,7 +611,7 @@ describe('DiffsStoreActions', () => {
describe('loadCollapsedDiff', () => {
const state = { showWhitespace: true };
- it('should fetch data and call mutation with response and the give parameter', done => {
+ it('should fetch data and call mutation with response and the give parameter', (done) => {
const file = { hash: 123, load_collapsed_diff_url: '/load/collapsed/diff/url' };
const data = { hash: 123, parallelDiffLines: [{ lineCode: 1 }] };
const mock = new MockAdapter(axios);
@@ -810,7 +811,7 @@ describe('DiffsStoreActions', () => {
});
describe('saveDiffDiscussion', () => {
- it('dispatches actions', done => {
+ it('dispatches actions', (done) => {
const commitId = 'something';
const formData = {
diffFile: { ...mockDiffFile },
@@ -822,7 +823,7 @@ describe('DiffsStoreActions', () => {
id: commitId,
},
};
- const dispatch = jest.fn(name => {
+ const dispatch = jest.fn((name) => {
switch (name) {
case 'saveNote':
return Promise.resolve({
@@ -854,7 +855,7 @@ describe('DiffsStoreActions', () => {
});
describe('toggleTreeOpen', () => {
- it('commits TOGGLE_FOLDER_OPEN', done => {
+ it('commits TOGGLE_FOLDER_OPEN', (done) => {
testAction(
toggleTreeOpen,
'path',
@@ -903,7 +904,7 @@ describe('DiffsStoreActions', () => {
});
describe('setShowTreeList', () => {
- it('commits toggle', done => {
+ it('commits toggle', (done) => {
testAction(
setShowTreeList,
{ showTreeList: true },
@@ -991,7 +992,7 @@ describe('DiffsStoreActions', () => {
});
describe('setRenderTreeList', () => {
- it('commits SET_RENDER_TREE_LIST', done => {
+ it('commits SET_RENDER_TREE_LIST', (done) => {
testAction(
setRenderTreeList,
true,
@@ -1014,7 +1015,7 @@ describe('DiffsStoreActions', () => {
jest.spyOn(eventHub, '$emit').mockImplementation();
});
- it('commits SET_SHOW_WHITESPACE', done => {
+ it('commits SET_SHOW_WHITESPACE', (done) => {
testAction(
setShowWhitespace,
{ showWhitespace: true },
@@ -1057,13 +1058,13 @@ describe('DiffsStoreActions', () => {
});
describe('setRenderIt', () => {
- it('commits RENDER_FILE', done => {
+ it('commits RENDER_FILE', (done) => {
testAction(setRenderIt, 'file', {}, [{ type: types.RENDER_FILE, payload: 'file' }], [], done);
});
});
describe('receiveFullDiffError', () => {
- it('updates state with the file that did not load', done => {
+ it('updates state with the file that did not load', (done) => {
testAction(
receiveFullDiffError,
'file',
@@ -1091,7 +1092,7 @@ describe('DiffsStoreActions', () => {
mock.onGet(`${TEST_HOST}/context`).replyOnce(200, ['test']);
});
- it('commits the success and dispatches an action to expand the new lines', done => {
+ it('commits the success and dispatches an action to expand the new lines', (done) => {
const file = {
context_lines_path: `${TEST_HOST}/context`,
file_path: 'test',
@@ -1113,7 +1114,7 @@ describe('DiffsStoreActions', () => {
mock.onGet(`${TEST_HOST}/context`).replyOnce(500);
});
- it('dispatches receiveFullDiffError', done => {
+ it('dispatches receiveFullDiffError', (done) => {
testAction(
fetchFullDiff,
{ context_lines_path: `${TEST_HOST}/context`, file_path: 'test', file_hash: 'test' },
@@ -1135,7 +1136,7 @@ describe('DiffsStoreActions', () => {
};
});
- it('dispatches fetchFullDiff when file is not expanded', done => {
+ it('dispatches fetchFullDiff when file is not expanded', (done) => {
testAction(
toggleFullDiff,
'test',
@@ -1211,7 +1212,7 @@ describe('DiffsStoreActions', () => {
});
describe('setFileUserCollapsed', () => {
- it('commits SET_FILE_COLLAPSED', done => {
+ it('commits SET_FILE_COLLAPSED', (done) => {
testAction(
setFileCollapsedByUser,
{ filePath: 'test', collapsed: true },
@@ -1230,12 +1231,12 @@ describe('DiffsStoreActions', () => {
describe('setExpandedDiffLines', () => {
beforeEach(() => {
- utils.idleCallback.mockImplementation(cb => {
+ utils.idleCallback.mockImplementation((cb) => {
cb({ timeRemaining: () => 50 });
});
});
- it('commits SET_CURRENT_VIEW_DIFF_FILE_LINES when lines less than MAX_RENDERING_DIFF_LINES', done => {
+ it('commits SET_CURRENT_VIEW_DIFF_FILE_LINES when lines less than MAX_RENDERING_DIFF_LINES', (done) => {
utils.convertExpandLines.mockImplementation(() => ['test']);
testAction(
@@ -1253,7 +1254,7 @@ describe('DiffsStoreActions', () => {
);
});
- it('commits ADD_CURRENT_VIEW_DIFF_FILE_LINES when lines more than MAX_RENDERING_DIFF_LINES', done => {
+ it('commits ADD_CURRENT_VIEW_DIFF_FILE_LINES when lines more than MAX_RENDERING_DIFF_LINES', (done) => {
const lines = new Array(501).fill().map((_, i) => `line-${i}`);
utils.convertExpandLines.mockReturnValue(lines);
@@ -1280,7 +1281,7 @@ describe('DiffsStoreActions', () => {
});
describe('setSuggestPopoverDismissed', () => {
- it('commits SET_SHOW_SUGGEST_POPOVER', done => {
+ it('commits SET_SHOW_SUGGEST_POPOVER', (done) => {
const state = { dismissEndpoint: `${TEST_HOST}/-/user_callouts` };
const mock = new MockAdapter(axios);
mock.onPost(state.dismissEndpoint).reply(200, {});
@@ -1409,7 +1410,7 @@ describe('DiffsStoreActions', () => {
const state = { diffFiles: [{ file_hash: '123' }] };
const rootGetters = {
getDiscussion: () => ({ diff_file: { file_hash: '123' } }),
- notesById: { '1': { discussion_id: '2' } },
+ notesById: { 1: { discussion_id: '2' } },
};
setCurrentDiffFileIdFromNote({ commit, state, rootGetters }, '1');
@@ -1422,7 +1423,7 @@ describe('DiffsStoreActions', () => {
const state = { diffFiles: [{ file_hash: '123' }] };
const rootGetters = {
getDiscussion: () => ({ id: '1' }),
- notesById: { '1': { discussion_id: '2' } },
+ notesById: { 1: { discussion_id: '2' } },
};
setCurrentDiffFileIdFromNote({ commit, state, rootGetters }, '1');
@@ -1435,7 +1436,7 @@ describe('DiffsStoreActions', () => {
const state = { diffFiles: [{ file_hash: '123' }] };
const rootGetters = {
getDiscussion: () => ({ diff_file: { file_hash: '124' } }),
- notesById: { '1': { discussion_id: '2' } },
+ notesById: { 1: { discussion_id: '2' } },
};
setCurrentDiffFileIdFromNote({ commit, state, rootGetters }, '1');
@@ -1445,7 +1446,7 @@ describe('DiffsStoreActions', () => {
});
describe('navigateToDiffFileIndex', () => {
- it('commits VIEW_DIFF_FILE', done => {
+ it('commits VIEW_DIFF_FILE', (done) => {
testAction(
navigateToDiffFileIndex,
0,
@@ -1472,4 +1473,46 @@ describe('DiffsStoreActions', () => {
);
});
});
+
+ describe('reviewFile', () => {
+ const file = {
+ id: '123',
+ file_identifier_hash: 'abc',
+ load_collapsed_diff_url: 'gitlab-org/gitlab-test/-/merge_requests/1/diffs',
+ };
+ it.each`
+ reviews | diffFile | reviewed
+ ${{ abc: ['123'] }} | ${file} | ${true}
+ ${{}} | ${file} | ${false}
+ `(
+ 'sets reviews ($reviews) to localStorage and state for file $file if it is marked reviewed=$reviewed',
+ ({ reviews, diffFile, reviewed }) => {
+ const commitSpy = jest.fn();
+ const getterSpy = jest.fn().mockReturnValue([]);
+
+ reviewFile(
+ {
+ commit: commitSpy,
+ getters: {
+ fileReviews: getterSpy,
+ },
+ state: {
+ mrReviews: { abc: ['123'] },
+ },
+ },
+ {
+ file: diffFile,
+ reviewed,
+ },
+ );
+
+ expect(localStorage.setItem).toHaveBeenCalledTimes(1);
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ 'gitlab-org/gitlab-test/-/merge_requests/1-file-reviews',
+ JSON.stringify(reviews),
+ );
+ expect(commitSpy).toHaveBeenCalledWith(types.SET_MR_FILE_REVIEWS, reviews);
+ },
+ );
+ });
});
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index 7e936c561fc..4d7f861ac22 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -251,9 +251,12 @@ describe('Diffs Module Getters', () => {
discussionMock.diff_file.file_hash = diffFileMock.file_hash;
expect(
- getters.getDiffFileDiscussions(localState, {}, {}, { discussions: [discussionMock] })(
- diffFileMock,
- ).length,
+ getters.getDiffFileDiscussions(
+ localState,
+ {},
+ {},
+ { discussions: [discussionMock] },
+ )(diffFileMock).length,
).toEqual(1);
});
@@ -345,7 +348,7 @@ describe('Diffs Module Getters', () => {
describe('fileLineCoverage', () => {
beforeEach(() => {
- Object.assign(localState.coverageFiles, { files: { 'app.js': { '1': 0, '2': 5 } } });
+ Object.assign(localState.coverageFiles, { files: { 'app.js': { 1: 0, 2: 5 } } });
});
it('returns empty object when no coverage data is available', () => {
@@ -372,4 +375,26 @@ describe('Diffs Module Getters', () => {
});
});
});
+
+ describe('fileReviews', () => {
+ const file1 = { id: '123', file_identifier_hash: 'abc' };
+ const file2 = { id: '098', file_identifier_hash: 'abc' };
+
+ it.each`
+ reviews | files | fileReviews
+ ${{}} | ${[file1, file2]} | ${[false, false]}
+ ${{ abc: ['123'] }} | ${[file1, file2]} | ${[true, false]}
+ ${{ abc: ['098'] }} | ${[file1, file2]} | ${[false, true]}
+ ${{ def: ['123'] }} | ${[file1, file2]} | ${[false, false]}
+ ${{ abc: ['123'], def: ['098'] }} | ${[]} | ${[]}
+ `(
+ 'returns $fileReviews based on the diff files in state and the existing reviews $reviews',
+ ({ reviews, files, fileReviews }) => {
+ localState.diffFiles = files;
+ localState.mrReviews = reviews;
+
+ expect(getters.fileReviews(localState)).toStrictEqual(fileReviews);
+ },
+ );
+ });
});
diff --git a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
index 0343ef75732..f7954515422 100644
--- a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
+++ b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
@@ -49,7 +49,7 @@ describe('Compare diff version dropdowns', () => {
let expectedHeadVersion;
const originalLocation = window.location;
- const setupTest = includeDiffHeadParam => {
+ const setupTest = (includeDiffHeadParam) => {
const diffHeadParam = includeDiffHeadParam ? '?diff_head=true' : '';
Object.defineProperty(window, 'location', {
@@ -81,7 +81,7 @@ describe('Compare diff version dropdowns', () => {
};
};
- const assertVersions = targetVersions => {
+ const assertVersions = (targetVersions) => {
// base and head should be the last two versions in that order
const targetBaseVersion = targetVersions[targetVersions.length - 2];
const targetHeadVersion = targetVersions[targetVersions.length - 1];
@@ -136,6 +136,7 @@ describe('Compare diff version dropdowns', () => {
...firstDiff,
href: firstDiff.version_path,
commitsText: `${firstDiff.commits_count} commits,`,
+ isLatestVersion: true,
versionName: 'latest version',
selected: true,
};
@@ -144,6 +145,9 @@ describe('Compare diff version dropdowns', () => {
selectedSourceIndex: expectedShape.version_index,
});
expect(sourceVersions[0]).toEqual(expectedShape);
- expect(sourceVersions[1].selected).toBe(false);
+ expect(sourceVersions[1]).toMatchObject({
+ selected: false,
+ isLatestVersion: false,
+ });
});
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index 13e7cad835d..2c342d8e2a5 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -105,7 +105,7 @@ describe('DiffsStoreMutations', () => {
describe('SET_COVERAGE_DATA', () => {
it('should set coverage data properly', () => {
const state = { coverageFiles: {} };
- const coverage = { 'app.js': { '1': 0, '2': 1 } };
+ const coverage = { 'app.js': { 1: 0, 2: 1 } };
mutations[types.SET_COVERAGE_DATA](state, coverage);
@@ -906,4 +906,19 @@ describe('DiffsStoreMutations', () => {
expect(state.viewDiffsFileByFile).toBe(value);
});
});
+
+ describe('SET_MR_FILE_REVIEWS', () => {
+ it.each`
+ newReviews | oldReviews
+ ${{ abc: ['123'] }} | ${{}}
+ ${{ abc: [] }} | ${{ abc: ['123'] }}
+ ${{}} | ${{ abc: ['123'] }}
+ `('sets mrReviews to $newReviews', ({ newReviews, oldReviews }) => {
+ const state = { mrReviews: oldReviews };
+
+ mutations[types.SET_MR_FILE_REVIEWS](state, newReviews);
+
+ expect(state.mrReviews).toStrictEqual(newReviews);
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 7ee97224707..a19e5e91677 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -481,7 +481,7 @@ describe('DiffsStoreUtils', () => {
});
it('adds the `.brokenSymlink` property to each diff file', () => {
- preparedDiff.diff_files.forEach(file => {
+ preparedDiff.diff_files.forEach((file) => {
expect(file).toEqual(expect.objectContaining({ brokenSymlink: false }));
});
});
@@ -492,9 +492,9 @@ describe('DiffsStoreUtils', () => {
...splitInlineDiff.diff_files,
...splitParallelDiff.diff_files,
...completedDiff.diff_files,
- ].flatMap(file => [...file[INLINE_DIFF_LINES_KEY]]);
+ ].flatMap((file) => [...file[INLINE_DIFF_LINES_KEY]]);
- lines.forEach(line => {
+ lines.forEach((line) => {
expect(line.commentsDisabled).toBe(false);
});
});
@@ -560,7 +560,7 @@ describe('DiffsStoreUtils', () => {
});
it('adds the `.brokenSymlink` property to each meta diff file', () => {
- preparedDiffFiles.forEach(file => {
+ preparedDiffFiles.forEach((file) => {
expect(file).toMatchObject({ brokenSymlink: false });
});
});
@@ -1119,22 +1119,87 @@ describe('DiffsStoreUtils', () => {
});
});
+ describe('isConflictMarker', () => {
+ it.each`
+ type | expected
+ ${'conflict_marker_our'} | ${true}
+ ${'conflict_marker_their'} | ${true}
+ ${'conflict_their'} | ${false}
+ ${'conflict_our'} | ${false}
+ `('returns $expected when type is $type', ({ type, expected }) => {
+ expect(utils.isConflictMarker({ type })).toBe(expected);
+ });
+ });
+
+ describe('isConflictOur', () => {
+ it.each`
+ type | expected
+ ${'conflict_marker_our'} | ${false}
+ ${'conflict_marker_their'} | ${false}
+ ${'conflict_their'} | ${false}
+ ${'conflict_our'} | ${true}
+ `('returns $expected when type is $type', ({ type, expected }) => {
+ expect(utils.isConflictOur({ type })).toBe(expected);
+ });
+ });
+
+ describe('isConflictTheir', () => {
+ it.each`
+ type | expected
+ ${'conflict_marker_our'} | ${false}
+ ${'conflict_marker_their'} | ${false}
+ ${'conflict_their'} | ${true}
+ ${'conflict_our'} | ${false}
+ `('returns $expected when type is $type', ({ type, expected }) => {
+ expect(utils.isConflictTheir({ type })).toBe(expected);
+ });
+ });
+
describe('parallelizeDiffLines', () => {
it('converts inline diff lines to parallel diff lines', () => {
const file = getDiffFileMock();
- expect(utils.parallelizeDiffLines(file[INLINE_DIFF_LINES_KEY])).toEqual(
+ expect(utils.parallelizeDiffLines(file[INLINE_DIFF_LINES_KEY])).toMatchObject(
file.parallel_diff_lines,
);
});
+ it('converts conflicted diffs line', () => {
+ const lines = [
+ { type: 'new' },
+ { type: 'conflict_marker_our' },
+ { type: 'conflict_our' },
+ { type: 'conflict_marker' },
+ { type: 'conflict_their' },
+ { type: 'conflict_marker_their' },
+ ];
+
+ expect(utils.parallelizeDiffLines(lines)).toEqual([
+ {
+ left: null,
+ right: {
+ chunk: 0,
+ type: 'new',
+ },
+ },
+ {
+ left: { chunk: 0, type: 'conflict_marker_our' },
+ right: { chunk: 0, type: 'conflict_marker_their' },
+ },
+ {
+ left: { chunk: 0, type: 'conflict_our' },
+ right: { chunk: 0, type: 'conflict_their' },
+ },
+ ]);
+ });
+
it('converts inline diff lines', () => {
const file = getDiffFileMock();
const files = utils.parallelizeDiffLines(file.highlighted_diff_lines, true);
- expect(files[5].left).toEqual(file.parallel_diff_lines[5].left);
+ expect(files[5].left).toMatchObject(file.parallel_diff_lines[5].left);
expect(files[5].right).toBeNull();
- expect(files[6].left).toEqual(file.parallel_diff_lines[5].right);
+ expect(files[6].left).toMatchObject(file.parallel_diff_lines[5].right);
expect(files[6].right).toBeNull();
});
});
diff --git a/spec/frontend/diffs/utils/diff_file_spec.js b/spec/frontend/diffs/utils/diff_file_spec.js
index 2e6247b8c07..2de8db28e71 100644
--- a/spec/frontend/diffs/utils/diff_file_spec.js
+++ b/spec/frontend/diffs/utils/diff_file_spec.js
@@ -1,6 +1,8 @@
import { prepareRawDiffFile } from '~/diffs/utils/diff_file';
function getDiffFiles() {
+ const loadFull = 'namespace/project/-/merge_requests/12345/diff_for_path?file_identifier=abc';
+
return [
{
blob: {
@@ -8,6 +10,7 @@ function getDiffFiles() {
},
file_hash: 'ABC', // This file is just a normal file
file_identifier_hash: 'ABC1',
+ load_collapsed_diff_url: loadFull,
},
{
blob: {
@@ -15,6 +18,7 @@ function getDiffFiles() {
},
file_hash: 'DEF', // This file replaces a symlink
file_identifier_hash: 'DEF1',
+ load_collapsed_diff_url: loadFull,
a_mode: '0',
b_mode: '0755',
},
@@ -24,6 +28,7 @@ function getDiffFiles() {
},
file_hash: 'DEF', // This symlink is replaced by a file
file_identifier_hash: 'DEF2',
+ load_collapsed_diff_url: loadFull,
a_mode: '120000',
b_mode: '0',
},
@@ -33,6 +38,7 @@ function getDiffFiles() {
},
file_hash: 'GHI', // This symlink replaces a file
file_identifier_hash: 'GHI1',
+ load_collapsed_diff_url: loadFull,
a_mode: '0',
b_mode: '120000',
},
@@ -42,6 +48,7 @@ function getDiffFiles() {
},
file_hash: 'GHI', // This file is replaced by a symlink
file_identifier_hash: 'GHI2',
+ load_collapsed_diff_url: loadFull,
a_mode: '0755',
b_mode: '0',
},
@@ -86,11 +93,11 @@ describe('diff_file utilities', () => {
it.each`
fileIndex | id
- ${0} | ${'8dcd585e-a421-4dab-a04e-6f88c81b7b4c'}
- ${1} | ${'3f178b78-392b-44a4-bd7d-5d6192208a97'}
- ${2} | ${'3d9e1354-cddf-4a11-8234-f0413521b2e5'}
- ${3} | ${'460f005b-d29d-43c1-9a08-099a7c7f08de'}
- ${4} | ${'d8c89733-6ce1-4455-ae3d-f8aad6ee99f9'}
+ ${0} | ${'68296a4f-f1c7-445a-bd0e-6e3b02c4eec0'}
+ ${1} | ${'051c9bb8-cdba-4eb7-b8d1-508906e6d8ba'}
+ ${2} | ${'ed3d53d5-5da0-412d-a3c6-7213f84e88d3'}
+ ${3} | ${'39d998dc-bc69-4b19-a6af-41e4369c2bd5'}
+ ${4} | ${'7072d115-ce39-423c-8346-9fcad58cd68e'}
`('sets the file id properly { id: $id } on normal diff files', ({ fileIndex, id }) => {
const preppedFile = prepareRawDiffFile({
file: files[fileIndex],
@@ -122,5 +129,18 @@ describe('diff_file utilities', () => {
expect(preppedFile).not.toHaveProp('id');
});
+
+ it('does not set the id property if the file is missing a `load_collapsed_diff_url` property', () => {
+ const fileMissingContentSha = { ...files[0] };
+
+ delete fileMissingContentSha.load_collapsed_diff_url;
+
+ const preppedFile = prepareRawDiffFile({
+ file: fileMissingContentSha,
+ allFiles: files,
+ });
+
+ expect(preppedFile).not.toHaveProp('id');
+ });
});
});
diff --git a/spec/frontend/diffs/utils/file_reviews_spec.js b/spec/frontend/diffs/utils/file_reviews_spec.js
new file mode 100644
index 00000000000..819426ee75f
--- /dev/null
+++ b/spec/frontend/diffs/utils/file_reviews_spec.js
@@ -0,0 +1,146 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+
+import {
+ getReviewsForMergeRequest,
+ setReviewsForMergeRequest,
+ isFileReviewed,
+ markFileReview,
+ reviewable,
+} from '~/diffs/utils/file_reviews';
+
+function getDefaultReviews() {
+ return {
+ abc: ['123', '098'],
+ };
+}
+
+describe('File Review(s) utilities', () => {
+ const mrPath = 'my/fake/mr/42';
+ const storageKey = `${mrPath}-file-reviews`;
+ const file = { id: '123', file_identifier_hash: 'abc' };
+ const storedValue = JSON.stringify(getDefaultReviews());
+ let reviews;
+
+ useLocalStorageSpy();
+
+ beforeEach(() => {
+ reviews = getDefaultReviews();
+ localStorage.clear();
+ });
+
+ describe('getReviewsForMergeRequest', () => {
+ it('fetches the appropriate stored reviews from localStorage', () => {
+ getReviewsForMergeRequest(mrPath);
+
+ expect(localStorage.getItem).toHaveBeenCalledTimes(1);
+ expect(localStorage.getItem).toHaveBeenCalledWith(storageKey);
+ });
+
+ it('returns an empty object if there have never been stored reviews for this MR', () => {
+ expect(getReviewsForMergeRequest(mrPath)).toStrictEqual({});
+ });
+
+ it.each`
+ data
+ ${'+++'}
+ ${'{ lookinGood: "yeah!", missingClosingBrace: "yeah :(" '}
+ `(
+ "returns an empty object if the stored reviews are corrupted/aren't parseable as JSON (like: $data)",
+ ({ data }) => {
+ localStorage.getItem.mockReturnValueOnce(data);
+
+ expect(getReviewsForMergeRequest(mrPath)).toStrictEqual({});
+ },
+ );
+
+ it('fetches the reviews for the MR if they exist', () => {
+ localStorage.setItem(storageKey, storedValue);
+
+ expect(getReviewsForMergeRequest(mrPath)).toStrictEqual(reviews);
+ });
+ });
+
+ describe('setReviewsForMergeRequest', () => {
+ it('sets the new value to localStorage', () => {
+ setReviewsForMergeRequest(mrPath, reviews);
+
+ expect(localStorage.setItem).toHaveBeenCalledTimes(1);
+ expect(localStorage.setItem).toHaveBeenCalledWith(storageKey, storedValue);
+ });
+
+ it('returns the new value for chainability', () => {
+ expect(setReviewsForMergeRequest(mrPath, reviews)).toStrictEqual(reviews);
+ });
+ });
+
+ describe('isFileReviewed', () => {
+ it.each`
+ description | diffFile | fileReviews
+ ${'the file does not have an `id`'} | ${{ ...file, id: undefined }} | ${getDefaultReviews()}
+ ${'there are no reviews for the file'} | ${file} | ${{ ...getDefaultReviews(), abc: undefined }}
+ `('returns `false` if $description', ({ diffFile, fileReviews }) => {
+ expect(isFileReviewed(fileReviews, diffFile)).toBe(false);
+ });
+
+ it("returns `true` for a file if it's available in the provided reviews", () => {
+ expect(isFileReviewed(reviews, file)).toBe(true);
+ });
+ });
+
+ describe('reviewable', () => {
+ it.each`
+ response | diffFile | description
+ ${true} | ${file} | ${'has an `.id` and a `.file_identifier_hash`'}
+ ${false} | ${{ file_identifier_hash: 'abc' }} | ${'does not have an `.id`'}
+ ${false} | ${{ ...file, id: undefined }} | ${'has an undefined `.id`'}
+ ${false} | ${{ ...file, id: null }} | ${'has a null `.id`'}
+ ${false} | ${{ ...file, id: 0 }} | ${'has an `.id` set to 0'}
+ ${false} | ${{ ...file, id: false }} | ${'has an `.id` set to false'}
+ ${false} | ${{ id: '123' }} | ${'does not have a `.file_identifier_hash`'}
+ ${false} | ${{ ...file, file_identifier_hash: undefined }} | ${'has an undefined `.file_identifier_hash`'}
+ ${false} | ${{ ...file, file_identifier_hash: null }} | ${'has a null `.file_identifier_hash`'}
+ ${false} | ${{ ...file, file_identifier_hash: 0 }} | ${'has a `.file_identifier_hash` set to 0'}
+ ${false} | ${{ ...file, file_identifier_hash: false }} | ${'has a `.file_identifier_hash` set to false'}
+ `('returns `$response` when the file $description`', ({ response, diffFile }) => {
+ expect(reviewable(diffFile)).toBe(response);
+ });
+ });
+
+ describe('markFileReview', () => {
+ it("adds a review when there's nothing that already exists", () => {
+ expect(markFileReview(null, file)).toStrictEqual({ abc: ['123'] });
+ });
+
+ it("overwrites an existing review if it's for the same file (identifier hash)", () => {
+ expect(markFileReview(reviews, file)).toStrictEqual(getDefaultReviews());
+ });
+
+ it('removes a review from the list when `reviewed` is `false`', () => {
+ expect(markFileReview(reviews, file, false)).toStrictEqual({ abc: ['098'] });
+ });
+
+ it('adds a new review if the file ID is new', () => {
+ const updatedFile = { ...file, id: '098' };
+ const allReviews = markFileReview({ abc: ['123'] }, updatedFile);
+
+ expect(allReviews).toStrictEqual(getDefaultReviews());
+ expect(allReviews.abc).toStrictEqual(['123', '098']);
+ });
+
+ it.each`
+ description | diffFile
+ ${'missing an `.id`'} | ${{ file_identifier_hash: 'abc' }}
+ ${'missing a `.file_identifier_hash`'} | ${{ id: '123' }}
+ `("doesn't modify the reviews if the file is $description", ({ diffFile }) => {
+ expect(markFileReview(reviews, diffFile)).toStrictEqual(getDefaultReviews());
+ });
+
+ it('removes the file key if there are no more reviews for it', () => {
+ let updated = markFileReview(reviews, file, false);
+
+ updated = markFileReview(updated, { ...file, id: '098' }, false);
+
+ expect(updated).toStrictEqual({});
+ });
+ });
+});
diff --git a/spec/frontend/diffs/utils/merge_request_spec.js b/spec/frontend/diffs/utils/merge_request_spec.js
new file mode 100644
index 00000000000..8c7b1e1f2a5
--- /dev/null
+++ b/spec/frontend/diffs/utils/merge_request_spec.js
@@ -0,0 +1,31 @@
+import { getDerivedMergeRequestInformation } from '~/diffs/utils/merge_request';
+import { diffMetadata } from '../mock_data/diff_metadata';
+
+describe('Merge Request utilities', () => {
+ const derivedMrInfo = {
+ mrPath: '/gitlab-org/gitlab-test/-/merge_requests/4',
+ userOrGroup: 'gitlab-org',
+ project: 'gitlab-test',
+ id: '4',
+ };
+ const unparseableEndpoint = {
+ mrPath: undefined,
+ userOrGroup: undefined,
+ project: undefined,
+ id: undefined,
+ };
+
+ describe('getDerivedMergeRequestInformation', () => {
+ const endpoint = `${diffMetadata.latest_version_path}.json?searchParam=irrelevant`;
+
+ it.each`
+ argument | response
+ ${{ endpoint }} | ${derivedMrInfo}
+ ${{}} | ${unparseableEndpoint}
+ ${{ endpoint: undefined }} | ${unparseableEndpoint}
+ ${{ endpoint: null }} | ${unparseableEndpoint}
+ `('generates the correct derived results based on $argument', ({ argument, response }) => {
+ expect(getDerivedMergeRequestInformation(argument)).toStrictEqual(response);
+ });
+ });
+});
diff --git a/spec/frontend/diffs/utils/uuids_spec.js b/spec/frontend/diffs/utils/uuids_spec.js
index 79d3ebadd4f..8d0a01e8cbd 100644
--- a/spec/frontend/diffs/utils/uuids_spec.js
+++ b/spec/frontend/diffs/utils/uuids_spec.js
@@ -32,7 +32,7 @@ describe('UUIDs Util', () => {
const ids = uuids({ count: 11 });
expect(ids.length).toEqual(11);
- expect(ids.every(id => UUIDV4.test(id))).toEqual(true);
+ expect(ids.every((id) => UUIDV4.test(id))).toEqual(true);
});
it.each`
diff --git a/spec/frontend/dirty_submit/dirty_submit_collection_spec.js b/spec/frontend/dirty_submit/dirty_submit_collection_spec.js
index 170d581be23..465391633e6 100644
--- a/spec/frontend/dirty_submit/dirty_submit_collection_spec.js
+++ b/spec/frontend/dirty_submit/dirty_submit_collection_spec.js
@@ -1,15 +1,15 @@
import DirtySubmitCollection from '~/dirty_submit/dirty_submit_collection';
import { setInputValue, createForm } from './helper';
-jest.mock('lodash/throttle', () => jest.fn(fn => fn));
+jest.mock('lodash/throttle', () => jest.fn((fn) => fn));
describe('DirtySubmitCollection', () => {
const testElementsCollection = [createForm(), createForm()];
- const forms = testElementsCollection.map(testElements => testElements.form);
+ const forms = testElementsCollection.map((testElements) => testElements.form);
new DirtySubmitCollection(forms); // eslint-disable-line no-new
- it.each(testElementsCollection)('disables submits until there are changes', testElements => {
+ it.each(testElementsCollection)('disables submits until there are changes', (testElements) => {
const { input, submit } = testElements;
const originalValue = input.value;
diff --git a/spec/frontend/dirty_submit/dirty_submit_form_spec.js b/spec/frontend/dirty_submit/dirty_submit_form_spec.js
index d7f690df1f3..cfcf1be609e 100644
--- a/spec/frontend/dirty_submit/dirty_submit_form_spec.js
+++ b/spec/frontend/dirty_submit/dirty_submit_form_spec.js
@@ -2,7 +2,7 @@ import { range as rge, throttle } from 'lodash';
import DirtySubmitForm from '~/dirty_submit/dirty_submit_form';
import { getInputValue, setInputValue, createForm } from './helper';
-jest.mock('lodash/throttle', () => jest.fn(fn => fn));
+jest.mock('lodash/throttle', () => jest.fn((fn) => fn));
const lodash = jest.requireActual('lodash');
function expectToToggleDisableOnDirtyUpdate(submit, input) {
@@ -66,7 +66,7 @@ describe('DirtySubmitForm', () => {
const { form, input } = createForm();
const updateDirtyInputSpy = jest.spyOn(new DirtySubmitForm(form), 'updateDirtyInput');
- rge(10).forEach(i => {
+ rge(10).forEach((i) => {
setInputValue(input, `change ${i}`, false);
});
@@ -78,13 +78,13 @@ describe('DirtySubmitForm', () => {
it('does not throttle updates when rapid changes are made to different form elements', () => {
const form = document.createElement('form');
const range = rge(10);
- range.forEach(i => {
+ range.forEach((i) => {
form.innerHTML += `<input type="text" name="input-${i}" class="js-input-${i}"/>`;
});
const updateDirtyInputSpy = jest.spyOn(new DirtySubmitForm(form), 'updateDirtyInput');
- range.forEach(i => {
+ range.forEach((i) => {
const input = form.querySelector(`.js-input-${i}`);
setInputValue(input, `change`, false);
});
diff --git a/spec/frontend/droplab/drop_down_spec.js b/spec/frontend/droplab/drop_down_spec.js
index d33d6bb70f1..c1fbda6f211 100644
--- a/spec/frontend/droplab/drop_down_spec.js
+++ b/spec/frontend/droplab/drop_down_spec.js
@@ -151,7 +151,7 @@ describe('DropLab DropDown', () => {
};
testContext.dummyListItem = document.createElement('li');
- jest.spyOn(testContext.event.target, 'closest').mockImplementation(selector => {
+ jest.spyOn(testContext.event.target, 'closest').mockImplementation((selector) => {
if (selector === 'li') {
return testContext.dummyListItem;
}
@@ -402,7 +402,7 @@ describe('DropLab DropDown', () => {
beforeEach(() => {
testContext.renderableList = {};
testContext.list = {
- querySelector: q => {
+ querySelector: (q) => {
if (q === '.filter-dropdown-loading') {
return false;
}
@@ -414,7 +414,7 @@ describe('DropLab DropDown', () => {
testContext.data = [0, 1];
testContext.customEvent = {};
- jest.spyOn(testContext.dropdown, 'renderChildren').mockImplementation(data => data);
+ jest.spyOn(testContext.dropdown, 'renderChildren').mockImplementation((data) => data);
jest.spyOn(testContext.list, 'dispatchEvent').mockImplementation(() => {});
jest.spyOn(testContext.data, 'map');
jest.spyOn(window, 'CustomEvent').mockReturnValue(testContext.customEvent);
@@ -465,7 +465,7 @@ describe('DropLab DropDown', () => {
testContext.dropdown = { renderChildren: () => {}, list: testContext.list };
testContext.data = [0, 1];
- jest.spyOn(testContext.dropdown, 'renderChildren').mockImplementation(data => data);
+ jest.spyOn(testContext.dropdown, 'renderChildren').mockImplementation((data) => data);
jest.spyOn(testContext.list, 'querySelector').mockImplementation(() => {});
jest.spyOn(testContext.data, 'map');
diff --git a/spec/frontend/droplab/plugins/ajax_filter_spec.js b/spec/frontend/droplab/plugins/ajax_filter_spec.js
index 5ec0400cbc5..d91884a60e5 100644
--- a/spec/frontend/droplab/plugins/ajax_filter_spec.js
+++ b/spec/frontend/droplab/plugins/ajax_filter_spec.js
@@ -28,7 +28,7 @@ describe('AjaxFilter', () => {
let ajaxSpy;
beforeEach(() => {
- jest.spyOn(AjaxCache, 'retrieve').mockImplementation(url => ajaxSpy(url));
+ jest.spyOn(AjaxCache, 'retrieve').mockImplementation((url) => ajaxSpy(url));
jest.spyOn(AjaxFilter, '_loadData').mockImplementation(() => {});
dummyConfig.onLoadingFinished = jest.fn();
@@ -38,8 +38,8 @@ describe('AjaxFilter', () => {
dummyList.list.appendChild(dynamicList);
});
- it('calls onLoadingFinished after loading data', done => {
- ajaxSpy = url => {
+ it('calls onLoadingFinished after loading data', (done) => {
+ ajaxSpy = (url) => {
expect(url).toBe('dummy endpoint?dummy search key=');
return Promise.resolve(dummyData);
};
@@ -52,16 +52,16 @@ describe('AjaxFilter', () => {
.catch(done.fail);
});
- it('does not call onLoadingFinished if Ajax call fails', done => {
+ it('does not call onLoadingFinished if Ajax call fails', (done) => {
const dummyError = new Error('My dummy is sick! :-(');
- ajaxSpy = url => {
+ ajaxSpy = (url) => {
expect(url).toBe('dummy endpoint?dummy search key=');
return Promise.reject(dummyError);
};
AjaxFilter.trigger()
.then(done.fail)
- .catch(error => {
+ .catch((error) => {
expect(error).toBe(dummyError);
expect(dummyConfig.onLoadingFinished.mock.calls.length).toBe(0);
})
diff --git a/spec/frontend/editor/editor_ci_schema_ext_spec.js b/spec/frontend/editor/editor_ci_schema_ext_spec.js
new file mode 100644
index 00000000000..9dd88aad7e6
--- /dev/null
+++ b/spec/frontend/editor/editor_ci_schema_ext_spec.js
@@ -0,0 +1,107 @@
+import { languages } from 'monaco-editor';
+import { TEST_HOST } from 'helpers/test_constants';
+import EditorLite from '~/editor/editor_lite';
+import { CiSchemaExtension } from '~/editor/extensions/editor_ci_schema_ext';
+import { EXTENSION_CI_SCHEMA_FILE_NAME_MATCH } from '~/editor/constants';
+
+describe('~/editor/editor_ci_config_ext', () => {
+ const defaultBlobPath = '.gitlab-ci.yml';
+
+ let editor;
+ let instance;
+ let editorEl;
+ let originalGitlabUrl;
+
+ const createMockEditor = ({ blobPath = defaultBlobPath } = {}) => {
+ setFixtures('<div id="editor"></div>');
+ editorEl = document.getElementById('editor');
+ editor = new EditorLite();
+ instance = editor.createInstance({
+ el: editorEl,
+ blobPath,
+ blobContent: '',
+ });
+ instance.use(new CiSchemaExtension());
+ };
+
+ beforeAll(() => {
+ originalGitlabUrl = gon.gitlab_url;
+ gon.gitlab_url = TEST_HOST;
+ });
+
+ afterAll(() => {
+ gon.gitlab_url = originalGitlabUrl;
+ });
+
+ beforeEach(() => {
+ createMockEditor();
+ });
+
+ afterEach(() => {
+ instance.dispose();
+ editorEl.remove();
+ });
+
+ describe('registerCiSchema', () => {
+ beforeEach(() => {
+ jest.spyOn(languages.yaml.yamlDefaults, 'setDiagnosticsOptions');
+ });
+
+ describe('register validations options with monaco for yaml language', () => {
+ const mockProjectNamespace = 'namespace1';
+ const mockProjectPath = 'project1';
+
+ const getConfiguredYmlSchema = () => {
+ return languages.yaml.yamlDefaults.setDiagnosticsOptions.mock.calls[0][0].schemas[0];
+ };
+
+ it('with expected basic validation configuration', () => {
+ instance.registerCiSchema({
+ projectNamespace: mockProjectNamespace,
+ projectPath: mockProjectPath,
+ });
+
+ const expectedOptions = {
+ validate: true,
+ enableSchemaRequest: true,
+ hover: true,
+ completion: true,
+ };
+
+ expect(languages.yaml.yamlDefaults.setDiagnosticsOptions).toHaveBeenCalledTimes(1);
+ expect(languages.yaml.yamlDefaults.setDiagnosticsOptions).toHaveBeenCalledWith(
+ expect.objectContaining(expectedOptions),
+ );
+ });
+
+ it('with an schema uri that contains project and ref', () => {
+ const mockRef = 'AABBCCDD';
+
+ instance.registerCiSchema({
+ projectNamespace: mockProjectNamespace,
+ projectPath: mockProjectPath,
+ ref: mockRef,
+ });
+
+ expect(getConfiguredYmlSchema()).toEqual({
+ uri: `${TEST_HOST}/${mockProjectNamespace}/${mockProjectPath}/-/schema/${mockRef}/${EXTENSION_CI_SCHEMA_FILE_NAME_MATCH}`,
+ fileMatch: [defaultBlobPath],
+ });
+ });
+
+ it('with an alternative file name match', () => {
+ createMockEditor({ blobPath: 'dir1/dir2/another-ci-filename.yml' });
+
+ instance.registerCiSchema({
+ projectNamespace: mockProjectNamespace,
+ projectPath: mockProjectPath,
+ });
+
+ expect(getConfiguredYmlSchema()).toEqual({
+ uri: `${TEST_HOST}/${mockProjectNamespace}/${mockProjectPath}/-/schema/master/${EXTENSION_CI_SCHEMA_FILE_NAME_MATCH}`,
+ fileMatch: ['another-ci-filename.yml'],
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/editor/editor_lite_extension_base_spec.js b/spec/frontend/editor/editor_lite_extension_base_spec.js
index ff53640b096..5490e9dc7b5 100644
--- a/spec/frontend/editor/editor_lite_extension_base_spec.js
+++ b/spec/frontend/editor/editor_lite_extension_base_spec.js
@@ -1,5 +1,5 @@
import { ERROR_INSTANCE_REQUIRED_FOR_EXTENSION } from '~/editor/constants';
-import { EditorLiteExtension } from '~/editor/editor_lite_extension_base';
+import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
describe('The basis for an Editor Lite extension', () => {
let ext;
@@ -16,7 +16,7 @@ describe('The basis for an Editor Lite extension', () => {
if (instance) {
if (options) {
- Object.entries(options).forEach(prop => {
+ Object.entries(options).forEach((prop) => {
expect(instance[prop]).toBeUndefined();
});
// Both instance and options are passed
diff --git a/spec/frontend/editor/editor_lite_spec.js b/spec/frontend/editor/editor_lite_spec.js
index 3a7680f6d17..c3099997287 100644
--- a/spec/frontend/editor/editor_lite_spec.js
+++ b/spec/frontend/editor/editor_lite_spec.js
@@ -2,7 +2,7 @@
import { editor as monacoEditor, languages as monacoLanguages, Uri } from 'monaco-editor';
import waitForPromises from 'helpers/wait_for_promises';
import Editor from '~/editor/editor_lite';
-import { EditorLiteExtension } from '~/editor/editor_lite_extension_base';
+import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
import { EDITOR_LITE_INSTANCE_ERROR_NO_EL, URI_PREFIX } from '~/editor/constants';
@@ -324,13 +324,13 @@ describe('Base editor', () => {
${'multiple simple objects'} | ${[FooObjExt, BarObjExt]} | ${['foo', 'bar']} | ${[fooRes, barRes]}
${'combination of ES6 classes and objects'} | ${[AlphaExt, BarObjExt]} | ${['alpha', 'bar']} | ${[alphaRes, barRes]}
`('is extensible with $type', ({ extensions, methods, expectations } = {}) => {
- methods.forEach(method => {
+ methods.forEach((method) => {
expect(instance[method]).toBeUndefined();
});
instance.use(extensions);
- methods.forEach(method => {
+ methods.forEach((method) => {
expect(instance[method]).toBeDefined();
});
@@ -359,7 +359,7 @@ describe('Base editor', () => {
it.each([WithStaticMethod, WithStaticMethodExtended])(
'properly resolves data for an extension with private data',
- ExtClass => {
+ (ExtClass) => {
const base = 1;
expect(instance.base).toBeUndefined();
expect(instance.boo).toBeUndefined();
@@ -408,7 +408,7 @@ describe('Base editor', () => {
};
beforeEach(() => {
- editorExtensionSpy = jest.spyOn(Editor, 'pushToImportsArray').mockImplementation(arr => {
+ editorExtensionSpy = jest.spyOn(Editor, 'pushToImportsArray').mockImplementation((arr) => {
arr.push(
Promise.resolve({
default: {},
@@ -524,7 +524,7 @@ describe('Base editor', () => {
});
it('sets default syntax highlighting theme', () => {
- const expectedTheme = themes.find(t => t.name === DEFAULT_THEME);
+ const expectedTheme = themes.find((t) => t.name === DEFAULT_THEME);
editor = new Editor();
@@ -533,7 +533,7 @@ describe('Base editor', () => {
});
it('sets correct theme if it is set in users preferences', () => {
- const expectedTheme = themes.find(t => t.name !== DEFAULT_THEME);
+ const expectedTheme = themes.find((t) => t.name !== DEFAULT_THEME);
expect(expectedTheme.name).not.toBe(DEFAULT_THEME);
diff --git a/spec/frontend/editor/editor_markdown_ext_spec.js b/spec/frontend/editor/editor_markdown_ext_spec.js
index b432d4d66ad..3f64dcfd7a0 100644
--- a/spec/frontend/editor/editor_markdown_ext_spec.js
+++ b/spec/frontend/editor/editor_markdown_ext_spec.js
@@ -1,6 +1,6 @@
import { Range, Position } from 'monaco-editor';
import EditorLite from '~/editor/editor_lite';
-import { EditorMarkdownExtension } from '~/editor/editor_markdown_ext';
+import { EditorMarkdownExtension } from '~/editor/extensions/editor_markdown_ext';
describe('Markdown Extension for Editor Lite', () => {
let editor;
@@ -114,7 +114,7 @@ describe('Markdown Extension for Editor Lite', () => {
});
describe('moveCursor', () => {
- const setPosition = endCol => {
+ const setPosition = (endCol) => {
const currentPos = new Position(2, endCol);
instance.setPosition(currentPos);
};
diff --git a/spec/frontend/emoji/emoji_spec.js b/spec/frontend/emoji/emoji_spec.js
index f528313ef02..feec445bc8d 100644
--- a/spec/frontend/emoji/emoji_spec.js
+++ b/spec/frontend/emoji/emoji_spec.js
@@ -369,11 +369,11 @@ describe('gl_emoji', () => {
describe('searchEmoji', () => {
const { atom, grey_question } = emojiFixtureMap;
const search = (query, opts) => searchEmoji(query, opts).map(({ name }) => name);
- const mangle = str => str.slice(0, 1) + str.slice(-1);
- const partial = str => str.slice(0, 2);
+ const mangle = (str) => str.slice(0, 1) + str.slice(-1);
+ const partial = (str) => str.slice(0, 2);
describe('with default options', () => {
- const subject = query => search(query);
+ const subject = (query) => search(query);
describeEmojiFields('with $field', ({ accessor }) => {
it(`should match by lower case: ${accessor(atom)}`, () => {
@@ -403,7 +403,7 @@ describe('gl_emoji', () => {
});
describe('with fuzzy match', () => {
- const subject = query => search(query, { match: 'fuzzy' });
+ const subject = (query) => search(query, { match: 'fuzzy' });
describeEmojiFields('with $field', ({ accessor }) => {
it(`should match by lower case: ${accessor(atom)}`, () => {
@@ -421,7 +421,7 @@ describe('gl_emoji', () => {
});
describe('with contains match', () => {
- const subject = query => search(query, { match: 'contains' });
+ const subject = (query) => search(query, { match: 'contains' });
describeEmojiFields('with $field', ({ accessor }) => {
it(`should match by lower case: ${accessor(atom)}`, () => {
@@ -443,7 +443,7 @@ describe('gl_emoji', () => {
});
describe('with fallback', () => {
- const subject = query => search(query, { fallback: true });
+ const subject = (query) => search(query, { fallback: true });
it.each`
query
@@ -454,7 +454,7 @@ describe('gl_emoji', () => {
});
describe('with name and alias fields', () => {
- const subject = query => search(query, { fields: ['name', 'alias'] });
+ const subject = (query) => search(query, { fields: ['name', 'alias'] });
it(`should match by name: ${atom.name}`, () => {
expect(subject(atom.name)).toContain(atom.name);
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 733bf4378eb..c055702d832 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -3,7 +3,7 @@
const path = require('path');
const { ErrorWithStack } = require('jest-util');
const JSDOMEnvironment = require('jest-environment-jsdom');
-const { TEST_HOST } = require('./helpers/test_constants');
+const { TEST_HOST } = require('./__helpers__/test_constants');
const ROOT_PATH = path.resolve(__dirname, '../..');
@@ -37,7 +37,7 @@ class CustomEnvironment extends JSDOMEnvironment {
this.rejectedPromises = [];
- this.global.promiseRejectionHandler = error => {
+ this.global.promiseRejectionHandler = (error) => {
this.rejectedPromises.push(error);
};
diff --git a/spec/frontend/environments/canary_ingress_spec.js b/spec/frontend/environments/canary_ingress_spec.js
new file mode 100644
index 00000000000..3dd67de1369
--- /dev/null
+++ b/spec/frontend/environments/canary_ingress_spec.js
@@ -0,0 +1,102 @@
+import { mount } from '@vue/test-utils';
+import { GlDropdownItem } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { CANARY_UPDATE_MODAL } from '~/environments/constants';
+import CanaryIngress from '~/environments/components/canary_ingress.vue';
+
+describe('/environments/components/canary_ingress.vue', () => {
+ let wrapper;
+
+ const setWeightTo = (weightWrapper, x) =>
+ weightWrapper
+ .findAll(GlDropdownItem)
+ .at(x / 5)
+ .vm.$emit('click');
+
+ const createComponent = () => {
+ wrapper = mount(CanaryIngress, {
+ propsData: {
+ canaryIngress: {
+ canary_weight: 60,
+ },
+ },
+ directives: {
+ GlModal: createMockDirective(),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ describe('stable weight', () => {
+ let stableWeightDropdown;
+
+ beforeEach(() => {
+ stableWeightDropdown = wrapper.find('[data-testid="stable-weight"]');
+ });
+
+ it('displays the current stable weight', () => {
+ expect(stableWeightDropdown.props('text')).toBe('40');
+ });
+
+ it('emits a change with the new canary weight', () => {
+ setWeightTo(stableWeightDropdown, 15);
+
+ expect(wrapper.emitted('change')).toContainEqual([85]);
+ });
+
+ it('lists options from 0 to 100 in increments of 5', () => {
+ const options = stableWeightDropdown.findAll(GlDropdownItem);
+ expect(options).toHaveLength(21);
+ options.wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
+ });
+
+ it('is set to open the change modal', () => {
+ stableWeightDropdown
+ .findAll(GlDropdownItem)
+ .wrappers.forEach((w) =>
+ expect(getBinding(w.element, 'gl-modal')).toMatchObject({ value: CANARY_UPDATE_MODAL }),
+ );
+ });
+ });
+
+ describe('canary weight', () => {
+ let canaryWeightDropdown;
+
+ beforeEach(() => {
+ canaryWeightDropdown = wrapper.find('[data-testid="canary-weight"]');
+ });
+
+ it('displays the current canary weight', () => {
+ expect(canaryWeightDropdown.props('text')).toBe('60');
+ });
+
+ it('emits a change with the new canary weight', () => {
+ setWeightTo(canaryWeightDropdown, 15);
+
+ expect(wrapper.emitted('change')).toContainEqual([15]);
+ });
+
+ it('lists options from 0 to 100 in increments of 5', () => {
+ canaryWeightDropdown
+ .findAll(GlDropdownItem)
+ .wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
+ });
+
+ it('is set to open the change modal', () => {
+ const options = canaryWeightDropdown.findAll(GlDropdownItem);
+ expect(options).toHaveLength(21);
+ options.wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
+ });
+ });
+});
diff --git a/spec/frontend/environments/canary_update_modal_spec.js b/spec/frontend/environments/canary_update_modal_spec.js
new file mode 100644
index 00000000000..d0b97cf2eda
--- /dev/null
+++ b/spec/frontend/environments/canary_update_modal_spec.js
@@ -0,0 +1,126 @@
+import { mount } from '@vue/test-utils';
+import { GlAlert, GlModal } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import CanaryUpdateModal from '~/environments/components/canary_update_modal.vue';
+import updateCanaryIngress from '~/environments/graphql/mutations/update_canary_ingress.mutation.graphql';
+
+describe('/environments/components/canary_update_modal.vue', () => {
+ let wrapper;
+ let modal;
+ let mutate;
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ const createComponent = () => {
+ mutate = jest.fn().mockResolvedValue();
+ wrapper = mount(CanaryUpdateModal, {
+ propsData: {
+ environment: {
+ name: 'staging',
+ global_id: 'gid://environments/staging',
+ },
+ weight: 60,
+ visible: true,
+ },
+ mocks: {
+ $apollo: { mutate },
+ },
+ });
+ modal = wrapper.find(GlModal);
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should bind the modal props', () => {
+ expect(modal.props()).toMatchObject({
+ modalId: 'confirm-canary-change',
+ actionPrimary: {
+ text: 'Change ratio',
+ attributes: [{ variant: 'info' }],
+ },
+ actionCancel: { text: 'Cancel' },
+ });
+ });
+
+ it('should display the new weights', () => {
+ expect(modal.text()).toContain('Stable: 40');
+ expect(modal.text()).toContain('Canary: 60');
+ });
+
+ it('should display the affected environment', () => {
+ expect(modal.text()).toContain(
+ 'You are changing the ratio of the canary rollout for staging compared to the stable deployment to:',
+ );
+ });
+
+ it('should update the weight on primary action', () => {
+ modal.vm.$emit('primary');
+
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: updateCanaryIngress,
+ variables: {
+ input: {
+ id: 'gid://environments/staging',
+ weight: 60,
+ },
+ },
+ });
+ });
+
+ it('should do nothing on cancel', () => {
+ modal.vm.$emit('secondary');
+ expect(mutate).not.toHaveBeenCalled();
+ });
+
+ it('should not display an error if there was not one', async () => {
+ mutate.mockResolvedValue({ data: { environmentsCanaryIngressUpdate: { errors: [] } } });
+ modal.vm.$emit('primary');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should display an error if there was one', async () => {
+ mutate.mockResolvedValue({ data: { environmentsCanaryIngressUpdate: { errors: ['error'] } } });
+ modal.vm.$emit('primary');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findAlert().text()).toBe('error');
+ });
+
+ it('should display a generic error if there was a top-level one', async () => {
+ mutate.mockRejectedValue();
+ modal.vm.$emit('primary');
+
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+
+ expect(findAlert().text()).toBe('Something went wrong. Please try again later');
+ });
+
+ it('hides teh alert on dismiss', async () => {
+ mutate.mockResolvedValue({ data: { environmentsCanaryIngressUpdate: { errors: ['error'] } } });
+ modal.vm.$emit('primary');
+
+ await wrapper.vm.$nextTick();
+
+ const alert = findAlert();
+ alert.vm.$emit('dismiss');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/environments/deploy_board_component_spec.js b/spec/frontend/environments/deploy_board_component_spec.js
new file mode 100644
index 00000000000..dbef03f99d8
--- /dev/null
+++ b/spec/frontend/environments/deploy_board_component_spec.js
@@ -0,0 +1,139 @@
+import { GlTooltip, GlIcon, GlLoadingIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import Vue from 'vue';
+import DeployBoard from '~/environments/components/deploy_board.vue';
+import CanaryIngress from '~/environments/components/canary_ingress.vue';
+import { deployBoardMockData, environment } from './mock_data';
+
+const logsPath = `gitlab-org/gitlab-test/-/logs?environment_name=${environment.name}`;
+
+describe('Deploy Board', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) =>
+ mount(Vue.extend(DeployBoard), {
+ provide: { glFeatures: { canaryIngressWeightControl: true } },
+ propsData: {
+ deployBoardData: deployBoardMockData,
+ isLoading: false,
+ isEmpty: false,
+ logsPath,
+ ...props,
+ },
+ });
+
+ describe('with valid data', () => {
+ beforeEach((done) => {
+ wrapper = createComponent();
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('should render percentage with completion value provided', () => {
+ expect(wrapper.vm.$refs.percentage.innerText).toEqual(`${deployBoardMockData.completion}%`);
+ });
+
+ it('should render total instance count', () => {
+ const renderedTotal = wrapper.find('.deploy-board-instances-text');
+ const actualTotal = deployBoardMockData.instances.length;
+ const output = `${actualTotal > 1 ? 'Instances' : 'Instance'} (${actualTotal})`;
+
+ expect(renderedTotal.text()).toEqual(output);
+ });
+
+ it('should render all instances', () => {
+ const instances = wrapper.findAll('.deploy-board-instances-container a');
+
+ expect(instances).toHaveLength(deployBoardMockData.instances.length);
+ expect(
+ instances.at(1).classes(`deployment-instance-${deployBoardMockData.instances[2].status}`),
+ ).toBe(true);
+ });
+
+ it('should render an abort and a rollback button with the provided url', () => {
+ const buttons = wrapper.findAll('.deploy-board-actions a');
+
+ expect(buttons.at(0).attributes('href')).toEqual(deployBoardMockData.rollback_url);
+ expect(buttons.at(1).attributes('href')).toEqual(deployBoardMockData.abort_url);
+ });
+
+ it('sets up a tooltip for the legend', () => {
+ const iconSpan = wrapper.find('[data-testid="legend-tooltip-target"]');
+ const tooltip = wrapper.find(GlTooltip);
+ const icon = iconSpan.find(GlIcon);
+
+ expect(tooltip.props('target')()).toBe(iconSpan.element);
+ expect(icon.props('name')).toBe('question');
+ });
+
+ it('renders the canary weight selector', () => {
+ const canary = wrapper.find(CanaryIngress);
+ expect(canary.exists()).toBe(true);
+ expect(canary.props('canaryIngress')).toEqual({ canary_weight: 50 });
+ });
+ });
+
+ describe('with empty state', () => {
+ beforeEach((done) => {
+ wrapper = createComponent({
+ deployBoardData: {},
+ isLoading: false,
+ isEmpty: true,
+ logsPath,
+ });
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('should render the empty state', () => {
+ expect(wrapper.find('.deploy-board-empty-state-svg svg')).toBeDefined();
+ expect(
+ wrapper.find('.deploy-board-empty-state-text .deploy-board-empty-state-title').text(),
+ ).toContain('Kubernetes deployment not found');
+ });
+ });
+
+ describe('with loading state', () => {
+ beforeEach((done) => {
+ wrapper = createComponent({
+ deployBoardData: {},
+ isLoading: true,
+ isEmpty: false,
+ logsPath,
+ });
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('should render loading spinner', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('has legend component', () => {
+ let statuses = [];
+ beforeEach((done) => {
+ wrapper = createComponent({
+ isLoading: false,
+ isEmpty: false,
+ logsPath: environment.log_path,
+ deployBoardData: deployBoardMockData,
+ });
+ ({ statuses } = wrapper.vm);
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('with all the possible statuses', () => {
+ const deployBoardLegend = wrapper.find('.deploy-board-legend');
+
+ expect(deployBoardLegend).toBeDefined();
+ expect(deployBoardLegend.findAll('a')).toHaveLength(Object.keys(statuses).length);
+ });
+
+ Object.keys(statuses).forEach((item) => {
+ it(`with ${item} text next to deployment instance icon`, () => {
+ expect(wrapper.find(`.deployment-instance-${item}`)).toBeDefined();
+ expect(wrapper.find(`.deployment-instance-${item} + .legend-text`).text()).toBe(
+ statuses[item].text,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
index cc5153d6eba..875a01c07ea 100644
--- a/spec/frontend/environments/environment_actions_spec.js
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -38,9 +38,9 @@ describe('EnvironmentActions Component', () => {
return createComponent({ actions: [scheduledJobAction, expiredJobAction] }, opts);
}
- const findDropdownItem = action => {
+ const findDropdownItem = (action) => {
const buttons = wrapper.findAll(GlDropdownItem);
- return buttons.filter(button => button.text().startsWith(action.name)).at(0);
+ return buttons.filter((button) => button.text().startsWith(action.name)).at(0);
};
afterEach(() => {
diff --git a/spec/frontend/environments/environment_monitoring_spec.js b/spec/frontend/environments/environment_monitoring_spec.js
index a73f49f1047..bbd49c04fb6 100644
--- a/spec/frontend/environments/environment_monitoring_spec.js
+++ b/spec/frontend/environments/environment_monitoring_spec.js
@@ -16,7 +16,8 @@ describe('Monitoring Component', () => {
};
const findButtons = () => wrapper.findAll(GlButton);
- const findButtonsByIcon = icon => findButtons().filter(button => button.props('icon') === icon);
+ const findButtonsByIcon = (icon) =>
+ findButtons().filter((button) => button.props('icon') === icon);
beforeEach(() => {
createWrapper();
diff --git a/spec/frontend/environments/environment_table_spec.js b/spec/frontend/environments/environment_table_spec.js
index b8ef40e2568..daef35bcf99 100644
--- a/spec/frontend/environments/environment_table_spec.js
+++ b/spec/frontend/environments/environment_table_spec.js
@@ -1,10 +1,11 @@
import { mount } from '@vue/test-utils';
import EnvironmentTable from '~/environments/components/environments_table.vue';
-import { folder } from './mock_data';
+import eventHub from '~/environments/event_hub';
+import DeployBoard from '~/environments/components/deploy_board.vue';
+import CanaryUpdateModal from '~/environments/components/canary_update_modal.vue';
+import { folder, deployBoardMockData } from './mock_data';
const eeOnlyProps = {
- canaryDeploymentFeatureId: 'canary_deployment',
- showCanaryDeploymentCallout: true,
userCalloutsPath: '/callouts',
lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
helpCanaryDeploymentsPath: 'help/canary-deployments',
@@ -37,10 +38,127 @@ describe('Environment table', () => {
wrapper.destroy();
});
- it('Should render a table', () => {
+ it('Should render a table', async () => {
+ const mockItem = {
+ name: 'review',
+ folderName: 'review',
+ size: 3,
+ isFolder: true,
+ environment_path: 'url',
+ };
+
+ await factory({
+ propsData: {
+ environments: [mockItem],
+ canReadEnvironment: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ },
+ });
+
expect(wrapper.classes()).toContain('ci-table');
});
+ it('should render deploy board container when data is provided', async () => {
+ const mockItem = {
+ name: 'review',
+ size: 1,
+ environment_path: 'url',
+ logs_path: 'url',
+ id: 1,
+ hasDeployBoard: true,
+ deployBoardData: deployBoardMockData,
+ isDeployBoardVisible: true,
+ isLoadingDeployBoard: false,
+ isEmptyDeployBoard: false,
+ };
+
+ await factory({
+ propsData: {
+ environments: [mockItem],
+ canCreateDeployment: false,
+ canReadEnvironment: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ },
+ });
+
+ expect(wrapper.find('.js-deploy-board-row').exists()).toBe(true);
+ expect(wrapper.find('.deploy-board-icon').exists()).toBe(true);
+ });
+
+ it('should toggle deploy board visibility when arrow is clicked', (done) => {
+ const mockItem = {
+ name: 'review',
+ size: 1,
+ environment_path: 'url',
+ id: 1,
+ hasDeployBoard: true,
+ deployBoardData: {
+ instances: [{ status: 'ready', tooltip: 'foo' }],
+ abort_url: 'url',
+ rollback_url: 'url',
+ completion: 100,
+ is_completed: true,
+ canary_ingress: { canary_weight: 60 },
+ },
+ isDeployBoardVisible: false,
+ };
+
+ eventHub.$on('toggleDeployBoard', (env) => {
+ expect(env.id).toEqual(mockItem.id);
+ done();
+ });
+
+ factory({
+ propsData: {
+ environments: [mockItem],
+ canReadEnvironment: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ },
+ });
+
+ wrapper.find('.deploy-board-icon').trigger('click');
+ });
+
+ it('should set the enviornment to change and weight when a change canary weight event is recevied', async () => {
+ const mockItem = {
+ name: 'review',
+ size: 1,
+ environment_path: 'url',
+ logs_path: 'url',
+ id: 1,
+ hasDeployBoard: true,
+ deployBoardData: deployBoardMockData,
+ isDeployBoardVisible: true,
+ isLoadingDeployBoard: false,
+ isEmptyDeployBoard: false,
+ };
+
+ await factory({
+ propsData: {
+ environments: [mockItem],
+ canCreateDeployment: false,
+ canReadEnvironment: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ },
+ });
+
+ wrapper.find(DeployBoard).vm.$emit('changeCanaryWeight', 40);
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(CanaryUpdateModal).props()).toMatchObject({
+ weight: 40,
+ environment: mockItem,
+ });
+ });
+
describe('sortEnvironments', () => {
it('should sort environments by last updated', () => {
const mockItems = [
@@ -266,7 +384,7 @@ describe('Environment table', () => {
},
});
- expect(wrapper.vm.sortedEnvironments.map(env => env.name)).toEqual([
+ expect(wrapper.vm.sortedEnvironments.map((env) => env.name)).toEqual([
review.name,
staging.name,
production.name,
diff --git a/spec/frontend/environments/environment_terminal_button_spec.js b/spec/frontend/environments/environment_terminal_button_spec.js
index 274186fbbd6..2475785a927 100644
--- a/spec/frontend/environments/environment_terminal_button_spec.js
+++ b/spec/frontend/environments/environment_terminal_button_spec.js
@@ -5,7 +5,7 @@ describe('Stop Component', () => {
let wrapper;
const terminalPath = '/path';
- const mountWithProps = props => {
+ const mountWithProps = (props) => {
wrapper = shallowMount(TerminalComponent, {
propsData: props,
});
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index bb114e31063..d6614e2fd2b 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -5,6 +5,7 @@ import EnableReviewAppModal from '~/environments/components/enable_review_app_mo
import Container from '~/environments/components/container.vue';
import EmptyState from '~/environments/components/empty_state.vue';
import EnvironmentsApp from '~/environments/components/environments_app.vue';
+import DeployBoard from '~/environments/components/deploy_board.vue';
import axios from '~/lib/utils/axios_utils';
import { environment, folder } from './mock_data';
@@ -18,8 +19,6 @@ describe('Environment', () => {
canReadEnvironment: true,
newEnvironmentPath: 'environments/new',
helpPagePath: 'help',
- canaryDeploymentFeatureId: 'canary_deployment',
- showCanaryDeploymentCallout: true,
userCalloutsPath: '/callouts',
lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
helpCanaryDeploymentsPath: 'help/canary-deployments',
@@ -114,6 +113,35 @@ describe('Environment', () => {
expect(wrapper.vm.updateContent).toHaveBeenCalledTimes(0);
});
});
+
+ describe('deploy boards', () => {
+ beforeEach(() => {
+ const deployEnvironment = {
+ ...environment,
+ rollout_status: {
+ status: 'found',
+ },
+ };
+
+ mockRequest(200, {
+ environments: [deployEnvironment],
+ stopped_count: 1,
+ available_count: 0,
+ });
+
+ return createWrapper();
+ });
+
+ it('should render deploy boards', () => {
+ expect(wrapper.find(DeployBoard).exists()).toBe(true);
+ });
+
+ it('should render arrow to open deploy boards', () => {
+ expect(
+ wrapper.find('.deploy-board-icon [data-testid="chevron-down-icon"]').exists(),
+ ).toBe(true);
+ });
+ });
});
});
diff --git a/spec/frontend/environments/environments_folder_view_spec.js b/spec/frontend/environments/environments_folder_view_spec.js
new file mode 100644
index 00000000000..e4661d27872
--- /dev/null
+++ b/spec/frontend/environments/environments_folder_view_spec.js
@@ -0,0 +1,59 @@
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
+import axios from '~/lib/utils/axios_utils';
+import { environmentsList } from './mock_data';
+
+describe('Environments Folder View', () => {
+ let mock;
+ let wrapper;
+
+ const mockData = {
+ endpoint: 'environments.json',
+ folderName: 'review',
+ canReadEnvironment: true,
+ cssContainerClass: 'container',
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('successful request', () => {
+ beforeEach(() => {
+ mock.onGet(mockData.endpoint).reply(
+ 200,
+ {
+ environments: environmentsList,
+ stopped_count: 1,
+ available_count: 0,
+ },
+ {
+ 'X-nExt-pAge': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '20',
+ 'X-Total-Pages': '10',
+ },
+ );
+
+ wrapper = mount(EnvironmentsFolderViewComponent, { propsData: mockData });
+ return axios.waitForAll();
+ });
+
+ describe('deploy boards', () => {
+ it('should render arrow to open deploy boards', () => {
+ expect(wrapper.find('.folder-icon[data-testid="chevron-right-icon"]').exists()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/environments/environments_store_spec.js b/spec/frontend/environments/environments_store_spec.js
index 8abdbcbbe54..4a07281353f 100644
--- a/spec/frontend/environments/environments_store_spec.js
+++ b/spec/frontend/environments/environments_store_spec.js
@@ -1,5 +1,5 @@
import Store from '~/environments/stores/environments_store';
-import { environmentsList, serverData } from './mock_data';
+import { environmentsList, serverData, deployBoardMockData } from './mock_data';
describe('Store', () => {
let store;
@@ -160,4 +160,58 @@ describe('Store', () => {
expect(store.getOpenFolders()[0]).toEqual(store.state.environments[1]);
});
});
+
+ it('should store a non folder environment with deploy board if rollout_status key is provided', () => {
+ const environment = {
+ name: 'foo',
+ size: 1,
+ latest: {
+ id: 1,
+ rollout_status: deployBoardMockData,
+ },
+ };
+
+ store.storeEnvironments([environment]);
+
+ expect(store.state.environments[0].hasDeployBoard).toEqual(true);
+ expect(store.state.environments[0].isDeployBoardVisible).toEqual(true);
+ expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
+ });
+
+ describe('deploy boards', () => {
+ beforeEach(() => {
+ const environment = {
+ name: 'foo',
+ size: 1,
+ latest: {
+ id: 1,
+ },
+ rollout_status: deployBoardMockData,
+ };
+
+ store.storeEnvironments([environment]);
+ });
+
+ it('should toggle deploy board property for given environment id', () => {
+ store.toggleDeployBoard(1);
+
+ expect(store.state.environments[0].isDeployBoardVisible).toEqual(false);
+ });
+
+ it('should keep deploy board data when updating environments', () => {
+ expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
+
+ const environment = {
+ name: 'foo',
+ size: 1,
+ latest: {
+ id: 1,
+ },
+ rollout_status: deployBoardMockData,
+ };
+ store.storeEnvironments([environment]);
+
+ expect(store.state.environments[0].deployBoardData).toEqual(deployBoardMockData);
+ });
+ });
});
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
index f55cb851dde..3943e89c6cf 100644
--- a/spec/frontend/environments/folder/environments_folder_view_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -16,14 +16,12 @@ describe('Environments Folder View', () => {
folderName: 'review',
canReadEnvironment: true,
cssContainerClass: 'container',
- canaryDeploymentFeatureId: 'canary_deployment',
- showCanaryDeploymentCallout: true,
userCalloutsPath: '/callouts',
lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
helpCanaryDeploymentsPath: 'help/canary-deployments',
};
- const mockEnvironments = environmentList => {
+ const mockEnvironments = (environmentList) => {
mock.onGet(mockData.endpoint).reply(
200,
{
diff --git a/spec/frontend/environments/mock_data.js b/spec/frontend/environments/mock_data.js
index e7b99c8688c..4ad005f55c3 100644
--- a/spec/frontend/environments/mock_data.js
+++ b/spec/frontend/environments/mock_data.js
@@ -26,6 +26,45 @@ const buildProps = {
updated_at: '2017-02-01T19:42:18.400Z',
};
+const deployBoardMockData = {
+ instances: [
+ { status: 'finished', tooltip: 'tanuki-2334 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2335 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2336 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2337 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2338 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2339 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2340 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2334 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2335 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2336 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2337 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2338 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2339 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2340 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'deploying', tooltip: 'tanuki-2341 Deploying', pod_name: 'production-tanuki-1' },
+ { status: 'deploying', tooltip: 'tanuki-2342 Deploying', pod_name: 'production-tanuki-1' },
+ { status: 'deploying', tooltip: 'tanuki-2343 Deploying', pod_name: 'production-tanuki-1' },
+ { status: 'failed', tooltip: 'tanuki-2344 Failed', pod_name: 'production-tanuki-1' },
+ { status: 'ready', tooltip: 'tanuki-2345 Ready', pod_name: 'production-tanuki-1' },
+ { status: 'ready', tooltip: 'tanuki-2346 Ready', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2348 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2349 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2350 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2353 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'waiting', tooltip: 'tanuki-2354 Waiting', pod_name: 'production-tanuki-1' },
+ { status: 'waiting', tooltip: 'tanuki-2355 Waiting', pod_name: 'production-tanuki-1' },
+ { status: 'waiting', tooltip: 'tanuki-2356 Waiting', pod_name: 'production-tanuki-1' },
+ ],
+ abort_url: 'url',
+ rollback_url: 'url',
+ completion: 100,
+ status: 'found',
+ canary_ingress: {
+ canary_weight: 50,
+ },
+};
+
const environment = {
name: 'production',
size: 1,
@@ -262,4 +301,4 @@ const tableData = {
},
};
-export { environment, environmentsList, folder, serverData, tableData };
+export { environment, environmentsList, folder, serverData, tableData, deployBoardMockData };
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index ef3eeb8c7e4..0b9e0008ed7 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -38,8 +38,8 @@ describe('ErrorDetails', () => {
let mocks;
const externalUrl = 'https://sentry.io/organizations/test-sentry-nk/issues/1/?project=1';
- const findInput = name => {
- const inputs = wrapper.findAll(GlFormInput).filter(c => c.attributes('name') === name);
+ const findInput = (name) => {
+ const inputs = wrapper.findAll(GlFormInput).filter((c) => c.attributes('name') === name);
return inputs.length ? inputs.at(0) : inputs;
};
@@ -245,7 +245,7 @@ describe('ErrorDetails', () => {
it.each(Object.keys(severityLevel))(
'should set correct severity level variant for %s badge',
- level => {
+ (level) => {
wrapper.setData({
error: {
tags: { level: severityLevel[level] },
diff --git a/spec/frontend/error_tracking/components/error_tracking_actions_spec.js b/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
index b22805f5227..05b3d2f1dec 100644
--- a/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
@@ -41,16 +41,8 @@ describe('Error Tracking Actions', () => {
expect(findButtons().exists()).toBe(true);
return wrapper.vm.$nextTick().then(() => {
- expect(
- findButtons()
- .at(0)
- .attributes('title'),
- ).toBe('Ignore');
- expect(
- findButtons()
- .at(1)
- .attributes('title'),
- ).toBe('Resolve');
+ expect(findButtons().at(0).attributes('title')).toBe('Ignore');
+ expect(findButtons().at(1).attributes('title')).toBe('Resolve');
});
});
});
@@ -64,11 +56,7 @@ describe('Error Tracking Actions', () => {
expect(findButtons().exists()).toBe(true);
return wrapper.vm.$nextTick().then(() => {
- expect(
- findButtons()
- .at(0)
- .attributes('title'),
- ).toBe('Undo Ignore');
+ expect(findButtons().at(0).attributes('title')).toBe('Undo Ignore');
});
});
});
@@ -82,11 +70,7 @@ describe('Error Tracking Actions', () => {
expect(findButtons().exists()).toBe(true);
return wrapper.vm.$nextTick().then(() => {
- expect(
- findButtons()
- .at(1)
- .attributes('title'),
- ).toBe('Unresolve');
+ expect(findButtons().at(1).attributes('title')).toBe('Unresolve');
});
});
});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index a77bf39cb54..da12237b1d9 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -19,18 +19,9 @@ describe('ErrorTrackingList', () => {
const findErrorListTable = () => wrapper.find('table');
const findErrorListRows = () => wrapper.findAll('tbody tr');
const dropdownsArray = () => wrapper.findAll(GlDropdown);
- const findRecentSearchesDropdown = () =>
- dropdownsArray()
- .at(0)
- .find(GlDropdown);
- const findStatusFilterDropdown = () =>
- dropdownsArray()
- .at(1)
- .find(GlDropdown);
- const findSortDropdown = () =>
- dropdownsArray()
- .at(2)
- .find(GlDropdown);
+ const findRecentSearchesDropdown = () => dropdownsArray().at(0).find(GlDropdown);
+ const findStatusFilterDropdown = () => dropdownsArray().at(1).find(GlDropdown);
+ const findSortDropdown = () => dropdownsArray().at(2).find(GlDropdown);
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findPagination = () => wrapper.find(GlPagination);
const findErrorActions = () => wrapper.find(ErrorTrackingActions);
@@ -156,7 +147,7 @@ describe('ErrorTrackingList', () => {
});
it('each error in the list should have an action button set', () => {
- findErrorListRows().wrappers.forEach(row => {
+ findErrorListRows().wrappers.forEach((row) => {
expect(row.find(ErrorTrackingActions).exists()).toBe(true);
});
});
@@ -243,7 +234,6 @@ describe('ErrorTrackingList', () => {
stubs: {
GlTable: false,
GlLink: false,
- GlDeprecatedButton: false,
},
});
});
@@ -274,7 +264,6 @@ describe('ErrorTrackingList', () => {
stubs: {
GlTable: false,
GlLink: false,
- GlDeprecatedButton: false,
},
});
});
@@ -454,7 +443,6 @@ describe('ErrorTrackingList', () => {
stubs: {
GlTable: false,
GlLink: false,
- GlDeprecatedButton: false,
},
});
});
diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js
index 43037473a61..34ad600af05 100644
--- a/spec/frontend/error_tracking/store/actions_spec.js
+++ b/spec/frontend/error_tracking/store/actions_spec.js
@@ -28,7 +28,7 @@ describe('Sentry common store actions', () => {
const params = { endpoint, redirectUrl, status };
describe('updateStatus', () => {
- it('should handle successful status update', done => {
+ it('should handle successful status update', (done) => {
mock.onPut().reply(200, {});
testAction(
actions.updateStatus,
@@ -48,7 +48,7 @@ describe('Sentry common store actions', () => {
);
});
- it('should handle unsuccessful status update', done => {
+ it('should handle unsuccessful status update', (done) => {
mock.onPut().reply(400, {});
testAction(actions.updateStatus, params, {}, [], [], () => {
expect(visitUrl).not.toHaveBeenCalled();
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
index 58e77c46e02..152ecde6985 100644
--- a/spec/frontend/error_tracking/store/details/actions_spec.js
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -28,7 +28,7 @@ describe('Sentry error details store actions', () => {
describe('startPollingStacktrace', () => {
const endpoint = '123/stacktrace';
- it('should commit SET_ERROR with received response', done => {
+ it('should commit SET_ERROR with received response', (done) => {
const payload = { error: [1, 2, 3] };
mockedAdapter.onGet().reply(200, payload);
testAction(
@@ -46,7 +46,7 @@ describe('Sentry error details store actions', () => {
);
});
- it('should show flash on API error', done => {
+ it('should show flash on API error', (done) => {
mockedAdapter.onGet().reply(400);
testAction(
@@ -62,7 +62,7 @@ describe('Sentry error details store actions', () => {
);
});
- it('should not restart polling when receiving an empty 204 response', done => {
+ it('should not restart polling when receiving an empty 204 response', (done) => {
mockedRestart = jest.spyOn(Poll.prototype, 'restart');
mockedAdapter.onGet().reply(204);
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 7326472e1dd..a93608fe70d 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -20,7 +20,7 @@ describe('error tracking actions', () => {
});
describe('startPolling', () => {
- it('should start polling for data', done => {
+ it('should start polling for data', (done) => {
const payload = { errors: [{ id: 1 }, { id: 2 }] };
mock.onGet().reply(httpStatusCodes.OK, payload);
@@ -41,14 +41,17 @@ describe('error tracking actions', () => {
);
});
- it('should show flash on API error', done => {
+ it('should show flash on API error', (done) => {
mock.onGet().reply(httpStatusCodes.BAD_REQUEST);
testAction(
actions.startPolling,
{},
{},
- [{ type: types.SET_LOADING, payload: true }, { type: types.SET_LOADING, payload: false }],
+ [
+ { type: types.SET_LOADING, payload: true },
+ { type: types.SET_LOADING, payload: false },
+ ],
[],
() => {
expect(createFlash).toHaveBeenCalledTimes(1);
@@ -64,7 +67,10 @@ describe('error tracking actions', () => {
actions.restartPolling,
{},
{},
- [{ type: types.SET_ERRORS, payload: [] }, { type: types.SET_LOADING, payload: true }],
+ [
+ { type: types.SET_ERRORS, payload: [] },
+ { type: types.SET_LOADING, payload: true },
+ ],
[],
);
});
@@ -110,7 +116,10 @@ describe('error tracking actions', () => {
actions.sortByField,
field,
{},
- [{ type: types.SET_CURSOR, payload: null }, { type: types.SET_SORT_FIELD, payload: field }],
+ [
+ { type: types.SET_CURSOR, payload: null },
+ { type: types.SET_SORT_FIELD, payload: field },
+ ],
[{ type: 'stopPolling' }, { type: 'startPolling' }],
);
});
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index f4a765a3d73..e2a8b57f555 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -35,12 +35,7 @@ describe('error tracking settings form', () => {
it('is rendered', () => {
expect(wrapper.findAll(GlFormInput).length).toBe(2);
expect(wrapper.find(GlFormInput).attributes('id')).toBe('error-tracking-api-host');
- expect(
- wrapper
- .findAll(GlFormInput)
- .at(1)
- .attributes('id'),
- ).toBe('error-tracking-token');
+ expect(wrapper.findAll(GlFormInput).at(1).attributes('id')).toBe('error-tracking-token');
expect(wrapper.findAll(GlButton).exists()).toBe(true);
});
@@ -56,12 +51,9 @@ describe('error tracking settings form', () => {
);
expect(pageText).not.toContain('Connection has failed. Re-check Auth Token and try again');
- expect(
- wrapper
- .findAll(GlFormInput)
- .at(0)
- .attributes('placeholder'),
- ).toContain('https://mysentryserver.com');
+ expect(wrapper.findAll(GlFormInput).at(0).attributes('placeholder')).toContain(
+ 'https://mysentryserver.com',
+ );
});
});
diff --git a/spec/frontend/error_tracking_settings/store/actions_spec.js b/spec/frontend/error_tracking_settings/store/actions_spec.js
index b076e6ecd31..216bf8011e8 100644
--- a/spec/frontend/error_tracking_settings/store/actions_spec.js
+++ b/spec/frontend/error_tracking_settings/store/actions_spec.js
@@ -27,7 +27,7 @@ describe('error tracking settings actions', () => {
refreshCurrentPage.mockClear();
});
- it('should request and transform the project list', done => {
+ it('should request and transform the project list', (done) => {
mock.onGet(TEST_HOST).reply(() => [200, { projects: projectList }]);
testAction(
actions.fetchProjects,
@@ -48,7 +48,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should handle a server error', done => {
+ it('should handle a server error', (done) => {
mock.onGet(`${TEST_HOST}.json`).reply(() => [400]);
testAction(
actions.fetchProjects,
@@ -68,7 +68,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should request projects correctly', done => {
+ it('should request projects correctly', (done) => {
testAction(
actions.requestProjects,
null,
@@ -79,7 +79,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should receive projects correctly', done => {
+ it('should receive projects correctly', (done) => {
const testPayload = [];
testAction(
actions.receiveProjectsSuccess,
@@ -95,7 +95,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should handle errors when receiving projects', done => {
+ it('should handle errors when receiving projects', (done) => {
const testPayload = [];
testAction(
actions.receiveProjectsError,
@@ -126,7 +126,7 @@ describe('error tracking settings actions', () => {
mock.restore();
});
- it('should save the page', done => {
+ it('should save the page', (done) => {
mock.onPatch(TEST_HOST).reply(200);
testAction(actions.updateSettings, null, state, [], [{ type: 'requestSettings' }], () => {
expect(mock.history.patch.length).toBe(1);
@@ -135,7 +135,7 @@ describe('error tracking settings actions', () => {
});
});
- it('should handle a server error', done => {
+ it('should handle a server error', (done) => {
mock.onPatch(TEST_HOST).reply(400);
testAction(
actions.updateSettings,
@@ -156,7 +156,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should request to save the page', done => {
+ it('should request to save the page', (done) => {
testAction(
actions.requestSettings,
null,
@@ -167,7 +167,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should handle errors when requesting to save the page', done => {
+ it('should handle errors when requesting to save the page', (done) => {
testAction(
actions.receiveSettingsError,
{},
@@ -181,7 +181,7 @@ describe('error tracking settings actions', () => {
describe('generic actions to update the store', () => {
const testData = 'test';
- it('should reset the `connect success` flag when updating the api host', done => {
+ it('should reset the `connect success` flag when updating the api host', (done) => {
testAction(
actions.updateApiHost,
testData,
@@ -192,7 +192,7 @@ describe('error tracking settings actions', () => {
);
});
- it('should reset the `connect success` flag when updating the token', done => {
+ it('should reset the `connect success` flag when updating the token', (done) => {
testAction(
actions.updateToken,
testData,
diff --git a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
index 06b9385b112..21b894ccbef 100644
--- a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
+++ b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
@@ -36,7 +36,7 @@ describe('Configure Feature Flags Modal', () => {
const findPrimaryAction = () => findGlModal().props('actionPrimary');
const findProjectNameInput = () => wrapper.find('#project_name_verification');
const findDangerGlAlert = () =>
- wrapper.findAll(GlAlert).filter(c => c.props('variant') === 'danger');
+ wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'danger');
describe('idle', () => {
afterEach(() => wrapper.destroy());
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
index f8e25925774..a754c682356 100644
--- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -42,7 +42,7 @@ describe('Edit feature flag form', () => {
});
};
- beforeEach(done => {
+ beforeEach((done) => {
mock = new MockAdapter(axios);
mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
id: 21,
diff --git a/spec/frontend/feature_flags/components/environments_dropdown_spec.js b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
index 917f5f5ccd3..042fccaa369 100644
--- a/spec/frontend/feature_flags/components/environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon, GlDeprecatedButton, GlSearchBoxByType } from '@gitlab/ui';
+import { GlLoadingIcon, GlButton, GlSearchBoxByType } from '@gitlab/ui';
import { TEST_HOST } from 'spec/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
@@ -11,7 +11,7 @@ describe('Feature flags > Environments dropdown ', () => {
let wrapper;
let mock;
const results = ['production', 'staging'];
- const factory = props => {
+ const factory = (props) => {
wrapper = shallowMount(EnvironmentsDropdown, {
propsData: {
...props,
@@ -99,8 +99,8 @@ describe('Feature flags > Environments dropdown ', () => {
it('emits event when a suggestion is clicked', async () => {
const button = wrapper
- .findAll(GlDeprecatedButton)
- .filter(b => b.text() === 'production')
+ .findAll(GlButton)
+ .filter((b) => b.text() === 'production')
.at(0);
button.vm.$emit('click');
await wrapper.vm.$nextTick();
@@ -110,7 +110,7 @@ describe('Feature flags > Environments dropdown ', () => {
describe('on click clear button', () => {
beforeEach(async () => {
- wrapper.find(GlDeprecatedButton).vm.$emit('click');
+ wrapper.find(GlButton).vm.$emit('click');
await wrapper.vm.$nextTick();
});
@@ -136,10 +136,7 @@ describe('Feature flags > Environments dropdown ', () => {
});
it('emits create event', async () => {
- wrapper
- .findAll(GlDeprecatedButton)
- .at(0)
- .vm.$emit('click');
+ wrapper.findAll(GlButton).at(0).vm.$emit('click');
await wrapper.vm.$nextTick();
expect(wrapper.emitted('createClicked')).toEqual([['production']]);
});
diff --git a/spec/frontend/feature_flags/components/feature_flags_spec.js b/spec/frontend/feature_flags/components/feature_flags_spec.js
index 3c1234fea94..8242d667d2e 100644
--- a/spec/frontend/feature_flags/components/feature_flags_spec.js
+++ b/spec/frontend/feature_flags/components/feature_flags_spec.js
@@ -85,7 +85,7 @@ describe('Feature flags', () => {
describe('when limit exceeded', () => {
const provideData = { ...mockData, featureFlagsLimitExceeded: true };
- beforeEach(done => {
+ beforeEach((done) => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
.reply(200, getRequestData, {});
@@ -101,11 +101,9 @@ describe('Feature flags', () => {
it('shows a feature flags limit reached alert', () => {
expect(limitAlert().exists()).toBe(true);
- expect(
- limitAlert()
- .find(GlSprintf)
- .attributes('message'),
- ).toContain('Feature flags limit reached');
+ expect(limitAlert().find(GlSprintf).attributes('message')).toContain(
+ 'Feature flags limit reached',
+ );
});
describe('when the alert is dismissed', () => {
@@ -134,7 +132,7 @@ describe('Feature flags', () => {
newUserListPath: null,
};
- beforeEach(done => {
+ beforeEach((done) => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
.reply(200, getRequestData, {});
@@ -219,7 +217,7 @@ describe('Feature flags', () => {
});
describe('with paginated feature flags', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mock
.onGet(mockState.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
.replyOnce(200, getRequestData, {
@@ -299,7 +297,7 @@ describe('Feature flags', () => {
});
describe('in user lists tab', () => {
- beforeEach(done => {
+ beforeEach((done) => {
factory();
setImmediate(done);
});
@@ -319,7 +317,7 @@ describe('Feature flags', () => {
});
describe('unsuccessful request', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mock
.onGet(mockState.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
.replyOnce(500, {});
@@ -352,7 +350,7 @@ describe('Feature flags', () => {
});
describe('rotate instance id', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
.reply(200, getRequestData, {});
diff --git a/spec/frontend/feature_flags/components/feature_flags_table_spec.js b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
index a488662470e..8881cfae88d 100644
--- a/spec/frontend/feature_flags/components/feature_flags_table_spec.js
+++ b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
@@ -120,13 +120,11 @@ describe('Feature flag table', () => {
describe('when active and with an update toggle', () => {
let toggle;
- let spy;
beforeEach(() => {
props.featureFlags[0].update_path = props.featureFlags[0].destroy_path;
createWrapper(props);
toggle = wrapper.find(GlToggle);
- spy = mockTracking('_category_', toggle.element, jest.spyOn);
});
it('should have a toggle', () => {
@@ -142,14 +140,6 @@ describe('Feature flag table', () => {
expect(wrapper.emitted('toggle-flag')).toEqual([[flag]]);
});
});
-
- it('should track a click', () => {
- toggle.trigger('click');
-
- expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
- label: 'feature_flag_toggle',
- });
- });
});
describe('with an active scope and a percentage rollout strategy', () => {
@@ -180,6 +170,8 @@ describe('Feature flag table', () => {
});
describe('with a new version flag', () => {
+ let toggle;
+ let spy;
let badges;
beforeEach(() => {
@@ -194,6 +186,7 @@ describe('Feature flag table', () => {
description: 'flag description',
destroy_path: 'destroy/path',
edit_path: 'edit/path',
+ update_path: 'update/path',
version: NEW_VERSION_FLAG,
scopes: [],
strategies: [
@@ -226,6 +219,8 @@ describe('Feature flag table', () => {
provide: { csrfToken: 'fakeToken', glFeatures: { featureFlagsNewVersion: true } },
});
+ toggle = wrapper.find(GlToggle);
+ spy = mockTracking('_category_', toggle.element, jest.spyOn);
badges = wrapper.findAll('[data-testid="strategy-badge"]');
});
@@ -254,6 +249,14 @@ describe('Feature flag table', () => {
it('shows the name of a user list for user list', () => {
expect(badges.at(3).text()).toContain('User List - test list');
});
+
+ it('tracks a click', () => {
+ toggle.trigger('click');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'feature_flag_toggle',
+ });
+ });
});
it('renders a feature flag without an iid', () => {
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
index 2c2a726d26f..3a057aedde9 100644
--- a/spec/frontend/feature_flags/components/form_spec.js
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -118,7 +118,7 @@ describe('feature flag form', () => {
});
});
- it('should be disabled if the feature flag is not active', done => {
+ it('should be disabled if the feature flag is not active', (done) => {
wrapper.setProps({ active: false });
wrapper.vm.$nextTick(() => {
expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
@@ -191,7 +191,7 @@ describe('feature flag form', () => {
expect(wrapper.vm.formScopes[0].active).toBe(false);
});
- it('should be disabled if the feature flag is not active', done => {
+ it('should be disabled if the feature flag is not active', (done) => {
wrapper.setProps({ active: false });
wrapper.vm.$nextTick(() => {
@@ -301,7 +301,7 @@ describe('feature flag form', () => {
});
describe('on submit', () => {
- const selectFirstRolloutStrategyOption = dropdownIndex => {
+ const selectFirstRolloutStrategyOption = (dropdownIndex) => {
wrapper
.findAll('select.js-rollout-strategy')
.at(dropdownIndex)
@@ -347,10 +347,7 @@ describe('feature flag form', () => {
return wrapper.vm.$nextTick();
})
.then(() => {
- wrapper
- .find('.js-add-new-scope')
- .find(ToggleButton)
- .vm.$emit('change', true);
+ wrapper.find('.js-add-new-scope').find(ToggleButton).vm.$emit('change', true);
})
.then(() => {
wrapper.find(ToggleButton).vm.$emit('change', true);
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
index 12dc98fbde8..ad58ceaf5f8 100644
--- a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -94,7 +94,7 @@ describe('New Environments Dropdown', () => {
});
it('should not display a create label', () => {
- items = items.filter(i => i.text().startsWith('Create'));
+ items = items.filter((i) => i.text().startsWith('Create'));
expect(items).toHaveLength(0);
});
diff --git a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
index b34fe7779e3..1c85eadc678 100644
--- a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
@@ -66,7 +66,7 @@ describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
it('should search when the filter changes', async () => {
let r;
Api.searchFeatureFlagUserLists.mockReturnValue(
- new Promise(resolve => {
+ new Promise((resolve) => {
r = resolve;
}),
);
diff --git a/spec/frontend/feature_flags/components/strategy_parameters_spec.js b/spec/frontend/feature_flags/components/strategy_parameters_spec.js
index a024384e623..82811c05195 100644
--- a/spec/frontend/feature_flags/components/strategy_parameters_spec.js
+++ b/spec/frontend/feature_flags/components/strategy_parameters_spec.js
@@ -51,7 +51,7 @@ describe('~/feature_flags/components/strategy_parameters.vue', () => {
});
it('should show the correct component', () => {
- expect(wrapper.contains(component)).toBe(true);
+ expect(wrapper.find(component).exists()).toBe(true);
});
it('should emit changes from the lower component', () => {
diff --git a/spec/frontend/feature_flags/components/user_lists_table_spec.js b/spec/frontend/feature_flags/components/user_lists_table_spec.js
index d6ced3be168..974f63ba934 100644
--- a/spec/frontend/feature_flags/components/user_lists_table_spec.js
+++ b/spec/frontend/feature_flags/components/user_lists_table_spec.js
@@ -42,7 +42,7 @@ describe('User Lists Table', () => {
it('should display a user list entry per user list', () => {
const lists = wrapper.findAll('[data-testid="ffUserList"]');
expect(lists).toHaveLength(5);
- lists.wrappers.forEach(list => {
+ lists.wrappers.forEach((list) => {
expect(list.find('[data-testid="ffUserListName"]').exists()).toBe(true);
expect(list.find('[data-testid="ffUserListIds"]').exists()).toBe(true);
expect(list.find('[data-testid="ffUserListTimestamp"]').exists()).toBe(true);
diff --git a/spec/frontend/feature_flags/store/edit/actions_spec.js b/spec/frontend/feature_flags/store/edit/actions_spec.js
index 9d764799d09..20cec5daac0 100644
--- a/spec/frontend/feature_flags/store/edit/actions_spec.js
+++ b/spec/frontend/feature_flags/store/edit/actions_spec.js
@@ -44,7 +44,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('success', () => {
- it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagSuccess ', done => {
+ it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagSuccess ', (done) => {
const featureFlag = {
name: 'feature_flag',
description: 'feature flag',
@@ -80,7 +80,7 @@ describe('Feature flags Edit Module actions', () => {
done,
);
});
- it('handles new version flags as well', done => {
+ it('handles new version flags as well', (done) => {
const featureFlag = {
name: 'name',
description: 'description',
@@ -117,7 +117,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('error', () => {
- it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagError ', done => {
+ it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagError ', (done) => {
mock.onPut(`${TEST_HOST}/endpoint.json`).replyOnce(500, { message: [] });
testAction(
@@ -145,7 +145,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('requestUpdateFeatureFlag', () => {
- it('should commit REQUEST_UPDATE_FEATURE_FLAG mutation', done => {
+ it('should commit REQUEST_UPDATE_FEATURE_FLAG mutation', (done) => {
testAction(
requestUpdateFeatureFlag,
null,
@@ -158,7 +158,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('receiveUpdateFeatureFlagSuccess', () => {
- it('should commit RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS mutation', done => {
+ it('should commit RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS mutation', (done) => {
testAction(
receiveUpdateFeatureFlagSuccess,
null,
@@ -175,7 +175,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('receiveUpdateFeatureFlagError', () => {
- it('should commit RECEIVE_UPDATE_FEATURE_FLAG_ERROR mutation', done => {
+ it('should commit RECEIVE_UPDATE_FEATURE_FLAG_ERROR mutation', (done) => {
testAction(
receiveUpdateFeatureFlagError,
'There was an error',
@@ -200,7 +200,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('success', () => {
- it('dispatches requestFeatureFlag and receiveFeatureFlagSuccess ', done => {
+ it('dispatches requestFeatureFlag and receiveFeatureFlagSuccess ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { id: 1 });
testAction(
@@ -223,7 +223,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('error', () => {
- it('dispatches requestFeatureFlag and receiveUpdateFeatureFlagError ', done => {
+ it('dispatches requestFeatureFlag and receiveUpdateFeatureFlagError ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
testAction(
@@ -246,7 +246,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('requestFeatureFlag', () => {
- it('should commit REQUEST_FEATURE_FLAG mutation', done => {
+ it('should commit REQUEST_FEATURE_FLAG mutation', (done) => {
testAction(
requestFeatureFlag,
null,
@@ -259,7 +259,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('receiveFeatureFlagSuccess', () => {
- it('should commit RECEIVE_FEATURE_FLAG_SUCCESS mutation', done => {
+ it('should commit RECEIVE_FEATURE_FLAG_SUCCESS mutation', (done) => {
testAction(
receiveFeatureFlagSuccess,
{ id: 1 },
@@ -272,7 +272,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('receiveFeatureFlagError', () => {
- it('should commit RECEIVE_FEATURE_FLAG_ERROR mutation', done => {
+ it('should commit RECEIVE_FEATURE_FLAG_ERROR mutation', (done) => {
testAction(
receiveFeatureFlagError,
null,
@@ -289,7 +289,7 @@ describe('Feature flags Edit Module actions', () => {
});
describe('toggelActive', () => {
- it('should commit TOGGLE_ACTIVE mutation', done => {
+ it('should commit TOGGLE_ACTIVE mutation', (done) => {
testAction(
toggleActive,
true,
diff --git a/spec/frontend/feature_flags/store/index/actions_spec.js b/spec/frontend/feature_flags/store/index/actions_spec.js
index d223bb2c292..82e16958d33 100644
--- a/spec/frontend/feature_flags/store/index/actions_spec.js
+++ b/spec/frontend/feature_flags/store/index/actions_spec.js
@@ -40,7 +40,7 @@ describe('Feature flags actions', () => {
});
describe('setFeatureFlagsOptions', () => {
- it('should commit SET_FEATURE_FLAGS_OPTIONS mutation', done => {
+ it('should commit SET_FEATURE_FLAGS_OPTIONS mutation', (done) => {
testAction(
setFeatureFlagsOptions,
{ page: '1', scope: 'all' },
@@ -65,7 +65,7 @@ describe('Feature flags actions', () => {
});
describe('success', () => {
- it('dispatches requestFeatureFlags and receiveFeatureFlagsSuccess ', done => {
+ it('dispatches requestFeatureFlags and receiveFeatureFlagsSuccess ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, getRequestData, {});
testAction(
@@ -88,7 +88,7 @@ describe('Feature flags actions', () => {
});
describe('error', () => {
- it('dispatches requestFeatureFlags and receiveFeatureFlagsError ', done => {
+ it('dispatches requestFeatureFlags and receiveFeatureFlagsError ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
testAction(
@@ -111,7 +111,7 @@ describe('Feature flags actions', () => {
});
describe('requestFeatureFlags', () => {
- it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', done => {
+ it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', (done) => {
testAction(
requestFeatureFlags,
null,
@@ -124,7 +124,7 @@ describe('Feature flags actions', () => {
});
describe('receiveFeatureFlagsSuccess', () => {
- it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', done => {
+ it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', (done) => {
testAction(
receiveFeatureFlagsSuccess,
{ data: getRequestData, headers: {} },
@@ -142,7 +142,7 @@ describe('Feature flags actions', () => {
});
describe('receiveFeatureFlagsError', () => {
- it('should commit RECEIVE_FEATURE_FLAGS_ERROR mutation', done => {
+ it('should commit RECEIVE_FEATURE_FLAGS_ERROR mutation', (done) => {
testAction(
receiveFeatureFlagsError,
null,
@@ -160,7 +160,7 @@ describe('Feature flags actions', () => {
});
describe('success', () => {
- it('dispatches requestUserLists and receiveUserListsSuccess ', done => {
+ it('dispatches requestUserLists and receiveUserListsSuccess ', (done) => {
testAction(
fetchUserLists,
null,
@@ -181,7 +181,7 @@ describe('Feature flags actions', () => {
});
describe('error', () => {
- it('dispatches requestUserLists and receiveUserListsError ', done => {
+ it('dispatches requestUserLists and receiveUserListsError ', (done) => {
Api.fetchFeatureFlagUserLists.mockRejectedValue();
testAction(
@@ -204,7 +204,7 @@ describe('Feature flags actions', () => {
});
describe('requestUserLists', () => {
- it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', done => {
+ it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', (done) => {
testAction(
requestUserLists,
null,
@@ -217,7 +217,7 @@ describe('Feature flags actions', () => {
});
describe('receiveUserListsSuccess', () => {
- it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', done => {
+ it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', (done) => {
testAction(
receiveUserListsSuccess,
{ data: [userList], headers: {} },
@@ -235,7 +235,7 @@ describe('Feature flags actions', () => {
});
describe('receiveUserListsError', () => {
- it('should commit RECEIVE_USER_LISTS_ERROR mutation', done => {
+ it('should commit RECEIVE_USER_LISTS_ERROR mutation', (done) => {
testAction(
receiveUserListsError,
null,
@@ -260,7 +260,7 @@ describe('Feature flags actions', () => {
});
describe('success', () => {
- it('dispatches requestRotateInstanceId and receiveRotateInstanceIdSuccess ', done => {
+ it('dispatches requestRotateInstanceId and receiveRotateInstanceIdSuccess ', (done) => {
mock.onPost(`${TEST_HOST}/endpoint.json`).replyOnce(200, rotateData, {});
testAction(
@@ -283,7 +283,7 @@ describe('Feature flags actions', () => {
});
describe('error', () => {
- it('dispatches requestRotateInstanceId and receiveRotateInstanceIdError ', done => {
+ it('dispatches requestRotateInstanceId and receiveRotateInstanceIdError ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
testAction(
@@ -306,7 +306,7 @@ describe('Feature flags actions', () => {
});
describe('requestRotateInstanceId', () => {
- it('should commit REQUEST_ROTATE_INSTANCE_ID mutation', done => {
+ it('should commit REQUEST_ROTATE_INSTANCE_ID mutation', (done) => {
testAction(
requestRotateInstanceId,
null,
@@ -319,7 +319,7 @@ describe('Feature flags actions', () => {
});
describe('receiveRotateInstanceIdSuccess', () => {
- it('should commit RECEIVE_ROTATE_INSTANCE_ID_SUCCESS mutation', done => {
+ it('should commit RECEIVE_ROTATE_INSTANCE_ID_SUCCESS mutation', (done) => {
testAction(
receiveRotateInstanceIdSuccess,
{ data: rotateData, headers: {} },
@@ -337,7 +337,7 @@ describe('Feature flags actions', () => {
});
describe('receiveRotateInstanceIdError', () => {
- it('should commit RECEIVE_ROTATE_INSTANCE_ID_ERROR mutation', done => {
+ it('should commit RECEIVE_ROTATE_INSTANCE_ID_ERROR mutation', (done) => {
testAction(
receiveRotateInstanceIdError,
null,
@@ -353,7 +353,7 @@ describe('Feature flags actions', () => {
let mock;
beforeEach(() => {
- mockedState.featureFlags = getRequestData.feature_flags.map(flag => ({
+ mockedState.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
scopes: mapToScopesViewModel(flag.scopes || []),
}));
@@ -364,7 +364,7 @@ describe('Feature flags actions', () => {
mock.restore();
});
describe('success', () => {
- it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', done => {
+ it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', (done) => {
mock.onPut(featureFlag.update_path).replyOnce(200, featureFlag, {});
testAction(
@@ -387,7 +387,7 @@ describe('Feature flags actions', () => {
});
});
describe('error', () => {
- it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', done => {
+ it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', (done) => {
mock.onPut(featureFlag.update_path).replyOnce(500);
testAction(
@@ -412,13 +412,13 @@ describe('Feature flags actions', () => {
});
describe('updateFeatureFlag', () => {
beforeEach(() => {
- mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ mockedState.featureFlags = getRequestData.feature_flags.map((f) => ({
...f,
scopes: mapToScopesViewModel(f.scopes || []),
}));
});
- it('commits UPDATE_FEATURE_FLAG with the given flag', done => {
+ it('commits UPDATE_FEATURE_FLAG with the given flag', (done) => {
testAction(
updateFeatureFlag,
featureFlag,
@@ -436,13 +436,13 @@ describe('Feature flags actions', () => {
});
describe('receiveUpdateFeatureFlagSuccess', () => {
beforeEach(() => {
- mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ mockedState.featureFlags = getRequestData.feature_flags.map((f) => ({
...f,
scopes: mapToScopesViewModel(f.scopes || []),
}));
});
- it('commits RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS with the given flag', done => {
+ it('commits RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS with the given flag', (done) => {
testAction(
receiveUpdateFeatureFlagSuccess,
featureFlag,
@@ -460,13 +460,13 @@ describe('Feature flags actions', () => {
});
describe('receiveUpdateFeatureFlagError', () => {
beforeEach(() => {
- mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ mockedState.featureFlags = getRequestData.feature_flags.map((f) => ({
...f,
scopes: mapToScopesViewModel(f.scopes || []),
}));
});
- it('commits RECEIVE_UPDATE_FEATURE_FLAG_ERROR with the given flag id', done => {
+ it('commits RECEIVE_UPDATE_FEATURE_FLAG_ERROR with the given flag id', (done) => {
testAction(
receiveUpdateFeatureFlagError,
featureFlag.id,
@@ -492,7 +492,7 @@ describe('Feature flags actions', () => {
Api.deleteFeatureFlagUserList.mockResolvedValue();
});
- it('should refresh the user lists', done => {
+ it('should refresh the user lists', (done) => {
testAction(
deleteUserList,
userList,
@@ -509,7 +509,7 @@ describe('Feature flags actions', () => {
Api.deleteFeatureFlagUserList.mockRejectedValue({ response: { data: 'some error' } });
});
- it('should dispatch receiveDeleteUserListError', done => {
+ it('should dispatch receiveDeleteUserListError', (done) => {
testAction(
deleteUserList,
userList,
@@ -529,7 +529,7 @@ describe('Feature flags actions', () => {
});
describe('receiveDeleteUserListError', () => {
- it('should commit RECEIVE_DELETE_USER_LIST_ERROR with the given list', done => {
+ it('should commit RECEIVE_DELETE_USER_LIST_ERROR with the given list', (done) => {
testAction(
receiveDeleteUserListError,
{ list: userList, error: 'mock error' },
@@ -547,7 +547,7 @@ describe('Feature flags actions', () => {
});
describe('clearAlert', () => {
- it('should commit RECEIVE_CLEAR_ALERT', done => {
+ it('should commit RECEIVE_CLEAR_ALERT', (done) => {
const alertIndex = 3;
testAction(
diff --git a/spec/frontend/feature_flags/store/index/mutations_spec.js b/spec/frontend/feature_flags/store/index/mutations_spec.js
index 376c7b069fa..eec56800a8d 100644
--- a/spec/frontend/feature_flags/store/index/mutations_spec.js
+++ b/spec/frontend/feature_flags/store/index/mutations_spec.js
@@ -50,7 +50,7 @@ describe('Feature flags store Mutations', () => {
});
it('should set featureFlags with the transformed data', () => {
- const expected = getRequestData.feature_flags.map(flag => ({
+ const expected = getRequestData.feature_flags.map((flag) => ({
...flag,
scopes: mapToScopesViewModel(flag.scopes || []),
}));
@@ -183,7 +183,7 @@ describe('Feature flags store Mutations', () => {
describe('UPDATE_FEATURE_FLAG', () => {
beforeEach(() => {
- stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ stateCopy.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
scopes: mapToScopesViewModel(flag.scopes || []),
}));
@@ -209,7 +209,7 @@ describe('Feature flags store Mutations', () => {
describe('RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS', () => {
const runUpdate = (stateCount, flagState, featureFlagUpdateParams) => {
- stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ stateCopy.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
...flagState,
scopes: mapToScopesViewModel(flag.scopes || []),
@@ -237,7 +237,7 @@ describe('Feature flags store Mutations', () => {
describe('RECEIVE_UPDATE_FEATURE_FLAG_ERROR', () => {
beforeEach(() => {
- stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ stateCopy.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
scopes: mapToScopesViewModel(flag.scopes || []),
}));
diff --git a/spec/frontend/feature_flags/store/new/actions_spec.js b/spec/frontend/feature_flags/store/new/actions_spec.js
index 130c5235aa0..9168f11fdfb 100644
--- a/spec/frontend/feature_flags/store/new/actions_spec.js
+++ b/spec/frontend/feature_flags/store/new/actions_spec.js
@@ -59,7 +59,7 @@ describe('Feature flags New Module Actions', () => {
});
describe('success', () => {
- it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagSuccess ', done => {
+ it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagSuccess ', (done) => {
const convertedActionParams = mapFromScopesViewModel(actionParams);
mock.onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams).replyOnce(200);
@@ -81,7 +81,7 @@ describe('Feature flags New Module Actions', () => {
);
});
- it('sends strategies for new style feature flags', done => {
+ it('sends strategies for new style feature flags', (done) => {
const newVersionFlagParams = {
name: 'name',
description: 'description',
@@ -120,7 +120,7 @@ describe('Feature flags New Module Actions', () => {
});
describe('error', () => {
- it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagError ', done => {
+ it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagError ', (done) => {
const convertedActionParams = mapFromScopesViewModel(actionParams);
mock
@@ -148,7 +148,7 @@ describe('Feature flags New Module Actions', () => {
});
describe('requestCreateFeatureFlag', () => {
- it('should commit REQUEST_CREATE_FEATURE_FLAG mutation', done => {
+ it('should commit REQUEST_CREATE_FEATURE_FLAG mutation', (done) => {
testAction(
requestCreateFeatureFlag,
null,
@@ -161,7 +161,7 @@ describe('Feature flags New Module Actions', () => {
});
describe('receiveCreateFeatureFlagSuccess', () => {
- it('should commit RECEIVE_CREATE_FEATURE_FLAG_SUCCESS mutation', done => {
+ it('should commit RECEIVE_CREATE_FEATURE_FLAG_SUCCESS mutation', (done) => {
testAction(
receiveCreateFeatureFlagSuccess,
null,
@@ -178,7 +178,7 @@ describe('Feature flags New Module Actions', () => {
});
describe('receiveCreateFeatureFlagError', () => {
- it('should commit RECEIVE_CREATE_FEATURE_FLAG_ERROR mutation', done => {
+ it('should commit RECEIVE_CREATE_FEATURE_FLAG_ERROR mutation', (done) => {
testAction(
receiveCreateFeatureFlagError,
'There was an error',
diff --git a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
index 2c3c3e3267a..beae5041156 100644
--- a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
+++ b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
@@ -44,7 +44,7 @@ describe('feature highlight helper', () => {
});
describe('inserted', () => {
- it('registers click event callback', done => {
+ it('registers click event callback', (done) => {
const context = {
getAttribute: () => 'popoverId',
dataset: {
@@ -52,7 +52,7 @@ describe('feature highlight helper', () => {
},
};
- jest.spyOn($.fn, 'on').mockImplementation(event => {
+ jest.spyOn($.fn, 'on').mockImplementation((event) => {
expect(event).toEqual('click');
done();
});
diff --git a/spec/frontend/filterable_list_spec.js b/spec/frontend/filterable_list_spec.js
index 67d18611661..8c6a71abad7 100644
--- a/spec/frontend/filterable_list_spec.js
+++ b/spec/frontend/filterable_list_spec.js
@@ -1,5 +1,5 @@
+import { getJSONFixture, setHTMLFixture } from 'helpers/fixtures';
import FilterableList from '~/filterable_list';
-import { getJSONFixture, setHTMLFixture } from './helpers/fixtures';
describe('FilterableList', () => {
let List;
diff --git a/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
index 158f70f7d47..f1cff02261d 100644
--- a/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
+++ b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
@@ -10,7 +10,7 @@ describe('Recent Searches Dropdown Content', () => {
const findDropdownItems = () => wrapper.findAll({ ref: 'dropdownItem' });
const findDropdownNote = () => wrapper.find({ ref: 'dropdownNote' });
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(RecentSearchesDropdownContent, {
propsData: {
allowedKeys: IssuableFilteredSearchTokenKeys.getKeys(),
@@ -84,18 +84,11 @@ describe('Recent Searches Dropdown Content', () => {
});
it('expect second dropdown to have 2 tokens', () => {
- expect(
- findDropdownItems()
- .at(1)
- .findAll('.js-dropdown-token'),
- ).toHaveLength(2);
+ expect(findDropdownItems().at(1).findAll('.js-dropdown-token')).toHaveLength(2);
});
it('emits recentSearchesItemSelected on dropdown item click', () => {
- findDropdownItems()
- .at(0)
- .find('.js-dropdown-button')
- .trigger('click');
+ findDropdownItems().at(0).find('.js-dropdown-button').trigger('click');
expect(onRecentSearchesItemSelectedSpy).toHaveBeenCalledWith('foo');
});
diff --git a/spec/frontend/filtered_search/dropdown_utils_spec.js b/spec/frontend/filtered_search/dropdown_utils_spec.js
index 3320b6b0942..32d1f909d0b 100644
--- a/spec/frontend/filtered_search/dropdown_utils_spec.js
+++ b/spec/frontend/filtered_search/dropdown_utils_spec.js
@@ -1,7 +1,7 @@
+import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper';
import DropdownUtils from '~/filtered_search/dropdown_utils';
import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager';
import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
-import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
describe('Dropdown Utils', () => {
const issueListFixture = 'issues/issue_list.html';
diff --git a/spec/frontend/filtered_search/filtered_search_manager_spec.js b/spec/frontend/filtered_search/filtered_search_manager_spec.js
index b1c299ba91f..def0aa14349 100644
--- a/spec/frontend/filtered_search/filtered_search_manager_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_manager_spec.js
@@ -1,5 +1,6 @@
import FilteredSearchManager from 'ee_else_ce/filtered_search/filtered_search_manager';
+import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper';
import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error';
import RecentSearchesRoot from '~/filtered_search/recent_searches_root';
@@ -7,7 +8,6 @@ import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered
import DropdownUtils from '~/filtered_search/dropdown_utils';
import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual_tokens';
import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager';
-import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
import { BACKSPACE_KEY_CODE, DELETE_KEY_CODE } from '~/lib/utils/keycodes';
import { visitUrl } from '~/lib/utils/url_utility';
import * as commonUtils from '~/lib/utils/common_utils';
@@ -188,11 +188,11 @@ describe('Filtered Search Manager', () => {
const defaultParams = '?scope=all&utf8=%E2%9C%93';
const defaultState = '&state=opened';
- it('should search with a single word', done => {
+ it('should search with a single word', (done) => {
initializeManager();
input.value = 'searchTerm';
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(`${defaultParams}&search=searchTerm`);
done();
});
@@ -200,11 +200,11 @@ describe('Filtered Search Manager', () => {
manager.search();
});
- it('sets default state', done => {
+ it('sets default state', (done) => {
initializeManager({ useDefaultState: true });
input.value = 'searchTerm';
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(`${defaultParams}${defaultState}&search=searchTerm`);
done();
});
@@ -212,11 +212,11 @@ describe('Filtered Search Manager', () => {
manager.search();
});
- it('should search with multiple words', done => {
+ it('should search with multiple words', (done) => {
initializeManager();
input.value = 'awesome search terms';
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(`${defaultParams}&search=awesome+search+terms`);
done();
});
@@ -224,11 +224,11 @@ describe('Filtered Search Manager', () => {
manager.search();
});
- it('should search with special characters', done => {
+ it('should search with special characters', (done) => {
initializeManager();
input.value = '~!@#$%^&*()_+{}:<>,.?/';
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(
`${defaultParams}&search=~!%40%23%24%25%5E%26*()_%2B%7B%7D%3A%3C%3E%2C.%3F%2F`,
);
@@ -238,13 +238,13 @@ describe('Filtered Search Manager', () => {
manager.search();
});
- it('should use replacement URL for condition', done => {
+ it('should use replacement URL for condition', (done) => {
initializeManager();
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', '13', true),
);
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(`${defaultParams}&milestone_title=replaced`);
done();
});
@@ -259,14 +259,14 @@ describe('Filtered Search Manager', () => {
manager.search();
});
- it('removes duplicated tokens', done => {
+ it('removes duplicated tokens', (done) => {
initializeManager();
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug')}
${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug')}
`);
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(`${defaultParams}&label_name[]=bug`);
done();
});
@@ -605,7 +605,7 @@ describe('Filtered Search Manager', () => {
it('correctly modifies params when custom modifier is passed', () => {
const modifedParams = manager.getAllParams.call(
{
- modifyUrlParams: params => params.reverse(),
+ modifyUrlParams: (params) => params.reverse(),
},
[].concat(paramsArr),
);
diff --git a/spec/frontend/filtered_search/filtered_search_token_keys_spec.js b/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
index f24d2b118c2..da6d9ac3ff2 100644
--- a/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
@@ -33,7 +33,7 @@ describe('Filtered Search Token Keys', () => {
describe('getKeys', () => {
it('should return keys', () => {
const getKeys = new FilteredSearchTokenKeys(tokenKeys).getKeys();
- const keys = new FilteredSearchTokenKeys(tokenKeys).get().map(i => i.key);
+ const keys = new FilteredSearchTokenKeys(tokenKeys).get().map((i) => i.key);
keys.forEach((key, i) => {
expect(key).toEqual(getKeys[i]);
diff --git a/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js b/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js
index 6a00065c9fe..44f67f269a2 100644
--- a/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js
@@ -1,13 +1,13 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper';
import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual_tokens';
-import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
describe('Filtered Search Visual Tokens', () => {
let mock;
const subject = FilteredSearchVisualTokens;
- const findElements = tokenElement => {
+ const findElements = (tokenElement) => {
const tokenNameElement = tokenElement.querySelector('.name');
const tokenOperatorElement = tokenElement.querySelector('.operator');
const tokenValueContainer = tokenElement.querySelector('.value-container');
diff --git a/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js b/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js
index c7be900ba2c..2041bc3d959 100644
--- a/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js
+++ b/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js
@@ -23,7 +23,7 @@ describe('Issues Filtered Search Token Keys', () => {
});
it('should return assignee as a string', () => {
- const assignee = tokenKeys.find(tokenKey => tokenKey.key === 'assignee');
+ const assignee = tokenKeys.find((tokenKey) => tokenKey.key === 'assignee');
expect(assignee.type).toEqual('string');
});
@@ -32,7 +32,7 @@ describe('Issues Filtered Search Token Keys', () => {
describe('getKeys', () => {
it('should return keys', () => {
const getKeys = IssuableFilteredSearchTokenKeys.getKeys();
- const keys = IssuableFilteredSearchTokenKeys.get().map(i => i.key);
+ const keys = IssuableFilteredSearchTokenKeys.get().map((i) => i.key);
keys.forEach((key, i) => {
expect(key).toEqual(getKeys[i]);
@@ -145,4 +145,25 @@ describe('Issues Filtered Search Token Keys', () => {
expect(result).toEqual(conditions[0]);
});
});
+
+ describe('removeTokensForKeys', () => {
+ let initTokenKeys;
+
+ beforeEach(() => {
+ initTokenKeys = [...IssuableFilteredSearchTokenKeys.get()];
+ });
+
+ it('should remove the tokenKeys corresponding to the given keys', () => {
+ const [firstTokenKey, secondTokenKey, ...restTokens] = initTokenKeys;
+ IssuableFilteredSearchTokenKeys.removeTokensForKeys(firstTokenKey.key, secondTokenKey.key);
+
+ expect(IssuableFilteredSearchTokenKeys.get()).toEqual(restTokens);
+ });
+
+ it('should do nothing when key is not found', () => {
+ IssuableFilteredSearchTokenKeys.removeTokensForKeys('bogus');
+
+ expect(IssuableFilteredSearchTokenKeys.get()).toEqual(initTokenKeys);
+ });
+ });
});
diff --git a/spec/frontend/filtered_search/recent_searches_root_spec.js b/spec/frontend/filtered_search/recent_searches_root_spec.js
index 281d406e013..6bb9e68d591 100644
--- a/spec/frontend/filtered_search/recent_searches_root_spec.js
+++ b/spec/frontend/filtered_search/recent_searches_root_spec.js
@@ -16,7 +16,7 @@ describe('RecentSearchesRoot', () => {
},
};
- Vue.mockImplementation(options => {
+ Vue.mockImplementation((options) => {
({ data, template } = options);
});
diff --git a/spec/frontend/filtered_search/services/recent_searches_service_spec.js b/spec/frontend/filtered_search/services/recent_searches_service_spec.js
index afeca54b949..6711ce03d40 100644
--- a/spec/frontend/filtered_search/services/recent_searches_service_spec.js
+++ b/spec/frontend/filtered_search/services/recent_searches_service_spec.js
@@ -18,49 +18,49 @@ describe('RecentSearchesService', () => {
jest.spyOn(RecentSearchesService, 'isAvailable').mockReturnValue(true);
});
- it('should default to empty array', done => {
+ it('should default to empty array', (done) => {
const fetchItemsPromise = service.fetch();
fetchItemsPromise
- .then(items => {
+ .then((items) => {
expect(items).toEqual([]);
})
.then(done)
.catch(done.fail);
});
- it('should reject when unable to parse', done => {
+ it('should reject when unable to parse', (done) => {
jest.spyOn(localStorage, 'getItem').mockReturnValue('fail');
const fetchItemsPromise = service.fetch();
fetchItemsPromise
.then(done.fail)
- .catch(error => {
+ .catch((error) => {
expect(error).toEqual(expect.any(SyntaxError));
})
.then(done)
.catch(done.fail);
});
- it('should reject when service is unavailable', done => {
+ it('should reject when service is unavailable', (done) => {
RecentSearchesService.isAvailable.mockReturnValue(false);
service
.fetch()
.then(done.fail)
- .catch(error => {
+ .catch((error) => {
expect(error).toEqual(expect.any(Error));
})
.then(done)
.catch(done.fail);
});
- it('should return items from localStorage', done => {
+ it('should return items from localStorage', (done) => {
jest.spyOn(localStorage, 'getItem').mockReturnValue('["foo", "bar"]');
const fetchItemsPromise = service.fetch();
fetchItemsPromise
- .then(items => {
+ .then((items) => {
expect(items).toEqual(['foo', 'bar']);
})
.then(done)
@@ -74,11 +74,11 @@ describe('RecentSearchesService', () => {
jest.spyOn(Storage.prototype, 'getItem').mockImplementation(() => {});
});
- it('should not call .getItem', done => {
+ it('should not call .getItem', (done) => {
RecentSearchesService.prototype
.fetch()
.then(done.fail)
- .catch(err => {
+ .catch((err) => {
expect(err).toEqual(new RecentSearchesServiceError());
expect(localStorage.getItem).not.toHaveBeenCalled();
})
diff --git a/spec/frontend/filtered_search/visual_token_value_spec.js b/spec/frontend/filtered_search/visual_token_value_spec.js
index e2855b29b70..2a76c4a27df 100644
--- a/spec/frontend/filtered_search/visual_token_value_spec.js
+++ b/spec/frontend/filtered_search/visual_token_value_spec.js
@@ -1,13 +1,13 @@
import { escape } from 'lodash';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
+import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper';
import VisualTokenValue from '~/filtered_search/visual_token_value';
import AjaxCache from '~/lib/utils/ajax_cache';
import UsersCache from '~/lib/utils/users_cache';
import DropdownUtils from '~/filtered_search//dropdown_utils';
-import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
describe('Filtered Search Visual Tokens', () => {
- const findElements = tokenElement => {
+ const findElements = (tokenElement) => {
const tokenNameElement = tokenElement.querySelector('.name');
const tokenValueContainer = tokenElement.querySelector('.value-container');
const tokenValueElement = tokenValueContainer.querySelector('.value');
@@ -39,15 +39,15 @@ describe('Filtered Search Visual Tokens', () => {
let usersCacheSpy;
beforeEach(() => {
- jest.spyOn(UsersCache, 'retrieve').mockImplementation(username => usersCacheSpy(username));
+ jest.spyOn(UsersCache, 'retrieve').mockImplementation((username) => usersCacheSpy(username));
});
- it('ignores error if UsersCache throws', done => {
+ it('ignores error if UsersCache throws', (done) => {
jest.spyOn(window, 'Flash').mockImplementation(() => {});
const dummyError = new Error('Earth rotated backwards');
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
const tokenValue = tokenValueElement.innerText;
- usersCacheSpy = username => {
+ usersCacheSpy = (username) => {
expect(`@${username}`).toBe(tokenValue);
return Promise.reject(dummyError);
};
@@ -61,10 +61,10 @@ describe('Filtered Search Visual Tokens', () => {
.catch(done.fail);
});
- it('does nothing if user cannot be found', done => {
+ it('does nothing if user cannot be found', (done) => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
const tokenValue = tokenValueElement.innerText;
- usersCacheSpy = username => {
+ usersCacheSpy = (username) => {
expect(`@${username}`).toBe(tokenValue);
return Promise.resolve(undefined);
};
@@ -78,14 +78,14 @@ describe('Filtered Search Visual Tokens', () => {
.catch(done.fail);
});
- it('replaces author token with avatar and display name', done => {
+ it('replaces author token with avatar and display name', (done) => {
const dummyUser = {
name: 'Important Person',
avatar_url: 'https://host.invalid/mypics/avatar.png',
};
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
const tokenValue = tokenValueElement.innerText;
- usersCacheSpy = username => {
+ usersCacheSpy = (username) => {
expect(`@${username}`).toBe(tokenValue);
return Promise.resolve(dummyUser);
};
@@ -104,14 +104,14 @@ describe('Filtered Search Visual Tokens', () => {
.catch(done.fail);
});
- it('escapes user name when creating token', done => {
+ it('escapes user name when creating token', (done) => {
const dummyUser = {
name: '<script>',
avatar_url: `${TEST_HOST}/mypics/avatar.png`,
};
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
const tokenValue = tokenValueElement.innerText;
- usersCacheSpy = username => {
+ usersCacheSpy = (username) => {
expect(`@${username}`).toBe(tokenValue);
return Promise.resolve(dummyUser);
};
@@ -168,7 +168,7 @@ describe('Filtered Search Visual Tokens', () => {
AjaxCache.internalStorage[`${filteredSearchInput.dataset.labelsEndpoint}.json`] = labelData;
});
- const parseColor = color => {
+ const parseColor = (color) => {
const dummyElement = document.createElement('div');
dummyElement.style.color = color;
return dummyElement.style.color;
@@ -180,10 +180,10 @@ describe('Filtered Search Visual Tokens', () => {
expect(tokenValueContainer.style.color).toBe(parseColor(label.text_color));
};
- const findLabel = tokenValue =>
- labelData.find(label => tokenValue === `~${DropdownUtils.getEscapedText(label.title)}`);
+ const findLabel = (tokenValue) =>
+ labelData.find((label) => tokenValue === `~${DropdownUtils.getEscapedText(label.title)}`);
- it('updates the color of a label token', done => {
+ it('updates the color of a label token', (done) => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(bugLabelToken);
const tokenValue = tokenValueElement.innerText;
const matchingLabel = findLabel(tokenValue);
@@ -197,7 +197,7 @@ describe('Filtered Search Visual Tokens', () => {
.catch(done.fail);
});
- it('updates the color of a label token with spaces', done => {
+ it('updates the color of a label token with spaces', (done) => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(spaceLabelToken);
const tokenValue = tokenValueElement.innerText;
const matchingLabel = findLabel(tokenValue);
@@ -211,7 +211,7 @@ describe('Filtered Search Visual Tokens', () => {
.catch(done.fail);
});
- it('does not change color of a missing label', done => {
+ it('does not change color of a missing label', (done) => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(missingLabelToken);
const tokenValue = tokenValueElement.innerText;
const matchingLabel = findLabel(tokenValue);
@@ -260,7 +260,7 @@ describe('Filtered Search Visual Tokens', () => {
});
describe('render', () => {
- const setupSpies = subject => {
+ const setupSpies = (subject) => {
jest.spyOn(subject, 'updateLabelTokenColor').mockImplementation(() => {});
const updateLabelTokenColorSpy = subject.updateLabelTokenColor;
diff --git a/spec/frontend/fixtures/static/create_item_dropdown.html b/spec/frontend/fixtures/static/create_item_dropdown.html
index aac7d3397ce..63e342d5d0c 100644
--- a/spec/frontend/fixtures/static/create_item_dropdown.html
+++ b/spec/frontend/fixtures/static/create_item_dropdown.html
@@ -7,21 +7,26 @@
data-toggle="dropdown"
>
<span class="dropdown-toggle-text ">some label</span
- ><i aria-hidden="true" data-hidden="true" class="fa fa-chevron-down"></i>
+ >
+ <svg data-testid="chevron-down-icon" class="gl-icon s16">
+ <use
+ href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#chevron-down"
+ ></use>
+ </svg>
</button>
<div class="dropdown-menu dropdown-select dropdown-menu-selectable">
<div class="dropdown-input">
- <input type="search" id="" class="dropdown-input-field" autocomplete="off" /><i
- aria-hidden="true"
- data-hidden="true"
- class="fa fa-search dropdown-input-search"
- ></i
- ><i
- aria-hidden="true"
- data-hidden="true"
- role="button"
- class="fa fa-times dropdown-input-clear js-dropdown-input-clear"
- ></i>
+ <input type="search" id="" class="dropdown-input-field" autocomplete="off" />
+ <svg data-testid="search-icon" class="gl-icon s16 dropdown-input-search">
+ <use
+ href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#search"
+ ></use>
+ </svg>
+ <svg data-testid="close-icon" class="gl-icon s16 dropdown-input-clear js-dropdown-input-clear">
+ <use
+ href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#close"
+ ></use>
+ </svg>
</div>
<div class="dropdown-content js-dropdown-content"></div>
<div class="dropdown-footer">
diff --git a/spec/frontend/fixtures/static/deprecated_jquery_dropdown.html b/spec/frontend/fixtures/static/deprecated_jquery_dropdown.html
index 41e7170b5c6..0f243653d46 100644
--- a/spec/frontend/fixtures/static/deprecated_jquery_dropdown.html
+++ b/spec/frontend/fixtures/static/deprecated_jquery_dropdown.html
@@ -9,7 +9,11 @@
<div class="dropdown-toggle-text">
Projects
</div>
- <i class="fa fa-chevron-down dropdown-toggle-caret js-projects-dropdown-toggle"></i>
+ <svg data-testid="chevron-down-icon" class="gl-icon s16 dropdown-toggle-caret js-projects-dropdown-toggle">
+ <use
+ href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#chevron-down"
+ ></use>
+ </svg>
</button>
<div class="dropdown-menu dropdown-select dropdown-menu-selectable">
<div class="dropdown-title gl-display-flex gl-align-items-center">
@@ -28,7 +32,11 @@
</div>
<div class="dropdown-input">
<input class="dropdown-input-field" placeholder="Filter results" type="search" />
- <i class="fa fa-search dropdown-input-search"></i>
+ <svg data-testid="search-icon" class="gl-icon s16 dropdown-input-search">
+ <use
+ href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#search"
+ ></use>
+ </svg>
</div>
<div class="dropdown-content"></div>
<div class="dropdown-loading">
diff --git a/spec/frontend/fixtures/static/line_highlighter.html b/spec/frontend/fixtures/static/line_highlighter.html
index 897a25d6760..1667097bc3b 100644
--- a/spec/frontend/fixtures/static/line_highlighter.html
+++ b/spec/frontend/fixtures/static/line_highlighter.html
@@ -2,103 +2,153 @@
<div class="file-content">
<div class="line-numbers">
<a data-line-number="1" href="#L1" id="L1">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
1
</a>
<a data-line-number="2" href="#L2" id="L2">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
2
</a>
<a data-line-number="3" href="#L3" id="L3">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
3
</a>
<a data-line-number="4" href="#L4" id="L4">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
4
</a>
<a data-line-number="5" href="#L5" id="L5">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
5
</a>
<a data-line-number="6" href="#L6" id="L6">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
6
</a>
<a data-line-number="7" href="#L7" id="L7">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
7
</a>
<a data-line-number="8" href="#L8" id="L8">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
8
</a>
<a data-line-number="9" href="#L9" id="L9">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
9
</a>
<a data-line-number="10" href="#L10" id="L10">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
10
</a>
<a data-line-number="11" href="#L11" id="L11">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
11
</a>
<a data-line-number="12" href="#L12" id="L12">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
12
</a>
<a data-line-number="13" href="#L13" id="L13">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
13
</a>
<a data-line-number="14" href="#L14" id="L14">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
14
</a>
<a data-line-number="15" href="#L15" id="L15">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
15
</a>
<a data-line-number="16" href="#L16" id="L16">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
16
</a>
<a data-line-number="17" href="#L17" id="L17">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
17
</a>
<a data-line-number="18" href="#L18" id="L18">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
18
</a>
<a data-line-number="19" href="#L19" id="L19">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
19
</a>
<a data-line-number="20" href="#L20" id="L20">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
20
</a>
<a data-line-number="21" href="#L21" id="L21">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
21
</a>
<a data-line-number="22" href="#L22" id="L22">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
22
</a>
<a data-line-number="23" href="#L23" id="L23">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
23
</a>
<a data-line-number="24" href="#L24" id="L24">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
24
</a>
<a data-line-number="25" href="#L25" id="L25">
-<i class="fa fa-link"></i>
+<svg data-testid="link-icon" class="s12">
+<use href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#link"></use>
+</svg>
25
</a>
</div>
diff --git a/spec/frontend/fixtures/static/project_select_combo_button.html b/spec/frontend/fixtures/static/project_select_combo_button.html
index f13f9075706..444e0bc84a2 100644
--- a/spec/frontend/fixtures/static/project_select_combo_button.html
+++ b/spec/frontend/fixtures/static/project_select_combo_button.html
@@ -4,6 +4,10 @@
<span class="gl-spinner"></span>
</a>
<a class="new-project-item-select-button">
- <i class="fa fa-caret-down"></i>
+ <svg data-testid="chevron-down-icon" class="gl-icon s16">
+ <use
+ href="/assets/icons-795a2ef2fd636a0538bbef3b8d2787dd90927b42d7617fdda8620930016b333d.svg#chevron-down"
+ ></use>
+ </svg>
</a>
</div>
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index a37d57b03fd..228c897ab00 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -325,7 +325,7 @@ describe('Flash', () => {
`;
});
- it('removes global flash on click', done => {
+ it('removes global flash on click', (done) => {
const flashEl = document.querySelector('.flash');
removeFlashClickListener(flashEl, false);
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index 439a410eaa1..b74e4ac45cf 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -65,7 +65,7 @@ describe('Frequent Items App Component', () => {
storage[storageKey] = value;
});
- localStorage.getItem.mockImplementation(storageKey => {
+ localStorage.getItem.mockImplementation((storageKey) => {
if (storage[storageKey]) {
return storage[storageKey];
}
@@ -160,7 +160,7 @@ describe('Frequent Items App Component', () => {
});
describe('created', () => {
- it('should bind event listeners on eventHub', done => {
+ it('should bind event listeners on eventHub', (done) => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
createComponentWithStore().$mount();
@@ -173,7 +173,7 @@ describe('Frequent Items App Component', () => {
});
describe('beforeDestroy', () => {
- it('should unbind event listeners on eventHub', done => {
+ it('should unbind event listeners on eventHub', (done) => {
jest.spyOn(eventHub, '$off').mockImplementation(() => {});
vm.$mount();
@@ -191,7 +191,7 @@ describe('Frequent Items App Component', () => {
expect(vm.$el.querySelector('.search-input-container')).toBeDefined();
});
- it('should render loading animation', done => {
+ it('should render loading animation', (done) => {
vm.$store.dispatch('fetchSearchedItems');
Vue.nextTick(() => {
@@ -204,7 +204,7 @@ describe('Frequent Items App Component', () => {
});
});
- it('should render frequent projects list header', done => {
+ it('should render frequent projects list header', (done) => {
Vue.nextTick(() => {
const sectionHeaderEl = vm.$el.querySelector('.section-header');
@@ -214,7 +214,7 @@ describe('Frequent Items App Component', () => {
});
});
- it('should render frequent projects list', done => {
+ it('should render frequent projects list', (done) => {
const expectedResult = getTopFrequentItems(mockFrequentProjects);
localStorage.getItem.mockImplementation(() => JSON.stringify(mockFrequentProjects));
@@ -229,7 +229,7 @@ describe('Frequent Items App Component', () => {
});
});
- it('should render searched projects list', done => {
+ it('should render searched projects list', (done) => {
mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects);
expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(1);
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
index 1160ed5c84b..19095c4474e 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -114,7 +114,7 @@ describe('FrequentItemsListItemComponent', () => {
const link = wrapper.find('a');
// NOTE: this listener is required to prevent the click from going through and causing:
// `Error: Not implemented: navigation ...`
- link.element.addEventListener('click', e => {
+ link.element.addEventListener('click', (e) => {
e.preventDefault();
});
link.trigger('click');
diff --git a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
index f5e654e6bcb..cdd8b127676 100644
--- a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
@@ -46,7 +46,7 @@ describe('FrequentItemsSearchInputComponent', () => {
});
describe('mounted', () => {
- it('should listen `dropdownOpen` event', done => {
+ it('should listen `dropdownOpen` event', (done) => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
const vmX = createComponent().vm;
@@ -61,7 +61,7 @@ describe('FrequentItemsSearchInputComponent', () => {
});
describe('beforeDestroy', () => {
- it('should unbind event listeners on eventHub', done => {
+ it('should unbind event listeners on eventHub', (done) => {
const vmX = createComponent().vm;
jest.spyOn(eventHub, '$off').mockImplementation(() => {});
diff --git a/spec/frontend/frequent_items/store/actions_spec.js b/spec/frontend/frequent_items/store/actions_spec.js
index 304098e85f1..351fde25f49 100644
--- a/spec/frontend/frequent_items/store/actions_spec.js
+++ b/spec/frontend/frequent_items/store/actions_spec.js
@@ -29,7 +29,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('setNamespace', () => {
- it('should set namespace', done => {
+ it('should set namespace', (done) => {
testAction(
actions.setNamespace,
mockNamespace,
@@ -42,7 +42,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('setStorageKey', () => {
- it('should set storage key', done => {
+ it('should set storage key', (done) => {
testAction(
actions.setStorageKey,
mockStorageKey,
@@ -55,7 +55,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('requestFrequentItems', () => {
- it('should request frequent items', done => {
+ it('should request frequent items', (done) => {
testAction(
actions.requestFrequentItems,
null,
@@ -68,7 +68,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('receiveFrequentItemsSuccess', () => {
- it('should set frequent items', done => {
+ it('should set frequent items', (done) => {
testAction(
actions.receiveFrequentItemsSuccess,
mockFrequentProjects,
@@ -81,7 +81,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('receiveFrequentItemsError', () => {
- it('should set frequent items error state', done => {
+ it('should set frequent items error state', (done) => {
testAction(
actions.receiveFrequentItemsError,
null,
@@ -94,7 +94,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('fetchFrequentItems', () => {
- it('should dispatch `receiveFrequentItemsSuccess`', done => {
+ it('should dispatch `receiveFrequentItemsSuccess`', (done) => {
mockedState.namespace = mockNamespace;
mockedState.storageKey = mockStorageKey;
@@ -108,7 +108,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
);
});
- it('should dispatch `receiveFrequentItemsError`', done => {
+ it('should dispatch `receiveFrequentItemsError`', (done) => {
jest.spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').mockReturnValue(false);
mockedState.namespace = mockNamespace;
mockedState.storageKey = mockStorageKey;
@@ -125,7 +125,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('requestSearchedItems', () => {
- it('should request searched items', done => {
+ it('should request searched items', (done) => {
testAction(
actions.requestSearchedItems,
null,
@@ -138,7 +138,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('receiveSearchedItemsSuccess', () => {
- it('should set searched items', done => {
+ it('should set searched items', (done) => {
testAction(
actions.receiveSearchedItemsSuccess,
mockSearchedProjects,
@@ -151,7 +151,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('receiveSearchedItemsError', () => {
- it('should set searched items error state', done => {
+ it('should set searched items error state', (done) => {
testAction(
actions.receiveSearchedItemsError,
null,
@@ -168,7 +168,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
gon.api_version = 'v4';
});
- it('should dispatch `receiveSearchedItemsSuccess`', done => {
+ it('should dispatch `receiveSearchedItemsSuccess`', (done) => {
mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects, {});
testAction(
@@ -187,7 +187,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
);
});
- it('should dispatch `receiveSearchedItemsError`', done => {
+ it('should dispatch `receiveSearchedItemsError`', (done) => {
gon.api_version = 'v4';
mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(500);
@@ -203,7 +203,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
describe('setSearchQuery', () => {
- it('should commit query and dispatch `fetchSearchedItems` when query is present', done => {
+ it('should commit query and dispatch `fetchSearchedItems` when query is present', (done) => {
testAction(
actions.setSearchQuery,
{ query: 'test' },
@@ -214,7 +214,7 @@ describe('Frequent Items Dropdown Store Actions', () => {
);
});
- it('should commit query and dispatch `fetchFrequentItems` when query is empty', done => {
+ it('should commit query and dispatch `fetchFrequentItems` when query is empty', (done) => {
testAction(
actions.setSearchQuery,
null,
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index eb9343847f1..c2ff66f6afc 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -8,7 +8,7 @@ import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete
import { TEST_HOST } from 'helpers/test_constants';
import { getJSONFixture } from 'helpers/fixtures';
-import waitForPromises from 'jest/helpers/wait_for_promises';
+import waitForPromises from 'helpers/wait_for_promises';
import MockAdapter from 'axios-mock-adapter';
import AjaxCache from '~/lib/utils/ajax_cache';
@@ -164,7 +164,7 @@ describe('GfmAutoComplete', () => {
});
});
- it.each([200, 500])('should set the loading state', async responseStatus => {
+ it.each([200, 500])('should set the loading state', async (responseStatus) => {
mock.onGet('vulnerabilities_autocomplete_url').replyOnce(responseStatus);
fetchData.call(context, {}, '[vulnerability:', 'query');
@@ -371,9 +371,9 @@ describe('GfmAutoComplete', () => {
const jointAllowedSymbols = allowedSymbols.join('');
describe('should match regular symbols', () => {
- flagsUseDefaultMatcher.forEach(flag => {
- allowedSymbols.forEach(symbol => {
- argumentSize.forEach(size => {
+ flagsUseDefaultMatcher.forEach((flag) => {
+ allowedSymbols.forEach((symbol) => {
+ argumentSize.forEach((size) => {
const query = new Array(size + 1).join(symbol);
const subtext = flag + query;
@@ -395,8 +395,8 @@ describe('GfmAutoComplete', () => {
const shouldNotBeFollowedBy = flags.concat(['\x00', '\x10', '\x3f', '\n', ' ']);
const shouldNotBePrependedBy = ['`'];
- flagsUseDefaultMatcher.forEach(atSign => {
- shouldNotBeFollowedBy.forEach(followedSymbol => {
+ flagsUseDefaultMatcher.forEach((atSign) => {
+ shouldNotBeFollowedBy.forEach((followedSymbol) => {
const seq = atSign + followedSymbol;
it(`should not match ${JSON.stringify(seq)}`, () => {
@@ -404,7 +404,7 @@ describe('GfmAutoComplete', () => {
});
});
- shouldNotBePrependedBy.forEach(prependedSymbol => {
+ shouldNotBePrependedBy.forEach((prependedSymbol) => {
const seq = prependedSymbol + atSign;
it(`should not match "${seq}"`, () => {
@@ -638,8 +638,8 @@ describe('GfmAutoComplete', () => {
};
const allLabels = labelsFixture;
- const assignedLabels = allLabels.filter(label => label.set);
- const unassignedLabels = allLabels.filter(label => !label.set);
+ const assignedLabels = allLabels.filter((label) => label.set);
+ const unassignedLabels = allLabels.filter((label) => !label.set);
let autocomplete;
let $textarea;
@@ -655,11 +655,8 @@ describe('GfmAutoComplete', () => {
autocomplete.destroy();
});
- const triggerDropdown = text => {
- $textarea
- .trigger('focus')
- .val(text)
- .caret('pos', -1);
+ const triggerDropdown = (text) => {
+ $textarea.trigger('focus').val(text).caret('pos', -1);
$textarea.trigger('keyup');
return new Promise(window.requestAnimationFrame);
@@ -668,12 +665,12 @@ describe('GfmAutoComplete', () => {
const getDropdownItems = () => {
const dropdown = document.getElementById('at-view-labels');
const items = dropdown.getElementsByTagName('li');
- return [].map.call(items, item => item.textContent.trim());
+ return [].map.call(items, (item) => item.textContent.trim());
};
const expectLabels = ({ input, output }) =>
triggerDropdown(input).then(() => {
- expect(getDropdownItems()).toEqual(output.map(label => label.title));
+ expect(getDropdownItems()).toEqual(output.map((label) => label.title));
});
describe('with no labels assigned', () => {
@@ -742,9 +739,9 @@ describe('GfmAutoComplete', () => {
});
describe.each`
- name | inputFormat | assert
- ${'insertTemplateFunction'} | ${name => ({ name })} | ${assertInserted}
- ${'templateFunction'} | ${name => name} | ${assertTemplated}
+ name | inputFormat | assert
+ ${'insertTemplateFunction'} | ${(name) => ({ name })} | ${assertInserted}
+ ${'templateFunction'} | ${(name) => name} | ${assertTemplated}
`('Emoji.$name', ({ name, inputFormat, assert }) => {
const execute = (accessor, input, emoji) =>
assert({
@@ -811,7 +808,7 @@ describe('GfmAutoComplete', () => {
const item = GfmAutoComplete.Emoji.templateFunction('heart')
.replace(/(<gl-emoji)\s+(data-name)/, '$1 $2')
- .replace(/>\s+|\s+</g, s => s.trim());
+ .replace(/>\s+|\s+</g, (s) => s.trim());
expect(item).toEqual(
`<li>${heart.name}<gl-emoji data-name="${heart.name}"></gl-emoji></li>`,
);
@@ -823,7 +820,7 @@ describe('GfmAutoComplete', () => {
const item = GfmAutoComplete.Emoji.templateFunction('star')
.replace(/(<gl-emoji)\s+(data-name)/, '$1 $2')
- .replace(/>\s+|\s+</g, s => s.trim());
+ .replace(/>\s+|\s+</g, (s) => s.trim());
expect(item).toEqual(`<li>${star.name}<gl-emoji data-name="${star.name}"></gl-emoji></li>`);
});
});
diff --git a/spec/frontend/gl_field_errors_spec.js b/spec/frontend/gl_field_errors_spec.js
index 078659bf429..a1737211252 100644
--- a/spec/frontend/gl_field_errors_spec.js
+++ b/spec/frontend/gl_field_errors_spec.js
@@ -33,7 +33,7 @@ describe('GL Style Field Errors', () => {
expect(customErrorElem.length).toBe(1);
- const customErrors = testContext.fieldErrors.state.inputs.filter(input => {
+ const customErrors = testContext.fieldErrors.state.inputs.filter((input) => {
return input.inputElement.hasClass(customErrorFlag);
});
@@ -41,18 +41,9 @@ describe('GL Style Field Errors', () => {
});
it('should not show any errors before submit attempt', () => {
- testContext.$form
- .find('.email')
- .val('not-a-valid-email')
- .keyup();
- testContext.$form
- .find('.text-required')
- .val('')
- .keyup();
- testContext.$form
- .find('.alphanumberic')
- .val('?---*')
- .keyup();
+ testContext.$form.find('.email').val('not-a-valid-email').keyup();
+ testContext.$form.find('.text-required').val('').keyup();
+ testContext.$form.find('.alphanumberic').val('?---*').keyup();
const errorsShown = testContext.$form.find('.gl-field-error-outline');
@@ -60,18 +51,9 @@ describe('GL Style Field Errors', () => {
});
it('should show errors when input valid is submitted', () => {
- testContext.$form
- .find('.email')
- .val('not-a-valid-email')
- .keyup();
- testContext.$form
- .find('.text-required')
- .val('')
- .keyup();
- testContext.$form
- .find('.alphanumberic')
- .val('?---*')
- .keyup();
+ testContext.$form.find('.email').val('not-a-valid-email').keyup();
+ testContext.$form.find('.text-required').val('').keyup();
+ testContext.$form.find('.alphanumberic').val('?---*').keyup();
testContext.$form.submit();
diff --git a/spec/frontend/gl_form_spec.js b/spec/frontend/gl_form_spec.js
index 52e1693f8a6..d9a01f7bcc1 100644
--- a/spec/frontend/gl_form_spec.js
+++ b/spec/frontend/gl_form_spec.js
@@ -8,7 +8,7 @@ describe('GLForm', () => {
const testContext = {};
describe('when instantiated', () => {
- beforeEach(done => {
+ beforeEach((done) => {
window.gl = window.gl || {};
testContext.form = $('<form class="gfm-form"><textarea class="js-gfm-input"></form>');
@@ -28,7 +28,7 @@ describe('GLForm', () => {
});
describe('setupAutosize', () => {
- beforeEach(done => {
+ beforeEach((done) => {
testContext.glForm.setupAutosize();
setImmediate(() => {
@@ -114,6 +114,26 @@ describe('GLForm', () => {
});
});
+ describe('autofocus', () => {
+ it('focus the textarea when autofocus is true', () => {
+ testContext.textarea.data('autofocus', true);
+ jest.spyOn($.prototype, 'focus');
+
+ testContext.glForm = new GLForm(testContext.form, false);
+
+ expect($.prototype.focus).toHaveBeenCalled();
+ });
+
+ it("doesn't focus the textarea when autofocus is false", () => {
+ testContext.textarea.data('autofocus', false);
+ jest.spyOn($.prototype, 'focus');
+
+ testContext.glForm = new GLForm(testContext.form, false);
+
+ expect($.prototype.focus).not.toHaveBeenCalled();
+ });
+ });
+
describe('supportsQuickActions', () => {
it('should return false if textarea does not support quick actions', () => {
const glForm = new GLForm(testContext.form, false);
diff --git a/spec/frontend/gpg_badges_spec.js b/spec/frontend/gpg_badges_spec.js
index 644b687aa19..7c1f83e577c 100644
--- a/spec/frontend/gpg_badges_spec.js
+++ b/spec/frontend/gpg_badges_spec.js
@@ -36,7 +36,7 @@ describe('GpgBadges', () => {
mock.restore();
});
- it('does not make a request if there is no container element', done => {
+ it('does not make a request if there is no container element', (done) => {
setFixtures('');
jest.spyOn(axios, 'get').mockImplementation(() => {});
@@ -48,13 +48,13 @@ describe('GpgBadges', () => {
.catch(done.fail);
});
- it('throws an error if the endpoint is missing', done => {
+ it('throws an error if the endpoint is missing', (done) => {
setFixtures('<div class="js-signature-container"></div>');
jest.spyOn(axios, 'get').mockImplementation(() => {});
GpgBadges.fetch()
.then(() => done.fail('Expected error to be thrown'))
- .catch(error => {
+ .catch((error) => {
expect(error.message).toBe('Missing commit signatures endpoint!');
expect(axios.get).not.toHaveBeenCalled();
})
@@ -62,7 +62,7 @@ describe('GpgBadges', () => {
.catch(done.fail);
});
- it('displays a loading spinner', done => {
+ it('displays a loading spinner', (done) => {
mock.onGet(dummyUrl).replyOnce(200);
GpgBadges.fetch()
@@ -76,7 +76,7 @@ describe('GpgBadges', () => {
.catch(done.fail);
});
- it('replaces the loading spinner', done => {
+ it('replaces the loading spinner', (done) => {
mock.onGet(dummyUrl).replyOnce(200, dummyResponse);
GpgBadges.fetch()
diff --git a/spec/frontend/group_settings/components/shared_runners_form_spec.js b/spec/frontend/group_settings/components/shared_runners_form_spec.js
index 9e3ee8a2cb1..4ec739122c8 100644
--- a/spec/frontend/group_settings/components/shared_runners_form_spec.js
+++ b/spec/frontend/group_settings/components/shared_runners_form_spec.js
@@ -32,7 +32,7 @@ describe('group_settings/components/shared_runners_form', () => {
const findErrorAlert = () => wrapper.find(GlAlert);
const findEnabledToggle = () => wrapper.find('[data-testid="enable-runners-toggle"]');
const findOverrideToggle = () => wrapper.find('[data-testid="override-runners-toggle"]');
- const changeToggle = toggle => toggle.vm.$emit('change', !toggle.props('value'));
+ const changeToggle = (toggle) => toggle.vm.$emit('change', !toggle.props('value'));
const getRequestPayload = () => JSON.parse(mock.history.put[0].data);
const isLoadingIconVisible = () => findLoadingIcon().exists();
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 72d8e23f28b..9244e4f331e 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -1,5 +1,4 @@
import '~/flash';
-import $ from 'jquery';
import Vue from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
import { GlModal, GlLoadingIcon } from '@gitlab/ui';
@@ -123,12 +122,12 @@ describe('AppComponent', () => {
it('should show flash error when request fails', () => {
mock.onGet('/dashboard/groups.json').reply(400);
- jest.spyOn($, 'scrollTo').mockImplementation(() => {});
+ jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
jest.spyOn(window, 'Flash').mockImplementation(() => {});
return vm.fetchGroups({}).then(() => {
expect(vm.isLoading).toBe(false);
- expect($.scrollTo).toHaveBeenCalledWith(0);
+ expect(window.scrollTo).toHaveBeenCalledWith({ behavior: 'smooth', top: 0 });
expect(window.Flash).toHaveBeenCalledWith('An error occurred. Please try again.');
});
});
@@ -180,7 +179,7 @@ describe('AppComponent', () => {
it('should fetch groups for provided page details and update window state', () => {
jest.spyOn(urlUtilities, 'mergeUrlParams');
jest.spyOn(window.history, 'replaceState').mockImplementation(() => {});
- jest.spyOn($, 'scrollTo').mockImplementation(() => {});
+ jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
const fetchPagePromise = vm.fetchPage(2, null, null, true);
@@ -195,7 +194,7 @@ describe('AppComponent', () => {
return fetchPagePromise.then(() => {
expect(vm.isLoading).toBe(false);
- expect($.scrollTo).toHaveBeenCalledWith(0);
+ expect(window.scrollTo).toHaveBeenCalledWith({ behavior: 'smooth', top: 0 });
expect(urlUtilities.mergeUrlParams).toHaveBeenCalledWith({ page: 2 }, expect.any(String));
expect(window.history.replaceState).toHaveBeenCalledWith(
{
@@ -308,14 +307,14 @@ describe('AppComponent', () => {
const notice = `You left the "${childGroupItem.fullName}" group.`;
jest.spyOn(vm.service, 'leaveGroup').mockResolvedValue({ data: { notice } });
jest.spyOn(vm.store, 'removeGroup');
- jest.spyOn($, 'scrollTo').mockImplementation(() => {});
+ jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
vm.leaveGroup();
expect(vm.targetGroup.isBeingRemoved).toBe(true);
expect(vm.service.leaveGroup).toHaveBeenCalledWith(vm.targetGroup.leavePath);
return waitForPromises().then(() => {
- expect($.scrollTo).toHaveBeenCalledWith(0);
+ expect(window.scrollTo).toHaveBeenCalledWith({ behavior: 'smooth', top: 0 });
expect(vm.store.removeGroup).toHaveBeenCalledWith(vm.targetGroup, vm.targetParentGroup);
expect($toast.show).toHaveBeenCalledWith(notice);
});
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 32bae812c86..d70ea709dee 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -31,7 +31,7 @@ describe('GroupItemComponent', () => {
vm.$destroy();
});
- const withMicrodata = group => ({
+ const withMicrodata = (group) => ({
...group,
microdata: getGroupItemMicrodata(group),
});
@@ -49,7 +49,7 @@ describe('GroupItemComponent', () => {
const { rowClass } = vm;
expect(Object.keys(rowClass).length).toBe(classes.length);
- Object.keys(rowClass).forEach(className => {
+ Object.keys(rowClass).forEach((className) => {
expect(classes.indexOf(className)).toBeGreaterThan(-1);
});
});
@@ -220,13 +220,13 @@ describe('GroupItemComponent', () => {
});
describe('schema.org props', () => {
describe('when showSchemaMarkup is disabled on the group', () => {
- it.each(['itemprop', 'itemtype', 'itemscope'], 'it does not set %s', attr => {
+ it.each(['itemprop', 'itemtype', 'itemscope'], 'it does not set %s', (attr) => {
expect(vm.$el.getAttribute(attr)).toBeNull();
});
it.each(
['.js-group-avatar', '.js-group-name', '.js-group-description'],
'it does not set `itemprop` on sub-nodes',
- selector => {
+ (selector) => {
expect(vm.$el.querySelector(selector).getAttribute('itemprop')).toBeNull();
},
);
diff --git a/spec/frontend/groups/components/invite_members_banner_spec.js b/spec/frontend/groups/components/invite_members_banner_spec.js
index 95003b211fd..4e69f3cd433 100644
--- a/spec/frontend/groups/components/invite_members_banner_spec.js
+++ b/spec/frontend/groups/components/invite_members_banner_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlBanner } from '@gitlab/ui';
+import { GlBanner, GlButton } from '@gitlab/ui';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { setCookie, parseBoolean } from '~/lib/utils/common_utils';
import InviteMembersBanner from '~/groups/components/invite_members_banner.vue';
@@ -107,14 +107,12 @@ describe('InviteMembersBanner', () => {
});
describe('dismissing', () => {
- const findButton = () => {
- return wrapper.find('button');
- };
+ const findButton = () => wrapper.findAll(GlButton).at(1);
beforeEach(() => {
wrapper = createComponent({ GlBanner });
- findButton().trigger('click');
+ findButton().vm.$emit('click');
});
it('sets iDismissed to true', () => {
@@ -138,7 +136,7 @@ describe('InviteMembersBanner', () => {
});
it('does not render the banner', () => {
- expect(wrapper.contains(GlBanner)).toBe(false);
+ expect(wrapper.find(GlBanner).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/groups/components/item_stats_spec.js b/spec/frontend/groups/components/item_stats_spec.js
index d8c88a608ac..f350012ebed 100644
--- a/spec/frontend/groups/components/item_stats_spec.js
+++ b/spec/frontend/groups/components/item_stats_spec.js
@@ -44,7 +44,7 @@ describe('ItemStats', () => {
expect(findItemStatsValue().exists()).toBe(true);
expect(findItemStatsValue().props('cssClass')).toBe('project-stars');
- expect(wrapper.contains('.last-updated')).toBe(true);
+ expect(wrapper.find('.last-updated').exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/groups/components/visibility_level_dropdown_spec.js b/spec/frontend/groups/components/visibility_level_dropdown_spec.js
index bf9508dc768..61b7bbb0833 100644
--- a/spec/frontend/groups/components/visibility_level_dropdown_spec.js
+++ b/spec/frontend/groups/components/visibility_level_dropdown_spec.js
@@ -11,7 +11,7 @@ describe('Visibility Level Dropdown', () => {
];
const defaultLevel = 0;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(Component, {
propsData,
});
@@ -33,7 +33,7 @@ describe('Visibility Level Dropdown', () => {
wrapper.find("input[name='group[visibility_level]']").attributes('value');
const dropdownText = () => wrapper.find(GlDropdown).props('text');
const findDropdownItems = () =>
- wrapper.findAll(GlDropdownItem).wrappers.map(option => ({
+ wrapper.findAll(GlDropdownItem).wrappers.map((option) => ({
text: option.text(),
secondaryText: option.props('secondaryText'),
}));
@@ -56,10 +56,7 @@ describe('Visibility Level Dropdown', () => {
describe('Selecting an option', () => {
beforeEach(() => {
- wrapper
- .findAll(GlDropdownItem)
- .at(1)
- .vm.$emit('click');
+ wrapper.findAll(GlDropdownItem).at(1).vm.$emit('click');
});
it('sets the value of the hidden input to the selected value', () => {
diff --git a/spec/frontend/groups/members/components/app_spec.js b/spec/frontend/groups/members/components/app_spec.js
index 208e2fc35b6..9847dacbec8 100644
--- a/spec/frontend/groups/members/components/app_spec.js
+++ b/spec/frontend/groups/members/components/app_spec.js
@@ -87,21 +87,9 @@ describe('GroupMembersApp', () => {
});
});
- describe.each`
- featureFlagValue | exists
- ${true} | ${true}
- ${false} | ${false}
- `(
- 'when `group_members_filtered_search` feature flag is $featureFlagValue',
- ({ featureFlagValue, exists }) => {
- it(`${exists ? 'renders' : 'does not render'} FilterSortContainer`, () => {
- createComponent(
- {},
- { provide: { glFeatures: { groupMembersFilteredSearch: featureFlagValue } } },
- );
-
- expect(findFilterSortContainer().exists()).toBe(exists);
- });
- },
- );
+ it('renders `FilterSortContainer`', () => {
+ createComponent();
+
+ expect(findFilterSortContainer().exists()).toBe(true);
+ });
});
diff --git a/spec/frontend/helpers/README.md b/spec/frontend/helpers/README.md
new file mode 100644
index 00000000000..22f9ea3442d
--- /dev/null
+++ b/spec/frontend/helpers/README.md
@@ -0,0 +1,3 @@
+This folder should only contain specs for '~/helpers' application code.
+
+If you want to create a helper to be used in tests, please place it under [__helpers__](../__helpers__).
diff --git a/spec/frontend/helpers/event_hub_factory_spec.js b/spec/frontend/helpers/event_hub_factory_spec.js
index c4f63ff6049..2491e8d5dda 100644
--- a/spec/frontend/helpers/event_hub_factory_spec.js
+++ b/spec/frontend/helpers/event_hub_factory_spec.js
@@ -93,7 +93,7 @@ describe('event bus factory', () => {
describe('$off', () => {
beforeEach(() => {
- otherHandlers.forEach(x => eventBus.$on(TEST_EVENT, x));
+ otherHandlers.forEach((x) => eventBus.$on(TEST_EVENT, x));
eventBus.$on(TEST_EVENT, handler);
});
@@ -115,7 +115,7 @@ describe('event bus factory', () => {
eventBus.$emit(TEST_EVENT);
expect(handler).not.toHaveBeenCalled();
- expect(otherHandlers.map(x => x.mock.calls.length)).toEqual(otherHandlers.map(() => 1));
+ expect(otherHandlers.map((x) => x.mock.calls.length)).toEqual(otherHandlers.map(() => 1));
});
it('without a handler, will no longer call any handlers', () => {
@@ -124,13 +124,13 @@ describe('event bus factory', () => {
eventBus.$emit(TEST_EVENT);
expect(handler).not.toHaveBeenCalled();
- expect(otherHandlers.map(x => x.mock.calls.length)).toEqual(otherHandlers.map(() => 0));
+ expect(otherHandlers.map((x) => x.mock.calls.length)).toEqual(otherHandlers.map(() => 0));
});
});
describe('$emit', () => {
beforeEach(() => {
- otherHandlers.forEach(x => eventBus.$on(TEST_EVENT_2, x));
+ otherHandlers.forEach((x) => eventBus.$on(TEST_EVENT_2, x));
eventBus.$on(TEST_EVENT, handler);
});
@@ -138,7 +138,7 @@ describe('event bus factory', () => {
eventBus.$emit(TEST_EVENT, 'arg1');
expect(handler).toHaveBeenCalledWith('arg1');
- expect(otherHandlers.map(x => x.mock.calls.length)).toEqual(otherHandlers.map(() => 0));
+ expect(otherHandlers.map((x) => x.mock.calls.length)).toEqual(otherHandlers.map(() => 0));
});
});
});
diff --git a/spec/frontend/helpers/help_page_helper_spec.js b/spec/frontend/helpers/help_page_helper_spec.js
new file mode 100644
index 00000000000..09c1a113a96
--- /dev/null
+++ b/spec/frontend/helpers/help_page_helper_spec.js
@@ -0,0 +1,29 @@
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+describe('help page helper', () => {
+ it.each`
+ relative_url_root | path | anchor | expected
+ ${undefined} | ${'administration/index'} | ${undefined} | ${'/help/administration/index'}
+ ${''} | ${'administration/index'} | ${undefined} | ${'/help/administration/index'}
+ ${'/'} | ${'administration/index'} | ${undefined} | ${'/help/administration/index'}
+ ${'/gitlab'} | ${'administration/index'} | ${undefined} | ${'/gitlab/help/administration/index'}
+ ${'/gitlab/'} | ${'administration/index'} | ${undefined} | ${'/gitlab/help/administration/index'}
+ ${undefined} | ${'administration/index'} | ${undefined} | ${'/help/administration/index'}
+ ${'/'} | ${'administration/index'} | ${undefined} | ${'/help/administration/index'}
+ ${''} | ${'administration/index.md'} | ${undefined} | ${'/help/administration/index.md'}
+ ${''} | ${'administration/index.md'} | ${'installing-gitlab'} | ${'/help/administration/index.md#installing-gitlab'}
+ ${''} | ${'administration/index'} | ${'installing-gitlab'} | ${'/help/administration/index#installing-gitlab'}
+ ${''} | ${'administration/index'} | ${'#installing-gitlab'} | ${'/help/administration/index#installing-gitlab'}
+ ${''} | ${'/administration/index'} | ${undefined} | ${'/help/administration/index'}
+ ${''} | ${'administration/index/'} | ${undefined} | ${'/help/administration/index/'}
+ ${''} | ${'/administration/index/'} | ${undefined} | ${'/help/administration/index/'}
+ ${'/'} | ${'/administration/index'} | ${undefined} | ${'/help/administration/index'}
+ `(
+ 'generates correct URL when path is `$path`, relative url is `$relative_url_root` and anchor is `$anchor`',
+ ({ relative_url_root, anchor, path, expected }) => {
+ window.gon = { relative_url_root };
+
+ expect(helpPagePath(path, { anchor })).toBe(expected);
+ },
+ );
+});
diff --git a/spec/frontend/helpers/monitor_helper_spec.js b/spec/frontend/helpers/monitor_helper_spec.js
index 219b05e312b..ef2a9fb0665 100644
--- a/spec/frontend/helpers/monitor_helper_spec.js
+++ b/spec/frontend/helpers/monitor_helper_spec.js
@@ -3,7 +3,11 @@ import { getSeriesLabel, makeDataSeries } from '~/helpers/monitor_helper';
describe('monitor helper', () => {
const defaultConfig = { default: true, name: 'default name' };
const name = 'data name';
- const series = [[1, 1], [2, 2], [3, 3]];
+ const series = [
+ [1, 1],
+ [2, 2],
+ [3, 3],
+ ];
describe('getSeriesLabel', () => {
const metricAttributes = { __name__: 'up', app: 'prometheus' };
diff --git a/spec/frontend/helpers/startup_css_helper_spec.js b/spec/frontend/helpers/startup_css_helper_spec.js
index 2d560c43fa5..703bdbd342f 100644
--- a/spec/frontend/helpers/startup_css_helper_spec.js
+++ b/spec/frontend/helpers/startup_css_helper_spec.js
@@ -39,24 +39,7 @@ describe('waitForCSSLoaded', () => {
});
});
- describe('with startup css disabled', () => {
- gon.features = {
- startupCss: false,
- };
-
- it('should invoke the action right away', async () => {
- const events = waitForCSSLoaded(mockedCallback);
- await events;
-
- expect(mockedCallback).toHaveBeenCalledTimes(1);
- });
- });
-
describe('with startup css enabled', () => {
- gon.features = {
- startupCss: true,
- };
-
it('should dispatch CSSLoaded when the assets are cached or already loaded', async () => {
setFixtures(`
<link href="one.css" data-startupcss="loaded">
@@ -75,7 +58,7 @@ describe('waitForCSSLoaded', () => {
const events = waitForCSSLoaded(mockedCallback);
document
.querySelectorAll('[data-startupcss="loading"]')
- .forEach(elem => elem.setAttribute('data-startupcss', 'loaded'));
+ .forEach((elem) => elem.setAttribute('data-startupcss', 'loaded'));
document.dispatchEvent(new CustomEvent('CSSStartupLinkLoaded'));
await events;
diff --git a/spec/frontend/helpers/wait_for_promises.js b/spec/frontend/helpers/wait_for_promises.js
deleted file mode 100644
index 1d2b53fc770..00000000000
--- a/spec/frontend/helpers/wait_for_promises.js
+++ /dev/null
@@ -1 +0,0 @@
-export default () => new Promise(resolve => requestAnimationFrame(resolve));
diff --git a/spec/frontend/ide/components/activity_bar_spec.js b/spec/frontend/ide/components/activity_bar_spec.js
index 762f3c5dad1..1a4b6ca0b71 100644
--- a/spec/frontend/ide/components/activity_bar_spec.js
+++ b/spec/frontend/ide/components/activity_bar_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
+import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import { leftSidebarViews } from '~/ide/constants';
import ActivityBar from '~/ide/components/activity_bar.vue';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
describe('IDE activity bar', () => {
const Component = Vue.extend(ActivityBar);
@@ -59,7 +59,7 @@ describe('IDE activity bar', () => {
expect(vm.$el.querySelector('.js-ide-edit-mode').classList).toContain('active');
});
- it('sets commit item active', done => {
+ it('sets commit item active', (done) => {
vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/branches/search_list_spec.js b/spec/frontend/ide/components/branches/search_list_spec.js
index fe142d70698..85776f8cc0e 100644
--- a/spec/frontend/ide/components/branches/search_list_spec.js
+++ b/spec/frontend/ide/components/branches/search_list_spec.js
@@ -71,7 +71,7 @@ describe('IDE branches search list', () => {
it('renders check next to active branch', () => {
const activeBranch = 'regular';
createComponent({ branches }, activeBranch);
- const items = wrapper.findAll(Item).filter(w => w.props('isActive'));
+ const items = wrapper.findAll(Item).filter((w) => w.props('isActive'));
expect(items.length).toBe(1);
expect(items.at(0).props('item').name).toBe(activeBranch);
diff --git a/spec/frontend/ide/components/commit_sidebar/actions_spec.js b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
index 0003e13c92f..91751bd34ea 100644
--- a/spec/frontend/ide/components/commit_sidebar/actions_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
@@ -30,7 +30,7 @@ describe('IDE commit sidebar actions', () => {
vm.$store.state.currentProjectId = 'abcproject';
const proj = { ...projectData };
- proj.branches[currentBranchId] = branches.find(branch => branch.name === currentBranchId);
+ proj.branches[currentBranchId] = branches.find((branch) => branch.name === currentBranchId);
proj.empty_repo = emptyRepo;
Vue.set(vm.$store.state.projects, 'abcproject', proj);
@@ -72,7 +72,7 @@ describe('IDE commit sidebar actions', () => {
expect(findText()).toContain('Commit to master branch');
});
- it('hides merge request option when project merge requests are disabled', done => {
+ it('hides merge request option when project merge requests are disabled', (done) => {
createComponent({ hasMR: false });
vm.$nextTick(() => {
@@ -106,7 +106,7 @@ describe('IDE commit sidebar actions', () => {
expect(vm.$store.dispatch).not.toHaveBeenCalled();
});
- it('calls again after staged changes', done => {
+ it('calls again after staged changes', (done) => {
createComponent({ currentBranchId: null });
vm.$store.state.currentBranchId = 'master';
diff --git a/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js b/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
index dbb43e43c19..c1dab4a04b6 100644
--- a/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
+import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import emptyState from '~/ide/components/commit_sidebar/empty_state.vue';
-import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
describe('IDE commit panel empty state', () => {
let vm;
diff --git a/spec/frontend/ide/components/commit_sidebar/list_item_spec.js b/spec/frontend/ide/components/commit_sidebar/list_item_spec.js
index 7ce628d4da7..baa25a11c2a 100644
--- a/spec/frontend/ide/components/commit_sidebar/list_item_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/list_item_spec.js
@@ -41,7 +41,7 @@ describe('Multi-file editor commit sidebar list item', () => {
expect(findPathText()).toContain(f.path);
});
- it('correctly renders renamed entries', done => {
+ it('correctly renders renamed entries', (done) => {
Vue.set(vm.file, 'prevName', 'Old name');
vm.$nextTick()
@@ -52,7 +52,7 @@ describe('Multi-file editor commit sidebar list item', () => {
.catch(done.fail);
});
- it('correctly renders entry, the name of which did not change after rename (as within a folder)', done => {
+ it('correctly renders entry, the name of which did not change after rename (as within a folder)', (done) => {
Vue.set(vm.file, 'prevName', f.name);
vm.$nextTick()
@@ -63,7 +63,7 @@ describe('Multi-file editor commit sidebar list item', () => {
.catch(done.fail);
});
- it('opens a closed file in the editor when clicking the file path', done => {
+ it('opens a closed file in the editor when clicking the file path', (done) => {
jest.spyOn(vm, 'openPendingTab');
jest.spyOn(router, 'push').mockImplementation(() => {});
@@ -77,7 +77,7 @@ describe('Multi-file editor commit sidebar list item', () => {
});
});
- it('calls updateViewer with diff when clicking file', done => {
+ it('calls updateViewer with diff when clicking file', (done) => {
jest.spyOn(vm, 'openFileInEditor');
jest.spyOn(vm, 'updateViewer');
jest.spyOn(router, 'push').mockImplementation(() => {});
@@ -134,7 +134,7 @@ describe('Multi-file editor commit sidebar list item', () => {
expect(vm.$el.querySelector('.is-active')).toBe(null);
});
- it('adds active class when keys match', done => {
+ it('adds active class when keys match', (done) => {
vm.keyPrefix = 'staged';
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/commit_sidebar/list_spec.js b/spec/frontend/ide/components/commit_sidebar/list_spec.js
index 636dfbf0b2a..898ec4bebce 100644
--- a/spec/frontend/ide/components/commit_sidebar/list_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/list_spec.js
@@ -31,7 +31,7 @@ describe('Multi-file editor commit sidebar list', () => {
});
describe('with a list of files', () => {
- beforeEach(done => {
+ beforeEach((done) => {
const f = file('file name');
f.changed = true;
vm.fileList.push(f);
diff --git a/spec/frontend/ide/components/commit_sidebar/message_field_spec.js b/spec/frontend/ide/components/commit_sidebar/message_field_spec.js
index d6ea8b9a4bd..1514fbc2c3b 100644
--- a/spec/frontend/ide/components/commit_sidebar/message_field_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/message_field_spec.js
@@ -23,7 +23,7 @@ describe('IDE commit message field', () => {
vm.$destroy();
});
- it('adds is-focused class on focus', done => {
+ it('adds is-focused class on focus', (done) => {
vm.$el.querySelector('textarea').focus();
vm.$nextTick(() => {
@@ -33,7 +33,7 @@ describe('IDE commit message field', () => {
});
});
- it('removed is-focused class on blur', done => {
+ it('removed is-focused class on blur', (done) => {
vm.$el.querySelector('textarea').focus();
vm.$nextTick()
@@ -66,7 +66,7 @@ describe('IDE commit message field', () => {
describe('highlights', () => {
describe('subject line', () => {
- it('does not highlight less than 50 characters', done => {
+ it('does not highlight less than 50 characters', (done) => {
vm.text = 'text less than 50 chars';
vm.$nextTick()
@@ -81,7 +81,7 @@ describe('IDE commit message field', () => {
.catch(done.fail);
});
- it('highlights characters over 50 length', done => {
+ it('highlights characters over 50 length', (done) => {
vm.text =
'text less than 50 chars that should not highlighted. text more than 50 should be highlighted';
@@ -102,7 +102,7 @@ describe('IDE commit message field', () => {
});
describe('body text', () => {
- it('does not highlight body text less tan 72 characters', done => {
+ it('does not highlight body text less tan 72 characters', (done) => {
vm.text = 'subject line\nbody content';
vm.$nextTick()
@@ -114,7 +114,7 @@ describe('IDE commit message field', () => {
.catch(done.fail);
});
- it('highlights body text more than 72 characters', done => {
+ it('highlights body text more than 72 characters', (done) => {
vm.text =
'subject line\nbody content that will be highlighted when it is more than 72 characters in length';
@@ -128,7 +128,7 @@ describe('IDE commit message field', () => {
.catch(done.fail);
});
- it('highlights body text & subject line', done => {
+ it('highlights body text & subject line', (done) => {
vm.text =
'text less than 50 chars that should not highlighted\nbody content that will be highlighted when it is more than 72 characters in length';
@@ -147,7 +147,7 @@ describe('IDE commit message field', () => {
});
describe('scrolling textarea', () => {
- it('updates transform of highlights', done => {
+ it('updates transform of highlights', (done) => {
vm.text = 'subject line\n\n\n\n\n\n\n\n\n\n\nbody content';
vm.$nextTick()
diff --git a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
index 7cbf5ebc61a..50da64abbbe 100644
--- a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
@@ -17,7 +17,7 @@ describe('create new MR checkbox', () => {
] = { foo: 'bar' };
};
- const setPermissions = permissions => {
+ const setPermissions = (permissions) => {
store.state.projects[store.state.currentProjectId].userPermissions = permissions;
};
@@ -33,7 +33,7 @@ describe('create new MR checkbox', () => {
vm.$store.state.currentBranchId = currentBranchId;
store.state.projects.abcproject.branches[currentBranchId] = branches.find(
- branch => branch.name === currentBranchId,
+ (branch) => branch.name === currentBranchId,
);
return vm.$mount();
@@ -69,7 +69,7 @@ describe('create new MR checkbox', () => {
expect(vm.$el.textContent).not.toBe('');
});
- it('has new MR', done => {
+ it('has new MR', (done) => {
setMR();
vm.$nextTick()
@@ -93,7 +93,7 @@ describe('create new MR checkbox', () => {
expect(vm.$el.textContent).toBe('');
});
- it('has new MR', done => {
+ it('has new MR', (done) => {
setMR();
vm.$nextTick()
@@ -118,7 +118,7 @@ describe('create new MR checkbox', () => {
expect(vm.$el.textContent).not.toBe('');
});
- it('is rendered if MR exists', done => {
+ it('is rendered if MR exists', (done) => {
setMR();
vm.$nextTick()
@@ -141,7 +141,7 @@ describe('create new MR checkbox', () => {
expect(vm.$el.textContent).not.toBe('');
});
- it('is hidden if MR exists', done => {
+ it('is hidden if MR exists', (done) => {
setMR();
vm.$nextTick()
@@ -165,7 +165,7 @@ describe('create new MR checkbox', () => {
expect(vm.$el.textContent).not.toBe('');
});
- it('is hidden if MR exists', done => {
+ it('is hidden if MR exists', (done) => {
setMR();
vm.$nextTick()
diff --git a/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js b/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
index bf61f4bbe77..73d811f99b8 100644
--- a/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
@@ -7,7 +7,7 @@ describe('IDE commit sidebar radio group', () => {
let vm;
let store;
- beforeEach(done => {
+ beforeEach((done) => {
store = createStore();
const Component = Vue.extend(radioGroup);
@@ -33,7 +33,7 @@ describe('IDE commit sidebar radio group', () => {
expect(vm.$el.textContent).toContain('test');
});
- it('uses slot if label is not present', done => {
+ it('uses slot if label is not present', (done) => {
vm.$destroy();
vm = new Vue({
@@ -41,7 +41,7 @@ describe('IDE commit sidebar radio group', () => {
radioGroup,
},
store,
- render: createElement =>
+ render: (createElement) =>
createElement('radio-group', { props: { value: '1' } }, 'Testing slot'),
});
@@ -54,7 +54,7 @@ describe('IDE commit sidebar radio group', () => {
});
});
- it('updates store when changing radio button', done => {
+ it('updates store when changing radio button', (done) => {
vm.$el.querySelector('input').dispatchEvent(new Event('change'));
Vue.nextTick(() => {
@@ -65,7 +65,7 @@ describe('IDE commit sidebar radio group', () => {
});
describe('with input', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.$destroy();
const Component = Vue.extend(radioGroup);
@@ -89,7 +89,7 @@ describe('IDE commit sidebar radio group', () => {
expect(vm.$el.querySelector('.form-control')).not.toBeNull();
});
- it('hides input when commitAction doesnt match value', done => {
+ it('hides input when commitAction doesnt match value', (done) => {
store.state.commit.commitAction = '2';
Vue.nextTick(() => {
@@ -98,7 +98,7 @@ describe('IDE commit sidebar radio group', () => {
});
});
- it('updates branch name in store on input', done => {
+ it('updates branch name in store on input', (done) => {
const input = vm.$el.querySelector('.form-control');
input.value = 'testing-123';
input.dispatchEvent(new Event('input'));
diff --git a/spec/frontend/ide/components/commit_sidebar/success_message_spec.js b/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
index db13c90fbb9..b116321efb3 100644
--- a/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
+import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import successMessage from '~/ide/components/commit_sidebar/success_message.vue';
-import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
describe('IDE commit panel successful commit state', () => {
let vm;
@@ -23,7 +23,7 @@ describe('IDE commit panel successful commit state', () => {
vm.$destroy();
});
- it('renders last commit message when it exists', done => {
+ it('renders last commit message when it exists', (done) => {
vm.$store.state.lastCommitMsg = 'testing commit message';
Vue.nextTick(() => {
diff --git a/spec/frontend/ide/components/error_message_spec.js b/spec/frontend/ide/components/error_message_spec.js
index 8b7e7da3b51..fa6816d3546 100644
--- a/spec/frontend/ide/components/error_message_spec.js
+++ b/spec/frontend/ide/components/error_message_spec.js
@@ -10,7 +10,7 @@ describe('IDE error message component', () => {
let wrapper;
const setErrorMessageMock = jest.fn();
- const createComponent = messageProps => {
+ const createComponent = (messageProps) => {
const fakeStore = new Vuex.Store({
actions: { setErrorMessage: setErrorMessageMock },
});
@@ -103,7 +103,7 @@ describe('IDE error message component', () => {
let resolveAction;
actionMock.mockImplementation(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolveAction = resolve;
}),
);
diff --git a/spec/frontend/ide/components/file_row_extra_spec.js b/spec/frontend/ide/components/file_row_extra_spec.js
index 2a106ad37c0..275b98bc154 100644
--- a/spec/frontend/ide/components/file_row_extra_spec.js
+++ b/spec/frontend/ide/components/file_row_extra_spec.js
@@ -45,15 +45,16 @@ describe('IDE extra file row component', () => {
expect(vm.folderChangesTooltip).toBe(undefined);
});
- [{ input: 1, output: '1 changed file' }, { input: 2, output: '2 changed files' }].forEach(
- ({ input, output }) => {
- it('returns changed files count if changes count is not 0', () => {
- changesCount = input;
-
- expect(vm.folderChangesTooltip).toBe(output);
- });
- },
- );
+ [
+ { input: 1, output: '1 changed file' },
+ { input: 2, output: '2 changed files' },
+ ].forEach(({ input, output }) => {
+ it('returns changed files count if changes count is not 0', () => {
+ changesCount = input;
+
+ expect(vm.folderChangesTooltip).toBe(output);
+ });
+ });
});
describe('show tree changes count', () => {
@@ -69,7 +70,7 @@ describe('IDE extra file row component', () => {
expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
});
- it('does not show when tree is open', done => {
+ it('does not show when tree is open', (done) => {
vm.file.type = 'tree';
vm.file.opened = true;
changesCount = 1;
@@ -81,7 +82,7 @@ describe('IDE extra file row component', () => {
});
});
- it('shows for trees with changes', done => {
+ it('shows for trees with changes', (done) => {
vm.file.type = 'tree';
vm.file.opened = false;
changesCount = 1;
@@ -99,7 +100,7 @@ describe('IDE extra file row component', () => {
expect(vm.$el.querySelector('.file-changed-icon')).toBe(null);
});
- it('shows when file is changed', done => {
+ it('shows when file is changed', (done) => {
vm.file.changed = true;
vm.$nextTick(() => {
@@ -109,7 +110,7 @@ describe('IDE extra file row component', () => {
});
});
- it('shows when file is staged', done => {
+ it('shows when file is staged', (done) => {
vm.file.staged = true;
vm.$nextTick(() => {
@@ -119,7 +120,7 @@ describe('IDE extra file row component', () => {
});
});
- it('shows when file is a tempFile', done => {
+ it('shows when file is a tempFile', (done) => {
vm.file.tempFile = true;
vm.$nextTick(() => {
@@ -129,7 +130,7 @@ describe('IDE extra file row component', () => {
});
});
- it('shows when file is renamed', done => {
+ it('shows when file is renamed', (done) => {
vm.file.prevPath = 'original-file';
vm.$nextTick(() => {
@@ -139,7 +140,7 @@ describe('IDE extra file row component', () => {
});
});
- it('hides when file is renamed', done => {
+ it('hides when file is renamed', (done) => {
vm.file.prevPath = 'original-file';
vm.file.type = 'tree';
@@ -156,7 +157,7 @@ describe('IDE extra file row component', () => {
expect(vm.$el.querySelector('[data-testid="git-merge-icon"]')).toBe(null);
});
- it('shows when a merge request change', done => {
+ it('shows when a merge request change', (done) => {
vm.file.mrChange = true;
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/file_templates/bar_spec.js b/spec/frontend/ide/components/file_templates/bar_spec.js
index 5a33837fb14..158995c9c9c 100644
--- a/spec/frontend/ide/components/file_templates/bar_spec.js
+++ b/spec/frontend/ide/components/file_templates/bar_spec.js
@@ -46,7 +46,7 @@ describe('IDE file templates bar component', () => {
});
describe('template dropdown', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.$store.state.fileTemplates.templates = [
{
name: 'test',
@@ -67,10 +67,7 @@ describe('IDE file templates bar component', () => {
it('calls fetchTemplate on click', () => {
jest.spyOn(vm, 'fetchTemplate').mockImplementation();
- vm.$el
- .querySelectorAll('.dropdown-content')[1]
- .querySelector('button')
- .click();
+ vm.$el.querySelectorAll('.dropdown-content')[1].querySelector('button').click();
expect(vm.fetchTemplate).toHaveBeenCalledWith({
name: 'test',
@@ -78,7 +75,7 @@ describe('IDE file templates bar component', () => {
});
});
- it('shows undo button if updateSuccess is true', done => {
+ it('shows undo button if updateSuccess is true', (done) => {
vm.$store.state.fileTemplates.updateSuccess = true;
vm.$nextTick(() => {
@@ -96,7 +93,7 @@ describe('IDE file templates bar component', () => {
expect(vm.undoFileTemplate).toHaveBeenCalled();
});
- it('calls setSelectedTemplateType if activeFile name matches a template', done => {
+ it('calls setSelectedTemplateType if activeFile name matches a template', (done) => {
const fileName = '.gitlab-ci.yml';
jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation(() => {});
diff --git a/spec/frontend/ide/components/file_templates/dropdown_spec.js b/spec/frontend/ide/components/file_templates/dropdown_spec.js
index 3cffbc3362f..628580103a4 100644
--- a/spec/frontend/ide/components/file_templates/dropdown_spec.js
+++ b/spec/frontend/ide/components/file_templates/dropdown_spec.js
@@ -109,7 +109,7 @@ describe('IDE file templates dropdown component', () => {
});
const items = findItemButtons();
- expect(items.wrappers.map(x => x.text())).toEqual(templates.map(x => x.name));
+ expect(items.wrappers.map((x) => x.text())).toEqual(templates.map((x) => x.name));
});
it('searches template data', () => {
@@ -124,7 +124,7 @@ describe('IDE file templates dropdown component', () => {
const items = findItemButtons();
expect(items.length).toBe(matches.length);
- expect(items.wrappers.map(x => x.text())).toEqual(matches);
+ expect(items.wrappers.map((x) => x.text())).toEqual(matches);
});
});
@@ -151,7 +151,7 @@ describe('IDE file templates dropdown component', () => {
const items = findItemButtons();
expect(items.length).toBe(data.length);
- expect(items.wrappers.map(x => x.text())).toEqual(data.map(x => x.name));
+ expect(items.wrappers.map((x) => x.text())).toEqual(data.map((x) => x.name));
});
it('renders input when `searchable` is true', () => {
@@ -169,7 +169,7 @@ describe('IDE file templates dropdown component', () => {
const items = findItemButtons();
expect(items.length).toBe(matches.length);
- expect(items.wrappers.map(x => x.text())).toEqual(matches);
+ expect(items.wrappers.map((x) => x.text())).toEqual(matches);
});
});
});
diff --git a/spec/frontend/ide/components/ide_review_spec.js b/spec/frontend/ide/components/ide_review_spec.js
index bcc98669427..37f34a96495 100644
--- a/spec/frontend/ide/components/ide_review_spec.js
+++ b/spec/frontend/ide/components/ide_review_spec.js
@@ -1,11 +1,11 @@
import Vue from 'vue';
import Vuex from 'vuex';
import { createLocalVue, mount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
+import { keepAlive } from 'helpers/keep_alive_component_helper';
import IdeReview from '~/ide/components/ide_review.vue';
import EditorModeDropdown from '~/ide/components/editor_mode_dropdown.vue';
import { createStore } from '~/ide/stores';
-import { trimText } from '../../helpers/text_helper';
-import { keepAlive } from '../../helpers/keep_alive_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
diff --git a/spec/frontend/ide/components/ide_sidebar_nav_spec.js b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
index 49d476b56e4..6b4cb9bd03d 100644
--- a/spec/frontend/ide/components/ide_sidebar_nav_spec.js
+++ b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
@@ -48,7 +48,7 @@ describe('ide/components/ide_sidebar_nav', () => {
const findButtons = () => wrapper.findAll('li button');
const findButtonsData = () =>
- findButtons().wrappers.map(button => {
+ findButtons().wrappers.map((button) => {
return {
title: button.attributes('title'),
ariaLabel: button.attributes('aria-label'),
@@ -58,10 +58,7 @@ describe('ide/components/ide_sidebar_nav', () => {
tooltip: getBinding(button.element, 'tooltip').value,
};
});
- const clickTab = () =>
- findButtons()
- .at(TEST_CURRENT_INDEX)
- .trigger('click');
+ const clickTab = () => findButtons().at(TEST_CURRENT_INDEX).trigger('click');
describe.each`
isOpen | side | otherSide | classes | classesObj | emitEvent | emitArg
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index 315298eaf26..805fa898611 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -25,7 +25,7 @@ describe('WebIDE', () => {
tree: [],
loading: false,
};
- Object.keys(state).forEach(key => {
+ Object.keys(state).forEach((key) => {
store.state[key] = state[key];
});
diff --git a/spec/frontend/ide/components/ide_status_bar_spec.js b/spec/frontend/ide/components/ide_status_bar_spec.js
index bc8144f544c..47506bb87e8 100644
--- a/spec/frontend/ide/components/ide_status_bar_spec.js
+++ b/spec/frontend/ide/components/ide_status_bar_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import _ from 'lodash';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import { TEST_HOST } from '../../helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import { createStore } from '~/ide/stores';
import IdeStatusBar from '~/ide/components/ide_status_bar.vue';
import { rightSidebarViews } from '~/ide/constants';
@@ -73,7 +73,7 @@ describe('ideStatusBar', () => {
});
describe('pipeline status', () => {
- it('opens right sidebar on clicking icon', done => {
+ it('opens right sidebar on clicking icon', (done) => {
jest.spyOn(vm, 'openRightPane').mockImplementation(() => {});
Vue.set(vm.$store.state.pipelines, 'latestPipeline', {
details: {
diff --git a/spec/frontend/ide/components/ide_status_mr_spec.js b/spec/frontend/ide/components/ide_status_mr_spec.js
index 4d9ad61201d..ac1be4b21c0 100644
--- a/spec/frontend/ide/components/ide_status_mr_spec.js
+++ b/spec/frontend/ide/components/ide_status_mr_spec.js
@@ -9,7 +9,7 @@ const TEST_URL = `${TEST_HOST}merge-requests/9001`;
describe('ide/components/ide_status_mr', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(IdeStatusMr, {
propsData: props,
});
diff --git a/spec/frontend/ide/components/ide_tree_list_spec.js b/spec/frontend/ide/components/ide_tree_list_spec.js
index dd57a5c5f4d..c8153ea339e 100644
--- a/spec/frontend/ide/components/ide_tree_list_spec.js
+++ b/spec/frontend/ide/components/ide_tree_list_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
+import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import IdeTreeList from '~/ide/components/ide_tree_list.vue';
import { createStore } from '~/ide/stores';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
@@ -41,7 +41,7 @@ describe('IDE tree list', () => {
vm.$mount();
});
- it('renders loading indicator', done => {
+ it('renders loading indicator', (done) => {
store.state.trees['abcproject/master'].loading = true;
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/ide_tree_spec.js b/spec/frontend/ide/components/ide_tree_spec.js
index ad00dec2e48..c5934b032c4 100644
--- a/spec/frontend/ide/components/ide_tree_spec.js
+++ b/spec/frontend/ide/components/ide_tree_spec.js
@@ -1,9 +1,9 @@
import Vue from 'vue';
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
+import { keepAlive } from 'helpers/keep_alive_component_helper';
import IdeTree from '~/ide/components/ide_tree.vue';
import { createStore } from '~/ide/stores';
-import { keepAlive } from '../../helpers/keep_alive_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
diff --git a/spec/frontend/ide/components/jobs/detail/description_spec.js b/spec/frontend/ide/components/jobs/detail/description_spec.js
index 5554738336a..786a7661d97 100644
--- a/spec/frontend/ide/components/jobs/detail/description_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/description_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import Description from '~/ide/components/jobs/detail/description.vue';
-import mountComponent from '../../../../helpers/vue_mount_component_helper';
import { jobs } from '../../../mock_data';
describe('IDE job description', () => {
diff --git a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
index 57174181a3d..b323ad8320c 100644
--- a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
@@ -5,7 +5,7 @@ import ScrollButton from '~/ide/components/jobs/detail/scroll_button.vue';
describe('IDE job log scroll button', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(ScrollButton, {
propsData: {
direction: 'up',
diff --git a/spec/frontend/ide/components/jobs/detail_spec.js b/spec/frontend/ide/components/jobs/detail_spec.js
index 496d8284fdd..79ac0a8122a 100644
--- a/spec/frontend/ide/components/jobs/detail_spec.js
+++ b/spec/frontend/ide/components/jobs/detail_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
+import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import JobDetail from '~/ide/components/jobs/detail.vue';
import { createStore } from '~/ide/stores';
-import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { jobs } from '../../mock_data';
describe('IDE jobs detail view', () => {
@@ -48,7 +48,7 @@ describe('IDE jobs detail view', () => {
expect(vm.$el.querySelector('.bash').textContent).toContain('testing');
});
- it('renders empty message output', done => {
+ it('renders empty message output', (done) => {
vm.$store.state.pipelines.detailJob.output = '';
vm.$nextTick(() => {
@@ -68,7 +68,7 @@ describe('IDE jobs detail view', () => {
expect(vm.$el.querySelector('.bash').style.display).toBe('none');
});
- it('hide loading icon when isLoading is false', done => {
+ it('hide loading icon when isLoading is false', (done) => {
vm.$store.state.pipelines.detailJob.isLoading = false;
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/jobs/item_spec.js b/spec/frontend/ide/components/jobs/item_spec.js
index 93c01640b54..7343fc80a03 100644
--- a/spec/frontend/ide/components/jobs/item_spec.js
+++ b/spec/frontend/ide/components/jobs/item_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import JobItem from '~/ide/components/jobs/item.vue';
-import mountComponent from '../../../helpers/vue_mount_component_helper';
import { jobs } from '../../mock_data';
describe('IDE jobs item', () => {
@@ -27,7 +27,7 @@ describe('IDE jobs item', () => {
expect(vm.$el.querySelector('[data-testid="status_success_borderless-icon"]')).not.toBe(null);
});
- it('does not render view logs button if not started', done => {
+ it('does not render view logs button if not started', (done) => {
vm.job.started = false;
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/jobs/list_spec.js b/spec/frontend/ide/components/jobs/list_spec.js
index e821a585e18..5c5c6602374 100644
--- a/spec/frontend/ide/components/jobs/list_spec.js
+++ b/spec/frontend/ide/components/jobs/list_spec.js
@@ -36,7 +36,7 @@ describe('IDE stages list', () => {
status: { icon: 'status_success' },
}));
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(StageList, {
propsData: {
...defaultProps,
@@ -48,7 +48,7 @@ describe('IDE stages list', () => {
};
afterEach(() => {
- Object.values(storeActions).forEach(actionMock => actionMock.mockClear());
+ Object.values(storeActions).forEach((actionMock) => actionMock.mockClear());
});
afterAll(() => {
diff --git a/spec/frontend/ide/components/jobs/stage_spec.js b/spec/frontend/ide/components/jobs/stage_spec.js
index 3a47571ee13..e80215b9d13 100644
--- a/spec/frontend/ide/components/jobs/stage_spec.js
+++ b/spec/frontend/ide/components/jobs/stage_spec.js
@@ -20,7 +20,7 @@ describe('IDE pipeline stage', () => {
const findHeader = () => wrapper.find({ ref: 'cardHeader' });
const findJobList = () => wrapper.find({ ref: 'jobList' });
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(Stage, {
propsData: {
...defaultProps,
@@ -60,10 +60,7 @@ describe('IDE pipeline stage', () => {
it('emits clickViewLog entity with job', () => {
const [job] = defaultProps.stage.jobs;
createComponent();
- wrapper
- .findAll(Item)
- .at(0)
- .vm.$emit('clickViewLog', job);
+ wrapper.findAll(Item).at(0).vm.$emit('clickViewLog', job);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().clickViewLog[0][0]).toBe(job);
});
diff --git a/spec/frontend/ide/components/merge_requests/list_spec.js b/spec/frontend/ide/components/merge_requests/list_spec.js
index 80dcd861451..f0ac852fa67 100644
--- a/spec/frontend/ide/components/merge_requests/list_spec.js
+++ b/spec/frontend/ide/components/merge_requests/list_spec.js
@@ -82,9 +82,7 @@ describe('IDE merge requests list', () => {
return wrapper.vm
.$nextTick()
.then(() => {
- findSearchTypeButtons()
- .at(0)
- .trigger('click');
+ findSearchTypeButtons().at(0).trigger('click');
return wrapper.vm.$nextTick();
})
.then(() => {
@@ -153,8 +151,8 @@ describe('IDE merge requests list', () => {
it('shows search types', () => {
const buttons = findSearchTypeButtons();
- expect(buttons.wrappers.map(x => x.text().trim())).toEqual(
- wrapper.vm.$options.searchTypes.map(x => x.label),
+ expect(buttons.wrappers.map((x) => x.text().trim())).toEqual(
+ wrapper.vm.$options.searchTypes.map((x) => x.label),
);
});
@@ -168,9 +166,7 @@ describe('IDE merge requests list', () => {
describe('with search type', () => {
beforeEach(() => {
- findSearchTypeButtons()
- .at(0)
- .trigger('click');
+ findSearchTypeButtons().at(0).trigger('click');
return wrapper.vm
.$nextTick()
diff --git a/spec/frontend/ide/components/nav_dropdown_button_spec.js b/spec/frontend/ide/components/nav_dropdown_button_spec.js
index c98aa313f40..a02bfa5c391 100644
--- a/spec/frontend/ide/components/nav_dropdown_button_spec.js
+++ b/spec/frontend/ide/components/nav_dropdown_button_spec.js
@@ -23,7 +23,7 @@ describe('NavDropdown', () => {
vm.$mount();
};
- const findIcon = name => vm.$el.querySelector(`[data-testid="${name}-icon"]`);
+ const findIcon = (name) => vm.$el.querySelector(`[data-testid="${name}-icon"]`);
const findMRIcon = () => findIcon('merge-request');
const findBranchIcon = () => findIcon('branch');
@@ -36,7 +36,7 @@ describe('NavDropdown', () => {
expect(trimText(vm.$el.textContent)).toEqual('- -');
});
- it('renders branch name, if state has currentBranchId', done => {
+ it('renders branch name, if state has currentBranchId', (done) => {
vm.$store.state.currentBranchId = TEST_BRANCH_ID;
vm.$nextTick()
@@ -47,7 +47,7 @@ describe('NavDropdown', () => {
.catch(done.fail);
});
- it('renders mr id, if state has currentMergeRequestId', done => {
+ it('renders mr id, if state has currentMergeRequestId', (done) => {
vm.$store.state.currentMergeRequestId = TEST_MR_ID;
vm.$nextTick()
@@ -58,7 +58,7 @@ describe('NavDropdown', () => {
.catch(done.fail);
});
- it('renders branch and mr, if state has both', done => {
+ it('renders branch and mr, if state has both', (done) => {
vm.$store.state.currentBranchId = TEST_BRANCH_ID;
vm.$store.state.currentMergeRequestId = TEST_MR_ID;
diff --git a/spec/frontend/ide/components/nav_dropdown_spec.js b/spec/frontend/ide/components/nav_dropdown_spec.js
index 2f91ab7af0a..12a1a4c8013 100644
--- a/spec/frontend/ide/components/nav_dropdown_spec.js
+++ b/spec/frontend/ide/components/nav_dropdown_spec.js
@@ -39,7 +39,7 @@ describe('IDE NavDropdown', () => {
});
};
- const findIcon = name => wrapper.find(`[data-testid="${name}-icon"]`);
+ const findIcon = (name) => wrapper.find(`[data-testid="${name}-icon"]`);
const findMRIcon = () => findIcon('merge-request');
const findNavForm = () => wrapper.find('.ide-nav-form');
const showDropdown = () => {
@@ -58,7 +58,7 @@ describe('IDE NavDropdown', () => {
expect(findNavForm().exists()).toBe(false);
});
- it('renders nav form when show.bs.dropdown', done => {
+ it('renders nav form when show.bs.dropdown', (done) => {
showDropdown();
wrapper.vm
@@ -70,7 +70,7 @@ describe('IDE NavDropdown', () => {
.catch(done.fail);
});
- it('destroys nav form when closed', done => {
+ it('destroys nav form when closed', (done) => {
showDropdown();
hideDropdown();
diff --git a/spec/frontend/ide/components/new_dropdown/button_spec.js b/spec/frontend/ide/components/new_dropdown/button_spec.js
index 66317296ee9..32fa2babcdb 100644
--- a/spec/frontend/ide/components/new_dropdown/button_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/button_spec.js
@@ -37,7 +37,7 @@ describe('IDE new entry dropdown button component', () => {
expect(vm.$emit).toHaveBeenCalledWith('click');
});
- it('hides label if showLabel is false', done => {
+ it('hides label if showLabel is false', (done) => {
vm.showLabel = false;
vm.$nextTick(() => {
@@ -52,7 +52,7 @@ describe('IDE new entry dropdown button component', () => {
expect(vm.tooltipTitle).toBe('');
});
- it('returns label', done => {
+ it('returns label', (done) => {
vm.showLabel = false;
vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/new_dropdown/index_spec.js b/spec/frontend/ide/components/new_dropdown/index_spec.js
index c6cebf36de3..793d950b3e0 100644
--- a/spec/frontend/ide/components/new_dropdown/index_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/index_spec.js
@@ -57,7 +57,7 @@ describe('new dropdown component', () => {
});
describe('isOpen', () => {
- it('scrolls dropdown into view', done => {
+ it('scrolls dropdown into view', (done) => {
jest.spyOn(vm.$refs.dropdownMenu, 'scrollIntoView').mockImplementation(() => {});
vm.isOpen = true;
diff --git a/spec/frontend/ide/components/new_dropdown/modal_spec.js b/spec/frontend/ide/components/new_dropdown/modal_spec.js
index ea8ba24c9d0..06434081e39 100644
--- a/spec/frontend/ide/components/new_dropdown/modal_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/modal_spec.js
@@ -19,7 +19,7 @@ describe('new file modal component', () => {
${'tree'} | ${'Create new directory'} | ${'Create directory'} | ${false}
${'blob'} | ${'Create new file'} | ${'Create file'} | ${true}
`('$entryType', ({ entryType, modalTitle, btnTitle, showsFileTemplates }) => {
- beforeEach(done => {
+ beforeEach((done) => {
const store = createStore();
vm = createComponentWithStore(Component, store).$mount();
diff --git a/spec/frontend/ide/components/new_dropdown/upload_spec.js b/spec/frontend/ide/components/new_dropdown/upload_spec.js
index 3f3784dbb3a..7303f81aad0 100644
--- a/spec/frontend/ide/components/new_dropdown/upload_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/upload_spec.js
@@ -62,15 +62,15 @@ describe('new dropdown upload', () => {
result: 'base64,8PDw8A==', // ðððð
};
- const textFile = new File(['plain text'], 'textFile');
- const binaryFile = new File(['😺'], 'binaryFile');
+ const textFile = new File(['plain text'], 'textFile', { type: 'test/mime-text' });
+ const binaryFile = new File(['😺'], 'binaryFile', { type: 'test/mime-binary' });
beforeEach(() => {
jest.spyOn(FileReader.prototype, 'readAsText');
});
- it('calls readAsText and creates file in plain text (without encoding) if the file content is plain text', done => {
- const waitForCreate = new Promise(resolve => vm.$on('create', resolve));
+ it('calls readAsText and creates file in plain text (without encoding) if the file content is plain text', (done) => {
+ const waitForCreate = new Promise((resolve) => vm.$on('create', resolve));
vm.createFile(textTarget, textFile);
@@ -83,6 +83,7 @@ describe('new dropdown upload', () => {
type: 'blob',
content: 'plain text',
rawPath: '',
+ mimeType: 'test/mime-text',
});
})
.then(done)
@@ -99,6 +100,7 @@ describe('new dropdown upload', () => {
type: 'blob',
content: 'ðððð',
rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
+ mimeType: 'test/mime-binary',
});
});
});
diff --git a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
index bb9ba32a699..c3da2a46858 100644
--- a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
+++ b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
@@ -15,7 +15,7 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
const width = 350;
const fakeComponentName = 'fake-component';
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(CollapsibleSidebar, {
localVue,
store,
@@ -47,7 +47,7 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
beforeEach(() => {
const FakeComponent = localVue.component(fakeComponentName, {
- render: () => {},
+ render: () => null,
});
fakeView = {
diff --git a/spec/frontend/ide/components/panes/right_spec.js b/spec/frontend/ide/components/panes/right_spec.js
index 203d35ed335..57fd0e49e94 100644
--- a/spec/frontend/ide/components/panes/right_spec.js
+++ b/spec/frontend/ide/components/panes/right_spec.js
@@ -14,7 +14,7 @@ describe('ide/components/panes/right.vue', () => {
let wrapper;
let store;
- const createComponent = props => {
+ const createComponent = (props) => {
extendStore(store, document.createElement('div'));
wrapper = shallowMount(RightPane, {
diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js
index c1744fefe20..a1fbfd96c31 100644
--- a/spec/frontend/ide/components/pipelines/list_spec.js
+++ b/spec/frontend/ide/components/pipelines/list_spec.js
@@ -1,4 +1,5 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
import { GlLoadingIcon, GlTab } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
@@ -8,8 +9,7 @@ import JobsList from '~/ide/components/jobs/list.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import IDEServices from '~/ide/services';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
jest.mock('~/ide/services', () => ({
pingUsage: jest.fn(),
@@ -59,9 +59,6 @@ describe('IDE pipelines list', () => {
failedStages: failedStagesGetterMock,
pipelineFailed: () => false,
},
- methods: {
- fetchLatestPipeline: jest.fn(),
- },
},
},
});
@@ -69,7 +66,6 @@ describe('IDE pipelines list', () => {
const createComponent = (state = {}, pipelinesState = {}) => {
wrapper = shallowMount(List, {
- localVue,
store: createStore(state, pipelinesState),
});
};
@@ -165,11 +161,7 @@ describe('IDE pipelines list', () => {
const isLoadingJobs = true;
createComponent({}, { ...withLatestPipelineState, stages, isLoadingJobs });
- const jobProps = wrapper
- .findAll(GlTab)
- .at(0)
- .find(JobsList)
- .props();
+ const jobProps = wrapper.findAll(GlTab).at(0).find(JobsList).props();
expect(jobProps.stages).toBe(stages);
expect(jobProps.loading).toBe(isLoadingJobs);
});
@@ -180,11 +172,7 @@ describe('IDE pipelines list', () => {
const isLoadingJobs = true;
createComponent({}, { ...withLatestPipelineState, isLoadingJobs });
- const jobProps = wrapper
- .findAll(GlTab)
- .at(1)
- .find(JobsList)
- .props();
+ const jobProps = wrapper.findAll(GlTab).at(1).find(JobsList).props();
expect(jobProps.stages).toBe(failedStages);
expect(jobProps.loading).toBe(isLoadingJobs);
});
diff --git a/spec/frontend/ide/components/preview/clientside_spec.js b/spec/frontend/ide/components/preview/clientside_spec.js
index 7b22f75cee4..220982e1fd9 100644
--- a/spec/frontend/ide/components/preview/clientside_spec.js
+++ b/spec/frontend/ide/components/preview/clientside_spec.js
@@ -3,6 +3,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import smooshpack from 'smooshpack';
import Clientside from '~/ide/components/preview/clientside.vue';
+import eventHub from '~/ide/eventhub';
jest.mock('smooshpack', () => ({
Manager: jest.fn(),
@@ -70,6 +71,17 @@ describe('IDE clientside preview', () => {
});
};
+ const createInitializedComponent = () => {
+ createComponent();
+ wrapper.setData({
+ sandpackReady: true,
+ manager: {
+ listener: jest.fn(),
+ updatePreview: jest.fn(),
+ },
+ });
+ };
+
afterEach(() => {
wrapper.destroy();
});
@@ -293,33 +305,33 @@ describe('IDE clientside preview', () => {
});
describe('update', () => {
- beforeEach(() => {
- createComponent();
- wrapper.setData({ sandpackReady: true });
- });
-
it('initializes manager if manager is empty', () => {
createComponent({ getters: { packageJson: dummyPackageJson } });
wrapper.setData({ sandpackReady: true });
wrapper.vm.update();
- jest.advanceTimersByTime(250);
-
return waitForCalls().then(() => {
expect(smooshpack.Manager).toHaveBeenCalled();
});
});
it('calls updatePreview', () => {
- wrapper.setData({
- manager: {
- listener: jest.fn(),
- updatePreview: jest.fn(),
- },
- });
+ createInitializedComponent();
+
wrapper.vm.update();
- jest.advanceTimersByTime(250);
+ expect(wrapper.vm.manager.updatePreview).toHaveBeenCalledWith(wrapper.vm.sandboxOpts);
+ });
+ });
+
+ describe('on ide.files.change event', () => {
+ beforeEach(() => {
+ createInitializedComponent();
+
+ eventHub.$emit('ide.files.change');
+ });
+
+ it('calls updatePreview', () => {
expect(wrapper.vm.manager.updatePreview).toHaveBeenCalledWith(wrapper.vm.sandboxOpts);
});
});
@@ -355,4 +367,18 @@ describe('IDE clientside preview', () => {
});
});
});
+
+ describe('when destroyed', () => {
+ let spy;
+
+ beforeEach(() => {
+ createInitializedComponent();
+ spy = wrapper.vm.manager.updatePreview;
+ wrapper.destroy();
+ });
+
+ it('does not call updatePreview', () => {
+ expect(spy).not.toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/ide/components/repo_commit_section_spec.js b/spec/frontend/ide/components/repo_commit_section_spec.js
index 096079308cd..e91debee4ca 100644
--- a/spec/frontend/ide/components/repo_commit_section_spec.js
+++ b/spec/frontend/ide/components/repo_commit_section_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
+import { keepAlive } from 'helpers/keep_alive_component_helper';
import { createStore } from '~/ide/stores';
import { createRouter } from '~/ide/ide_router';
-import { keepAlive } from '../../helpers/keep_alive_component_helper';
import RepoCommitSection from '~/ide/components/repo_commit_section.vue';
import EmptyState from '~/ide/components/commit_sidebar/empty_state.vue';
import { stageKeys } from '~/ide/constants';
@@ -32,7 +32,7 @@ describe('RepoCommitSection', () => {
},
};
- const files = [file('file1'), file('file2')].map(f =>
+ const files = [file('file1'), file('file2')].map((f) =>
Object.assign(f, {
type: 'blob',
content: 'orginal content',
@@ -42,7 +42,7 @@ describe('RepoCommitSection', () => {
store.state.currentBranch = 'master';
store.state.changedFiles = [];
store.state.stagedFiles = [{ ...files[0] }, { ...files[1] }];
- store.state.stagedFiles.forEach(f =>
+ store.state.stagedFiles.forEach((f) =>
Object.assign(f, {
changed: true,
staged: true,
@@ -50,7 +50,7 @@ describe('RepoCommitSection', () => {
}),
);
- files.forEach(f => {
+ files.forEach((f) => {
store.state.entries[f.path] = f;
});
}
@@ -77,18 +77,8 @@ describe('RepoCommitSection', () => {
});
it('renders no changes text', () => {
- expect(
- wrapper
- .find(EmptyState)
- .text()
- .trim(),
- ).toContain('No changes');
- expect(
- wrapper
- .find(EmptyState)
- .find('img')
- .attributes('src'),
- ).toBe(TEST_NO_CHANGES_SVG);
+ expect(wrapper.find(EmptyState).text().trim()).toContain('No changes');
+ expect(wrapper.find(EmptyState).find('img').attributes('src')).toBe(TEST_NO_CHANGES_SVG);
});
});
@@ -115,9 +105,9 @@ describe('RepoCommitSection', () => {
const allFiles = store.state.changedFiles.concat(store.state.stagedFiles);
const changedFileNames = wrapper
.findAll('.multi-file-commit-list > li')
- .wrappers.map(x => x.text().trim());
+ .wrappers.map((x) => x.text().trim());
- expect(changedFileNames).toEqual(allFiles.map(x => x.path));
+ expect(changedFileNames).toEqual(allFiles.map((x) => x.path));
});
it('does not show empty state', () => {
@@ -151,7 +141,7 @@ describe('RepoCommitSection', () => {
beforeEach(() => {
setupDefaultState();
- store.state.changedFiles = store.state.stagedFiles.map(x =>
+ store.state.changedFiles = store.state.stagedFiles.map((x) =>
Object.assign(x, { staged: false }),
);
store.state.stagedFiles = [];
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 71a4f08cfb4..89a7f423e34 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -5,6 +5,7 @@ import '~/behaviors/markdown/render_gfm';
import { Range } from 'monaco-editor';
import waitForPromises from 'helpers/wait_for_promises';
import waitUsingRealTimer from 'helpers/wait_using_real_timer';
+import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import axios from '~/lib/utils/axios_utils';
import service from '~/ide/services';
import { createStoreOptions } from '~/ide/stores';
@@ -16,7 +17,6 @@ import {
FILE_VIEW_MODE_PREVIEW,
viewerTypes,
} from '~/ide/constants';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { file } from '../helpers';
import { exampleConfigs, exampleFiles } from '../lib/editorconfig/mock_data';
@@ -25,7 +25,7 @@ describe('RepoEditor', () => {
let store;
const waitForEditorSetup = () =>
- new Promise(resolve => {
+ new Promise((resolve) => {
vm.$once('editorSetup', resolve);
});
@@ -43,7 +43,7 @@ describe('RepoEditor', () => {
vm.$mount();
};
- const createOpenFile = path => {
+ const createOpenFile = (path) => {
const origFile = store.state.openFiles[0];
const newFile = { ...origFile, path, key: path, name: 'myfile.txt', content: 'hello world' };
@@ -91,7 +91,7 @@ describe('RepoEditor', () => {
});
const findEditor = () => vm.$el.querySelector('.multi-file-editor-holder');
- const changeViewMode = viewMode =>
+ const changeViewMode = (viewMode) =>
store.dispatch('editor/updateFileEditor', { path: vm.file.path, data: { viewMode } });
describe('default', () => {
@@ -119,7 +119,7 @@ describe('RepoEditor', () => {
expect(findEditor()).not.toHaveCss({ display: 'none' });
});
- it('renders only an edit tab', done => {
+ it('renders only an edit tab', (done) => {
Vue.nextTick(() => {
const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li');
@@ -157,7 +157,7 @@ describe('RepoEditor', () => {
mock.restore();
});
- it('renders an Edit and a Preview Tab', done => {
+ it('renders an Edit and a Preview Tab', (done) => {
Vue.nextTick(() => {
const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li');
@@ -169,7 +169,7 @@ describe('RepoEditor', () => {
});
});
- it('renders markdown for tempFile', done => {
+ it('renders markdown for tempFile', (done) => {
vm.file.tempFile = true;
vm.$nextTick()
@@ -202,9 +202,11 @@ describe('RepoEditor', () => {
});
describe('when open file is binary and not raw', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.file.name = 'file.dat';
vm.file.content = '🐱'; // non-ascii binary content
+ jest.spyOn(vm.editor, 'createInstance').mockImplementation();
+ jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation();
vm.$nextTick(done);
});
@@ -212,10 +214,20 @@ describe('RepoEditor', () => {
it('does not render the IDE', () => {
expect(vm.shouldHideEditor).toBeTruthy();
});
+
+ it('does not call createInstance', async () => {
+ // Mirror the act's in the `createEditorInstance`
+ vm.createEditorInstance();
+
+ await vm.$nextTick();
+
+ expect(vm.editor.createInstance).not.toHaveBeenCalled();
+ expect(vm.editor.createDiffInstance).not.toHaveBeenCalled();
+ });
});
describe('createEditorInstance', () => {
- it('calls createInstance when viewer is editor', done => {
+ it('calls createInstance when viewer is editor', (done) => {
jest.spyOn(vm.editor, 'createInstance').mockImplementation();
vm.createEditorInstance();
@@ -227,7 +239,7 @@ describe('RepoEditor', () => {
});
});
- it('calls createDiffInstance when viewer is diff', done => {
+ it('calls createDiffInstance when viewer is diff', (done) => {
vm.$store.state.viewer = 'diff';
jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation();
@@ -241,7 +253,7 @@ describe('RepoEditor', () => {
});
});
- it('calls createDiffInstance when viewer is a merge request diff', done => {
+ it('calls createDiffInstance when viewer is a merge request diff', (done) => {
vm.$store.state.viewer = 'mrdiff';
jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation();
@@ -342,7 +354,7 @@ describe('RepoEditor', () => {
jest.spyOn(vm.editor, 'updateDiffView').mockImplementation();
});
- it('calls updateDimensions when panelResizing is false', done => {
+ it('calls updateDimensions when panelResizing is false', (done) => {
vm.$store.state.panelResizing = true;
vm.$nextTick()
@@ -358,7 +370,7 @@ describe('RepoEditor', () => {
.catch(done.fail);
});
- it('does not call updateDimensions when panelResizing is true', done => {
+ it('does not call updateDimensions when panelResizing is true', (done) => {
vm.$store.state.panelResizing = true;
vm.$nextTick(() => {
@@ -369,7 +381,7 @@ describe('RepoEditor', () => {
});
});
- it('calls updateDimensions when rightPane is opened', done => {
+ it('calls updateDimensions when rightPane is opened', (done) => {
vm.$store.state.rightPane.isOpen = true;
vm.$nextTick(() => {
@@ -386,7 +398,7 @@ describe('RepoEditor', () => {
expect(vm.$el.querySelector('.nav-links')).not.toBe(null);
});
- it('hides tabs in review mode', done => {
+ it('hides tabs in review mode', (done) => {
vm.$store.state.currentActivityView = leftSidebarViews.review.name;
vm.$nextTick(() => {
@@ -396,7 +408,7 @@ describe('RepoEditor', () => {
});
});
- it('hides tabs in commit mode', done => {
+ it('hides tabs in commit mode', (done) => {
vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
vm.$nextTick(() => {
@@ -408,7 +420,7 @@ describe('RepoEditor', () => {
});
describe('when files view mode is preview', () => {
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(vm.editor, 'updateDimensions').mockImplementation();
changeViewMode(FILE_VIEW_MODE_PREVIEW);
vm.file.name = 'myfile.md';
@@ -440,7 +452,7 @@ describe('RepoEditor', () => {
jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true);
});
- it('does not fetch file information for temp entries', done => {
+ it('does not fetch file information for temp entries', (done) => {
vm.file.tempFile = true;
vm.initEditor();
@@ -452,7 +464,7 @@ describe('RepoEditor', () => {
.catch(done.fail);
});
- it('is being initialised for files without content even if shouldHideEditor is `true`', done => {
+ it('is being initialised for files without content even if shouldHideEditor is `true`', (done) => {
vm.file.content = '';
vm.file.raw = '';
@@ -467,7 +479,7 @@ describe('RepoEditor', () => {
.catch(done.fail);
});
- it('does not initialize editor for files already with content', done => {
+ it('does not initialize editor for files already with content', (done) => {
vm.file.content = 'foo';
vm.initEditor();
@@ -487,7 +499,7 @@ describe('RepoEditor', () => {
jest.spyOn(vm, 'initEditor').mockImplementation();
});
- it('calls removePendingTab when old file is pending', done => {
+ it('calls removePendingTab when old file is pending', (done) => {
jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true);
jest.spyOn(vm, 'removePendingTab').mockImplementation();
@@ -507,7 +519,7 @@ describe('RepoEditor', () => {
.catch(done.fail);
});
- it('does not call initEditor if the file did not change', done => {
+ it('does not call initEditor if the file did not change', (done) => {
Vue.set(vm, 'file', vm.file);
vm.$nextTick()
@@ -518,7 +530,7 @@ describe('RepoEditor', () => {
.catch(done.fail);
});
- it('calls initEditor when file key is changed', done => {
+ it('calls initEditor when file key is changed', (done) => {
expect(vm.initEditor).not.toHaveBeenCalled();
Vue.set(vm, 'file', {
@@ -540,7 +552,7 @@ describe('RepoEditor', () => {
vm.getRawFileData.mockRestore();
});
- const createRemoteFile = name => ({
+ const createRemoteFile = (name) => ({
...file(name),
tmpFile: false,
});
@@ -593,7 +605,7 @@ describe('RepoEditor', () => {
});
describe('onPaste', () => {
- const setFileName = name => {
+ const setFileName = (name) => {
Vue.set(vm, 'file', {
...vm.file,
content: 'hello world\n',
@@ -615,8 +627,8 @@ describe('RepoEditor', () => {
);
};
- const watchState = watched =>
- new Promise(resolve => {
+ const watchState = (watched) =>
+ new Promise((resolve) => {
const unwatch = vm.$store.watch(watched, () => {
unwatch();
resolve();
@@ -626,7 +638,7 @@ describe('RepoEditor', () => {
// Pasting an image does a lot of things like using the FileReader API,
// so, waitForPromises isn't very reliable (and causes a flaky spec)
// Read more about state.watch: https://vuex.vuejs.org/api/#watch
- const waitForFileContentChange = () => watchState(s => s.entries['foo/bar.md'].content);
+ const waitForFileContentChange = () => watchState((s) => s.entries['foo/bar.md'].content);
beforeEach(() => {
setFileName('bar.md');
diff --git a/spec/frontend/ide/components/repo_tabs_spec.js b/spec/frontend/ide/components/repo_tabs_spec.js
index b251f207853..45a17c37667 100644
--- a/spec/frontend/ide/components/repo_tabs_spec.js
+++ b/spec/frontend/ide/components/repo_tabs_spec.js
@@ -30,7 +30,7 @@ describe('RepoTabs', () => {
wrapper.destroy();
});
- it('renders a list of tabs', done => {
+ it('renders a list of tabs', (done) => {
store.state.openFiles[0].active = true;
wrapper.vm.$nextTick(() => {
diff --git a/spec/frontend/ide/components/resizable_panel_spec.js b/spec/frontend/ide/components/resizable_panel_spec.js
index 7368de0cee7..b1a1212371e 100644
--- a/spec/frontend/ide/components/resizable_panel_spec.js
+++ b/spec/frontend/ide/components/resizable_panel_spec.js
@@ -38,7 +38,7 @@ describe('~/ide/components/resizable_panel', () => {
};
const findResizer = () => wrapper.find(PanelResizer);
const findInlineStyle = () => wrapper.element.style.cssText;
- const createInlineStyle = width => `width: ${width}px;`;
+ const createInlineStyle = (width) => `width: ${width}px;`;
describe.each`
props | showResizer | resizerSide | expectedStyle
diff --git a/spec/frontend/ide/components/shared/tokened_input_spec.js b/spec/frontend/ide/components/shared/tokened_input_spec.js
index e687216bd06..837bfe6b574 100644
--- a/spec/frontend/ide/components/shared/tokened_input_spec.js
+++ b/spec/frontend/ide/components/shared/tokened_input_spec.js
@@ -44,9 +44,9 @@ describe('IDE shared/TokenedInput', () => {
});
it('renders tokens', () => {
- const renderedTokens = getTokenElements(vm).map(x => x.textContent.trim());
+ const renderedTokens = getTokenElements(vm).map((x) => x.textContent.trim());
- expect(renderedTokens).toEqual(TEST_TOKENS.map(x => x.label));
+ expect(renderedTokens).toEqual(TEST_TOKENS.map((x) => x.label));
});
it('renders input', () => {
@@ -54,7 +54,7 @@ describe('IDE shared/TokenedInput', () => {
expect(vm.$refs.input).toHaveValue(TEST_VALUE);
});
- it('renders placeholder, when tokens are empty', done => {
+ it('renders placeholder, when tokens are empty', (done) => {
vm.tokens = [];
vm.$nextTick()
diff --git a/spec/frontend/ide/components/terminal/session_spec.js b/spec/frontend/ide/components/terminal/session_spec.js
index 3ca37166ac4..5653c8bf14d 100644
--- a/spec/frontend/ide/components/terminal/session_spec.js
+++ b/spec/frontend/ide/components/terminal/session_spec.js
@@ -67,7 +67,7 @@ describe('IDE TerminalSession', () => {
});
});
- [STARTING, PENDING, RUNNING].forEach(status => {
+ [STARTING, PENDING, RUNNING].forEach((status) => {
it(`show stop button when status is ${status}`, () => {
state.session = { status };
factory();
@@ -82,7 +82,7 @@ describe('IDE TerminalSession', () => {
});
});
- [STOPPING, STOPPED].forEach(status => {
+ [STOPPING, STOPPED].forEach((status) => {
it(`show stop button when status is ${status}`, () => {
state.session = { status };
factory();
diff --git a/spec/frontend/ide/components/terminal/terminal_controls_spec.js b/spec/frontend/ide/components/terminal/terminal_controls_spec.js
index c22063e1d72..99182710218 100644
--- a/spec/frontend/ide/components/terminal/terminal_controls_spec.js
+++ b/spec/frontend/ide/components/terminal/terminal_controls_spec.js
@@ -17,7 +17,7 @@ describe('IDE TerminalControls', () => {
it('shows an up and down scroll button', () => {
factory();
- expect(buttons.wrappers.map(x => x.props())).toEqual([
+ expect(buttons.wrappers.map((x) => x.props())).toEqual([
expect.objectContaining({ direction: 'up', disabled: true }),
expect.objectContaining({ direction: 'down', disabled: true }),
]);
diff --git a/spec/frontend/ide/components/terminal/terminal_spec.js b/spec/frontend/ide/components/terminal/terminal_spec.js
index 3095288bb28..9cfe167d2f6 100644
--- a/spec/frontend/ide/components/terminal/terminal_spec.js
+++ b/spec/frontend/ide/components/terminal/terminal_spec.js
@@ -1,4 +1,5 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import Terminal from '~/ide/components/terminal/terminal.vue';
@@ -14,24 +15,25 @@ import GLTerminal from '~/terminal/terminal';
const TEST_TERMINAL_PATH = 'terminal/path';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
jest.mock('~/terminal/terminal', () =>
- jest.fn().mockImplementation(() => ({
- dispose: jest.fn(),
- disable: jest.fn(),
- addScrollListener: jest.fn(),
- scrollToTop: jest.fn(),
- scrollToBottom: jest.fn(),
- })),
+ jest.fn().mockImplementation(function FakeTerminal() {
+ Object.assign(this, {
+ dispose: jest.fn(),
+ disable: jest.fn(),
+ addScrollListener: jest.fn(),
+ scrollToTop: jest.fn(),
+ scrollToBottom: jest.fn(),
+ });
+ }),
);
describe('IDE Terminal', () => {
let wrapper;
let state;
- const factory = propsData => {
+ const factory = (propsData) => {
const store = new Vuex.Store({
state,
mutations: {
@@ -41,13 +43,12 @@ describe('IDE Terminal', () => {
},
});
- wrapper = shallowMount(localVue.extend(Terminal), {
+ wrapper = shallowMount(Terminal, {
propsData: {
status: RUNNING,
terminalPath: TEST_TERMINAL_PATH,
...propsData,
},
- localVue,
store,
});
};
@@ -63,7 +64,7 @@ describe('IDE Terminal', () => {
});
describe('loading text', () => {
- [STARTING, PENDING].forEach(status => {
+ [STARTING, PENDING].forEach((status) => {
it(`shows when starting (${status})`, () => {
factory({ status });
@@ -79,7 +80,7 @@ describe('IDE Terminal', () => {
expect(wrapper.find('.top-bar').text()).toBe('Stopping...');
});
- [RUNNING, STOPPED].forEach(status => {
+ [RUNNING, STOPPED].forEach((status) => {
it('hides when not loading', () => {
factory({ status });
@@ -102,7 +103,7 @@ describe('IDE Terminal', () => {
factory();
wrapper.vm.createTerminal();
- return localVue.nextTick();
+ return nextTick();
});
it('is visible if terminal is created', () => {
@@ -129,7 +130,7 @@ describe('IDE Terminal', () => {
wrapper.setData({ canScrollUp: true, canScrollDown: true });
- return localVue.nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.find(TerminalControls).props()).toEqual({
canScrollUp: true,
canScrollDown: true,
@@ -139,30 +140,24 @@ describe('IDE Terminal', () => {
});
describe('refresh', () => {
- let createTerminal;
- let stopTerminal;
-
- beforeEach(() => {
- createTerminal = jest.fn().mockName('createTerminal');
- stopTerminal = jest.fn().mockName('stopTerminal');
- });
-
it('creates the terminal if running', () => {
factory({ status: RUNNING, terminalPath: TEST_TERMINAL_PATH });
- wrapper.setMethods({ createTerminal });
wrapper.vm.refresh();
- expect(createTerminal).toHaveBeenCalled();
+ expect(GLTerminal.mock.instances).toHaveLength(1);
});
- it('stops the terminal if stopping', () => {
- factory({ status: STOPPING });
+ it('stops the terminal if stopping', async () => {
+ factory({ status: RUNNING, terminalPath: TEST_TERMINAL_PATH });
- wrapper.setMethods({ stopTerminal });
wrapper.vm.refresh();
- expect(stopTerminal).toHaveBeenCalled();
+ const terminal = GLTerminal.mock.instances[0];
+ wrapper.setProps({ status: STOPPING });
+ await nextTick();
+
+ expect(terminal.disable).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
index 9adf5848f9d..d15583f81e4 100644
--- a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
+++ b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
@@ -26,7 +26,7 @@ describe('ide/components/terminal_sync/terminal_sync_status', () => {
namespaced: true,
state: moduleState,
mutations: {
- [START_LOADING]: state => {
+ [START_LOADING]: (state) => {
state.isLoading = true;
},
},
diff --git a/spec/frontend/ide/file_helpers.js b/spec/frontend/ide/file_helpers.js
index 326f8b9716d..19604b9d570 100644
--- a/spec/frontend/ide/file_helpers.js
+++ b/spec/frontend/ide/file_helpers.js
@@ -26,7 +26,7 @@ export const createMovedFile = (path, prevPath, content) =>
prevPath,
});
-export const createEntries = path =>
+export const createEntries = (path) =>
path.split('/').reduce((acc, part, idx, parts) => {
const parentPath = parts.slice(0, idx).join('/');
const fullPath = parentPath ? `${parentPath}/${part}` : part;
diff --git a/spec/frontend/ide/helpers.js b/spec/frontend/ide/helpers.js
index 6b65dd96ef4..f815c3d090e 100644
--- a/spec/frontend/ide/helpers.js
+++ b/spec/frontend/ide/helpers.js
@@ -12,9 +12,9 @@ export const file = (name = 'name', id = name, type = '', parent = null) =>
parentPath: parent ? parent.path : '',
});
-export const createEntriesFromPaths = paths =>
+export const createEntriesFromPaths = (paths) =>
paths
- .map(path => ({
+ .map((path) => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
@@ -30,7 +30,7 @@ export const createEntriesFromPaths = paths =>
};
}, {});
-export const createTriggerChangeAction = payload => ({
+export const createTriggerChangeAction = (payload) => ({
type: 'triggerFilesChange',
...(payload ? { payload } : {}),
});
@@ -41,5 +41,10 @@ export const createTriggerRenamePayload = (path, newPath) => ({
newPath,
});
+export const createTriggerUpdatePayload = (path) => ({
+ type: commitActionTypes.update,
+ path,
+});
+
export const createTriggerRenameAction = (path, newPath) =>
createTriggerChangeAction(createTriggerRenamePayload(path, newPath));
diff --git a/spec/frontend/ide/ide_router_extension_spec.js b/spec/frontend/ide/ide_router_extension_spec.js
index 3e29ecc4a90..976b127ac83 100644
--- a/spec/frontend/ide/ide_router_extension_spec.js
+++ b/spec/frontend/ide/ide_router_extension_spec.js
@@ -4,7 +4,7 @@ import IdeRouter from '~/ide/ide_router_extension';
jest.mock('vue-router');
describe('IDE overrides of VueRouter', () => {
- const paths = branch => [
+ const paths = (branch) => [
`${branch}`,
`/${branch}`,
`/${branch}/-/`,
@@ -35,7 +35,7 @@ describe('IDE overrides of VueRouter', () => {
${'test-#-hash'} | ${'test-%23-hash'}
${'test/hash#123'} | ${'test/hash%23123'}
`('finds project path when route is $path', ({ path, expected }) => {
- paths(path).forEach(route => {
+ paths(path).forEach((route) => {
const expectedPath = route.replace(path, expected);
router.push(route);
diff --git a/spec/frontend/ide/ide_router_spec.js b/spec/frontend/ide/ide_router_spec.js
index a4fe00883cf..acab2c6aeef 100644
--- a/spec/frontend/ide/ide_router_spec.js
+++ b/spec/frontend/ide/ide_router_spec.js
@@ -32,7 +32,7 @@ describe('IDE router', () => {
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/blob`,
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/edit`,
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`,
- ].forEach(route => {
+ ].forEach((route) => {
it(`finds project path when route is "${route}"`, () => {
router.push(route);
diff --git a/spec/frontend/ide/lib/common/model_spec.js b/spec/frontend/ide/lib/common/model_spec.js
index df46b7774b0..51df1e2e42f 100644
--- a/spec/frontend/ide/lib/common/model_spec.js
+++ b/spec/frontend/ide/lib/common/model_spec.js
@@ -81,7 +81,7 @@ describe('Multi-file editor library model', () => {
});
describe('onChange', () => {
- it('calls callback on change', done => {
+ it('calls callback on change', (done) => {
const spy = jest.fn();
model.onChange(spy);
diff --git a/spec/frontend/ide/lib/create_diff_spec.js b/spec/frontend/ide/lib/create_diff_spec.js
index 273f9ee27bd..76494f9af1b 100644
--- a/spec/frontend/ide/lib/create_diff_spec.js
+++ b/spec/frontend/ide/lib/create_diff_spec.js
@@ -145,12 +145,12 @@ new file mode 100644
--- /dev/null
+++ b/${PATH_LOREM}
@@ -0,0 +1,${LINES.length} @@
-${LINES.map(line => `+${line}`).join('\n')}
+${LINES.map((line) => `+${line}`).join('\n')}
diff --git "a/${PATH_IPSUM}" "b/${PATH_IPSUM}"
--- a/${PATH_IPSUM}
+++ b/${PATH_IPSUM}
@@ -1,${LINES.length} +1,1 @@
-${LINES.map(line => `-${line}`).join('\n')}
+${LINES.map((line) => `-${line}`).join('\n')}
+That's all folks!
\\ No newline at end of file
`;
@@ -167,10 +167,10 @@ ${LINES.map(line => `-${line}`).join('\n')}
const deletedFiles = ['foo/bar/zed/test.md', 'foo/bar/zed/test2.md'];
const entries = deletedFiles.reduce((acc, path) => Object.assign(acc, createEntries(path)), {});
const allDeleted = [...deletedFiles, 'foo/bar/zed', 'foo/bar'];
- allDeleted.forEach(path => {
+ allDeleted.forEach((path) => {
entries[path].deleted = true;
});
- const changedFiles = deletedFiles.map(x => entries[x]);
+ const changedFiles = deletedFiles.map((x) => entries[x]);
const result = createDiff({ changedFiles, entries });
diff --git a/spec/frontend/ide/lib/create_file_diff_spec.js b/spec/frontend/ide/lib/create_file_diff_spec.js
index 4b428468a6d..294f0a926aa 100644
--- a/spec/frontend/ide/lib/create_file_diff_spec.js
+++ b/spec/frontend/ide/lib/create_file_diff_spec.js
@@ -21,11 +21,7 @@ const spliceLines = (content, lineNumber, deleteCount = 0, newLines = []) => {
return lines.join('\n');
};
-const mapLines = (content, mapFn) =>
- content
- .split('\n')
- .map(mapFn)
- .join('\n');
+const mapLines = (content, mapFn) => content.split('\n').map(mapFn).join('\n');
describe('IDE lib/create_file_diff', () => {
it('returns empty string with "garbage" action', () => {
@@ -55,7 +51,7 @@ describe('IDE lib/create_file_diff', () => {
const expectedHead = `diff --git "a/${PATH}" "b/${PATH}"
new file mode 100644`;
- const expectedChunkHead = lineCount => `--- /dev/null
+ const expectedChunkHead = (lineCount) => `--- /dev/null
+++ b/${PATH}
@@ -0,0 +1,${lineCount} @@`;
@@ -90,7 +86,7 @@ ${expectedChunkHead(1)}
expect(result).toBe(`${expectedHead}
${expectedChunkHead(TEXT_LINE_COUNT)}
-${mapLines(TEXT, line => `+${line}`)}
+${mapLines(TEXT, (line) => `+${line}`)}
`);
});
});
@@ -99,7 +95,7 @@ ${mapLines(TEXT, line => `+${line}`)}
const expectedHead = `diff --git "a/${PATH}" "b/${PATH}"
deleted file mode 100644`;
- const expectedChunkHead = lineCount => `--- a/${PATH}
+ const expectedChunkHead = (lineCount) => `--- a/${PATH}
+++ /dev/null
@@ -1,${lineCount} +0,0 @@`;
@@ -115,7 +111,7 @@ deleted file mode 100644`;
expect(result).toBe(`${expectedHead}
${expectedChunkHead(TEXT_LINE_COUNT)}
-${mapLines(TEXT, line => `-${line}`)}
+${mapLines(TEXT, (line) => `-${line}`)}
`);
});
});
diff --git a/spec/frontend/ide/lib/diff/controller_spec.js b/spec/frontend/ide/lib/diff/controller_spec.js
index 8ee6388a760..57c134620c0 100644
--- a/spec/frontend/ide/lib/diff/controller_spec.js
+++ b/spec/frontend/ide/lib/diff/controller_spec.js
@@ -37,7 +37,7 @@ describe('Multi-file editor library dirty diff controller', () => {
});
describe('getDiffChangeType', () => {
- ['added', 'removed', 'modified'].forEach(type => {
+ ['added', 'removed', 'modified'].forEach((type) => {
it(`returns ${type}`, () => {
const change = {
[type]: true,
@@ -49,7 +49,7 @@ describe('Multi-file editor library dirty diff controller', () => {
});
describe('getDecorator', () => {
- ['added', 'removed', 'modified'].forEach(type => {
+ ['added', 'removed', 'modified'].forEach((type) => {
it(`returns with linesDecorationsClassName for ${type}`, () => {
const change = {
[type]: true,
diff --git a/spec/frontend/ide/lib/editor_spec.js b/spec/frontend/ide/lib/editor_spec.js
index 01c2eab33a5..12779c61dc3 100644
--- a/spec/frontend/ide/lib/editor_spec.js
+++ b/spec/frontend/ide/lib/editor_spec.js
@@ -15,7 +15,7 @@ describe('Multi-file editor library', () => {
let holder;
let store;
- const setNodeOffsetWidth = val => {
+ const setNodeOffsetWidth = (val) => {
Object.defineProperty(instance.instance.getDomNode(), 'offsetWidth', {
get() {
return val;
diff --git a/spec/frontend/ide/lib/editorconfig/parser_spec.js b/spec/frontend/ide/lib/editorconfig/parser_spec.js
index f99410236e1..c2b4a8e6c49 100644
--- a/spec/frontend/ide/lib/editorconfig/parser_spec.js
+++ b/spec/frontend/ide/lib/editorconfig/parser_spec.js
@@ -2,14 +2,14 @@ import { getRulesWithTraversal } from '~/ide/lib/editorconfig/parser';
import { exampleConfigs, exampleFiles } from './mock_data';
describe('~/ide/lib/editorconfig/parser', () => {
- const getExampleConfigContent = path =>
- Promise.resolve(exampleConfigs.find(x => x.path === path)?.content);
+ const getExampleConfigContent = (path) =>
+ Promise.resolve(exampleConfigs.find((x) => x.path === path)?.content);
describe('getRulesWithTraversal', () => {
it.each(exampleFiles)(
'traverses through all editorconfig files in parent directories (until root=true is hit) and finds rules for this file (case %#)',
({ path, rules }) => {
- return getRulesWithTraversal(path, getExampleConfigContent).then(result => {
+ return getRulesWithTraversal(path, getExampleConfigContent).then((result) => {
expect(result).toEqual(rules);
});
},
diff --git a/spec/frontend/ide/lib/errors_spec.js b/spec/frontend/ide/lib/errors_spec.js
index 733d5a5da3c..2e4acdb8a63 100644
--- a/spec/frontend/ide/lib/errors_spec.js
+++ b/spec/frontend/ide/lib/errors_spec.js
@@ -14,7 +14,7 @@ const CODEOWNERS_MESSAGE =
const CHANGED_MESSAGE = 'Things changed since you started editing';
describe('~/ide/lib/errors', () => {
- const createResponseError = message => ({
+ const createResponseError = (message) => ({
response: {
data: {
message,
diff --git a/spec/frontend/ide/lib/files_spec.js b/spec/frontend/ide/lib/files_spec.js
index 8ca6f01d9a6..50738af0e33 100644
--- a/spec/frontend/ide/lib/files_spec.js
+++ b/spec/frontend/ide/lib/files_spec.js
@@ -1,7 +1,9 @@
import { decorateFiles, splitParent } from '~/ide/lib/files';
import { decorateData } from '~/ide/stores/utils';
-const createEntries = paths => {
+const TEST_BLOB_DATA = { mimeType: 'test/mime' };
+
+const createEntries = (paths) => {
const createEntry = (acc, { path, type, children }) => {
const { name, parent } = splitParent(path);
@@ -13,7 +15,8 @@ const createEntries = paths => {
type,
parentPath: parent,
}),
- tree: children.map(childName => expect.objectContaining({ name: childName })),
+ tree: children.map((childName) => expect.objectContaining({ name: childName })),
+ ...(type === 'blob' ? TEST_BLOB_DATA : {}),
};
return acc;
@@ -43,14 +46,14 @@ describe('IDE lib decorate files', () => {
{ path: 'README.md', type: 'blob', children: [] },
]);
- const { entries, treeList } = decorateFiles({ data });
+ const { entries, treeList } = decorateFiles({ data, blobData: TEST_BLOB_DATA });
// Here we test the keys and then each key/value individually because `expect(entries).toEqual(expectedEntries)`
// was taking a very long time for some reason. Probably due to large objects and nested `expect.objectContaining`.
const entryKeys = Object.keys(entries);
expect(entryKeys).toEqual(Object.keys(expectedEntries));
- entryKeys.forEach(key => {
+ entryKeys.forEach((key) => {
expect(entries[key]).toEqual(expectedEntries[key]);
});
diff --git a/spec/frontend/ide/lib/languages/hcl_spec.js b/spec/frontend/ide/lib/languages/hcl_spec.js
index a39673a3225..c6ebad6a4f4 100644
--- a/spec/frontend/ide/lib/languages/hcl_spec.js
+++ b/spec/frontend/ide/lib/languages/hcl_spec.js
@@ -284,6 +284,42 @@ describe('tokenization for .tf files', () => {
],
],
],
+ [
+ ` foo = <<-EOF
+ bar
+ EOF`,
+ [
+ [
+ { language: 'hcl', offset: 0, type: '' },
+ { language: 'hcl', offset: 2, type: 'variable.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'operator.hcl' },
+ { language: 'hcl', offset: 7, type: '' },
+ { language: 'hcl', offset: 8, type: 'string.heredoc.delimiter.hcl' },
+ ],
+ [{ language: 'hcl', offset: 0, type: 'string.heredoc.hcl' }],
+ [
+ { language: 'hcl', offset: 0, type: 'string.heredoc.hcl' },
+ { language: 'hcl', offset: 2, type: 'string.heredoc.delimiter.hcl' },
+ ],
+ ],
+ ],
+ [
+ `foo = <<-EOF
+bar
+EOF`,
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'string.heredoc.delimiter.hcl' },
+ ],
+ [{ language: 'hcl', offset: 0, type: 'string.heredoc.hcl' }],
+ [{ language: 'hcl', offset: 0, type: 'string.heredoc.delimiter.hcl' }],
+ ],
+ ],
])('%s', (string, tokens) => {
expect(editor.tokenize(string, 'hcl')).toEqual(tokens);
});
diff --git a/spec/frontend/ide/lib/mirror_spec.js b/spec/frontend/ide/lib/mirror_spec.js
index 21bed5948f3..8f417ea54dc 100644
--- a/spec/frontend/ide/lib/mirror_spec.js
+++ b/spec/frontend/ide/lib/mirror_spec.js
@@ -72,7 +72,7 @@ describe('ide/lib/mirror', () => {
});
const waitForConnection = (delay = SERVICE_DELAY) => {
- const wait = new Promise(resolve => {
+ const wait = new Promise((resolve) => {
setTimeout(resolve, 10);
});
@@ -82,7 +82,7 @@ describe('ide/lib/mirror', () => {
};
const connectPass = () => waitForConnection().then(() => mockWebSocket.onopen());
const connectFail = () => waitForConnection().then(() => mockWebSocket.onerror());
- const sendResponse = msg => {
+ const sendResponse = (msg) => {
mockWebSocket.onmessage(msg);
};
diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js
index d2c32a81811..c3d6182bd78 100644
--- a/spec/frontend/ide/services/index_spec.js
+++ b/spec/frontend/ide/services/index_spec.js
@@ -51,7 +51,7 @@ describe('IDE services', () => {
raw: 'raw content',
};
- return services.getRawFileData(file).then(raw => {
+ return services.getRawFileData(file).then((raw) => {
expect(raw).toBe('content');
});
});
@@ -65,7 +65,7 @@ describe('IDE services', () => {
raw: 'raw content',
};
- return services.getRawFileData(file).then(raw => {
+ return services.getRawFileData(file).then((raw) => {
expect(raw).toBe('raw content');
});
});
@@ -77,7 +77,7 @@ describe('IDE services', () => {
raw: 'raw content',
};
- return services.getRawFileData(file).then(raw => {
+ return services.getRawFileData(file).then((raw) => {
expect(raw).toBe('raw content');
});
});
@@ -89,7 +89,7 @@ describe('IDE services', () => {
raw: '',
};
- return services.getRawFileData(file).then(raw => {
+ return services.getRawFileData(file).then((raw) => {
expect(raw).toBe('');
});
});
@@ -116,7 +116,22 @@ describe('IDE services', () => {
});
it('sends a request to file.rawPath', () => {
- return services.getRawFileData(file).then(raw => {
+ return services.getRawFileData(file).then((raw) => {
+ expect(axios.get).toHaveBeenCalledWith(file.rawPath, {
+ transformResponse: [expect.any(Function)],
+ });
+ expect(raw).toEqual('raw content');
+ });
+ });
+
+ it('returns arraybuffer for binary files', () => {
+ file.binary = true;
+
+ return services.getRawFileData(file).then((raw) => {
+ expect(axios.get).toHaveBeenCalledWith(file.rawPath, {
+ transformResponse: [expect.any(Function)],
+ responseType: 'arraybuffer',
+ });
expect(raw).toEqual('raw content');
});
});
@@ -146,7 +161,7 @@ describe('IDE services', () => {
it('gives back file.baseRaw for files with that property present', () => {
file.baseRaw = TEST_FILE_CONTENTS;
- return services.getBaseRawFileData(file, TEST_PROJECT_ID, TEST_COMMIT_SHA).then(content => {
+ return services.getBaseRawFileData(file, TEST_PROJECT_ID, TEST_COMMIT_SHA).then((content) => {
expect(content).toEqual(TEST_FILE_CONTENTS);
});
});
@@ -155,7 +170,7 @@ describe('IDE services', () => {
file.tempFile = true;
file.baseRaw = TEST_FILE_CONTENTS;
- return services.getBaseRawFileData(file, TEST_PROJECT_ID, TEST_COMMIT_SHA).then(content => {
+ return services.getBaseRawFileData(file, TEST_PROJECT_ID, TEST_COMMIT_SHA).then((content) => {
expect(content).toEqual(TEST_FILE_CONTENTS);
});
});
@@ -192,7 +207,7 @@ describe('IDE services', () => {
});
it('fetches file content', () =>
- services.getBaseRawFileData(file, TEST_PROJECT_ID, TEST_COMMIT_SHA).then(content => {
+ services.getBaseRawFileData(file, TEST_PROJECT_ID, TEST_COMMIT_SHA).then((content) => {
expect(content).toEqual(TEST_FILE_CONTENTS);
}));
},
@@ -209,7 +224,7 @@ describe('IDE services', () => {
Api.project.mockReturnValue(Promise.resolve({ data: { ...projectData } }));
query.mockReturnValue(Promise.resolve({ data: { project: gqlProjectData } }));
- return services.getProjectData(TEST_NAMESPACE, TEST_PROJECT).then(response => {
+ return services.getProjectData(TEST_NAMESPACE, TEST_PROJECT).then((response) => {
expect(response).toEqual({ data: { ...projectData, ...gqlProjectData } });
expect(Api.project).toHaveBeenCalledWith(TEST_PROJECT_ID);
expect(query).toHaveBeenCalledWith({
diff --git a/spec/frontend/ide/stores/actions/file_spec.js b/spec/frontend/ide/stores/actions/file_spec.js
index 744ac086b5f..9d367714bbe 100644
--- a/spec/frontend/ide/stores/actions/file_spec.js
+++ b/spec/frontend/ide/stores/actions/file_spec.js
@@ -7,7 +7,7 @@ import * as types from '~/ide/stores/mutation_types';
import service from '~/ide/services';
import { createRouter } from '~/ide/ide_router';
import eventHub from '~/ide/eventhub';
-import { file, createTriggerRenameAction } from '../../helpers';
+import { file, createTriggerRenameAction, createTriggerUpdatePayload } from '../../helpers';
const ORIGINAL_CONTENT = 'original content';
const RELATIVE_URL_ROOT = '/gitlab';
@@ -75,7 +75,7 @@ describe('IDE store file actions', () => {
});
});
- it('closes file & opens next available file', () => {
+ it('switches to the next available file before closing the current one ', () => {
const f = file('newOpenFile');
store.state.openFiles.push(f);
@@ -90,10 +90,12 @@ describe('IDE store file actions', () => {
});
it('removes file if it pending', () => {
- store.state.openFiles.push({
- ...localFile,
- pending: true,
- });
+ store.state.openFiles = [
+ {
+ ...localFile,
+ pending: true,
+ },
+ ];
return store.dispatch('closeFile', localFile).then(() => {
expect(store.state.openFiles.length).toBe(0);
@@ -189,7 +191,7 @@ describe('IDE store file actions', () => {
});
describe('call to service', () => {
- const callExpectation = serviceCalled => {
+ const callExpectation = (serviceCalled) => {
store.dispatch('getFileData', { path: localFile.path });
if (serviceCalled) {
@@ -422,11 +424,11 @@ describe('IDE store file actions', () => {
loadingWhenGettingRawData = undefined;
loadingWhenGettingBaseRawData = undefined;
- jest.spyOn(service, 'getRawFileData').mockImplementation(f => {
+ jest.spyOn(service, 'getRawFileData').mockImplementation((f) => {
loadingWhenGettingRawData = f.loading;
return Promise.resolve('raw');
});
- jest.spyOn(service, 'getBaseRawFileData').mockImplementation(f => {
+ jest.spyOn(service, 'getBaseRawFileData').mockImplementation((f) => {
loadingWhenGettingBaseRawData = f.loading;
return Promise.resolve('rawBase');
});
@@ -510,12 +512,15 @@ describe('IDE store file actions', () => {
describe('changeFileContent', () => {
let tmpFile;
+ let onFilesChange;
beforeEach(() => {
tmpFile = file('tmpFile');
tmpFile.content = '\n';
tmpFile.raw = '\n';
store.state.entries[tmpFile.path] = tmpFile;
+ onFilesChange = jest.fn();
+ eventHub.$on('ide.files.change', onFilesChange);
});
it('updates file content', () => {
@@ -580,6 +585,17 @@ describe('IDE store file actions', () => {
expect(store.state.changedFiles.length).toBe(0);
});
});
+
+ it('triggers ide.files.change', async () => {
+ expect(onFilesChange).not.toHaveBeenCalled();
+
+ await store.dispatch('changeFileContent', {
+ path: tmpFile.path,
+ content: 'content\n',
+ });
+
+ expect(onFilesChange).toHaveBeenCalledWith(createTriggerUpdatePayload(tmpFile.path));
+ });
});
describe('with changed file', () => {
@@ -743,7 +759,7 @@ describe('IDE store file actions', () => {
});
it('returns true when opened', () => {
- return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then(added => {
+ return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then((added) => {
expect(added).toBe(true);
});
});
@@ -755,7 +771,7 @@ describe('IDE store file actions', () => {
key: `pending-${f.key}`,
});
- return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then(added => {
+ return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then((added) => {
expect(added).toBe(false);
});
});
diff --git a/spec/frontend/ide/stores/actions/merge_request_spec.js b/spec/frontend/ide/stores/actions/merge_request_spec.js
index b1cceda9d85..9b17d95ea35 100644
--- a/spec/frontend/ide/stores/actions/merge_request_spec.js
+++ b/spec/frontend/ide/stores/actions/merge_request_spec.js
@@ -49,7 +49,7 @@ describe('IDE store merge request actions', () => {
mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests/).reply(200, mockData);
});
- it('calls getProjectMergeRequests service method', done => {
+ it('calls getProjectMergeRequests service method', (done) => {
store
.dispatch('getMergeRequestsForBranch', { projectId: TEST_PROJECT, branchId: 'bar' })
.then(() => {
@@ -66,19 +66,19 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('sets the "Merge Request" Object', done => {
+ it('sets the "Merge Request" Object', (done) => {
store
.dispatch('getMergeRequestsForBranch', { projectId: TEST_PROJECT, branchId: 'bar' })
.then(() => {
expect(store.state.projects.abcproject.mergeRequests).toEqual({
- '2': expect.objectContaining(mrData),
+ 2: expect.objectContaining(mrData),
});
done();
})
.catch(done.fail);
});
- it('sets "Current Merge Request" object to the most recent MR', done => {
+ it('sets "Current Merge Request" object to the most recent MR', (done) => {
store
.dispatch('getMergeRequestsForBranch', { projectId: TEST_PROJECT, branchId: 'bar' })
.then(() => {
@@ -88,7 +88,7 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('does nothing if user cannot read MRs', done => {
+ it('does nothing if user cannot read MRs', (done) => {
store.state.projects[TEST_PROJECT].userPermissions[PERMISSION_READ_MR] = false;
store
@@ -108,7 +108,7 @@ describe('IDE store merge request actions', () => {
mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests/).reply(200, []);
});
- it('does not fail if there are no merge requests for current branch', done => {
+ it('does not fail if there are no merge requests for current branch', (done) => {
store
.dispatch('getMergeRequestsForBranch', { projectId: TEST_PROJECT, branchId: 'foo' })
.then(() => {
@@ -126,7 +126,7 @@ describe('IDE store merge request actions', () => {
mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests/).networkError();
});
- it('flashes message, if error', done => {
+ it('flashes message, if error', (done) => {
store
.dispatch('getMergeRequestsForBranch', { projectId: TEST_PROJECT, branchId: 'bar' })
.catch(() => {
@@ -149,7 +149,7 @@ describe('IDE store merge request actions', () => {
.reply(200, { title: 'mergerequest' });
});
- it('calls getProjectMergeRequestData service method', done => {
+ it('calls getProjectMergeRequestData service method', (done) => {
store
.dispatch('getMergeRequestData', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
@@ -160,7 +160,7 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('sets the Merge Request Object', done => {
+ it('sets the Merge Request Object', (done) => {
store
.dispatch('getMergeRequestData', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
@@ -180,7 +180,7 @@ describe('IDE store merge request actions', () => {
mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests\/1/).networkError();
});
- it('dispatches error action', done => {
+ it('dispatches error action', (done) => {
const dispatch = jest.fn();
getMergeRequestData(
@@ -224,7 +224,7 @@ describe('IDE store merge request actions', () => {
.reply(200, { title: 'mergerequest' });
});
- it('calls getProjectMergeRequestChanges service method', done => {
+ it('calls getProjectMergeRequestChanges service method', (done) => {
store
.dispatch('getMergeRequestChanges', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
@@ -235,7 +235,7 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('sets the Merge Request Changes Object', done => {
+ it('sets the Merge Request Changes Object', (done) => {
store
.dispatch('getMergeRequestChanges', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
@@ -253,7 +253,7 @@ describe('IDE store merge request actions', () => {
mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests\/1\/changes/).networkError();
});
- it('dispatches error action', done => {
+ it('dispatches error action', (done) => {
const dispatch = jest.fn();
getMergeRequestChanges(
@@ -296,7 +296,7 @@ describe('IDE store merge request actions', () => {
jest.spyOn(service, 'getProjectMergeRequestVersions');
});
- it('calls getProjectMergeRequestVersions service method', done => {
+ it('calls getProjectMergeRequestVersions service method', (done) => {
store
.dispatch('getMergeRequestVersions', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
@@ -307,7 +307,7 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('sets the Merge Request Versions Object', done => {
+ it('sets the Merge Request Versions Object', (done) => {
store
.dispatch('getMergeRequestVersions', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
@@ -323,7 +323,7 @@ describe('IDE store merge request actions', () => {
mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests\/1\/versions/).networkError();
});
- it('dispatches error action', done => {
+ it('dispatches error action', (done) => {
const dispatch = jest.fn();
getMergeRequestVersions(
@@ -422,7 +422,7 @@ describe('IDE store merge request actions', () => {
);
});
- it('dispatches actions for merge request data', done => {
+ it('dispatches actions for merge request data', (done) => {
openMergeRequest({ state: store.state, dispatch: store.dispatch, getters: mockGetters }, mr)
.then(() => {
expect(store.dispatch.mock.calls).toEqual([
@@ -451,7 +451,7 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('updates activity bar view and gets file data, if changes are found', done => {
+ it('updates activity bar view and gets file data, if changes are found', (done) => {
store.state.entries.foo = {
type: 'blob',
};
@@ -490,7 +490,7 @@ describe('IDE store merge request actions', () => {
.catch(done.fail);
});
- it('flashes message, if error', done => {
+ it('flashes message, if error', (done) => {
store.dispatch.mockRejectedValue();
openMergeRequest(store, mr)
diff --git a/spec/frontend/ide/stores/actions/project_spec.js b/spec/frontend/ide/stores/actions/project_spec.js
index ca3687307a9..1244c8af91a 100644
--- a/spec/frontend/ide/stores/actions/project_spec.js
+++ b/spec/frontend/ide/stores/actions/project_spec.js
@@ -53,7 +53,7 @@ describe('IDE store project actions', () => {
});
});
- it('calls the service', done => {
+ it('calls the service', (done) => {
store
.dispatch('refreshLastCommitData', {
projectId: store.state.currentProjectId,
@@ -67,7 +67,7 @@ describe('IDE store project actions', () => {
.catch(done.fail);
});
- it('commits getBranchData', done => {
+ it('commits getBranchData', (done) => {
testAction(
refreshLastCommitData,
{
@@ -94,7 +94,7 @@ describe('IDE store project actions', () => {
});
describe('showBranchNotFoundError', () => {
- it('dispatches setErrorMessage', done => {
+ it('dispatches setErrorMessage', (done) => {
testAction(
showBranchNotFoundError,
'master',
@@ -123,7 +123,7 @@ describe('IDE store project actions', () => {
jest.spyOn(api, 'createBranch').mockResolvedValue();
});
- it('calls API', done => {
+ it('calls API', (done) => {
createNewBranchFromDefault(
{
state: {
@@ -148,7 +148,7 @@ describe('IDE store project actions', () => {
.catch(done.fail);
});
- it('clears error message', done => {
+ it('clears error message', (done) => {
const dispatchSpy = jest.fn().mockName('dispatch');
createNewBranchFromDefault(
@@ -172,7 +172,7 @@ describe('IDE store project actions', () => {
.catch(done.fail);
});
- it('reloads window', done => {
+ it('reloads window', (done) => {
createNewBranchFromDefault(
{
state: {
@@ -196,7 +196,7 @@ describe('IDE store project actions', () => {
});
describe('loadEmptyBranch', () => {
- it('creates a blank tree and sets loading state to false', done => {
+ it('creates a blank tree and sets loading state to false', (done) => {
testAction(
loadEmptyBranch,
{ projectId: TEST_PROJECT_ID, branchId: 'master' },
@@ -213,7 +213,7 @@ describe('IDE store project actions', () => {
);
});
- it('does nothing, if tree already exists', done => {
+ it('does nothing, if tree already exists', (done) => {
const trees = { [`${TEST_PROJECT_ID}/master`]: [] };
testAction(
@@ -279,7 +279,7 @@ describe('IDE store project actions', () => {
const branchId = '123-lorem';
const ref = 'abcd2322';
- it('when empty repo, loads empty branch', done => {
+ it('when empty repo, loads empty branch', (done) => {
const mockGetters = { emptyRepo: true };
testAction(
@@ -292,13 +292,13 @@ describe('IDE store project actions', () => {
);
});
- it('when branch already exists, does nothing', done => {
+ it('when branch already exists, does nothing', (done) => {
store.state.projects[projectId].branches[branchId] = {};
testAction(loadBranch, { projectId, branchId }, store.state, [], [], done);
});
- it('fetches branch data', done => {
+ it('fetches branch data', (done) => {
const mockGetters = { findBranch: () => ({ commit: { id: ref } }) };
jest.spyOn(store, 'dispatch').mockResolvedValue();
@@ -317,7 +317,7 @@ describe('IDE store project actions', () => {
.catch(done.fail);
});
- it('shows an error if branch can not be fetched', done => {
+ it('shows an error if branch can not be fetched', (done) => {
jest.spyOn(store, 'dispatch').mockReturnValue(Promise.reject());
loadBranch(store, { projectId, branchId })
@@ -356,7 +356,7 @@ describe('IDE store project actions', () => {
jest.spyOn(store, 'dispatch').mockResolvedValue();
});
- it('dispatches branch actions', done => {
+ it('dispatches branch actions', (done) => {
openBranch(store, branch)
.then(() => {
expect(store.dispatch.mock.calls).toEqual([
@@ -375,9 +375,9 @@ describe('IDE store project actions', () => {
jest.spyOn(store, 'dispatch').mockReturnValue(Promise.reject());
});
- it('dispatches correct branch actions', done => {
+ it('dispatches correct branch actions', (done) => {
openBranch(store, branch)
- .then(val => {
+ .then((val) => {
expect(store.dispatch.mock.calls).toEqual([
['setCurrentBranchId', branchId],
['loadBranch', { projectId, branchId }],
diff --git a/spec/frontend/ide/stores/actions/tree_spec.js b/spec/frontend/ide/stores/actions/tree_spec.js
index 0eabd982d57..7831df9f3a4 100644
--- a/spec/frontend/ide/stores/actions/tree_spec.js
+++ b/spec/frontend/ide/stores/actions/tree_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import { showTreeEntry, getFiles, setDirectoryData } from '~/ide/stores/actions/tree';
import * as types from '~/ide/stores/mutation_types';
import axios from '~/lib/utils/axios_utils';
@@ -57,26 +57,15 @@ describe('Multi-file store tree actions', () => {
});
it('calls service getFiles', () => {
- return (
- store
- .dispatch('getFiles', basicCallParameters)
- // getFiles actions calls lodash.defer
- .then(() => jest.runOnlyPendingTimers())
- .then(() => {
- expect(service.getFiles).toHaveBeenCalledWith('foo/abcproject', '12345678');
- })
- );
+ return store.dispatch('getFiles', basicCallParameters).then(() => {
+ expect(service.getFiles).toHaveBeenCalledWith('foo/abcproject', '12345678');
+ });
});
- it('adds data into tree', done => {
+ it('adds data into tree', (done) => {
store
.dispatch('getFiles', basicCallParameters)
.then(() => {
- // The populating of the tree is deferred for performance reasons.
- // See this merge request for details: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/25700
- jest.advanceTimersByTime(1);
- })
- .then(() => {
projectTree = store.state.trees['abcproject/master'];
expect(projectTree.tree.length).toBe(2);
@@ -93,7 +82,7 @@ describe('Multi-file store tree actions', () => {
});
describe('error', () => {
- it('dispatches error action', done => {
+ it('dispatches error action', (done) => {
const dispatch = jest.fn();
store.state.projects = {
@@ -148,7 +137,7 @@ describe('Multi-file store tree actions', () => {
store.state.entries[tree.path] = tree;
});
- it('toggles the tree open', done => {
+ it('toggles the tree open', (done) => {
store
.dispatch('toggleTreeOpen', tree.path)
.then(() => {
@@ -174,7 +163,7 @@ describe('Multi-file store tree actions', () => {
Object.assign(store.state.entries, createEntriesFromPaths(paths));
});
- it('opens the parents', done => {
+ it('opens the parents', (done) => {
testAction(
showTreeEntry,
'grandparent/parent/child.txt',
@@ -187,7 +176,7 @@ describe('Multi-file store tree actions', () => {
});
describe('setDirectoryData', () => {
- it('sets tree correctly if there are no opened files yet', done => {
+ it('sets tree correctly if there are no opened files yet', (done) => {
const treeFile = file({ name: 'README.md' });
store.state.trees['abcproject/master'] = {};
diff --git a/spec/frontend/ide/stores/actions_spec.js b/spec/frontend/ide/stores/actions_spec.js
index 04128c27e70..036bc91cd11 100644
--- a/spec/frontend/ide/stores/actions_spec.js
+++ b/spec/frontend/ide/stores/actions_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import { visitUrl } from '~/lib/utils/url_utility';
import { createStore } from '~/ide/stores';
import { createRouter } from '~/ide/ide_router';
@@ -20,7 +21,6 @@ import {
import axios from '~/lib/utils/axios_utils';
import * as types from '~/ide/stores/mutation_types';
import { file, createTriggerRenameAction, createTriggerChangeAction } from '../helpers';
-import testAction from '../../helpers/vuex_action_helper';
import eventHub from '~/ide/eventhub';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -42,7 +42,7 @@ describe('Multi-file store actions', () => {
});
describe('redirectToUrl', () => {
- it('calls visitUrl', done => {
+ it('calls visitUrl', (done) => {
store
.dispatch('redirectToUrl', 'test')
.then(() => {
@@ -55,7 +55,7 @@ describe('Multi-file store actions', () => {
});
describe('setInitialData', () => {
- it('commits initial data', done => {
+ it('commits initial data', (done) => {
store
.dispatch('setInitialData', { canCommit: true })
.then(() => {
@@ -70,7 +70,7 @@ describe('Multi-file store actions', () => {
const paths = ['to_discard', 'another_one_to_discard'];
beforeEach(() => {
- paths.forEach(path => {
+ paths.forEach((path) => {
const f = file(path);
f.changed = true;
@@ -81,14 +81,14 @@ describe('Multi-file store actions', () => {
});
it('discards all changes in file', () => {
- const expectedCalls = paths.map(path => ['restoreOriginalFile', path]);
+ const expectedCalls = paths.map((path) => ['restoreOriginalFile', path]);
discardAllChanges(store);
expect(store.dispatch.mock.calls).toEqual(expect.arrayContaining(expectedCalls));
});
- it('removes all files from changedFiles state', done => {
+ it('removes all files from changedFiles state', (done) => {
store
.dispatch('discardAllChanges')
.then(() => {
@@ -120,7 +120,7 @@ describe('Multi-file store actions', () => {
});
describe('tree', () => {
- it('creates temp tree', done => {
+ it('creates temp tree', (done) => {
store
.dispatch('createTempEntry', {
name: 'test',
@@ -137,7 +137,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('creates new folder inside another tree', done => {
+ it('creates new folder inside another tree', (done) => {
const tree = {
type: 'tree',
name: 'testing',
@@ -162,7 +162,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('does not create new tree if already exists', done => {
+ it('does not create new tree if already exists', (done) => {
const tree = {
type: 'tree',
path: 'testing',
@@ -188,18 +188,20 @@ describe('Multi-file store actions', () => {
});
describe('blob', () => {
- it('creates temp file', done => {
+ it('creates temp file', (done) => {
const name = 'test';
store
.dispatch('createTempEntry', {
name,
type: 'blob',
+ mimeType: 'test/mime',
})
.then(() => {
const f = store.state.entries[name];
expect(f.tempFile).toBeTruthy();
+ expect(f.mimeType).toBe('test/mime');
expect(store.state.trees['abcproject/mybranch'].tree.length).toBe(1);
done();
@@ -207,7 +209,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('adds tmp file to open files', done => {
+ it('adds tmp file to open files', (done) => {
const name = 'test';
store
@@ -226,7 +228,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('adds tmp file to staged files', done => {
+ it('adds tmp file to staged files', (done) => {
const name = 'test';
store
@@ -248,7 +250,7 @@ describe('Multi-file store actions', () => {
expect(store.dispatch).toHaveBeenCalledWith('setFileActive', 'test');
});
- it('creates flash message if file already exists', done => {
+ it('creates flash message if file already exists', (done) => {
const f = file('test', '1', 'blob');
store.state.trees['abcproject/mybranch'].tree = [f];
store.state.entries[f.path] = f;
@@ -271,7 +273,7 @@ describe('Multi-file store actions', () => {
});
describe('scrollToTab', () => {
- it('focuses the current active element', done => {
+ it('focuses the current active element', (done) => {
document.body.innerHTML +=
'<div id="tabs"><div class="active"><div class="repo-tab"></div></div></div>';
const el = document.querySelector('.repo-tab');
@@ -376,7 +378,7 @@ describe('Multi-file store actions', () => {
});
describe('updateViewer', () => {
- it('updates viewer state', done => {
+ it('updates viewer state', (done) => {
store
.dispatch('updateViewer', 'diff')
.then(() => {
@@ -388,7 +390,7 @@ describe('Multi-file store actions', () => {
});
describe('updateActivityBarView', () => {
- it('commits UPDATE_ACTIVITY_BAR_VIEW', done => {
+ it('commits UPDATE_ACTIVITY_BAR_VIEW', (done) => {
testAction(
updateActivityBarView,
'test',
@@ -401,7 +403,7 @@ describe('Multi-file store actions', () => {
});
describe('setEmptyStateSvgs', () => {
- it('commits setEmptyStateSvgs', done => {
+ it('commits setEmptyStateSvgs', (done) => {
testAction(
setEmptyStateSvgs,
'svg',
@@ -414,7 +416,7 @@ describe('Multi-file store actions', () => {
});
describe('updateTempFlagForEntry', () => {
- it('commits UPDATE_TEMP_FLAG', done => {
+ it('commits UPDATE_TEMP_FLAG', (done) => {
const f = {
...file(),
path: 'test',
@@ -432,7 +434,7 @@ describe('Multi-file store actions', () => {
);
});
- it('commits UPDATE_TEMP_FLAG and dispatches for parent', done => {
+ it('commits UPDATE_TEMP_FLAG and dispatches for parent', (done) => {
const parent = {
...file(),
path: 'testing',
@@ -455,7 +457,7 @@ describe('Multi-file store actions', () => {
);
});
- it('does not dispatch for parent, if parent does not exist', done => {
+ it('does not dispatch for parent, if parent does not exist', (done) => {
const f = {
...file(),
path: 'test',
@@ -475,7 +477,7 @@ describe('Multi-file store actions', () => {
});
describe('setCurrentBranchId', () => {
- it('commits setCurrentBranchId', done => {
+ it('commits setCurrentBranchId', (done) => {
testAction(
setCurrentBranchId,
'branchId',
@@ -488,7 +490,7 @@ describe('Multi-file store actions', () => {
});
describe('toggleFileFinder', () => {
- it('commits TOGGLE_FILE_FINDER', done => {
+ it('commits TOGGLE_FILE_FINDER', (done) => {
testAction(
toggleFileFinder,
true,
@@ -501,7 +503,7 @@ describe('Multi-file store actions', () => {
});
describe('setErrorMessage', () => {
- it('commis error messsage', done => {
+ it('commis error messsage', (done) => {
testAction(
setErrorMessage,
'error',
@@ -514,7 +516,7 @@ describe('Multi-file store actions', () => {
});
describe('deleteEntry', () => {
- it('commits entry deletion', done => {
+ it('commits entry deletion', (done) => {
store.state.entries.path = 'testing';
testAction(
@@ -527,7 +529,7 @@ describe('Multi-file store actions', () => {
);
});
- it('does not delete a folder after it is emptied', done => {
+ it('does not delete a folder after it is emptied', (done) => {
const testFolder = {
type: 'tree',
tree: [],
@@ -573,7 +575,7 @@ describe('Multi-file store actions', () => {
});
describe('and previous does not exist', () => {
- it('reverts the rename before deleting', done => {
+ it('reverts the rename before deleting', (done) => {
testAction(
deleteEntry,
testEntry.path,
@@ -608,7 +610,7 @@ describe('Multi-file store actions', () => {
store.state.entries[oldEntry.path] = oldEntry;
});
- it('does not revert rename before deleting', done => {
+ it('does not revert rename before deleting', (done) => {
testAction(
deleteEntry,
testEntry.path,
@@ -619,7 +621,7 @@ describe('Multi-file store actions', () => {
);
});
- it('when previous is deleted, it reverts rename before deleting', done => {
+ it('when previous is deleted, it reverts rename before deleting', (done) => {
store.state.entries[testEntry.prevPath].deleted = true;
testAction(
@@ -654,7 +656,7 @@ describe('Multi-file store actions', () => {
jest.spyOn(eventHub, '$emit').mockImplementation();
});
- it('does not purge model cache for temporary entries that got renamed', done => {
+ it('does not purge model cache for temporary entries that got renamed', (done) => {
Object.assign(store.state.entries, {
test: {
...file('test'),
@@ -676,7 +678,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('purges model cache for renamed entry', done => {
+ it('purges model cache for renamed entry', (done) => {
Object.assign(store.state.entries, {
test: {
...file('test'),
@@ -735,7 +737,7 @@ describe('Multi-file store actions', () => {
]);
});
- it('if not changed, completely unstages and discards entry if renamed to original', done => {
+ it('if not changed, completely unstages and discards entry if renamed to original', (done) => {
testAction(
renameEntry,
{ path: 'renamed', name: 'orig' },
@@ -759,7 +761,7 @@ describe('Multi-file store actions', () => {
);
});
- it('if already in changed, does not add to change', done => {
+ it('if already in changed, does not add to change', (done) => {
store.state.changedFiles.push(renamedEntry);
testAction(
@@ -772,7 +774,7 @@ describe('Multi-file store actions', () => {
);
});
- it('routes to the renamed file if the original file has been opened', done => {
+ it('routes to the renamed file if the original file has been opened', (done) => {
store.state.currentProjectId = 'test/test';
store.state.currentBranchId = 'master';
@@ -813,7 +815,7 @@ describe('Multi-file store actions', () => {
});
});
- it('updates entries in a folder correctly, when folder is renamed', done => {
+ it('updates entries in a folder correctly, when folder is renamed', (done) => {
store
.dispatch('renameEntry', {
path: 'folder',
@@ -831,7 +833,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('discards renaming of an entry if the root folder is renamed back to a previous name', done => {
+ it('discards renaming of an entry if the root folder is renamed back to a previous name', (done) => {
const rootFolder = file('old-folder', 'old-folder', 'tree');
const testEntry = file('test', 'test', 'blob', rootFolder);
@@ -923,7 +925,7 @@ describe('Multi-file store actions', () => {
});
});
- it('creates new directory', done => {
+ it('creates new directory', (done) => {
expect(store.state.entries[newParentPath]).toBeUndefined();
store
@@ -953,7 +955,7 @@ describe('Multi-file store actions', () => {
rootDir.tree.push(newDir);
});
- it('inserts in new directory', done => {
+ it('inserts in new directory', (done) => {
expect(newDir.tree).toEqual([]);
store
@@ -969,11 +971,11 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('when new directory is deleted, it undeletes it', done => {
+ it('when new directory is deleted, it undeletes it', (done) => {
store.dispatch('deleteEntry', newParentPath);
expect(store.state.entries[newParentPath].deleted).toBe(true);
- expect(rootDir.tree.some(x => x.path === newParentPath)).toBe(false);
+ expect(rootDir.tree.some((x) => x.path === newParentPath)).toBe(false);
store
.dispatch('renameEntry', {
@@ -983,7 +985,7 @@ describe('Multi-file store actions', () => {
})
.then(() => {
expect(store.state.entries[newParentPath].deleted).toBe(false);
- expect(rootDir.tree.some(x => x.path === newParentPath)).toBe(true);
+ expect(rootDir.tree.some((x) => x.path === newParentPath)).toBe(true);
})
.then(done)
.catch(done.fail);
@@ -1027,12 +1029,12 @@ describe('Multi-file store actions', () => {
document.querySelector('.flash-container').remove();
});
- it('passes the error further unchanged without dispatching any action when response is 404', done => {
+ it('passes the error further unchanged without dispatching any action when response is 404', (done) => {
mock.onGet(/(.*)/).replyOnce(404);
getBranchData(...callParams)
.then(done.fail)
- .catch(e => {
+ .catch((e) => {
expect(dispatch.mock.calls).toHaveLength(0);
expect(e.response.status).toEqual(404);
expect(document.querySelector('.flash-alert')).toBeNull();
@@ -1040,12 +1042,12 @@ describe('Multi-file store actions', () => {
});
});
- it('does not pass the error further and flashes an alert if error is not 404', done => {
+ it('does not pass the error further and flashes an alert if error is not 404', (done) => {
mock.onGet(/(.*)/).replyOnce(418);
getBranchData(...callParams)
.then(done.fail)
- .catch(e => {
+ .catch((e) => {
expect(dispatch.mock.calls).toHaveLength(0);
expect(e.response).toBeUndefined();
expect(document.querySelector('.flash-alert')).not.toBeNull();
diff --git a/spec/frontend/ide/stores/extend_spec.js b/spec/frontend/ide/stores/extend_spec.js
index b0f1063153e..ffb00f9ef5b 100644
--- a/spec/frontend/ide/stores/extend_spec.js
+++ b/spec/frontend/ide/stores/extend_spec.js
@@ -15,7 +15,7 @@ describe('ide/stores/extend', () => {
store = {};
el = {};
- [terminalPlugin, terminalSyncPlugin].forEach(x => {
+ [terminalPlugin, terminalSyncPlugin].forEach((x) => {
const plugin = jest.fn();
x.mockImplementation(() => plugin);
@@ -28,7 +28,7 @@ describe('ide/stores/extend', () => {
terminalSyncPlugin.mockClear();
});
- const withGonFeatures = features => {
+ const withGonFeatures = (features) => {
global.gon = { ...global.gon, features };
};
diff --git a/spec/frontend/ide/stores/getters_spec.js b/spec/frontend/ide/stores/getters_spec.js
index 5ae87f5f9cd..1787f9e9361 100644
--- a/spec/frontend/ide/stores/getters_spec.js
+++ b/spec/frontend/ide/stores/getters_spec.js
@@ -407,7 +407,7 @@ describe('IDE store getters', () => {
${'canCreateMergeRequests'} | ${'createMergeRequestIn'}
${'canPushCode'} | ${'pushCode'}
`('$getterName', ({ getterName, permissionKey }) => {
- it.each([true, false])('finds permission for current project (%s)', val => {
+ it.each([true, false])('finds permission for current project (%s)', (val) => {
localState.projects[TEST_PROJECT_ID] = {
userPermissions: {
[permissionKey]: val,
diff --git a/spec/frontend/ide/stores/integration_spec.js b/spec/frontend/ide/stores/integration_spec.js
index b6a7c7fd02d..388bd3b99d2 100644
--- a/spec/frontend/ide/stores/integration_spec.js
+++ b/spec/frontend/ide/stores/integration_spec.js
@@ -84,7 +84,7 @@ describe('IDE store integration', () => {
store.dispatch('commit/updateFilesAfterCommit', { data: {} });
expect(store.state.entries[TEST_PATH]).toEqual(expected);
- expect(store.state.entries[TEST_PATH_DIR].tree.find(x => x.path === TEST_PATH)).toEqual(
+ expect(store.state.entries[TEST_PATH_DIR].tree.find((x) => x.path === TEST_PATH)).toEqual(
expected,
);
});
diff --git a/spec/frontend/ide/stores/modules/branches/actions_spec.js b/spec/frontend/ide/stores/modules/branches/actions_spec.js
index 2ab4126cccf..b1c077c4082 100644
--- a/spec/frontend/ide/stores/modules/branches/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/branches/actions_spec.js
@@ -42,7 +42,7 @@ describe('IDE branches actions', () => {
});
describe('requestBranches', () => {
- it('should commit request', done => {
+ it('should commit request', (done) => {
testAction(
requestBranches,
null,
@@ -55,7 +55,7 @@ describe('IDE branches actions', () => {
});
describe('receiveBranchesError', () => {
- it('should commit error', done => {
+ it('should commit error', (done) => {
testAction(
receiveBranchesError,
{ search: TEST_SEARCH },
@@ -78,7 +78,7 @@ describe('IDE branches actions', () => {
});
describe('receiveBranchesSuccess', () => {
- it('should commit received data', done => {
+ it('should commit received data', (done) => {
testAction(
receiveBranchesSuccess,
branches,
@@ -110,7 +110,7 @@ describe('IDE branches actions', () => {
});
});
- it('dispatches success with received data', done => {
+ it('dispatches success with received data', (done) => {
testAction(
fetchBranches,
{ search: TEST_SEARCH },
@@ -131,7 +131,7 @@ describe('IDE branches actions', () => {
mock.onGet(/\/api\/v4\/projects\/\d+\/repository\/branches(.*)$/).replyOnce(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
testAction(
fetchBranches,
{ search: TEST_SEARCH },
@@ -148,7 +148,7 @@ describe('IDE branches actions', () => {
});
describe('resetBranches', () => {
- it('commits reset', done => {
+ it('commits reset', (done) => {
testAction(
resetBranches,
null,
diff --git a/spec/frontend/ide/stores/modules/branches/mutations_spec.js b/spec/frontend/ide/stores/modules/branches/mutations_spec.js
index ed8e05bf299..ddf55479be9 100644
--- a/spec/frontend/ide/stores/modules/branches/mutations_spec.js
+++ b/spec/frontend/ide/stores/modules/branches/mutations_spec.js
@@ -28,7 +28,7 @@ describe('IDE branches mutations', () => {
describe('RECEIVE_BRANCHES_SUCCESS', () => {
it('sets branches', () => {
- const expectedBranches = branches.map(branch => ({
+ const expectedBranches = branches.map((branch) => ({
name: branch.name,
committedDate: branch.commit.committed_date,
}));
diff --git a/spec/frontend/ide/stores/modules/clientside/actions_spec.js b/spec/frontend/ide/stores/modules/clientside/actions_spec.js
index a47bc0bd711..05627f8ed0e 100644
--- a/spec/frontend/ide/stores/modules/clientside/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/clientside/actions_spec.js
@@ -25,7 +25,7 @@ describe('IDE store module clientside actions', () => {
});
describe('pingUsage', () => {
- it('posts to usage endpoint', done => {
+ it('posts to usage endpoint', (done) => {
const usageSpy = jest.fn(() => [200]);
mock.onPost(TEST_USAGE_URL).reply(() => usageSpy());
diff --git a/spec/frontend/ide/stores/modules/commit/actions_spec.js b/spec/frontend/ide/stores/modules/commit/actions_spec.js
index cfe2bddf76c..5be0e22a9fc 100644
--- a/spec/frontend/ide/stores/modules/commit/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/actions_spec.js
@@ -1,6 +1,7 @@
import { file } from 'jest/ide/helpers';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import { visitUrl } from '~/lib/utils/url_utility';
import { createStore } from '~/ide/stores';
import service from '~/ide/services';
@@ -11,7 +12,6 @@ import * as mutationTypes from '~/ide/stores/modules/commit/mutation_types';
import * as actions from '~/ide/stores/modules/commit/actions';
import { createUnexpectedCommitError } from '~/ide/lib/errors';
import { commitActionTypes, PERMISSION_CREATE_MR } from '~/ide/constants';
-import testAction from '../../../../helpers/vuex_action_helper';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
@@ -19,6 +19,17 @@ jest.mock('~/lib/utils/url_utility', () => ({
}));
const TEST_COMMIT_SHA = '123456789';
+const COMMIT_RESPONSE = {
+ id: '123456',
+ short_id: '123',
+ message: 'test message',
+ committed_date: 'date',
+ parent_ids: [],
+ stats: {
+ additions: '1',
+ deletions: '2',
+ },
+};
describe('IDE commit module actions', () => {
let mock;
@@ -32,7 +43,9 @@ describe('IDE commit module actions', () => {
mock = new MockAdapter(axios);
jest.spyOn(router, 'push').mockImplementation();
- mock.onGet('/api/v1/projects/abcproject/repository/branches/master').reply(200);
+ mock
+ .onGet('/api/v1/projects/abcproject/repository/branches/master')
+ .reply(200, { commit: COMMIT_RESPONSE });
});
afterEach(() => {
@@ -41,7 +54,7 @@ describe('IDE commit module actions', () => {
});
describe('updateCommitMessage', () => {
- it('updates store with new commit message', done => {
+ it('updates store with new commit message', (done) => {
store
.dispatch('commit/updateCommitMessage', 'testing')
.then(() => {
@@ -53,7 +66,7 @@ describe('IDE commit module actions', () => {
});
describe('discardDraft', () => {
- it('resets commit message to blank', done => {
+ it('resets commit message to blank', (done) => {
store.state.commit.commitMessage = 'testing';
store
@@ -67,7 +80,7 @@ describe('IDE commit module actions', () => {
});
describe('updateCommitAction', () => {
- it('updates store with new commit action', done => {
+ it('updates store with new commit action', (done) => {
store
.dispatch('commit/updateCommitAction', '1')
.then(() => {
@@ -123,7 +136,7 @@ describe('IDE commit module actions', () => {
});
});
- it('updates commit message with short_id', done => {
+ it('updates commit message with short_id', (done) => {
store
.dispatch('commit/setLastCommitMessage', { short_id: '123' })
.then(() => {
@@ -135,7 +148,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('updates commit message with stats', done => {
+ it('updates commit message with stats', (done) => {
store
.dispatch('commit/setLastCommitMessage', {
short_id: '123',
@@ -200,12 +213,12 @@ describe('IDE commit module actions', () => {
});
store.state.openFiles = store.state.stagedFiles;
- store.state.stagedFiles.forEach(stagedFile => {
+ store.state.stagedFiles.forEach((stagedFile) => {
store.state.entries[stagedFile.path] = stagedFile;
});
});
- it('updates stores working reference', done => {
+ it('updates stores working reference', (done) => {
store
.dispatch('commit/updateFilesAfterCommit', {
data,
@@ -218,14 +231,14 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('resets all files changed status', done => {
+ it('resets all files changed status', (done) => {
store
.dispatch('commit/updateFilesAfterCommit', {
data,
branch,
})
.then(() => {
- store.state.openFiles.forEach(entry => {
+ store.state.openFiles.forEach((entry) => {
expect(entry.changed).toBeFalsy();
});
})
@@ -233,7 +246,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('sets files commit data', done => {
+ it('sets files commit data', (done) => {
store
.dispatch('commit/updateFilesAfterCommit', {
data,
@@ -246,7 +259,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('updates raw content for changed file', done => {
+ it('updates raw content for changed file', (done) => {
store
.dispatch('commit/updateFilesAfterCommit', {
data,
@@ -259,7 +272,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('emits changed event for file', done => {
+ it('emits changed event for file', (done) => {
store
.dispatch('commit/updateFilesAfterCommit', {
data,
@@ -319,7 +332,7 @@ describe('IDE commit module actions', () => {
store.state.commit.commitAction = '2';
store.state.commit.commitMessage = 'testing 123';
- store.state.openFiles.forEach(localF => {
+ store.state.openFiles.forEach((localF) => {
store.state.entries[localF.path] = localF;
});
});
@@ -329,23 +342,11 @@ describe('IDE commit module actions', () => {
});
describe('success', () => {
- const COMMIT_RESPONSE = {
- id: '123456',
- short_id: '123',
- message: 'test message',
- committed_date: 'date',
- parent_ids: '321',
- stats: {
- additions: '1',
- deletions: '2',
- },
- };
-
beforeEach(() => {
jest.spyOn(service, 'commit').mockResolvedValue({ data: COMMIT_RESPONSE });
});
- it('calls service', done => {
+ it('calls service', (done) => {
store
.dispatch('commit/commitChanges')
.then(() => {
@@ -370,7 +371,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('sends lastCommit ID when not creating new branch', done => {
+ it('sends lastCommit ID when not creating new branch', (done) => {
store.state.commit.commitAction = '1';
store
@@ -397,7 +398,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('sets last Commit Msg', done => {
+ it('sets last Commit Msg', (done) => {
store
.dispatch('commit/commitChanges')
.then(() => {
@@ -410,7 +411,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('adds commit data to files', done => {
+ it('adds commit data to files', (done) => {
store
.dispatch('commit/commitChanges')
.then(() => {
@@ -423,7 +424,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('resets stores commit actions', done => {
+ it('resets stores commit actions', (done) => {
store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH;
store
@@ -435,7 +436,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('removes all staged files', done => {
+ it('removes all staged files', (done) => {
store
.dispatch('commit/commitChanges')
.then(() => {
@@ -446,7 +447,7 @@ describe('IDE commit module actions', () => {
});
describe('merge request', () => {
- it('redirects to new merge request page', done => {
+ it('redirects to new merge request page', (done) => {
jest.spyOn(eventHub, '$on').mockImplementation();
store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH;
@@ -456,9 +457,7 @@ describe('IDE commit module actions', () => {
.dispatch('commit/commitChanges')
.then(() => {
expect(visitUrl).toHaveBeenCalledWith(
- `webUrl/-/merge_requests/new?merge_request[source_branch]=${
- store.getters['commit/placeholderBranchName']
- }&merge_request[target_branch]=master&nav_source=webide`,
+ `webUrl/-/merge_requests/new?merge_request[source_branch]=${store.getters['commit/placeholderBranchName']}&merge_request[target_branch]=master&nav_source=webide`,
);
done();
@@ -466,7 +465,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('does not redirect to new merge request page when shouldCreateMR is not checked', done => {
+ it('does not redirect to new merge request page when shouldCreateMR is not checked', (done) => {
jest.spyOn(eventHub, '$on').mockImplementation();
store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH;
@@ -512,7 +511,7 @@ describe('IDE commit module actions', () => {
});
});
- it('shows failed message', done => {
+ it('shows failed message', (done) => {
store
.dispatch('commit/commitChanges')
.then(() => {
@@ -546,19 +545,7 @@ describe('IDE commit module actions', () => {
});
describe('first commit of a branch', () => {
- const COMMIT_RESPONSE = {
- id: '123456',
- short_id: '123',
- message: 'test message',
- committed_date: 'date',
- parent_ids: [],
- stats: {
- additions: '1',
- deletions: '2',
- },
- };
-
- it('commits TOGGLE_EMPTY_STATE mutation on empty repo', done => {
+ it('commits TOGGLE_EMPTY_STATE mutation on empty repo', (done) => {
jest.spyOn(service, 'commit').mockResolvedValue({ data: COMMIT_RESPONSE });
jest.spyOn(store, 'commit');
@@ -575,7 +562,7 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
- it('does not commmit TOGGLE_EMPTY_STATE mutation on existing project', done => {
+ it('does not commmit TOGGLE_EMPTY_STATE mutation on existing project', (done) => {
COMMIT_RESPONSE.parent_ids.push('1234');
jest.spyOn(service, 'commit').mockResolvedValue({ data: COMMIT_RESPONSE });
jest.spyOn(store, 'commit');
@@ -596,7 +583,7 @@ describe('IDE commit module actions', () => {
});
describe('toggleShouldCreateMR', () => {
- it('commits both toggle and interacting with MR checkbox actions', done => {
+ it('commits both toggle and interacting with MR checkbox actions', (done) => {
testAction(
actions.toggleShouldCreateMR,
{},
diff --git a/spec/frontend/ide/stores/modules/commit/getters_spec.js b/spec/frontend/ide/stores/modules/commit/getters_spec.js
index adbfd7c6835..66ed51dbd13 100644
--- a/spec/frontend/ide/stores/modules/commit/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/getters_spec.js
@@ -103,7 +103,7 @@ describe('IDE commit module getters', () => {
expect(getters.preBuiltCommitMessage(state, null, rootState)).toBe('test commit message');
});
- ['changedFiles', 'stagedFiles'].forEach(key => {
+ ['changedFiles', 'stagedFiles'].forEach((key) => {
it('returns commitMessage with updated file', () => {
rootState[key].push({
path: 'test-file',
diff --git a/spec/frontend/ide/stores/modules/editor/setup_spec.js b/spec/frontend/ide/stores/modules/editor/setup_spec.js
index 71b5d7590c5..659bfb2742f 100644
--- a/spec/frontend/ide/stores/modules/editor/setup_spec.js
+++ b/spec/frontend/ide/stores/modules/editor/setup_spec.js
@@ -1,8 +1,9 @@
+import { cloneDeep } from 'lodash';
import Vuex from 'vuex';
import eventHub from '~/ide/eventhub';
import { createStoreOptions } from '~/ide/stores';
import { setupFileEditorsSync } from '~/ide/stores/modules/editor/setup';
-import { createTriggerRenamePayload } from '../../../helpers';
+import { createTriggerRenamePayload, createTriggerUpdatePayload } from '../../../helpers';
describe('~/ide/stores/modules/editor/setup', () => {
let store;
@@ -33,6 +34,14 @@ describe('~/ide/stores/modules/editor/setup', () => {
});
});
+ it('when files update is emitted, does nothing', () => {
+ const origState = cloneDeep(store.state);
+
+ eventHub.$emit('ide.files.change', createTriggerUpdatePayload('foo'));
+
+ expect(store.state).toEqual(origState);
+ });
+
it('when files rename is emitted, renames fileEditor', () => {
eventHub.$emit('ide.files.change', createTriggerRenamePayload('foo', 'foo_new'));
diff --git a/spec/frontend/ide/stores/modules/file_templates/actions_spec.js b/spec/frontend/ide/stores/modules/file_templates/actions_spec.js
index 6c1fa163a91..76898e83c7a 100644
--- a/spec/frontend/ide/stores/modules/file_templates/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/file_templates/actions_spec.js
@@ -20,7 +20,7 @@ describe('IDE file templates actions', () => {
});
describe('requestTemplateTypes', () => {
- it('commits REQUEST_TEMPLATE_TYPES', done => {
+ it('commits REQUEST_TEMPLATE_TYPES', (done) => {
testAction(
actions.requestTemplateTypes,
null,
@@ -33,7 +33,7 @@ describe('IDE file templates actions', () => {
});
describe('receiveTemplateTypesError', () => {
- it('commits RECEIVE_TEMPLATE_TYPES_ERROR and dispatches setErrorMessage', done => {
+ it('commits RECEIVE_TEMPLATE_TYPES_ERROR and dispatches setErrorMessage', (done) => {
testAction(
actions.receiveTemplateTypesError,
null,
@@ -55,7 +55,7 @@ describe('IDE file templates actions', () => {
});
describe('receiveTemplateTypesSuccess', () => {
- it('commits RECEIVE_TEMPLATE_TYPES_SUCCESS', done => {
+ it('commits RECEIVE_TEMPLATE_TYPES_SUCCESS', (done) => {
testAction(
actions.receiveTemplateTypesSuccess,
'test',
@@ -81,7 +81,7 @@ describe('IDE file templates actions', () => {
});
});
- it('rejects if selectedTemplateType is empty', done => {
+ it('rejects if selectedTemplateType is empty', (done) => {
const dispatch = jest.fn().mockName('dispatch');
actions
@@ -94,7 +94,7 @@ describe('IDE file templates actions', () => {
});
});
- it('dispatches actions', done => {
+ it('dispatches actions', (done) => {
state.selectedTemplateType = { key: 'licenses' };
testAction(
@@ -121,7 +121,7 @@ describe('IDE file templates actions', () => {
mock.onGet(/api\/(.*)\/templates\/licenses/).replyOnce(500);
});
- it('dispatches actions', done => {
+ it('dispatches actions', (done) => {
state.selectedTemplateType = { key: 'licenses' };
testAction(
@@ -184,7 +184,7 @@ describe('IDE file templates actions', () => {
});
describe('receiveTemplateError', () => {
- it('dispatches setErrorMessage', done => {
+ it('dispatches setErrorMessage', (done) => {
testAction(
actions.receiveTemplateError,
'test',
@@ -217,7 +217,7 @@ describe('IDE file templates actions', () => {
.replyOnce(200, { content: 'testing content' });
});
- it('dispatches setFileTemplate if template already has content', done => {
+ it('dispatches setFileTemplate if template already has content', (done) => {
const template = { content: 'already has content' };
testAction(
@@ -230,7 +230,7 @@ describe('IDE file templates actions', () => {
);
});
- it('dispatches success', done => {
+ it('dispatches success', (done) => {
const template = { key: 'mit' };
state.selectedTemplateType = { key: 'licenses' };
@@ -245,7 +245,7 @@ describe('IDE file templates actions', () => {
);
});
- it('dispatches success and uses name key for API call', done => {
+ it('dispatches success and uses name key for API call', (done) => {
const template = { name: 'testing' };
state.selectedTemplateType = { key: 'licenses' };
@@ -266,7 +266,7 @@ describe('IDE file templates actions', () => {
mock.onGet(/api\/(.*)\/templates\/licenses\/mit/).replyOnce(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
const template = { name: 'testing' };
state.selectedTemplateType = { key: 'licenses' };
diff --git a/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js b/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js
index ec472ab418f..6594d65f558 100644
--- a/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/merge_requests/actions_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/merge_requests/state';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
@@ -10,7 +11,6 @@ import {
resetMergeRequests,
} from '~/ide/stores/modules/merge_requests/actions';
import { mergeRequests } from '../../../mock_data';
-import testAction from '../../../../helpers/vuex_action_helper';
describe('IDE merge requests actions', () => {
let mockedState;
@@ -28,7 +28,7 @@ describe('IDE merge requests actions', () => {
});
describe('requestMergeRequests', () => {
- it('should commit request', done => {
+ it('should commit request', (done) => {
testAction(
requestMergeRequests,
null,
@@ -41,7 +41,7 @@ describe('IDE merge requests actions', () => {
});
describe('receiveMergeRequestsError', () => {
- it('should commit error', done => {
+ it('should commit error', (done) => {
testAction(
receiveMergeRequestsError,
{ type: 'created', search: '' },
@@ -64,7 +64,7 @@ describe('IDE merge requests actions', () => {
});
describe('receiveMergeRequestsSuccess', () => {
- it('should commit received data', done => {
+ it('should commit received data', (done) => {
testAction(
receiveMergeRequestsSuccess,
mergeRequests,
@@ -118,7 +118,7 @@ describe('IDE merge requests actions', () => {
});
});
- it('dispatches success with received data', done => {
+ it('dispatches success with received data', (done) => {
testAction(
fetchMergeRequests,
{ type: 'created' },
@@ -156,7 +156,7 @@ describe('IDE merge requests actions', () => {
);
});
- it('dispatches success with received data', done => {
+ it('dispatches success with received data', (done) => {
testAction(
fetchMergeRequests,
{ type: null },
@@ -177,7 +177,7 @@ describe('IDE merge requests actions', () => {
mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
testAction(
fetchMergeRequests,
{ type: 'created', search: '' },
@@ -195,7 +195,7 @@ describe('IDE merge requests actions', () => {
});
describe('resetMergeRequests', () => {
- it('commits reset', done => {
+ it('commits reset', (done) => {
testAction(
resetMergeRequests,
null,
diff --git a/spec/frontend/ide/stores/modules/pane/actions_spec.js b/spec/frontend/ide/stores/modules/pane/actions_spec.js
index 8c56714e0ed..42fe8b400b8 100644
--- a/spec/frontend/ide/stores/modules/pane/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/pane/actions_spec.js
@@ -7,18 +7,18 @@ describe('IDE pane module actions', () => {
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
- it('dispatches open if closed', done => {
+ it('dispatches open if closed', (done) => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: false }, [], [{ type: 'open' }], done);
});
- it('dispatches close if opened', done => {
+ it('dispatches close if opened', (done) => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
describe('with a view specified', () => {
- it('commits SET_OPEN and SET_CURRENT_VIEW', done => {
+ it('commits SET_OPEN and SET_CURRENT_VIEW', (done) => {
testAction(
actions.open,
TEST_VIEW,
@@ -32,7 +32,7 @@ describe('IDE pane module actions', () => {
);
});
- it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
+ it('commits KEEP_ALIVE_VIEW if keepAlive is true', (done) => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
@@ -49,7 +49,7 @@ describe('IDE pane module actions', () => {
});
describe('without a view specified', () => {
- it('commits SET_OPEN', done => {
+ it('commits SET_OPEN', (done) => {
testAction(
actions.open,
undefined,
@@ -63,7 +63,7 @@ describe('IDE pane module actions', () => {
});
describe('close', () => {
- it('commits SET_OPEN', done => {
+ it('commits SET_OPEN', (done) => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
diff --git a/spec/frontend/ide/stores/modules/pipelines/actions_spec.js b/spec/frontend/ide/stores/modules/pipelines/actions_spec.js
index 8511843cc92..b7ed257e954 100644
--- a/spec/frontend/ide/stores/modules/pipelines/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/pipelines/actions_spec.js
@@ -1,6 +1,7 @@
import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
+import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import {
requestLatestPipeline,
@@ -24,7 +25,6 @@ import {
import state from '~/ide/stores/modules/pipelines/state';
import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import { rightSidebarViews } from '~/ide/constants';
-import testAction from '../../../../helpers/vuex_action_helper';
import { pipelines, jobs } from '../../../mock_data';
describe('IDE pipelines actions', () => {
@@ -44,7 +44,7 @@ describe('IDE pipelines actions', () => {
});
describe('requestLatestPipeline', () => {
- it('commits request', done => {
+ it('commits request', (done) => {
testAction(
requestLatestPipeline,
null,
@@ -57,7 +57,7 @@ describe('IDE pipelines actions', () => {
});
describe('receiveLatestPipelineError', () => {
- it('commits error', done => {
+ it('commits error', (done) => {
testAction(
receiveLatestPipelineError,
{ status: 404 },
@@ -68,7 +68,7 @@ describe('IDE pipelines actions', () => {
);
});
- it('dispatches setErrorMessage is not 404', done => {
+ it('dispatches setErrorMessage is not 404', (done) => {
testAction(
receiveLatestPipelineError,
{ status: 500 },
@@ -123,7 +123,7 @@ describe('IDE pipelines actions', () => {
.reply(200, { data: { foo: 'bar' } }, { 'poll-interval': '10000' });
});
- it('dispatches request', done => {
+ it('dispatches request', (done) => {
jest.spyOn(axios, 'get');
jest.spyOn(Visibility, 'hidden').mockReturnValue(false);
@@ -139,7 +139,7 @@ describe('IDE pipelines actions', () => {
jest.advanceTimersByTime(1000);
- new Promise(resolve => requestAnimationFrame(resolve))
+ new Promise((resolve) => requestAnimationFrame(resolve))
.then(() => {
expect(axios.get).toHaveBeenCalled();
expect(axios.get).toHaveBeenCalledTimes(1);
@@ -150,7 +150,7 @@ describe('IDE pipelines actions', () => {
jest.advanceTimersByTime(10000);
})
- .then(() => new Promise(resolve => requestAnimationFrame(resolve)))
+ .then(() => new Promise((resolve) => requestAnimationFrame(resolve)))
.then(() => {
expect(axios.get).toHaveBeenCalled();
expect(axios.get).toHaveBeenCalledTimes(2);
@@ -169,7 +169,7 @@ describe('IDE pipelines actions', () => {
mock.onGet('/abc/def/commit/abc123def456ghi789jkl/pipelines').reply(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
const dispatch = jest.fn().mockName('dispatch');
const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' },
@@ -180,7 +180,7 @@ describe('IDE pipelines actions', () => {
jest.advanceTimersByTime(1500);
- new Promise(resolve => requestAnimationFrame(resolve))
+ new Promise((resolve) => requestAnimationFrame(resolve))
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveLatestPipelineError', expect.anything());
})
@@ -191,13 +191,13 @@ describe('IDE pipelines actions', () => {
});
describe('requestJobs', () => {
- it('commits request', done => {
+ it('commits request', (done) => {
testAction(requestJobs, 1, mockedState, [{ type: types.REQUEST_JOBS, payload: 1 }], [], done);
});
});
describe('receiveJobsError', () => {
- it('commits error', done => {
+ it('commits error', (done) => {
testAction(
receiveJobsError,
{ id: 1 },
@@ -220,7 +220,7 @@ describe('IDE pipelines actions', () => {
});
describe('receiveJobsSuccess', () => {
- it('commits data', done => {
+ it('commits data', (done) => {
testAction(
receiveJobsSuccess,
{ id: 1, data: jobs },
@@ -240,7 +240,7 @@ describe('IDE pipelines actions', () => {
mock.onGet(stage.dropdownPath).replyOnce(200, jobs);
});
- it('dispatches request', done => {
+ it('dispatches request', (done) => {
testAction(
fetchJobs,
stage,
@@ -260,7 +260,7 @@ describe('IDE pipelines actions', () => {
mock.onGet(stage.dropdownPath).replyOnce(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
testAction(
fetchJobs,
stage,
@@ -277,7 +277,7 @@ describe('IDE pipelines actions', () => {
});
describe('toggleStageCollapsed', () => {
- it('commits collapse', done => {
+ it('commits collapse', (done) => {
testAction(
toggleStageCollapsed,
1,
@@ -290,7 +290,7 @@ describe('IDE pipelines actions', () => {
});
describe('setDetailJob', () => {
- it('commits job', done => {
+ it('commits job', (done) => {
testAction(
setDetailJob,
'job',
@@ -301,7 +301,7 @@ describe('IDE pipelines actions', () => {
);
});
- it('dispatches rightPane/open as pipeline when job is null', done => {
+ it('dispatches rightPane/open as pipeline when job is null', (done) => {
testAction(
setDetailJob,
null,
@@ -312,7 +312,7 @@ describe('IDE pipelines actions', () => {
);
});
- it('dispatches rightPane/open as job', done => {
+ it('dispatches rightPane/open as job', (done) => {
testAction(
setDetailJob,
'job',
@@ -325,13 +325,13 @@ describe('IDE pipelines actions', () => {
});
describe('requestJobLogs', () => {
- it('commits request', done => {
+ it('commits request', (done) => {
testAction(requestJobLogs, null, mockedState, [{ type: types.REQUEST_JOB_LOGS }], [], done);
});
});
describe('receiveJobLogsError', () => {
- it('commits error', done => {
+ it('commits error', (done) => {
testAction(
receiveJobLogsError,
null,
@@ -354,7 +354,7 @@ describe('IDE pipelines actions', () => {
});
describe('receiveJobLogsSuccess', () => {
- it('commits data', done => {
+ it('commits data', (done) => {
testAction(
receiveJobLogsSuccess,
'data',
@@ -377,7 +377,7 @@ describe('IDE pipelines actions', () => {
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
});
- it('dispatches request', done => {
+ it('dispatches request', (done) => {
testAction(
fetchJobLogs,
null,
@@ -408,7 +408,7 @@ describe('IDE pipelines actions', () => {
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
testAction(
fetchJobLogs,
null,
@@ -422,7 +422,7 @@ describe('IDE pipelines actions', () => {
});
describe('resetLatestPipeline', () => {
- it('commits reset mutations', done => {
+ it('commits reset mutations', (done) => {
testAction(
resetLatestPipeline,
null,
diff --git a/spec/frontend/ide/stores/modules/pipelines/mutations_spec.js b/spec/frontend/ide/stores/modules/pipelines/mutations_spec.js
index 7d2f5d5d710..d820bf0291e 100644
--- a/spec/frontend/ide/stores/modules/pipelines/mutations_spec.js
+++ b/spec/frontend/ide/stores/modules/pipelines/mutations_spec.js
@@ -137,7 +137,7 @@ describe('IDE pipelines mutations', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].jobs.length).toBe(jobs.length);
expect(mockedState.stages[0].jobs).toEqual(
- jobs.map(job => ({
+ jobs.map((job) => ({
id: job.id,
name: job.name,
status: job.status,
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js
index 242b1579be7..05935f1db38 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js
@@ -102,7 +102,7 @@ describe('IDE store terminal check actions', () => {
);
});
- [httpStatus.FORBIDDEN, httpStatus.NOT_FOUND].forEach(status => {
+ [httpStatus.FORBIDDEN, httpStatus.NOT_FOUND].forEach((status) => {
it(`hides tab, when status is ${status}`, () => {
const payload = { response: { status } };
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
index d0ac2af3ffd..dd51786745f 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
@@ -281,7 +281,7 @@ describe('IDE store terminal session controls actions', () => {
);
});
- [httpStatus.NOT_FOUND, httpStatus.UNPROCESSABLE_ENTITY].forEach(status => {
+ [httpStatus.NOT_FOUND, httpStatus.UNPROCESSABLE_ENTITY].forEach((status) => {
it(`dispatches request and startSession on ${status}`, () => {
mock
.onPost(state.session.retryPath, { branch: rootState.currentBranchId, format: 'json' })
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
index e25746e1dd1..0e123dce798 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
@@ -98,7 +98,7 @@ describe('IDE store terminal session controls actions', () => {
);
});
- [STOPPING, STOPPED, 'unexpected'].forEach(status => {
+ [STOPPING, STOPPED, 'unexpected'].forEach((status) => {
it(`kills session if status is ${status}`, () => {
return testAction(
actions.receiveSessionStatusSuccess,
diff --git a/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js b/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js
index 3fa57bde415..2ae7e8a8727 100644
--- a/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js
@@ -22,7 +22,7 @@ describe('ide/stores/modules/terminal_sync/actions', () => {
});
describe('upload', () => {
- it('uploads to mirror and sets success', done => {
+ it('uploads to mirror and sets success', (done) => {
mirror.upload.mockReturnValue(Promise.resolve());
testAction(
@@ -38,7 +38,7 @@ describe('ide/stores/modules/terminal_sync/actions', () => {
);
});
- it('sets error when failed', done => {
+ it('sets error when failed', (done) => {
const err = { message: 'it failed!' };
mirror.upload.mockReturnValue(Promise.reject(err));
@@ -54,7 +54,7 @@ describe('ide/stores/modules/terminal_sync/actions', () => {
});
describe('stop', () => {
- it('disconnects from mirror', done => {
+ it('disconnects from mirror', (done) => {
testAction(actions.stop, null, rootState, [{ type: types.STOP }], [], () => {
expect(mirror.disconnect).toHaveBeenCalled();
done();
@@ -83,7 +83,7 @@ describe('ide/stores/modules/terminal_sync/actions', () => {
};
});
- it('connects to mirror and sets success', done => {
+ it('connects to mirror and sets success', (done) => {
mirror.connect.mockReturnValue(Promise.resolve());
testAction(
diff --git a/spec/frontend/ide/stores/mutations/file_spec.js b/spec/frontend/ide/stores/mutations/file_spec.js
index fd39cf21635..9bbdac0ae25 100644
--- a/spec/frontend/ide/stores/mutations/file_spec.js
+++ b/spec/frontend/ide/stores/mutations/file_spec.js
@@ -128,7 +128,7 @@ describe('IDE store file mutations', () => {
localState.changedFiles[0],
localState.openFiles[0],
localFile,
- ].forEach(f => {
+ ].forEach((f) => {
expect(f).toEqual(
expect.objectContaining({
path,
@@ -142,7 +142,7 @@ describe('IDE store file mutations', () => {
});
describe('SET_FILE_RAW_DATA', () => {
- const callMutationForFile = f => {
+ const callMutationForFile = (f) => {
mutations.SET_FILE_RAW_DATA(localState, {
file: f,
raw: 'testing',
diff --git a/spec/frontend/ide/stores/plugins/terminal_sync_spec.js b/spec/frontend/ide/stores/plugins/terminal_sync_spec.js
index 2aa3e770e7d..0e5f4184679 100644
--- a/spec/frontend/ide/stores/plugins/terminal_sync_spec.js
+++ b/spec/frontend/ide/stores/plugins/terminal_sync_spec.js
@@ -4,6 +4,7 @@ import { SET_SESSION_STATUS } from '~/ide/stores/modules/terminal/mutation_types
import { RUNNING, STOPPING } from '~/ide/stores/modules/terminal/constants';
import { createStore } from '~/ide/stores';
import eventHub from '~/ide/eventhub';
+import { createTriggerUpdatePayload } from '../../helpers';
jest.mock('~/ide/lib/mirror');
@@ -51,6 +52,14 @@ describe('IDE stores/plugins/mirror', () => {
expect(store.dispatch).toHaveBeenCalledWith(ACTION_UPLOAD);
});
+ it('does nothing when ide.files.change is emitted with "update"', () => {
+ eventHub.$emit(FILES_CHANGE_EVENT, createTriggerUpdatePayload('foo'));
+
+ jest.runAllTimers();
+
+ expect(store.dispatch).not.toHaveBeenCalledWith(ACTION_UPLOAD);
+ });
+
describe('when session stops', () => {
beforeEach(() => {
store.commit(`terminal/${SET_SESSION_STATUS}`, STOPPING);
diff --git a/spec/frontend/ide/sync_router_and_store_spec.js b/spec/frontend/ide/sync_router_and_store_spec.js
index 20fd77c4dfb..ac0f6aefb69 100644
--- a/spec/frontend/ide/sync_router_and_store_spec.js
+++ b/spec/frontend/ide/sync_router_and_store_spec.js
@@ -17,7 +17,7 @@ describe('~/ide/sync_router_and_store', () => {
const getRouterCurrentPath = () => router.currentRoute.fullPath;
const getStoreCurrentPath = () => store.state.router.fullPath;
- const updateRouter = async path => {
+ const updateRouter = async (path) => {
if (getRouterCurrentPath() === path) {
return;
}
@@ -25,7 +25,7 @@ describe('~/ide/sync_router_and_store', () => {
router.push(path);
await waitForPromises();
};
- const updateStore = path => {
+ const updateStore = (path) => {
store.dispatch('router/push', path);
return waitForPromises();
};
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index 3b772c0b259..f467d174eeb 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -263,7 +263,7 @@ describe('WebIDE utils', () => {
it('reads a file and returns its output as a data url', () => {
const file = new File(['foo'], 'foo.png', { type: 'image/png' });
- return readFileAsDataURL(file).then(contents => {
+ return readFileAsDataURL(file).then((contents) => {
expect(contents).toBe('data:image/png;base64,Zm9v');
});
});
diff --git a/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js b/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js
index 98c05d648b8..6548f9959c5 100644
--- a/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js
+++ b/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import * as commentIndicatorHelper from '~/image_diff/helpers/comment_indicator_helper';
import * as mockData from '../mock_data';
diff --git a/spec/frontend/image_diff/helpers/utils_helper_spec.js b/spec/frontend/image_diff/helpers/utils_helper_spec.js
index 7f2376826c2..29bebaf6905 100644
--- a/spec/frontend/image_diff/helpers/utils_helper_spec.js
+++ b/spec/frontend/image_diff/helpers/utils_helper_spec.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import * as utilsHelper from '~/image_diff/helpers/utils_helper';
import ImageBadge from '~/image_diff/image_badge';
import * as mockData from '../mock_data';
diff --git a/spec/frontend/image_diff/image_diff_spec.js b/spec/frontend/image_diff/image_diff_spec.js
index d89e4312344..e14f8dc774f 100644
--- a/spec/frontend/image_diff/image_diff_spec.js
+++ b/spec/frontend/image_diff/image_diff_spec.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import ImageDiff from '~/image_diff/image_diff';
import * as imageUtility from '~/lib/utils/image_utility';
import imageDiffHelper from '~/image_diff/helpers/index';
diff --git a/spec/frontend/image_diff/init_discussion_tab_spec.js b/spec/frontend/image_diff/init_discussion_tab_spec.js
index f459fdf5a08..f96d00230ee 100644
--- a/spec/frontend/image_diff/init_discussion_tab_spec.js
+++ b/spec/frontend/image_diff/init_discussion_tab_spec.js
@@ -11,7 +11,7 @@ describe('initDiscussionTab', () => {
`);
});
- it('should pass canCreateNote as false to initImageDiff', done => {
+ it('should pass canCreateNote as false to initImageDiff', (done) => {
jest
.spyOn(initImageDiffHelper, 'initImageDiff')
.mockImplementation((diffFileEl, canCreateNote) => {
@@ -22,7 +22,7 @@ describe('initDiscussionTab', () => {
initDiscussionTab();
});
- it('should pass renderCommentBadge as true to initImageDiff', done => {
+ it('should pass renderCommentBadge as true to initImageDiff', (done) => {
jest
.spyOn(initImageDiffHelper, 'initImageDiff')
.mockImplementation((diffFileEl, canCreateNote, renderCommentBadge) => {
diff --git a/spec/frontend/image_diff/replaced_image_diff_spec.js b/spec/frontend/image_diff/replaced_image_diff_spec.js
index 10827d76e55..16be4b82ad7 100644
--- a/spec/frontend/image_diff/replaced_image_diff_spec.js
+++ b/spec/frontend/image_diff/replaced_image_diff_spec.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import ReplacedImageDiff from '~/image_diff/replaced_image_diff';
import ImageDiff from '~/image_diff/image_diff';
import { viewTypes } from '~/image_diff/view_types';
@@ -176,8 +176,8 @@ describe('ReplacedImageDiff', () => {
expect(ImageDiff.prototype.bindEvents).toHaveBeenCalled();
});
- it('should register click eventlistener to 2-up view mode', done => {
- jest.spyOn(ReplacedImageDiff.prototype, 'changeView').mockImplementation(viewMode => {
+ it('should register click eventlistener to 2-up view mode', (done) => {
+ jest.spyOn(ReplacedImageDiff.prototype, 'changeView').mockImplementation((viewMode) => {
expect(viewMode).toEqual(viewTypes.TWO_UP);
done();
});
@@ -186,8 +186,8 @@ describe('ReplacedImageDiff', () => {
replacedImageDiff.viewModesEls[viewTypes.TWO_UP].click();
});
- it('should register click eventlistener to swipe view mode', done => {
- jest.spyOn(ReplacedImageDiff.prototype, 'changeView').mockImplementation(viewMode => {
+ it('should register click eventlistener to swipe view mode', (done) => {
+ jest.spyOn(ReplacedImageDiff.prototype, 'changeView').mockImplementation((viewMode) => {
expect(viewMode).toEqual(viewTypes.SWIPE);
done();
});
@@ -196,8 +196,8 @@ describe('ReplacedImageDiff', () => {
replacedImageDiff.viewModesEls[viewTypes.SWIPE].click();
});
- it('should register click eventlistener to onion skin view mode', done => {
- jest.spyOn(ReplacedImageDiff.prototype, 'changeView').mockImplementation(viewMode => {
+ it('should register click eventlistener to onion skin view mode', (done) => {
+ jest.spyOn(ReplacedImageDiff.prototype, 'changeView').mockImplementation((viewMode) => {
expect(viewMode).toEqual(viewTypes.SWIPE);
done();
});
@@ -325,7 +325,7 @@ describe('ReplacedImageDiff', () => {
setupImageFrameEls();
});
- it('should pass showCommentIndicator normalized indicator values', done => {
+ it('should pass showCommentIndicator normalized indicator values', (done) => {
jest.spyOn(imageDiffHelper, 'showCommentIndicator').mockImplementation(() => {});
jest
.spyOn(imageDiffHelper, 'resizeCoordinatesToImageElement')
@@ -339,7 +339,7 @@ describe('ReplacedImageDiff', () => {
replacedImageDiff.renderNewView(indicator);
});
- it('should call showCommentIndicator', done => {
+ it('should call showCommentIndicator', (done) => {
const normalized = {
normalized: true,
};
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
index d88a31a0e47..ac8b73aeb49 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
@@ -5,7 +5,7 @@ import ImportTableRow from '~/import_entities/import_groups/components/import_ta
import { STATUSES } from '~/import_entities/constants';
import { availableNamespacesFixture } from '../graphql/fixtures';
-const getFakeGroup = status => ({
+const getFakeGroup = (status) => ({
web_url: 'https://fake.host/',
full_path: 'fake_group_1',
full_name: 'fake_name_1',
@@ -22,13 +22,13 @@ describe('import table row', () => {
let group;
const findByText = (cmp, text) => {
- return wrapper.findAll(cmp).wrappers.find(node => node.text().indexOf(text) === 0);
+ return wrapper.findAll(cmp).wrappers.find((node) => node.text().indexOf(text) === 0);
};
const findImportButton = () => findByText(GlButton, 'Import');
const findNameInput = () => wrapper.find(GlFormInput);
const findNamespaceDropdown = () => wrapper.find(Select2Select);
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(ImportTableRow, {
propsData: {
availableNamespaces: availableNamespacesFixture,
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 0ca721cd951..cd184bb65cc 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -2,7 +2,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index cacbe358a62..514ed411138 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -63,7 +63,7 @@ describe('Bulk import resolvers', () => {
});
it('mirrors REST endpoint response fields', () => {
- const extractRelevantFields = obj => ({ id: obj.id, full_path: obj.full_path });
+ const extractRelevantFields = (obj) => ({ id: obj.id, full_path: obj.full_path });
expect(results.map(extractRelevantFields)).toStrictEqual(
availableNamespacesFixture.map(extractRelevantFields),
@@ -89,20 +89,20 @@ describe('Bulk import resolvers', () => {
expect(
results.every((r, idx) =>
MIRRORED_FIELDS.every(
- field => r[field] === statusEndpointFixture.importable_data[idx][field],
+ (field) => r[field] === statusEndpointFixture.importable_data[idx][field],
),
),
).toBe(true);
});
it('populates each result instance with status field default to none', () => {
- expect(results.every(r => r.status === STATUSES.NONE)).toBe(true);
+ expect(results.every((r) => r.status === STATUSES.NONE)).toBe(true);
});
it('populates each result instance with import_target defaulted to first available namespace', () => {
expect(
results.every(
- r => r.import_target.target_namespace === availableNamespacesFixture[0].full_path,
+ (r) => r.import_target.target_namespace === availableNamespacesFixture[0].full_path,
),
).toBe(true);
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
index 8eb1ffb3cd0..e7f1626f81d 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
@@ -23,7 +23,7 @@ describe('Bulk import status poller', () => {
let clientMock;
const listQueryCacheCalls = () =>
- clientMock.readQuery.mock.calls.filter(call => call[0].query === bulkImportSourceGroupsQuery);
+ clientMock.readQuery.mock.calls.filter((call) => call[0].query === bulkImportSourceGroupsQuery);
beforeEach(() => {
clientMock = createMockClient({
@@ -142,9 +142,11 @@ describe('Bulk import status poller', () => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
- bulkImportSourceGroups: [STARTED_GROUP_1, NOT_STARTED_GROUP, STARTED_GROUP_2].map(group =>
- generateFakeEntry(group),
- ),
+ bulkImportSourceGroups: [
+ STARTED_GROUP_1,
+ NOT_STARTED_GROUP,
+ STARTED_GROUP_2,
+ ].map((group) => generateFakeEntry(group)),
},
});
@@ -155,9 +157,9 @@ describe('Bulk import status poller', () => {
await waitForPromises();
const [[doc]] = clientMock.query.mock.calls;
const { selections } = doc.query.definitions[0].selectionSet;
- expect(selections.every(field => field.name.value === 'group')).toBeTruthy();
+ expect(selections.every((field) => field.name.value === 'group')).toBeTruthy();
expect(selections).toHaveLength(2);
- expect(selections.map(sel => sel.arguments[0].value.value)).toStrictEqual([
+ expect(selections.map((sel) => sel.arguments[0].value.value)).toStrictEqual([
`${TARGET_NAMESPACE}/${STARTED_GROUP_1.import_target.new_name}`,
`${TARGET_NAMESPACE}/${STARTED_GROUP_2.import_target.new_name}`,
]);
@@ -167,7 +169,7 @@ describe('Bulk import status poller', () => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
- bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map(group =>
+ bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map((group) =>
generateFakeEntry(group),
),
},
@@ -189,7 +191,7 @@ describe('Bulk import status poller', () => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
- bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map(group =>
+ bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map((group) =>
generateFakeEntry(group),
),
},
diff --git a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
index b4ac11b4404..27f642d15c8 100644
--- a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
@@ -27,7 +27,7 @@ describe('ImportProjectsTable', () => {
const findImportAllButton = () =>
wrapper
.findAll(GlButton)
- .filter(w => w.props().variant === 'success')
+ .filter((w) => w.props().variant === 'success')
.at(0);
const findImportAllModal = () => wrapper.find({ ref: 'importAllModal' });
@@ -112,7 +112,7 @@ describe('ImportProjectsTable', () => {
expect(
wrapper
.findAll('th')
- .filter(w => w.text() === `From ${providerTitle}`)
+ .filter((w) => w.text() === `From ${providerTitle}`)
.exists(),
).toBe(true);
@@ -142,6 +142,29 @@ describe('ImportProjectsTable', () => {
},
);
+ it.each`
+ importingRepoCount | buttonMessage
+ ${1} | ${'Importing 1 repository'}
+ ${5} | ${'Importing 5 repositories'}
+ `(
+ 'sets the button text to "$buttonMessage" when importing repos',
+ ({ importingRepoCount, buttonMessage }) => {
+ createComponent({
+ state: {
+ providerRepos: [providerRepo],
+ },
+ getters: {
+ hasIncompatibleRepos: () => false,
+ importAllCount: () => 10,
+ isImportingAnyRepo: () => true,
+ importingRepoCount: () => importingRepoCount,
+ },
+ });
+
+ expect(findImportAllButton().text()).toBe(buttonMessage);
+ },
+ );
+
it('renders an empty state if there are no repositories available', () => {
createComponent({ state: { repositories: [] } });
@@ -168,7 +191,7 @@ describe('ImportProjectsTable', () => {
});
it('shows loading spinner when import is in progress', () => {
- createComponent({ getters: { isImportingAnyRepo: () => true } });
+ createComponent({ getters: { isImportingAnyRepo: () => true, importallCount: () => 1 } });
expect(findImportAllButton().props().loading).toBe(true);
});
diff --git a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
index aa003226050..2ed11ae277e 100644
--- a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
@@ -34,7 +34,7 @@ describe('ProviderRepoTableRow', () => {
}
const findImportButton = () => {
- const buttons = wrapper.findAll('button').filter(node => node.text() === 'Import');
+ const buttons = wrapper.findAll('button').filter((node) => node.text() === 'Import');
return buttons.length ? buttons.at(0) : buttons;
};
diff --git a/spec/frontend/import_entities/import_projects/store/actions_spec.js b/spec/frontend/import_entities/import_projects/store/actions_spec.js
index 5d4e73a17a3..bd731dc3929 100644
--- a/spec/frontend/import_entities/import_projects/store/actions_spec.js
+++ b/spec/frontend/import_entities/import_projects/store/actions_spec.js
@@ -123,7 +123,7 @@ describe('import_projects store actions', () => {
it('includes page in url query params', async () => {
let requestedUrl;
- mock.onGet().reply(config => {
+ mock.onGet().reply((config) => {
requestedUrl = config.url;
return [200, payload];
});
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index 6329a84ff6e..8212776742c 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -158,17 +158,11 @@ describe('Incidents List', () => {
describe('Assignees', () => {
it('shows Unassigned when there are no assignees', () => {
- expect(
- findAssignees()
- .at(0)
- .text(),
- ).toBe(I18N.unassigned);
+ expect(findAssignees().at(0).text()).toBe(I18N.unassigned);
});
it('renders an avatar component when there is an assignee', () => {
- const avatar = findAssignees()
- .at(1)
- .find(GlAvatar);
+ const avatar = findAssignees().at(1).find(GlAvatar);
const { src, label } = avatar.attributes();
const { name, avatarUrl } = mockIncidents[1].assignees.nodes[0];
@@ -189,9 +183,7 @@ describe('Incidents List', () => {
});
it('contains a link to the incident details page', async () => {
- findTableRows()
- .at(0)
- .trigger('click');
+ findTableRows().at(0).trigger('click');
expect(visitUrl).toHaveBeenCalledWith(
joinPaths(`/project/issues/incident`, mockIncidents[0].iid),
);
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
index c3fd4a9bab2..82d7f691efd 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -50,6 +50,7 @@ exports[`Alert integration settings form default state should match the default
category="primary"
data-qa-selector="incident_templates_dropdown"
headertext=""
+ hideheaderborder="true"
id="alert-integration-settings-issue-template"
size="medium"
text="selecte_tmpl"
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index f0eb54c1b3a..79ad5ad1bb9 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -52,6 +52,7 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
</gl-button-stub>
<gl-modal-stub
+ dismisslabel="Close"
modalclass=""
modalid="resetWebhookModal"
ok-title="Reset webhook URL"
diff --git a/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js b/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
index 11b9eda2585..c80d9ad2e5b 100644
--- a/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
+++ b/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
@@ -39,19 +39,12 @@ describe('IncidentsSettingTabs', () => {
});
it('should render the tab for each active integration', () => {
- const activeTabs = wrapper.vm.$options.tabs.filter(tab => tab.active);
+ const activeTabs = wrapper.vm.$options.tabs.filter((tab) => tab.active);
expect(findIntegrationTabs().length).toBe(activeTabs.length);
activeTabs.forEach((tab, index) => {
+ expect(findIntegrationTabs().at(index).attributes('title')).toBe(tab.title);
expect(
- findIntegrationTabs()
- .at(index)
- .attributes('title'),
- ).toBe(tab.title);
- expect(
- findIntegrationTabs()
- .at(index)
- .find(`[data-testid="${tab.component}-tab"]`)
- .exists(),
+ findIntegrationTabs().at(index).find(`[data-testid="${tab.component}-tab"]`).exists(),
).toBe(true);
});
});
diff --git a/spec/frontend/integrations/edit/components/dynamic_field_spec.js b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
index 53234419f5f..ecd03103992 100644
--- a/spec/frontend/integrations/edit/components/dynamic_field_spec.js
+++ b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
@@ -37,153 +37,142 @@ describe('DynamicField', () => {
const findGlFormTextarea = () => wrapper.find(GlFormTextarea);
describe('template', () => {
- describe.each([[true, 'disabled', 'readonly'], [false, undefined, undefined]])(
- 'dynamic field, when isInheriting = `%p`',
- (isInheriting, disabled, readonly) => {
- describe('type is checkbox', () => {
- beforeEach(() => {
- createComponent(
- {
- type: 'checkbox',
- },
- isInheriting,
- );
- });
-
- it(`renders GlFormCheckbox, which ${isInheriting ? 'is' : 'is not'} disabled`, () => {
- expect(findGlFormCheckbox().exists()).toBe(true);
- expect(
- findGlFormCheckbox()
- .find('[type=checkbox]')
- .attributes('disabled'),
- ).toBe(disabled);
- });
+ describe.each([
+ [true, 'disabled', 'readonly'],
+ [false, undefined, undefined],
+ ])('dynamic field, when isInheriting = `%p`', (isInheriting, disabled, readonly) => {
+ describe('type is checkbox', () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ type: 'checkbox',
+ },
+ isInheriting,
+ );
+ });
- it('does not render other types of input', () => {
- expect(findGlFormSelect().exists()).toBe(false);
- expect(findGlFormTextarea().exists()).toBe(false);
- expect(findGlFormInput().exists()).toBe(false);
- });
+ it(`renders GlFormCheckbox, which ${isInheriting ? 'is' : 'is not'} disabled`, () => {
+ expect(findGlFormCheckbox().exists()).toBe(true);
+ expect(findGlFormCheckbox().find('[type=checkbox]').attributes('disabled')).toBe(
+ disabled,
+ );
});
- describe('type is select', () => {
- beforeEach(() => {
- createComponent(
- {
- type: 'select',
- choices: [['all', 'All details'], ['standard', 'Standard']],
- },
- isInheriting,
- );
- });
+ it('does not render other types of input', () => {
+ expect(findGlFormSelect().exists()).toBe(false);
+ expect(findGlFormTextarea().exists()).toBe(false);
+ expect(findGlFormInput().exists()).toBe(false);
+ });
+ });
- it(`renders GlFormSelect, which ${isInheriting ? 'is' : 'is not'} disabled`, () => {
- expect(findGlFormSelect().exists()).toBe(true);
- expect(findGlFormSelect().findAll('option')).toHaveLength(2);
- expect(
- findGlFormSelect()
- .find('select')
- .attributes('disabled'),
- ).toBe(disabled);
- });
+ describe('type is select', () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ type: 'select',
+ choices: [
+ ['all', 'All details'],
+ ['standard', 'Standard'],
+ ],
+ },
+ isInheriting,
+ );
+ });
- it('does not render other types of input', () => {
- expect(findGlFormCheckbox().exists()).toBe(false);
- expect(findGlFormTextarea().exists()).toBe(false);
- expect(findGlFormInput().exists()).toBe(false);
- });
+ it(`renders GlFormSelect, which ${isInheriting ? 'is' : 'is not'} disabled`, () => {
+ expect(findGlFormSelect().exists()).toBe(true);
+ expect(findGlFormSelect().findAll('option')).toHaveLength(2);
+ expect(findGlFormSelect().find('select').attributes('disabled')).toBe(disabled);
});
- describe('type is textarea', () => {
- beforeEach(() => {
- createComponent(
- {
- type: 'textarea',
- },
- isInheriting,
- );
- });
+ it('does not render other types of input', () => {
+ expect(findGlFormCheckbox().exists()).toBe(false);
+ expect(findGlFormTextarea().exists()).toBe(false);
+ expect(findGlFormInput().exists()).toBe(false);
+ });
+ });
- it(`renders GlFormTextarea, which ${isInheriting ? 'is' : 'is not'} readonly`, () => {
- expect(findGlFormTextarea().exists()).toBe(true);
- expect(
- findGlFormTextarea()
- .find('textarea')
- .attributes('readonly'),
- ).toBe(readonly);
- });
+ describe('type is textarea', () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ type: 'textarea',
+ },
+ isInheriting,
+ );
+ });
- it('does not render other types of input', () => {
- expect(findGlFormCheckbox().exists()).toBe(false);
- expect(findGlFormSelect().exists()).toBe(false);
- expect(findGlFormInput().exists()).toBe(false);
- });
+ it(`renders GlFormTextarea, which ${isInheriting ? 'is' : 'is not'} readonly`, () => {
+ expect(findGlFormTextarea().exists()).toBe(true);
+ expect(findGlFormTextarea().find('textarea').attributes('readonly')).toBe(readonly);
});
- describe('type is password', () => {
- beforeEach(() => {
- createComponent(
- {
- type: 'password',
- },
- isInheriting,
- );
- });
+ it('does not render other types of input', () => {
+ expect(findGlFormCheckbox().exists()).toBe(false);
+ expect(findGlFormSelect().exists()).toBe(false);
+ expect(findGlFormInput().exists()).toBe(false);
+ });
+ });
- it(`renders GlFormInput, which ${isInheriting ? 'is' : 'is not'} readonly`, () => {
- expect(findGlFormInput().exists()).toBe(true);
- expect(findGlFormInput().attributes('type')).toBe('password');
- expect(findGlFormInput().attributes('readonly')).toBe(readonly);
- });
+ describe('type is password', () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ type: 'password',
+ },
+ isInheriting,
+ );
+ });
- it('does not render other types of input', () => {
- expect(findGlFormCheckbox().exists()).toBe(false);
- expect(findGlFormSelect().exists()).toBe(false);
- expect(findGlFormTextarea().exists()).toBe(false);
- });
+ it(`renders GlFormInput, which ${isInheriting ? 'is' : 'is not'} readonly`, () => {
+ expect(findGlFormInput().exists()).toBe(true);
+ expect(findGlFormInput().attributes('type')).toBe('password');
+ expect(findGlFormInput().attributes('readonly')).toBe(readonly);
});
- describe('type is text', () => {
- beforeEach(() => {
- createComponent(
- {
- type: 'text',
- required: true,
- },
- isInheriting,
- );
- });
+ it('does not render other types of input', () => {
+ expect(findGlFormCheckbox().exists()).toBe(false);
+ expect(findGlFormSelect().exists()).toBe(false);
+ expect(findGlFormTextarea().exists()).toBe(false);
+ });
+ });
- it(`renders GlFormInput, which ${isInheriting ? 'is' : 'is not'} readonly`, () => {
- expect(findGlFormInput().exists()).toBe(true);
- expect(findGlFormInput().attributes()).toMatchObject({
+ describe('type is text', () => {
+ beforeEach(() => {
+ createComponent(
+ {
type: 'text',
- id: 'service_project_url',
- name: 'service[project_url]',
- placeholder: defaultProps.placeholder,
- required: 'required',
- });
- expect(findGlFormInput().attributes('readonly')).toBe(readonly);
- });
+ required: true,
+ },
+ isInheriting,
+ );
+ });
- it('does not render other types of input', () => {
- expect(findGlFormCheckbox().exists()).toBe(false);
- expect(findGlFormSelect().exists()).toBe(false);
- expect(findGlFormTextarea().exists()).toBe(false);
+ it(`renders GlFormInput, which ${isInheriting ? 'is' : 'is not'} readonly`, () => {
+ expect(findGlFormInput().exists()).toBe(true);
+ expect(findGlFormInput().attributes()).toMatchObject({
+ type: 'text',
+ id: 'service_project_url',
+ name: 'service[project_url]',
+ placeholder: defaultProps.placeholder,
+ required: 'required',
});
+ expect(findGlFormInput().attributes('readonly')).toBe(readonly);
});
- },
- );
+
+ it('does not render other types of input', () => {
+ expect(findGlFormCheckbox().exists()).toBe(false);
+ expect(findGlFormSelect().exists()).toBe(false);
+ expect(findGlFormTextarea().exists()).toBe(false);
+ });
+ });
+ });
describe('help text', () => {
it('renders description with help text', () => {
createComponent();
- expect(
- findGlFormGroup()
- .find('small')
- .text(),
- ).toBe(defaultProps.help);
+ expect(findGlFormGroup().find('small').text()).toBe(defaultProps.help);
});
it('renders description with help text as HTML', () => {
@@ -193,11 +182,7 @@ describe('DynamicField', () => {
help: helpHTML,
});
- expect(
- findGlFormGroup()
- .find('small')
- .html(),
- ).toContain(helpHTML);
+ expect(findGlFormGroup().find('small').html()).toContain(helpHTML);
});
});
@@ -205,11 +190,7 @@ describe('DynamicField', () => {
it('renders label with title', () => {
createComponent();
- expect(
- findGlFormGroup()
- .find('label')
- .text(),
- ).toBe(defaultProps.title);
+ expect(findGlFormGroup().find('label').text()).toBe(defaultProps.title);
});
describe('for password field with some value (hidden by backend)', () => {
@@ -219,11 +200,7 @@ describe('DynamicField', () => {
value: 'true',
});
- expect(
- findGlFormGroup()
- .find('label')
- .text(),
- ).toBe(`Enter new ${defaultProps.title}`);
+ expect(findGlFormGroup().find('label').text()).toBe(`Enter new ${defaultProps.title}`);
});
});
});
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index a727bb9c734..eaeed2703d1 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -12,7 +12,7 @@ describe('JiraIssuesFields', () => {
editProjectPath: '/edit',
};
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = mount(JiraIssuesFields, {
propsData: { ...defaultProps, ...props },
});
diff --git a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
index 782930eb6a2..a69e8d6e163 100644
--- a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
@@ -57,9 +57,7 @@ describe('JiraTriggerFields', () => {
// browsers don't include unchecked boxes in form submissions.
it('includes comment settings as false even if unchecked', () => {
expect(
- findCommentSettings()
- .find('input[name="service[comment_on_event_enabled]"]')
- .exists(),
+ findCommentSettings().find('input[name="service[comment_on_event_enabled]"]').exists(),
).toBe(true);
});
@@ -106,11 +104,11 @@ describe('JiraTriggerFields', () => {
true,
);
- wrapper.findAll('[type=checkbox]').wrappers.forEach(checkbox => {
+ wrapper.findAll('[type=checkbox]').wrappers.forEach((checkbox) => {
expect(checkbox.attributes('disabled')).toBe('disabled');
});
- wrapper.findAll('[type=radio]').wrappers.forEach(radio => {
+ wrapper.findAll('[type=radio]').wrappers.forEach((radio) => {
expect(radio.attributes('disabled')).toBe('disabled');
});
});
diff --git a/spec/frontend/integrations/edit/components/trigger_fields_spec.js b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
index df12c70f9f5..3fa1e5b5f5a 100644
--- a/spec/frontend/integrations/edit/components/trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
@@ -29,7 +29,7 @@ describe('TriggerFields', () => {
const findAllGlFormCheckboxes = () => wrapper.findAll(GlFormCheckbox);
const findAllGlFormInputs = () => wrapper.findAll(GlFormInput);
- describe.each([true, false])('template, isInheriting = `%p`', isInheriting => {
+ describe.each([true, false])('template, isInheriting = `%p`', (isInheriting) => {
it('renders a label with text "Trigger"', () => {
createComponent();
diff --git a/spec/frontend/invite_member/components/invite_member_modal_spec.js b/spec/frontend/invite_member/components/invite_member_modal_spec.js
index 1d0adb3ab4c..1140c2a9475 100644
--- a/spec/frontend/invite_member/components/invite_member_modal_spec.js
+++ b/spec/frontend/invite_member/components/invite_member_modal_spec.js
@@ -1,18 +1,22 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLink } from '@gitlab/ui';
+import { GlLink, GlModal } from '@gitlab/ui';
import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
+import { stubComponent } from 'helpers/stub_component';
import InviteMemberModal from '~/invite_member/components/invite_member_modal.vue';
const memberPath = 'member_path';
+const GlEmoji = { template: '<img />' };
const createComponent = () => {
return shallowMount(InviteMemberModal, {
provide: {
membersPath: memberPath,
},
stubs: {
- 'gl-emoji': '<img/>',
- 'gl-modal': '<div><slot name="modal-title"></slot><slot></slot></div>',
+ GlEmoji,
+ GlModal: stubComponent(GlModal, {
+ template: '<div><slot name="modal-title"></slot><slot></slot></div>',
+ }),
},
});
};
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 4ac2a28105c..fc039bdf6da 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdown, GlDropdownItem, GlDatepicker, GlSprintf, GlLink } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem, GlDatepicker, GlSprintf, GlLink, GlModal } from '@gitlab/ui';
+import { stubComponent } from 'helpers/stub_component';
+import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue';
@@ -10,6 +12,15 @@ const accessLevels = { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, O
const defaultAccessLevel = '10';
const helpLink = 'https://example.com';
+const user1 = { id: 1, name: 'Name One', username: 'one_1', avatar_url: '' };
+const user2 = { id: 2, name: 'Name Two', username: 'one_2', avatar_url: '' };
+const user3 = {
+ id: 'user-defined-token',
+ name: 'email@example.com',
+ username: 'one_2',
+ avatar_url: '',
+};
+
const createComponent = (data = {}) => {
return shallowMount(InviteMembersModal, {
propsData: {
@@ -24,9 +35,12 @@ const createComponent = (data = {}) => {
return data;
},
stubs: {
- 'gl-modal': '<div><slot name="modal-footer"></slot><slot></slot></div>',
- 'gl-dropdown': true,
- 'gl-dropdown-item': true,
+ GlModal: stubComponent(GlModal, {
+ template:
+ '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-footer"></slot></div>',
+ }),
+ GlDropdown: true,
+ GlDropdownItem: true,
GlSprintf,
},
});
@@ -46,6 +60,7 @@ describe('InviteMembersModal', () => {
const findLink = () => wrapper.find(GlLink);
const findCancelButton = () => wrapper.find({ ref: 'cancelButton' });
const findInviteButton = () => wrapper.find({ ref: 'inviteButton' });
+ const clickInviteButton = () => findInviteButton().vm.$emit('click');
describe('rendering the modal', () => {
beforeEach(() => {
@@ -53,7 +68,7 @@ describe('InviteMembersModal', () => {
});
it('renders the modal with the correct title', () => {
- expect(wrapper.attributes('title')).toBe('Invite team members');
+ expect(wrapper.find(GlModal).props('title')).toBe('Invite team members');
});
it('renders the Cancel button text correctly', () => {
@@ -88,78 +103,184 @@ describe('InviteMembersModal', () => {
});
describe('submitting the invite form', () => {
- const postData = {
- user_id: '1',
- access_level: '10',
- expires_at: new Date(),
- format: 'json',
- };
+ const apiErrorMessage = 'Member already exists';
+
+ describe('when inviting an existing user to group by user ID', () => {
+ const postData = {
+ user_id: '1',
+ access_level: '10',
+ expires_at: undefined,
+ format: 'json',
+ };
+
+ describe('when invites are sent successfully', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user1] });
- describe('when the invite was sent successfully', () => {
- beforeEach(() => {
- wrapper = createComponent();
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
+ jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
- wrapper.vm.$toast = { show: jest.fn() };
- jest.spyOn(Api, 'inviteGroupMember').mockResolvedValue({ data: postData });
+ clickInviteButton();
+ });
- wrapper.vm.submitForm(postData);
+ it('calls Api addGroupMembersByUserId with the correct params', () => {
+ expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, postData);
+ });
+
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ });
});
- it('displays the successful toastMessage', () => {
- const toastMessageSuccessful = 'Members were successfully added';
+ describe('when the invite received an api error message', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user1] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest
+ .spyOn(Api, 'addGroupMembersByUserId')
+ .mockRejectedValue({ response: { data: { message: apiErrorMessage } } });
+ jest.spyOn(wrapper.vm, 'showToastMessageError');
+
+ clickInviteButton();
+ });
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
- toastMessageSuccessful,
- wrapper.vm.toastOptions,
- );
+ it('displays the apiErrorMessage in the toastMessage', async () => {
+ await waitForPromises();
+
+ expect(wrapper.vm.showToastMessageError).toHaveBeenCalledWith({
+ response: { data: { message: apiErrorMessage } },
+ });
+ });
});
- it('calls Api inviteGroupMember with the correct params', () => {
- expect(Api.inviteGroupMember).toHaveBeenCalledWith(id, postData);
+ describe('when any invite failed for any other reason', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user1, user2] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest
+ .spyOn(Api, 'addGroupMembersByUserId')
+ .mockRejectedValue({ response: { data: { success: false } } });
+ jest.spyOn(wrapper.vm, 'showToastMessageError');
+
+ clickInviteButton();
+ });
+
+ it('displays the generic error toastMessage', async () => {
+ await waitForPromises();
+
+ expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ });
});
});
- describe('when sending the invite for a single member returned an api error', () => {
- const apiErrorMessage = 'Members already exists';
+ describe('when inviting a new user by email address', () => {
+ const postData = {
+ access_level: '10',
+ expires_at: undefined,
+ email: 'email@example.com',
+ format: 'json',
+ };
+
+ describe('when invites are sent successfully', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user3] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
+ jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
- beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: '123' });
+ clickInviteButton();
+ });
- wrapper.vm.$toast = { show: jest.fn() };
- jest
- .spyOn(Api, 'inviteGroupMember')
- .mockRejectedValue({ response: { data: { message: apiErrorMessage } } });
+ it('calls Api inviteGroupMembersByEmail with the correct params', () => {
+ expect(Api.inviteGroupMembersByEmail).toHaveBeenCalledWith(id, postData);
+ });
- findInviteButton().vm.$emit('click');
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ });
});
- it('displays the api error message for the toastMessage', () => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
- apiErrorMessage,
- wrapper.vm.toastOptions,
- );
+ describe('when any invite failed for any reason', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user1, user2] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest
+ .spyOn(Api, 'addGroupMembersByUserId')
+ .mockRejectedValue({ response: { data: { success: false } } });
+ jest.spyOn(wrapper.vm, 'showToastMessageError');
+
+ clickInviteButton();
+ });
+
+ it('displays the generic error toastMessage', async () => {
+ await waitForPromises();
+
+ expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ });
});
});
- describe('when sending the invite for multiple members returned any error', () => {
- const genericErrorMessage = 'Some of the members could not be added';
+ describe('when inviting members and non-members in same click', () => {
+ const postData = {
+ access_level: '10',
+ expires_at: undefined,
+ format: 'json',
+ };
+
+ const emailPostData = { ...postData, email: 'email@example.com' };
+ const idPostData = { ...postData, user_id: '1' };
+
+ describe('when invites are sent successfully', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user1, user3] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
+ jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
+ jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
- beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: '123' });
+ clickInviteButton();
+ });
- wrapper.vm.$toast = { show: jest.fn() };
- jest
- .spyOn(Api, 'inviteGroupMember')
- .mockRejectedValue({ response: { data: { success: false } } });
+ it('calls Api inviteGroupMembersByEmail with the correct params', () => {
+ expect(Api.inviteGroupMembersByEmail).toHaveBeenCalledWith(id, emailPostData);
+ });
- findInviteButton().vm.$emit('click');
+ it('calls Api addGroupMembersByUserId with the correct params', () => {
+ expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, idPostData);
+ });
+
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ });
});
- it('displays the expected toastMessage', () => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
- genericErrorMessage,
- wrapper.vm.toastOptions,
- );
+ describe('when any invite failed for any reason', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user1, user3] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+
+ jest
+ .spyOn(Api, 'inviteGroupMembersByEmail')
+ .mockRejectedValue({ response: { data: { success: false } } });
+
+ jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
+ jest.spyOn(wrapper.vm, 'showToastMessageError');
+
+ clickInviteButton();
+ });
+
+ it('displays the generic error toastMessage', async () => {
+ await waitForPromises();
+
+ expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ });
});
});
});
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index 106a2df783d..ff123a13ce7 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -3,13 +3,13 @@ import { nextTick } from 'vue';
import { GlTokenSelector } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import { stubComponent } from 'helpers/stub_component';
-import Api from '~/api';
+import * as UserApi from '~/api/user_api';
import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
const label = 'testgroup';
const placeholder = 'Search for a member';
-const user1 = { id: 1, name: 'Name One', username: 'one_1', avatar_url: '' };
-const user2 = { id: 2, name: 'Name Two', username: 'two_2', avatar_url: '' };
+const user1 = { id: 1, name: 'John Smith', username: 'one_1', avatar_url: '' };
+const user2 = { id: 2, name: 'Jane Doe', username: 'two_2', avatar_url: '' };
const allUsers = [user1, user2];
const createComponent = () => {
@@ -28,7 +28,7 @@ describe('MembersTokenSelect', () => {
let wrapper;
beforeEach(() => {
- jest.spyOn(Api, 'users').mockResolvedValue({ data: allUsers });
+ jest.spyOn(UserApi, 'getUsers').mockResolvedValue({ data: allUsers });
wrapper = createComponent();
});
@@ -57,7 +57,7 @@ describe('MembersTokenSelect', () => {
await waitForPromises();
- expect(Api.users).not.toHaveBeenCalled();
+ expect(UserApi.getUsers).not.toHaveBeenCalled();
});
});
@@ -77,27 +77,42 @@ describe('MembersTokenSelect', () => {
});
describe('when text input is typed in', () => {
+ let tokenSelector;
+
+ beforeEach(() => {
+ tokenSelector = findTokenSelector();
+ });
+
it('calls the API with search parameter', async () => {
const searchParam = 'One';
- const tokenSelector = findTokenSelector();
tokenSelector.vm.$emit('text-input', searchParam);
await waitForPromises();
- expect(Api.users).toHaveBeenCalledWith(searchParam, wrapper.vm.$options.queryOptions);
+ expect(UserApi.getUsers).toHaveBeenCalledWith(
+ searchParam,
+ wrapper.vm.$options.queryOptions,
+ );
expect(tokenSelector.props('hideDropdownWithNoItems')).toBe(false);
});
+
+ describe('when input text is an email', () => {
+ it('allows user defined tokens', async () => {
+ tokenSelector.vm.$emit('text-input', 'foo@bar.com');
+
+ await nextTick();
+
+ expect(tokenSelector.props('allowUserDefinedTokens')).toBe(true);
+ });
+ });
});
describe('when user is selected', () => {
it('emits `input` event with selected users', () => {
- findTokenSelector().vm.$emit('input', [
- { id: 1, name: 'John Smith' },
- { id: 2, name: 'Jane Doe' },
- ]);
+ findTokenSelector().vm.$emit('input', [user1, user2]);
- expect(wrapper.emitted().input[0][0]).toBe('1,2');
+ expect(wrapper.emitted().input[0][0]).toEqual([user1, user2]);
});
});
});
diff --git a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
index 17a195df494..de2671816d8 100644
--- a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
+++ b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
@@ -22,13 +22,14 @@ const issuable2 = {
const pathIdSeparator = PathIdSeparator.Issue;
-const findFormInput = wrapper => wrapper.find('.js-add-issuable-form-input').element;
+const findFormInput = (wrapper) => wrapper.find('.js-add-issuable-form-input').element;
-const findRadioInput = (inputs, value) => inputs.filter(input => input.element.value === value)[0];
+const findRadioInput = (inputs, value) =>
+ inputs.filter((input) => input.element.value === value)[0];
-const findRadioInputs = wrapper => wrapper.findAll('[name="linked-issue-type-radio"]');
+const findRadioInputs = (wrapper) => wrapper.findAll('[name="linked-issue-type-radio"]');
-const constructWrapper = props => {
+const constructWrapper = (props) => {
return shallowMount(AddIssuableForm, {
propsData: {
inputValue: '',
@@ -192,7 +193,7 @@ describe('AddIssuableForm', () => {
});
describe('when the form is submitted', () => {
- it('emits an event with a "relates_to" link type when the "relates to" radio input selected', done => {
+ it('emits an event with a "relates_to" link type when the "relates to" radio input selected', (done) => {
jest.spyOn(wrapper.vm, '$emit').mockImplementation(() => {});
wrapper.vm.linkedIssueType = linkedIssueTypesMap.RELATES_TO;
@@ -207,7 +208,7 @@ describe('AddIssuableForm', () => {
});
});
- it('emits an event with a "blocks" link type when the "blocks" radio input selected', done => {
+ it('emits an event with a "blocks" link type when the "blocks" radio input selected', (done) => {
jest.spyOn(wrapper.vm, '$emit').mockImplementation(() => {});
wrapper.vm.linkedIssueType = linkedIssueTypesMap.BLOCKS;
@@ -222,7 +223,7 @@ describe('AddIssuableForm', () => {
});
});
- it('emits an event with a "is_blocked_by" link type when the "is blocked by" radio input selected', done => {
+ it('emits an event with a "is_blocked_by" link type when the "is blocked by" radio input selected', (done) => {
jest.spyOn(wrapper.vm, '$emit').mockImplementation(() => {});
wrapper.vm.linkedIssueType = linkedIssueTypesMap.IS_BLOCKED_BY;
@@ -237,7 +238,7 @@ describe('AddIssuableForm', () => {
});
});
- it('shows error message when error is present', done => {
+ it('shows error message when error is present', (done) => {
const itemAddFailureMessage = 'Something went wrong while submitting.';
wrapper.setProps({
hasError: true,
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
index dd05f49b458..c0889b09adc 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
@@ -130,11 +130,8 @@ describe('RelatedIssuesBlock', () => {
describe('showCategorizedIssues prop', () => {
const issueList = () => wrapper.findAll('.js-related-issues-token-list-item');
const categorizedHeadings = () => wrapper.findAll('h4');
- const headingTextAt = index =>
- categorizedHeadings()
- .at(index)
- .text();
- const mountComponent = showCategorizedIssues => {
+ const headingTextAt = (index) => categorizedHeadings().at(index).text();
+ const mountComponent = (showCategorizedIssues) => {
wrapper = mount(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
index 39bc244297b..ffd9683cd6b 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
@@ -182,12 +182,7 @@ describe('RelatedIssuesList', () => {
});
it('shows due date', () => {
- expect(
- wrapper
- .find(IssueDueDate)
- .find('.board-card-info-text')
- .text(),
- ).toBe('Nov 22, 2010');
+ expect(wrapper.find(IssueDueDate).find('.board-card-info-text').text()).toBe('Nov 22, 2010');
});
});
});
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
index 2c02e1e1de4..93fe321957c 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
@@ -315,9 +315,7 @@ describe('RelatedIssuesRoot', () => {
const input = 23;
wrapper.vm.onInput({
- untouchedRawReferences: String(input)
- .trim()
- .split(/\s/),
+ untouchedRawReferences: String(input).trim().split(/\s/),
touchedReference: input,
});
diff --git a/spec/frontend/issuable_list/components/issuable_item_spec.js b/spec/frontend/issuable_list/components/issuable_item_spec.js
index 3a9a0d3fd59..3c01bf2d319 100644
--- a/spec/frontend/issuable_list/components/issuable_item_spec.js
+++ b/spec/frontend/issuable_list/components/issuable_item_spec.js
@@ -252,12 +252,9 @@ describe('IssuableItem', () => {
await wrapper.vm.$nextTick();
- expect(
- wrapper
- .find('[data-testid="issuable-title"]')
- .find(GlLink)
- .attributes('target'),
- ).toBe('_blank');
+ expect(wrapper.find('[data-testid="issuable-title"]').find(GlLink).attributes('target')).toBe(
+ '_blank',
+ );
});
it('renders issuable reference', () => {
diff --git a/spec/frontend/issuable_show/components/issuable_body_spec.js b/spec/frontend/issuable_show/components/issuable_body_spec.js
index 5708eaf4a31..4ffbbad4f37 100644
--- a/spec/frontend/issuable_show/components/issuable_body_spec.js
+++ b/spec/frontend/issuable_show/components/issuable_body_spec.js
@@ -98,10 +98,7 @@ describe('IssuableBody', () => {
it('renders issuable edit info', () => {
const editedEl = wrapper.find('small');
- const sanitizedText = editedEl
- .text()
- .replace(/\n/g, ' ')
- .replace(/\s+/g, ' ');
+ const sanitizedText = editedEl.text().replace(/\n/g, ' ').replace(/\s+/g, ' ');
expect(sanitizedText).toContain('Edited');
expect(sanitizedText).toContain('ago');
@@ -138,7 +135,7 @@ describe('IssuableBody', () => {
it.each(['keydown-title', 'keydown-description'])(
'component emits `%s` event with event object and issuableMeta params via issuable-edit-form',
- async eventName => {
+ async (eventName) => {
const eventObj = {
preventDefault: jest.fn(),
stopPropagation: jest.fn(),
diff --git a/spec/frontend/issuable_show/components/issuable_edit_form_spec.js b/spec/frontend/issuable_show/components/issuable_edit_form_spec.js
index a865bdb5608..522374f2e9c 100644
--- a/spec/frontend/issuable_show/components/issuable_edit_form_spec.js
+++ b/spec/frontend/issuable_show/components/issuable_edit_form_spec.js
@@ -28,7 +28,7 @@ const createComponent = ({ propsData = issuableEditFormProps } = {}) =>
describe('IssuableEditForm', () => {
let wrapper;
- const assertEvent = eventSpy => {
+ const assertEvent = (eventSpy) => {
expect(eventSpy).toHaveBeenNthCalledWith(1, 'update.issuable', expect.any(Function));
expect(eventSpy).toHaveBeenNthCalledWith(2, 'close.form', expect.any(Function));
};
diff --git a/spec/frontend/issuable_show/components/issuable_header_spec.js b/spec/frontend/issuable_show/components/issuable_header_spec.js
index fad8ec8a891..f9c20ab04b8 100644
--- a/spec/frontend/issuable_show/components/issuable_header_spec.js
+++ b/spec/frontend/issuable_show/components/issuable_header_spec.js
@@ -24,7 +24,7 @@ const createComponent = (propsData = issuableHeaderProps) =>
describe('IssuableHeader', () => {
let wrapper;
- const findByTestId = testId => wrapper.find(`[data-testid="${testId}"]`);
+ const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`);
beforeEach(() => {
wrapper = createComponent();
diff --git a/spec/frontend/issuable_show/components/issuable_show_root_spec.js b/spec/frontend/issuable_show/components/issuable_show_root_spec.js
index ca0aefc1083..6cebfc150f9 100644
--- a/spec/frontend/issuable_show/components/issuable_show_root_spec.js
+++ b/spec/frontend/issuable_show/components/issuable_show_root_spec.js
@@ -121,7 +121,7 @@ describe('IssuableShowRoot', () => {
it.each(['keydown-title', 'keydown-description'])(
'component emits `%s` event with event object and issuableMeta params via issuable-body',
- eventName => {
+ (eventName) => {
const eventObj = {
preventDefault: jest.fn(),
stopPropagation: jest.fn(),
diff --git a/spec/frontend/issuable_spec.js b/spec/frontend/issuable_spec.js
index 63c1fda2fb4..6712b8bfd34 100644
--- a/spec/frontend/issuable_spec.js
+++ b/spec/frontend/issuable_spec.js
@@ -49,7 +49,7 @@ describe('Issuable', () => {
mock.restore();
});
- it('should send request to reset email token', done => {
+ it('should send request to reset email token', (done) => {
jest.spyOn(axios, 'put');
document.querySelector('.incoming-email-token-reset').click();
diff --git a/spec/frontend/issuable_suggestions/components/app_spec.js b/spec/frontend/issuable_suggestions/components/app_spec.js
index 0cb5b9c90ba..fb8ef00567c 100644
--- a/spec/frontend/issuable_suggestions/components/app_spec.js
+++ b/spec/frontend/issuable_suggestions/components/app_spec.js
@@ -85,12 +85,7 @@ describe('Issuable suggestions app component', () => {
wrapper.setData(data);
return wrapper.vm.$nextTick(() => {
- expect(
- wrapper
- .findAll('li')
- .at(0)
- .classes(),
- ).toContain('gl-mb-3');
+ expect(wrapper.findAll('li').at(0).classes()).toContain('gl-mb-3');
});
});
@@ -98,12 +93,7 @@ describe('Issuable suggestions app component', () => {
wrapper.setData(data);
return wrapper.vm.$nextTick(() => {
- expect(
- wrapper
- .findAll('li')
- .at(1)
- .classes(),
- ).not.toContain('gl-mb-3');
+ expect(wrapper.findAll('li').at(1).classes()).not.toContain('gl-mb-3');
});
});
});
diff --git a/spec/frontend/issuable_suggestions/components/item_spec.js b/spec/frontend/issuable_suggestions/components/item_spec.js
index 9912e77d5fe..d0dde256edd 100644
--- a/spec/frontend/issuable_suggestions/components/item_spec.js
+++ b/spec/frontend/issuable_suggestions/components/item_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlTooltip, GlLink, GlIcon } from '@gitlab/ui';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
import Suggestion from '~/issuable_suggestions/components/item.vue';
import mockData from '../mock_data';
diff --git a/spec/frontend/issuable_suggestions/mock_data.js b/spec/frontend/issuable_suggestions/mock_data.js
index 4ce35ad5196..74b569d9833 100644
--- a/spec/frontend/issuable_suggestions/mock_data.js
+++ b/spec/frontend/issuable_suggestions/mock_data.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
function getDate(daysMinus) {
const today = new Date();
diff --git a/spec/frontend/issue_show/components/app_spec.js b/spec/frontend/issue_show/components/app_spec.js
index dde4e8458d5..ec2055ca7d1 100644
--- a/spec/frontend/issue_show/components/app_spec.js
+++ b/spec/frontend/issue_show/components/app_spec.js
@@ -47,6 +47,7 @@ describe('Issuable output', () => {
provide: {
fullPath: 'gitlab-org/incidents',
iid: '19',
+ uploadMetricsFeatureAvailable: false,
},
stubs: {
HighlightBar: true,
@@ -398,8 +399,8 @@ describe('Issuable output', () => {
wrapper.vm.poll.makeRequest();
- return new Promise(resolve => {
- wrapper.vm.$watch('formState.lockedWarningVisible', value => {
+ return new Promise((resolve) => {
+ wrapper.vm.$watch('formState.lockedWarningVisible', (value) => {
if (value) {
resolve();
}
@@ -507,13 +508,6 @@ describe('Issuable output', () => {
expect(wrapper.vm.issueChanged).toBe(false);
});
- it('returns false when `initialTitleText` is null and `formState.title` is empty string', () => {
- wrapper.vm.store.formState.title = '';
- wrapper.setProps({ initialTitleText: null });
-
- expect(wrapper.vm.issueChanged).toBe(false);
- });
-
it('returns true when description is changed', () => {
wrapper.vm.store.formState.description = 'RandomText';
diff --git a/spec/frontend/issue_show/components/description_spec.js b/spec/frontend/issue_show/components/description_spec.js
index bc7511225a0..de53d98e6be 100644
--- a/spec/frontend/issue_show/components/description_spec.js
+++ b/spec/frontend/issue_show/components/description_spec.js
@@ -74,7 +74,7 @@ describe('Description component', () => {
let modal;
const recaptchaChild = vm.$children.find(
// eslint-disable-next-line no-underscore-dangle
- child => child.$options._componentTag === 'recaptcha-modal',
+ (child) => child.$options._componentTag === 'recaptcha-modal',
);
recaptchaChild.scriptSrc = '//scriptsrc';
diff --git a/spec/frontend/issue_show/components/edit_actions_spec.js b/spec/frontend/issue_show/components/edit_actions_spec.js
index 79a2bcd5eab..6a00eec4b1f 100644
--- a/spec/frontend/issue_show/components/edit_actions_spec.js
+++ b/spec/frontend/issue_show/components/edit_actions_spec.js
@@ -6,7 +6,7 @@ import Store from '~/issue_show/stores';
describe('Edit Actions components', () => {
let vm;
- beforeEach(done => {
+ beforeEach((done) => {
const Component = Vue.extend(editActions);
const store = new Store({
titleHtml: '',
@@ -34,7 +34,7 @@ describe('Edit Actions components', () => {
expect(vm.$el.querySelectorAll('[disabled]').length).toBe(0);
});
- it('does not render delete button if canUpdate is false', done => {
+ it('does not render delete button if canUpdate is false', (done) => {
vm.canDestroy = false;
Vue.nextTick(() => {
@@ -44,7 +44,7 @@ describe('Edit Actions components', () => {
});
});
- it('disables submit button when title is blank', done => {
+ it('disables submit button when title is blank', (done) => {
vm.formState.title = '';
Vue.nextTick(() => {
@@ -54,7 +54,7 @@ describe('Edit Actions components', () => {
});
});
- it('should not show delete button if showDeleteButton is false', done => {
+ it('should not show delete button if showDeleteButton is false', (done) => {
vm.showDeleteButton = false;
Vue.nextTick(() => {
@@ -70,7 +70,7 @@ describe('Edit Actions components', () => {
expect(eventHub.$emit).toHaveBeenCalledWith('update.issuable');
});
- it('disabled button after clicking save button', done => {
+ it('disabled button after clicking save button', (done) => {
vm.$el.querySelector('.btn-success').click();
Vue.nextTick(() => {
@@ -97,7 +97,7 @@ describe('Edit Actions components', () => {
expect(eventHub.$emit).toHaveBeenCalledWith('delete.issuable', { destroy_confirm: true });
});
- it('does no actions when confirm is false', done => {
+ it('does no actions when confirm is false', (done) => {
jest.spyOn(window, 'confirm').mockReturnValue(false);
vm.$el.querySelector('.btn-danger').click();
diff --git a/spec/frontend/issue_show/components/fields/description_spec.js b/spec/frontend/issue_show/components/fields/description_spec.js
index 8ea326ad1ee..96c81c419d0 100644
--- a/spec/frontend/issue_show/components/fields/description_spec.js
+++ b/spec/frontend/issue_show/components/fields/description_spec.js
@@ -8,7 +8,7 @@ describe('Description field component', () => {
let vm;
let store;
- beforeEach(done => {
+ beforeEach((done) => {
const Component = Vue.extend(descriptionField);
const el = document.createElement('div');
store = new Store({
@@ -38,7 +38,7 @@ describe('Description field component', () => {
expect(vm.$el.querySelector('.md-area textarea').value).toBe('test');
});
- it('renders markdown field with a markdown description', done => {
+ it('renders markdown field with a markdown description', (done) => {
store.formState.description = '**test**';
Vue.nextTick(() => {
diff --git a/spec/frontend/issue_show/components/form_spec.js b/spec/frontend/issue_show/components/form_spec.js
index b06a3a89d3b..4e123f606f6 100644
--- a/spec/frontend/issue_show/components/form_spec.js
+++ b/spec/frontend/issue_show/components/form_spec.js
@@ -26,7 +26,7 @@ describe('Inline edit form component', () => {
vm.$destroy();
});
- const createComponent = props => {
+ const createComponent = (props) => {
const Component = Vue.extend(formComponent);
vm = mountComponent(Component, {
diff --git a/spec/frontend/issue_show/components/header_actions_spec.js b/spec/frontend/issue_show/components/header_actions_spec.js
index b9836ae7240..4df62ec8717 100644
--- a/spec/frontend/issue_show/components/header_actions_spec.js
+++ b/spec/frontend/issue_show/components/header_actions_spec.js
@@ -62,7 +62,7 @@ describe('HeaderActions component', () => {
const findToggleIssueStateButton = () => wrapper.find(GlButton);
- const findDropdownAt = index => wrapper.findAll(GlDropdown).at(index);
+ const findDropdownAt = (index) => wrapper.findAll(GlDropdown).at(index);
const findMobileDropdownItems = () => findDropdownAt(0).findAll(GlDropdownItem);
@@ -70,10 +70,7 @@ describe('HeaderActions component', () => {
const findModal = () => wrapper.find(GlModal);
- const findModalLinkAt = index =>
- findModal()
- .findAll(GlLink)
- .at(index);
+ const findModalLinkAt = (index) => findModal().findAll(GlLink).at(index);
const mountComponent = ({
props = {},
@@ -208,7 +205,7 @@ describe('HeaderActions component', () => {
it(`${isItemVisible ? 'shows' : 'hides'} "${itemText}" item`, () => {
expect(
findDropdownItems()
- .filter(item => item.text() === itemText)
+ .filter((item) => item.text() === itemText)
.exists(),
).toBe(isItemVisible);
});
diff --git a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js b/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
index c1ab4433761..112cb4d4c3a 100644
--- a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
+++ b/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
@@ -17,7 +17,7 @@ describe('Highlight Bar', () => {
title: 'Alert 1',
};
- const mountComponent = options => {
+ const mountComponent = (options) => {
wrapper = shallowMount(
HighlightBar,
merge(
diff --git a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
index c6200fd69bf..416870d1408 100644
--- a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import merge from 'lodash/merge';
import { GlTab } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
import INVALID_URL from '~/lib/utils/invalid_url';
import IncidentTabs from '~/issue_show/components/incidents/incident_tabs.vue';
import { descriptionProps } from '../../mock_data';
@@ -18,36 +20,45 @@ const mockAlert = {
describe('Incident Tabs component', () => {
let wrapper;
- const mountComponent = (data = {}) => {
- wrapper = shallowMount(IncidentTabs, {
- propsData: {
- ...descriptionProps,
- },
- stubs: {
- DescriptionComponent: true,
- },
- provide: {
- fullPath: '',
- iid: '',
- },
- data() {
- return { alert: mockAlert, ...data };
- },
- mocks: {
- $apollo: {
- queries: {
- alert: {
- loading: true,
+ const mountComponent = (data = {}, options = {}) => {
+ wrapper = shallowMount(
+ IncidentTabs,
+ merge(
+ {
+ propsData: {
+ ...descriptionProps,
+ },
+ stubs: {
+ DescriptionComponent: true,
+ MetricsTab: true,
+ },
+ provide: {
+ fullPath: '',
+ iid: '',
+ uploadMetricsFeatureAvailable: true,
+ },
+ data() {
+ return { alert: mockAlert, ...data };
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ alert: {
+ loading: true,
+ },
+ },
},
},
},
- },
- });
+ options,
+ ),
+ );
};
const findTabs = () => wrapper.findAll(GlTab);
const findSummaryTab = () => findTabs().at(0);
- const findAlertDetailsTab = () => findTabs().at(1);
+ const findMetricsTab = () => wrapper.find('[data-testid="metrics-tab"]');
+ const findAlertDetailsTab = () => wrapper.find('[data-testid="alert-details-tab"]');
const findAlertDetailsComponent = () => wrapper.find(AlertDetailsTable);
const findDescriptionComponent = () => wrapper.find(DescriptionComponent);
const findHighlightBarComponent = () => wrapper.find(HighlightBar);
@@ -100,6 +111,24 @@ describe('Incident Tabs component', () => {
});
});
+ describe('upload metrics feature available', () => {
+ it('shows the metric tab when metrics are available', async () => {
+ mountComponent({}, { provide: { uploadMetricsFeatureAvailable: true } });
+
+ await waitForPromises();
+
+ expect(findMetricsTab().exists()).toBe(true);
+ });
+
+ it('hides the tab when metrics are not available', async () => {
+ mountComponent({}, { provide: { uploadMetricsFeatureAvailable: false } });
+
+ await waitForPromises();
+
+ expect(findMetricsTab().exists()).toBe(false);
+ });
+ });
+
describe('Snowplow tracking', () => {
beforeEach(() => {
jest.spyOn(Tracking, 'event');
diff --git a/spec/frontend/issue_show/components/pinned_links_spec.js b/spec/frontend/issue_show/components/pinned_links_spec.js
index bb67feee601..2d140fd068a 100644
--- a/spec/frontend/issue_show/components/pinned_links_spec.js
+++ b/spec/frontend/issue_show/components/pinned_links_spec.js
@@ -11,7 +11,7 @@ describe('PinnedLinks', () => {
const findButtons = () => wrapper.findAll(GlButton);
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(PinnedLinks, {
propsData: {
zoomMeetingUrl: '',
@@ -26,11 +26,7 @@ describe('PinnedLinks', () => {
zoomMeetingUrl: `<a href="${plainZoomUrl}">Zoom</a>`,
});
- expect(
- findButtons()
- .at(0)
- .text(),
- ).toBe(JOIN_ZOOM_MEETING);
+ expect(findButtons().at(0).text()).toBe(JOIN_ZOOM_MEETING);
});
it('displays Status link', () => {
@@ -38,11 +34,7 @@ describe('PinnedLinks', () => {
publishedIncidentUrl: `<a href="${plainStatusUrl}">Status</a>`,
});
- expect(
- findButtons()
- .at(0)
- .text(),
- ).toBe(STATUS_PAGE_PUBLISHED);
+ expect(findButtons().at(0).text()).toBe(STATUS_PAGE_PUBLISHED);
});
it('does not render if there are no links', () => {
diff --git a/spec/frontend/issue_show/issue_spec.js b/spec/frontend/issue_show/issue_spec.js
index cee9969d26a..818f501882b 100644
--- a/spec/frontend/issue_show/issue_spec.js
+++ b/spec/frontend/issue_show/issue_spec.js
@@ -14,7 +14,7 @@ useMockIntersectionObserver();
jest.mock('~/lib/utils/poll');
-const setupHTML = initialData => {
+const setupHTML = (initialData) => {
document.body.innerHTML = `<div id="js-issuable-app"></div>`;
document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(initialData);
};
diff --git a/spec/frontend/issue_spec.js b/spec/frontend/issue_spec.js
index 00595736821..303267e784f 100644
--- a/spec/frontend/issue_spec.js
+++ b/spec/frontend/issue_spec.js
@@ -41,7 +41,7 @@ describe('Issue', () => {
expect($boxOpen).toHaveText('Open');
}
- [true, false].forEach(isIssueInitiallyOpen => {
+ [true, false].forEach((isIssueInitiallyOpen) => {
describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, () => {
const action = isIssueInitiallyOpen ? 'close' : 'reopen';
let mock;
diff --git a/spec/frontend/issues_list/components/issuable_spec.js b/spec/frontend/issues_list/components/issuable_spec.js
index 6e584152551..b47a84ad7f6 100644
--- a/spec/frontend/issues_list/components/issuable_spec.js
+++ b/spec/frontend/issues_list/components/issuable_spec.js
@@ -72,9 +72,9 @@ describe('Issuable component', () => {
window.Date = DateOrig;
});
- const checkExists = findFn => () => findFn().exists();
+ const checkExists = (findFn) => () => findFn().exists();
const hasIcon = (iconName, iconWrapper = wrapper) =>
- iconWrapper.findAll(GlIcon).wrappers.some(icon => icon.props('name') === iconName);
+ iconWrapper.findAll(GlIcon).wrappers.some((icon) => icon.props('name') === iconName);
const hasConfidentialIcon = () => hasIcon('eye-slash');
const findTaskStatus = () => wrapper.find('.task-status');
const findOpenedAgoContainer = () => wrapper.find('[data-testid="openedByMessage"]');
@@ -91,8 +91,8 @@ describe('Issuable component', () => {
const findDownvotes = () => wrapper.find('[data-testid="downvotes"]');
const findNotes = () => wrapper.find('[data-testid="notes-count"]');
const findBulkCheckbox = () => wrapper.find('input.selected-issuable');
- const findScopedLabels = () => findLabels().filter(w => isScopedLabel({ title: w.text() }));
- const findUnscopedLabels = () => findLabels().filter(w => !isScopedLabel({ title: w.text() }));
+ const findScopedLabels = () => findLabels().filter((w) => isScopedLabel({ title: w.text() }));
+ const findUnscopedLabels = () => findLabels().filter((w) => !isScopedLabel({ title: w.text() }));
const findIssuableTitle = () => wrapper.find('[data-testid="issuable-title"]');
const findIssuableStatus = () => wrapper.find('[data-testid="issuable-status"]');
const containsJiraLogo = () => wrapper.find('[data-testid="jira-logo"]').exists();
@@ -336,13 +336,13 @@ describe('Issuable component', () => {
it('renders labels', () => {
factory({ issuable });
- const labels = findLabels().wrappers.map(label => ({
+ const labels = findLabels().wrappers.map((label) => ({
href: label.props('target'),
text: label.text(),
tooltip: label.attributes('description'),
}));
- const expected = testLabels.map(label => ({
+ const expected = testLabels.map((label) => ({
href: mergeUrlParams({ 'label_name[]': label.name }, TEST_BASE_URL),
text: label.name,
tooltip: label.description,
@@ -363,13 +363,13 @@ describe('Issuable component', () => {
it('renders labels', () => {
factory({ issuable });
- const labels = findLabels().wrappers.map(label => ({
+ const labels = findLabels().wrappers.map((label) => ({
href: label.props('target'),
text: label.text(),
tooltip: label.attributes('description'),
}));
- const expected = testLabels.map(label => ({
+ const expected = testLabels.map((label) => ({
href: mergeUrlParams({ 'labels[]': label.name }, TEST_BASE_URL),
text: label.name,
tooltip: label.description,
diff --git a/spec/frontend/issues_list/components/issuables_list_app_spec.js b/spec/frontend/issues_list/components/issuables_list_app_spec.js
index 1f80b4fc54a..9d603099ff7 100644
--- a/spec/frontend/issues_list/components/issuables_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issuables_list_app_spec.js
@@ -27,7 +27,7 @@ const TEST_ENDPOINT = '/issues';
const TEST_CREATE_ISSUES_PATH = '/createIssue';
const TEST_SVG_PATH = '/emptySvg';
-const setUrl = query => {
+const setUrl = (query) => {
window.location.href = `${TEST_LOCATION}${query}`;
window.location.search = query;
};
@@ -45,10 +45,10 @@ describe('Issuables list component', () => {
let wrapper;
let apiSpy;
- const setupApiMock = cb => {
+ const setupApiMock = (cb) => {
apiSpy = jest.fn(cb);
- mockAxios.onGet(TEST_ENDPOINT).reply(cfg => apiSpy(cfg));
+ mockAxios.onGet(TEST_ENDPOINT).reply((cfg) => apiSpy(cfg));
};
const factory = (props = { sortKey: 'priority' }) => {
@@ -238,15 +238,15 @@ describe('Issuables list component', () => {
wrapper.vm.onSelectIssuable({ issuable: i0, selected: false });
expect(wrapper.vm.selection).toEqual({});
wrapper.vm.onSelectIssuable({ issuable: i1, selected: true });
- expect(wrapper.vm.selection).toEqual({ '1': true });
+ expect(wrapper.vm.selection).toEqual({ 1: true });
wrapper.vm.onSelectIssuable({ issuable: i0, selected: true });
- expect(wrapper.vm.selection).toEqual({ '1': true, '0': true });
+ expect(wrapper.vm.selection).toEqual({ 1: true, 0: true });
wrapper.vm.onSelectIssuable({ issuable: i2, selected: true });
- expect(wrapper.vm.selection).toEqual({ '1': true, '0': true, '2': true });
+ expect(wrapper.vm.selection).toEqual({ 1: true, 0: true, 2: true });
wrapper.vm.onSelectIssuable({ issuable: i2, selected: true });
- expect(wrapper.vm.selection).toEqual({ '1': true, '0': true, '2': true });
+ expect(wrapper.vm.selection).toEqual({ 1: true, 0: true, 2: true });
wrapper.vm.onSelectIssuable({ issuable: i0, selected: false });
- expect(wrapper.vm.selection).toEqual({ '1': true, '2': true });
+ expect(wrapper.vm.selection).toEqual({ 1: true, 2: true });
});
it('broadcasts a message to the bulk edit sidebar when a value is added to selection', () => {
diff --git a/spec/frontend/jira_connect/api_spec.js b/spec/frontend/jira_connect/api_spec.js
new file mode 100644
index 00000000000..8fecbee9ca7
--- /dev/null
+++ b/spec/frontend/jira_connect/api_spec.js
@@ -0,0 +1,107 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import httpStatus from '~/lib/utils/http_status';
+
+import { addSubscription, removeSubscription, fetchGroups } from '~/jira_connect/api';
+
+describe('JiraConnect API', () => {
+ let mock;
+ let response;
+
+ const mockAddPath = 'addPath';
+ const mockRemovePath = 'removePath';
+ const mockNamespace = 'namespace';
+ const mockJwt = 'jwt';
+ const mockResponse = { success: true };
+
+ const tokenSpy = jest.fn().mockReturnValue(mockJwt);
+
+ window.AP = {
+ context: {
+ getToken: tokenSpy,
+ },
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ response = null;
+ });
+
+ describe('addSubscription', () => {
+ const makeRequest = () => addSubscription(mockAddPath, mockNamespace);
+
+ it('returns success response', async () => {
+ jest.spyOn(axios, 'post');
+ mock
+ .onPost(mockAddPath, {
+ jwt: mockJwt,
+ namespace_path: mockNamespace,
+ })
+ .replyOnce(httpStatus.OK, mockResponse);
+
+ response = await makeRequest();
+
+ expect(tokenSpy).toHaveBeenCalled();
+ expect(axios.post).toHaveBeenCalledWith(mockAddPath, {
+ jwt: mockJwt,
+ namespace_path: mockNamespace,
+ });
+ expect(response.data).toEqual(mockResponse);
+ });
+ });
+
+ describe('removeSubscription', () => {
+ const makeRequest = () => removeSubscription(mockRemovePath);
+
+ it('returns success response', async () => {
+ jest.spyOn(axios, 'delete');
+ mock.onDelete(mockRemovePath).replyOnce(httpStatus.OK, mockResponse);
+
+ response = await makeRequest();
+
+ expect(tokenSpy).toHaveBeenCalled();
+ expect(axios.delete).toHaveBeenCalledWith(mockRemovePath, {
+ params: {
+ jwt: mockJwt,
+ },
+ });
+ expect(response.data).toEqual(mockResponse);
+ });
+ });
+
+ describe('fetchGroups', () => {
+ const mockGroupsPath = 'groupsPath';
+ const mockPage = 1;
+ const mockPerPage = 10;
+
+ const makeRequest = () =>
+ fetchGroups(mockGroupsPath, {
+ page: mockPage,
+ perPage: mockPerPage,
+ });
+
+ it('returns success response', async () => {
+ jest.spyOn(axios, 'get');
+ mock
+ .onGet(mockGroupsPath, {
+ page: mockPage,
+ per_page: mockPerPage,
+ })
+ .replyOnce(httpStatus.OK, mockResponse);
+
+ response = await makeRequest();
+
+ expect(axios.get).toHaveBeenCalledWith(mockGroupsPath, {
+ params: {
+ page: mockPage,
+ per_page: mockPerPage,
+ },
+ });
+ expect(response.data).toEqual(mockResponse);
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/components/app_spec.js b/spec/frontend/jira_connect/components/app_spec.js
new file mode 100644
index 00000000000..be990d5061c
--- /dev/null
+++ b/spec/frontend/jira_connect/components/app_spec.js
@@ -0,0 +1,80 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { GlAlert } from '@gitlab/ui';
+import JiraConnectApp from '~/jira_connect/components/app.vue';
+import createStore from '~/jira_connect/store';
+import { SET_ERROR_MESSAGE } from '~/jira_connect/store/mutation_types';
+
+Vue.use(Vuex);
+
+describe('JiraConnectApp', () => {
+ let wrapper;
+ let store;
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findHeader = () => wrapper.findByTestId('new-jira-connect-ui-heading');
+ const findHeaderText = () => findHeader().text();
+
+ const createComponent = (options = {}) => {
+ store = createStore();
+
+ wrapper = extendedWrapper(
+ shallowMount(JiraConnectApp, {
+ store,
+ provide: {
+ glFeatures: { newJiraConnectUi: true },
+ },
+ ...options,
+ }),
+ );
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('template', () => {
+ it('renders new UI', () => {
+ createComponent();
+
+ expect(findHeader().exists()).toBe(true);
+ expect(findHeaderText()).toBe('Linked namespaces');
+ });
+
+ describe('newJiraConnectUi is false', () => {
+ it('does not render new UI', () => {
+ createComponent({
+ provide: {
+ glFeatures: { newJiraConnectUi: false },
+ },
+ });
+
+ expect(findHeader().exists()).toBe(false);
+ });
+ });
+
+ it.each`
+ errorMessage | errorShouldRender
+ ${'Test error'} | ${true}
+ ${''} | ${false}
+ ${undefined} | ${false}
+ `(
+ 'renders correct alert when errorMessage is `$errorMessage`',
+ async ({ errorMessage, errorShouldRender }) => {
+ createComponent();
+
+ store.commit(SET_ERROR_MESSAGE, errorMessage);
+ await wrapper.vm.$nextTick();
+
+ expect(findAlert().exists()).toBe(errorShouldRender);
+ if (errorShouldRender) {
+ expect(findAlert().isVisible()).toBe(errorShouldRender);
+ expect(findAlert().html()).toContain(errorMessage);
+ }
+ },
+ );
+ });
+});
diff --git a/spec/frontend/jira_connect/components/groups_list_item_spec.js b/spec/frontend/jira_connect/components/groups_list_item_spec.js
new file mode 100644
index 00000000000..77577c53cf4
--- /dev/null
+++ b/spec/frontend/jira_connect/components/groups_list_item_spec.js
@@ -0,0 +1,46 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlAvatar } from '@gitlab/ui';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mockGroup1 } from '../mock_data';
+
+import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
+
+describe('GroupsListItem', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = extendedWrapper(
+ shallowMount(GroupsListItem, {
+ propsData: {
+ group: mockGroup1,
+ },
+ }),
+ );
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findGlAvatar = () => wrapper.find(GlAvatar);
+ const findGroupName = () => wrapper.findByTestId('group-list-item-name');
+ const findGroupDescription = () => wrapper.findByTestId('group-list-item-description');
+
+ it('renders group avatar', () => {
+ expect(findGlAvatar().exists()).toBe(true);
+ expect(findGlAvatar().props('src')).toBe(mockGroup1.avatar_url);
+ });
+
+ it('renders group name', () => {
+ expect(findGroupName().text()).toBe(mockGroup1.full_name);
+ });
+
+ it('renders group description', () => {
+ expect(findGroupDescription().text()).toBe(mockGroup1.description);
+ });
+});
diff --git a/spec/frontend/jira_connect/components/groups_list_spec.js b/spec/frontend/jira_connect/components/groups_list_spec.js
new file mode 100644
index 00000000000..94f158e6344
--- /dev/null
+++ b/spec/frontend/jira_connect/components/groups_list_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import { fetchGroups } from '~/jira_connect/api';
+import GroupsList from '~/jira_connect/components/groups_list.vue';
+import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
+import { mockGroup1, mockGroup2 } from '../mock_data';
+
+jest.mock('~/jira_connect/api', () => {
+ return {
+ fetchGroups: jest.fn(),
+ };
+});
+describe('GroupsList', () => {
+ let wrapper;
+
+ const mockEmptyResponse = { data: [] };
+
+ const createComponent = (options = {}) => {
+ wrapper = shallowMount(GroupsList, {
+ ...options,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findAllItems = () => wrapper.findAll(GroupsListItem);
+ const findFirstItem = () => findAllItems().at(0);
+ const findSecondItem = () => findAllItems().at(1);
+
+ describe('isLoading is true', () => {
+ it('renders loading icon', async () => {
+ fetchGroups.mockResolvedValue(mockEmptyResponse);
+ createComponent();
+
+ wrapper.setData({ isLoading: true });
+ await wrapper.vm.$nextTick();
+
+ expect(findGlLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('no groups returned', () => {
+ it('renders empty state', async () => {
+ fetchGroups.mockResolvedValue(mockEmptyResponse);
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain('No available namespaces');
+ });
+ });
+
+ describe('with groups returned', () => {
+ it('renders groups list', async () => {
+ fetchGroups.mockResolvedValue({ data: [mockGroup1, mockGroup2] });
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findAllItems().length).toBe(2);
+ expect(findFirstItem().props('group')).toBe(mockGroup1);
+ expect(findSecondItem().props('group')).toBe(mockGroup2);
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/mock_data.js b/spec/frontend/jira_connect/mock_data.js
new file mode 100644
index 00000000000..31565912489
--- /dev/null
+++ b/spec/frontend/jira_connect/mock_data.js
@@ -0,0 +1,15 @@
+export const mockGroup1 = {
+ id: 1,
+ avatar_url: 'avatar.png',
+ name: 'Gitlab Org',
+ full_name: 'Gitlab Org',
+ description: 'Open source software to collaborate on code',
+};
+
+export const mockGroup2 = {
+ id: 2,
+ avatar_url: 'avatar.png',
+ name: 'Gitlab Com',
+ full_name: 'Gitlab Com',
+ description: 'For GitLab company related projects',
+};
diff --git a/spec/frontend/jira_connect/store/mutations_spec.js b/spec/frontend/jira_connect/store/mutations_spec.js
new file mode 100644
index 00000000000..d1f9d22b3de
--- /dev/null
+++ b/spec/frontend/jira_connect/store/mutations_spec.js
@@ -0,0 +1,18 @@
+import mutations from '~/jira_connect/store/mutations';
+import state from '~/jira_connect/store/state';
+
+describe('JiraConnect store mutations', () => {
+ let localState;
+
+ beforeEach(() => {
+ localState = state();
+ });
+
+ describe('SET_ERROR_MESSAGE', () => {
+ it('sets error message', () => {
+ mutations.SET_ERROR_MESSAGE(localState, 'test error');
+
+ expect(localState.errorMessage).toBe('test error');
+ });
+ });
+});
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index c40b7c90c72..bea27c8877d 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -114,49 +114,59 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
role="menu"
tabindex="-1"
>
- <!---->
-
<div
- class="gl-search-box-by-type"
+ class="gl-new-dropdown-inner"
>
- <svg
- aria-hidden="true"
- class="gl-search-box-by-type-search-icon gl-icon s16"
- data-testid="search-icon"
- >
- <use
- href="#search"
- />
- </svg>
-
- <input
- aria-label="Search"
- class="gl-form-input gl-search-box-by-type-input form-control"
- placeholder="Search"
- type="text"
- />
+ <!---->
<div
- class="gl-search-box-by-type-right-icons"
- >
- <!---->
-
- <!---->
- </div>
- </div>
-
- <li
- class="gl-new-dropdown-text text-secondary"
- role="presentation"
- >
- <p
- class="b-dropdown-text"
+ class="gl-new-dropdown-contents"
>
-
+ <div
+ class="gl-search-box-by-type"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-search-box-by-type-search-icon gl-icon s16"
+ data-testid="search-icon"
+ >
+ <use
+ href="#search"
+ />
+ </svg>
+
+ <input
+ aria-label="Search"
+ class="gl-form-input gl-search-box-by-type-input form-control"
+ placeholder="Search"
+ type="text"
+ />
+
+ <div
+ class="gl-search-box-by-type-right-icons"
+ >
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+
+ <li
+ class="gl-new-dropdown-text text-secondary"
+ role="presentation"
+ >
+ <p
+ class="b-dropdown-text"
+ >
+
No matches found
- </p>
- </li>
+ </p>
+ </li>
+ </div>
+
+ <!---->
+ </div>
</ul>
</div>
@@ -229,49 +239,59 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
role="menu"
tabindex="-1"
>
- <!---->
-
<div
- class="gl-search-box-by-type"
+ class="gl-new-dropdown-inner"
>
- <svg
- aria-hidden="true"
- class="gl-search-box-by-type-search-icon gl-icon s16"
- data-testid="search-icon"
- >
- <use
- href="#search"
- />
- </svg>
-
- <input
- aria-label="Search"
- class="gl-form-input gl-search-box-by-type-input form-control"
- placeholder="Search"
- type="text"
- />
+ <!---->
<div
- class="gl-search-box-by-type-right-icons"
- >
- <!---->
-
- <!---->
- </div>
- </div>
-
- <li
- class="gl-new-dropdown-text text-secondary"
- role="presentation"
- >
- <p
- class="b-dropdown-text"
+ class="gl-new-dropdown-contents"
>
-
+ <div
+ class="gl-search-box-by-type"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-search-box-by-type-search-icon gl-icon s16"
+ data-testid="search-icon"
+ >
+ <use
+ href="#search"
+ />
+ </svg>
+
+ <input
+ aria-label="Search"
+ class="gl-form-input gl-search-box-by-type-input form-control"
+ placeholder="Search"
+ type="text"
+ />
+
+ <div
+ class="gl-search-box-by-type-right-icons"
+ >
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+
+ <li
+ class="gl-new-dropdown-text text-secondary"
+ role="presentation"
+ >
+ <p
+ class="b-dropdown-text"
+ >
+
No matches found
- </p>
- </li>
+ </p>
+ </li>
+ </div>
+
+ <!---->
+ </div>
</ul>
</div>
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index d184c054b8a..00fb8f5435e 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -37,7 +37,7 @@ describe('JiraImportForm', () => {
const getUserDropdown = () => getTable().find(GlDropdown);
- const getHeader = name => getByRole(wrapper.element, 'columnheader', { name });
+ const getHeader = (name) => getByRole(wrapper.element, 'columnheader', { name });
const findLoadMoreUsersButton = () => wrapper.find('[data-testid="load-more-users-button"]');
diff --git a/spec/frontend/jira_import/components/jira_import_progress_spec.js b/spec/frontend/jira_import/components/jira_import_progress_spec.js
index ed7e1824fa3..b56230e94fc 100644
--- a/spec/frontend/jira_import/components/jira_import_progress_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_progress_spec.js
@@ -8,7 +8,7 @@ describe('JiraImportProgress', () => {
const importProject = 'JIRAPROJECT';
- const getGlEmptyStateProp = attribute => wrapper.find(GlEmptyState).props(attribute);
+ const getGlEmptyStateProp = (attribute) => wrapper.find(GlEmptyState).props(attribute);
const getParagraphText = () => wrapper.find('p').text();
diff --git a/spec/frontend/jira_import/components/jira_import_setup_spec.js b/spec/frontend/jira_import/components/jira_import_setup_spec.js
index 7c84d4a166a..320e270b493 100644
--- a/spec/frontend/jira_import/components/jira_import_setup_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_setup_spec.js
@@ -6,7 +6,7 @@ import { illustration, jiraIntegrationPath } from '../mock_data';
describe('JiraImportSetup', () => {
let wrapper;
- const getGlEmptyStateProp = attribute => wrapper.find(GlEmptyState).props(attribute);
+ const getGlEmptyStateProp = (attribute) => wrapper.find(GlEmptyState).props(attribute);
beforeEach(() => {
wrapper = shallowMount(JiraImportSetup, {
diff --git a/spec/frontend/jobs/components/artifacts_block_spec.js b/spec/frontend/jobs/components/artifacts_block_spec.js
index a709a59cadd..134463c6763 100644
--- a/spec/frontend/jobs/components/artifacts_block_spec.js
+++ b/spec/frontend/jobs/components/artifacts_block_spec.js
@@ -1,12 +1,12 @@
import { mount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
import { getTimeago } from '~/lib/utils/datetime_utility';
import ArtifactsBlock from '~/jobs/components/artifacts_block.vue';
-import { trimText } from '../../helpers/text_helper';
describe('Artifacts block', () => {
let wrapper;
- const createWrapper = propsData =>
+ const createWrapper = (propsData) =>
mount(ArtifactsBlock, {
propsData: {
helpUrl: 'help-url',
diff --git a/spec/frontend/jobs/components/commit_block_spec.js b/spec/frontend/jobs/components/commit_block_spec.js
index 4e2d0053831..13261317b48 100644
--- a/spec/frontend/jobs/components/commit_block_spec.js
+++ b/spec/frontend/jobs/components/commit_block_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/jobs/components/commit_block.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Commit block', () => {
const Component = Vue.extend(component);
diff --git a/spec/frontend/jobs/components/empty_state_spec.js b/spec/frontend/jobs/components/empty_state_spec.js
index 29d0c4e07aa..c9de110ce06 100644
--- a/spec/frontend/jobs/components/empty_state_spec.js
+++ b/spec/frontend/jobs/components/empty_state_spec.js
@@ -12,7 +12,7 @@ describe('Empty State', () => {
variablesSettingsUrl: '',
};
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = mount(EmptyState, {
propsData: {
...defaultProps,
@@ -46,11 +46,7 @@ describe('Empty State', () => {
});
it('renders provided title', () => {
- expect(
- findTitle()
- .text()
- .trim(),
- ).toBe(defaultProps.title);
+ expect(findTitle().text().trim()).toBe(defaultProps.title);
});
});
@@ -60,11 +56,7 @@ describe('Empty State', () => {
});
it('renders content', () => {
- expect(
- findContent()
- .text()
- .trim(),
- ).toBe(content);
+ expect(findContent().text().trim()).toBe(content);
});
});
diff --git a/spec/frontend/jobs/components/erased_block_spec.js b/spec/frontend/jobs/components/erased_block_spec.js
index d66ee71df6a..b3e1d28eb16 100644
--- a/spec/frontend/jobs/components/erased_block_spec.js
+++ b/spec/frontend/jobs/components/erased_block_spec.js
@@ -10,7 +10,7 @@ describe('Erased block', () => {
const timeago = getTimeago();
const formattedDate = timeago.format(erasedAt);
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = mount(ErasedBlock, {
propsData: props,
});
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index 94653d4d4c7..657687b5e2a 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -3,7 +3,7 @@ import { mount, createLocalVue } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { getJSONFixture } from 'helpers/fixtures';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import JobApp from '~/jobs/components/job_app.vue';
import Sidebar from '~/jobs/components/sidebar.vue';
@@ -133,13 +133,9 @@ describe('Job App', () => {
});
it('should render provided job information', () => {
- expect(
- wrapper
- .find('.header-main-content')
- .text()
- .replace(/\s+/g, ' ')
- .trim(),
- ).toContain('passed Job #4757 triggered 1 year ago by Root');
+ expect(wrapper.find('.header-main-content').text().replace(/\s+/g, ' ').trim()).toContain(
+ 'passed Job #4757 triggered 1 year ago by Root',
+ );
});
it('should render new issue link', () => {
@@ -151,11 +147,7 @@ describe('Job App', () => {
it('should render created key', () =>
setupAndMount().then(() => {
expect(
- wrapper
- .find('.header-main-content')
- .text()
- .replace(/\s+/g, ' ')
- .trim(),
+ wrapper.find('.header-main-content').text().replace(/\s+/g, ' ').trim(),
).toContain('passed Job #4757 created 3 weeks ago by Root');
}));
});
@@ -383,7 +375,7 @@ describe('Job App', () => {
});
describe('sidebar', () => {
- it('has no blank blocks', done => {
+ it('has no blank blocks', (done) => {
setupAndMount({
jobData: {
duration: null,
@@ -400,7 +392,7 @@ describe('Job App', () => {
const blocks = wrapper.findAll('.blocks-container > *').wrappers;
expect(blocks.length).toBeGreaterThan(0);
- blocks.forEach(block => {
+ blocks.forEach((block) => {
expect(block.text().trim()).not.toBe('');
});
})
diff --git a/spec/frontend/jobs/components/job_container_item_spec.js b/spec/frontend/jobs/components/job_container_item_spec.js
index 41b399fa32b..af7ce100d83 100644
--- a/spec/frontend/jobs/components/job_container_item_spec.js
+++ b/spec/frontend/jobs/components/job_container_item_spec.js
@@ -82,7 +82,7 @@ describe('JobContainerItem', () => {
);
});
- it('displays remaining time in tooltip', done => {
+ it('displays remaining time in tooltip', (done) => {
vm = mountComponent(Component, {
job: delayedJobFixture,
isActive: false,
diff --git a/spec/frontend/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index 233cef05622..97b0333cb32 100644
--- a/spec/frontend/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -21,7 +21,7 @@ describe('Job log controllers', () => {
isTraceSizeVisible: true,
};
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = mount(JobLogControllers, {
propsData: {
...defaultProps,
diff --git a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
index be684769b46..bc0d455c309 100644
--- a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
+++ b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import SidebarJobDetailsContainer from '~/jobs/components/sidebar_job_details_container.vue';
import DetailRow from '~/jobs/components/sidebar_detail_row.vue';
import createStore from '~/jobs/store';
-import { extendedWrapper } from '../../helpers/vue_test_utils_helper';
import job from '../mock_data';
describe('Job Sidebar Details Container', () => {
@@ -37,7 +37,7 @@ describe('Job Sidebar Details Container', () => {
it('should render an empty container', () => {
createWrapper();
- expect(wrapper.isEmpty()).toBe(true);
+ expect(wrapper.html()).toBe('');
});
});
@@ -55,7 +55,6 @@ describe('Job Sidebar Details Container', () => {
await store.dispatch('receiveJobSuccess', { [detail]: job[detail] });
const detailsRow = findAllDetailsRow();
- expect(wrapper.isEmpty()).toBe(false);
expect(detailsRow).toHaveLength(1);
expect(detailsRow.at(0).text()).toBe(value);
});
@@ -65,7 +64,6 @@ describe('Job Sidebar Details Container', () => {
await store.dispatch('receiveJobSuccess', { tags });
const tagsComponent = findJobTags();
- expect(wrapper.isEmpty()).toBe(false);
expect(tagsComponent.text()).toBe('Tags: tag');
});
});
@@ -91,7 +89,7 @@ describe('Job Sidebar Details Container', () => {
await store.dispatch('receiveJobSuccess', { metadata });
const detailsRow = findAllDetailsRow();
- expect(wrapper.isEmpty()).toBe(true);
+ expect(wrapper.html()).toBe('');
expect(detailsRow.exists()).toBe(false);
});
@@ -100,7 +98,6 @@ describe('Job Sidebar Details Container', () => {
await store.dispatch('receiveJobSuccess', { metadata });
const detailsRow = findAllDetailsRow();
- expect(wrapper.isEmpty()).toBe(false);
expect(detailsRow).toHaveLength(1);
expect(detailsRow.at(0).text()).toBe('Timeout: 1m 40s');
});
diff --git a/spec/frontend/jobs/components/jobs_container_spec.js b/spec/frontend/jobs/components/jobs_container_spec.js
index 119b18b7557..9a336489101 100644
--- a/spec/frontend/jobs/components/jobs_container_spec.js
+++ b/spec/frontend/jobs/components/jobs_container_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/jobs/components/jobs_container.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Jobs List block', () => {
const Component = Vue.extend(component);
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/jobs/components/log/line_spec.js
index 914ae2424c8..367154e7f82 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/jobs/components/log/line_spec.js
@@ -34,10 +34,7 @@ describe('Job Log Line', () => {
const findLine = () => wrapper.find('span');
const findLink = () => findLine().find('a');
const findLinks = () => findLine().findAll('a');
- const findLinkAttributeByIndex = i =>
- findLinks()
- .at(i)
- .attributes();
+ const findLinkAttributeByIndex = (i) => findLinks().at(i).attributes();
beforeEach(() => {
data = mockProps();
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index 015d5e01a46..f662ffa1780 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -62,11 +62,7 @@ describe('Job Log', () => {
});
it('renders an icon with the open state', () => {
- expect(
- findCollapsibleLine()
- .find('[data-testid="angle-down-icon"]')
- .exists(),
- ).toBe(true);
+ expect(findCollapsibleLine().find('[data-testid="angle-down-icon"]').exists()).toBe(true);
});
describe('on click header section', () => {
diff --git a/spec/frontend/jobs/components/manual_variables_form_spec.js b/spec/frontend/jobs/components/manual_variables_form_spec.js
index 547f146cf88..f6c37407e2b 100644
--- a/spec/frontend/jobs/components/manual_variables_form_spec.js
+++ b/spec/frontend/jobs/components/manual_variables_form_spec.js
@@ -27,7 +27,7 @@ describe('Manual Variables Form', () => {
factory(requiredProps);
});
- afterEach(done => {
+ afterEach((done) => {
// The component has a `nextTick` callback after some events so we need
// to wait for those to finish before destroying.
setImmediate(() => {
@@ -54,7 +54,7 @@ describe('Manual Variables Form', () => {
});
describe('when adding a new variable', () => {
- it('creates a new variable when user types a new key and resets the form', done => {
+ it('creates a new variable when user types a new key and resets the form', (done) => {
wrapper.vm
.$nextTick()
.then(() => wrapper.find({ ref: 'inputKey' }).setValue('new key'))
@@ -67,7 +67,7 @@ describe('Manual Variables Form', () => {
.catch(done.fail);
});
- it('creates a new variable when user types a new value and resets the form', done => {
+ it('creates a new variable when user types a new value and resets the form', (done) => {
wrapper.vm
.$nextTick()
.then(() => wrapper.find({ ref: 'inputSecretValue' }).setValue('new value'))
@@ -82,7 +82,7 @@ describe('Manual Variables Form', () => {
});
describe('when deleting a variable', () => {
- beforeEach(done => {
+ beforeEach((done) => {
wrapper.vm.variables = [
{
key: 'new key',
diff --git a/spec/frontend/jobs/components/sidebar_spec.js b/spec/frontend/jobs/components/sidebar_spec.js
index 1d4be2fb81e..22d555ffec7 100644
--- a/spec/frontend/jobs/components/sidebar_spec.js
+++ b/spec/frontend/jobs/components/sidebar_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import Sidebar, { forwardDeploymentFailureModalId } from '~/jobs/components/sidebar.vue';
import StagesDropdown from '~/jobs/components/stages_dropdown.vue';
import JobsContainer from '~/jobs/components/jobs_container.vue';
@@ -6,7 +7,6 @@ import JobRetryForwardDeploymentModal from '~/jobs/components/job_retry_forward_
import JobRetryButton from '~/jobs/components/job_sidebar_retry_button.vue';
import createStore from '~/jobs/store';
import job, { jobsInStage } from '../mock_data';
-import { extendedWrapper } from '../../helpers/vue_test_utils_helper';
describe('Sidebar details block', () => {
let store;
diff --git a/spec/frontend/jobs/components/stages_dropdown_spec.js b/spec/frontend/jobs/components/stages_dropdown_spec.js
index e8fa6094c25..72d5d0f9d44 100644
--- a/spec/frontend/jobs/components/stages_dropdown_spec.js
+++ b/spec/frontend/jobs/components/stages_dropdown_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import { trimText } from 'helpers/text_helper';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/jobs/components/stages_dropdown.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Stages Dropdown', () => {
const Component = Vue.extend(component);
diff --git a/spec/frontend/jobs/components/stuck_block_spec.js b/spec/frontend/jobs/components/stuck_block_spec.js
index 926286bf75a..4db73eaaaec 100644
--- a/spec/frontend/jobs/components/stuck_block_spec.js
+++ b/spec/frontend/jobs/components/stuck_block_spec.js
@@ -12,7 +12,7 @@ describe('Stuck Block Job component', () => {
}
});
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = shallowMount(StuckBlock, {
propsData: {
...props,
diff --git a/spec/frontend/jobs/components/trigger_block_spec.js b/spec/frontend/jobs/components/trigger_block_spec.js
index 448197b82c0..16ea276ee4a 100644
--- a/spec/frontend/jobs/components/trigger_block_spec.js
+++ b/spec/frontend/jobs/components/trigger_block_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/jobs/components/trigger_block.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Trigger block', () => {
const Component = Vue.extend(component);
@@ -32,7 +32,7 @@ describe('Trigger block', () => {
describe('with variables', () => {
describe('hide/reveal variables', () => {
- it('should toggle variables on click', done => {
+ it('should toggle variables on click', (done) => {
vm = mountComponent(Component, {
trigger: {
short_token: 'bd7e',
diff --git a/spec/frontend/jobs/store/actions_spec.js b/spec/frontend/jobs/store/actions_spec.js
index 91bd5521f70..2d757ce76bf 100644
--- a/spec/frontend/jobs/store/actions_spec.js
+++ b/spec/frontend/jobs/store/actions_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { TEST_HOST } from '../../helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import {
setJobEndpoint,
@@ -39,7 +39,7 @@ describe('Job State actions', () => {
});
describe('setJobEndpoint', () => {
- it('should commit SET_JOB_ENDPOINT mutation', done => {
+ it('should commit SET_JOB_ENDPOINT mutation', (done) => {
testAction(
setJobEndpoint,
'job/872324.json',
@@ -52,7 +52,7 @@ describe('Job State actions', () => {
});
describe('setTraceOptions', () => {
- it('should commit SET_TRACE_OPTIONS mutation', done => {
+ it('should commit SET_TRACE_OPTIONS mutation', (done) => {
testAction(
setTraceOptions,
{ pagePath: 'job/872324/trace.json' },
@@ -65,26 +65,26 @@ describe('Job State actions', () => {
});
describe('hideSidebar', () => {
- it('should commit HIDE_SIDEBAR mutation', done => {
+ it('should commit HIDE_SIDEBAR mutation', (done) => {
testAction(hideSidebar, null, mockedState, [{ type: types.HIDE_SIDEBAR }], [], done);
});
});
describe('showSidebar', () => {
- it('should commit HIDE_SIDEBAR mutation', done => {
+ it('should commit HIDE_SIDEBAR mutation', (done) => {
testAction(showSidebar, null, mockedState, [{ type: types.SHOW_SIDEBAR }], [], done);
});
});
describe('toggleSidebar', () => {
describe('when isSidebarOpen is true', () => {
- it('should dispatch hideSidebar', done => {
+ it('should dispatch hideSidebar', (done) => {
testAction(toggleSidebar, null, mockedState, [], [{ type: 'hideSidebar' }], done);
});
});
describe('when isSidebarOpen is false', () => {
- it('should dispatch showSidebar', done => {
+ it('should dispatch showSidebar', (done) => {
mockedState.isSidebarOpen = false;
testAction(toggleSidebar, null, mockedState, [], [{ type: 'showSidebar' }], done);
@@ -93,7 +93,7 @@ describe('Job State actions', () => {
});
describe('requestJob', () => {
- it('should commit REQUEST_JOB mutation', done => {
+ it('should commit REQUEST_JOB mutation', (done) => {
testAction(requestJob, null, mockedState, [{ type: types.REQUEST_JOB }], [], done);
});
});
@@ -113,7 +113,7 @@ describe('Job State actions', () => {
});
describe('success', () => {
- it('dispatches requestJob and receiveJobSuccess ', done => {
+ it('dispatches requestJob and receiveJobSuccess ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { id: 121212, name: 'karma' });
testAction(
@@ -140,7 +140,7 @@ describe('Job State actions', () => {
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
});
- it('dispatches requestJob and receiveJobError ', done => {
+ it('dispatches requestJob and receiveJobError ', (done) => {
testAction(
fetchJob,
null,
@@ -161,7 +161,7 @@ describe('Job State actions', () => {
});
describe('receiveJobSuccess', () => {
- it('should commit RECEIVE_JOB_SUCCESS mutation', done => {
+ it('should commit RECEIVE_JOB_SUCCESS mutation', (done) => {
testAction(
receiveJobSuccess,
{ id: 121232132 },
@@ -174,25 +174,25 @@ describe('Job State actions', () => {
});
describe('receiveJobError', () => {
- it('should commit RECEIVE_JOB_ERROR mutation', done => {
+ it('should commit RECEIVE_JOB_ERROR mutation', (done) => {
testAction(receiveJobError, null, mockedState, [{ type: types.RECEIVE_JOB_ERROR }], [], done);
});
});
describe('scrollTop', () => {
- it('should dispatch toggleScrollButtons action', done => {
+ it('should dispatch toggleScrollButtons action', (done) => {
testAction(scrollTop, null, mockedState, [], [{ type: 'toggleScrollButtons' }], done);
});
});
describe('scrollBottom', () => {
- it('should dispatch toggleScrollButtons action', done => {
+ it('should dispatch toggleScrollButtons action', (done) => {
testAction(scrollBottom, null, mockedState, [], [{ type: 'toggleScrollButtons' }], done);
});
});
describe('requestTrace', () => {
- it('should commit REQUEST_TRACE mutation', done => {
+ it('should commit REQUEST_TRACE mutation', (done) => {
testAction(requestTrace, null, mockedState, [{ type: types.REQUEST_TRACE }], [], done);
});
});
@@ -212,7 +212,7 @@ describe('Job State actions', () => {
});
describe('success', () => {
- it('dispatches requestTrace, receiveTraceSuccess and stopPollingTrace when job is complete', done => {
+ it('dispatches requestTrace, receiveTraceSuccess and stopPollingTrace when job is complete', (done) => {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, {
html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
complete: true,
@@ -255,7 +255,7 @@ describe('Job State actions', () => {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, tracePayload);
});
- it('dispatches startPollingTrace', done => {
+ it('dispatches startPollingTrace', (done) => {
testAction(
fetchTrace,
null,
@@ -270,7 +270,7 @@ describe('Job State actions', () => {
);
});
- it('does not dispatch startPollingTrace when timeout is non-empty', done => {
+ it('does not dispatch startPollingTrace when timeout is non-empty', (done) => {
mockedState.traceTimeout = 1;
testAction(
@@ -293,7 +293,7 @@ describe('Job State actions', () => {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).reply(500);
});
- it('dispatches requestTrace and receiveTraceError ', done => {
+ it('dispatches requestTrace and receiveTraceError ', (done) => {
testAction(
fetchTrace,
null,
@@ -358,7 +358,7 @@ describe('Job State actions', () => {
window.clearTimeout = origTimeout;
});
- it('should commit STOP_POLLING_TRACE mutation ', done => {
+ it('should commit STOP_POLLING_TRACE mutation ', (done) => {
const traceTimeout = 7;
testAction(
@@ -377,7 +377,7 @@ describe('Job State actions', () => {
});
describe('receiveTraceSuccess', () => {
- it('should commit RECEIVE_TRACE_SUCCESS mutation ', done => {
+ it('should commit RECEIVE_TRACE_SUCCESS mutation ', (done) => {
testAction(
receiveTraceSuccess,
'hello world',
@@ -390,13 +390,13 @@ describe('Job State actions', () => {
});
describe('receiveTraceError', () => {
- it('should commit stop polling trace', done => {
+ it('should commit stop polling trace', (done) => {
testAction(receiveTraceError, null, mockedState, [], [{ type: 'stopPollingTrace' }], done);
});
});
describe('toggleCollapsibleLine', () => {
- it('should commit TOGGLE_COLLAPSIBLE_LINE mutation ', done => {
+ it('should commit TOGGLE_COLLAPSIBLE_LINE mutation ', (done) => {
testAction(
toggleCollapsibleLine,
{ isClosed: true },
@@ -409,7 +409,7 @@ describe('Job State actions', () => {
});
describe('requestJobsForStage', () => {
- it('should commit REQUEST_JOBS_FOR_STAGE mutation ', done => {
+ it('should commit REQUEST_JOBS_FOR_STAGE mutation ', (done) => {
testAction(
requestJobsForStage,
{ name: 'deploy' },
@@ -433,7 +433,7 @@ describe('Job State actions', () => {
});
describe('success', () => {
- it('dispatches requestJobsForStage and receiveJobsForStageSuccess ', done => {
+ it('dispatches requestJobsForStage and receiveJobsForStageSuccess ', (done) => {
mock
.onGet(`${TEST_HOST}/jobs.json`)
.replyOnce(200, { latest_statuses: [{ id: 121212, name: 'build' }], retried: [] });
@@ -463,7 +463,7 @@ describe('Job State actions', () => {
mock.onGet(`${TEST_HOST}/jobs.json`).reply(500);
});
- it('dispatches requestJobsForStage and receiveJobsForStageError', done => {
+ it('dispatches requestJobsForStage and receiveJobsForStageError', (done) => {
testAction(
fetchJobsForStage,
{ dropdown_path: `${TEST_HOST}/jobs.json` },
@@ -485,7 +485,7 @@ describe('Job State actions', () => {
});
describe('receiveJobsForStageSuccess', () => {
- it('should commit RECEIVE_JOBS_FOR_STAGE_SUCCESS mutation ', done => {
+ it('should commit RECEIVE_JOBS_FOR_STAGE_SUCCESS mutation ', (done) => {
testAction(
receiveJobsForStageSuccess,
[{ id: 121212, name: 'karma' }],
@@ -498,7 +498,7 @@ describe('Job State actions', () => {
});
describe('receiveJobsForStageError', () => {
- it('should commit RECEIVE_JOBS_FOR_STAGE_ERROR mutation ', done => {
+ it('should commit RECEIVE_JOBS_FOR_STAGE_ERROR mutation ', (done) => {
testAction(
receiveJobsForStageError,
null,
diff --git a/spec/frontend/jobs/store/helpers.js b/spec/frontend/jobs/store/helpers.js
index 78e33394b63..402ae58971a 100644
--- a/spec/frontend/jobs/store/helpers.js
+++ b/spec/frontend/jobs/store/helpers.js
@@ -1,5 +1,5 @@
import state from '~/jobs/store/state';
-export const resetStore = store => {
+export const resetStore = (store) => {
store.replaceState(state());
};
diff --git a/spec/frontend/lazy_loader_spec.js b/spec/frontend/lazy_loader_spec.js
index 5eb09bc2359..f8ac7568724 100644
--- a/spec/frontend/lazy_loader_spec.js
+++ b/spec/frontend/lazy_loader_spec.js
@@ -1,10 +1,10 @@
import { noop } from 'lodash';
import { TEST_HOST } from 'helpers/test_constants';
import { useMockMutationObserver, useMockIntersectionObserver } from 'helpers/mock_dom_observer';
+import waitForPromises from 'helpers/wait_for_promises';
import LazyLoader from '~/lazy_loader';
-import waitForPromises from './helpers/wait_for_promises';
-const execImmediately = callback => {
+const execImmediately = (callback) => {
callback();
};
@@ -20,7 +20,7 @@ describe('LazyLoader', () => {
triggerMutation(document.body, { options: { childList: true, subtree: true } });
};
- const triggerIntersectionWithRatio = img => {
+ const triggerIntersectionWithRatio = (img) => {
triggerIntersection(img, { entry: { intersectionRatio: 0.1 } });
};
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
index ee1971a4931..a01f86678e9 100644
--- a/spec/frontend/lib/dompurify_spec.js
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -15,8 +15,8 @@ const absoluteGon = {
const expectedSanitized = '<svg><use></use></svg>';
const safeUrls = {
- root: Object.values(rootGon).map(url => `${url}#ellipsis_h`),
- absolute: Object.values(absoluteGon).map(url => `${url}#ellipsis_h`),
+ root: Object.values(rootGon).map((url) => `${url}#ellipsis_h`),
+ absolute: Object.values(absoluteGon).map((url) => `${url}#ellipsis_h`),
};
const unsafeUrls = [
@@ -60,7 +60,7 @@ describe('~/lib/dompurify', () => {
expect(sanitize(htmlHref)).toBe(htmlHref);
});
- it.each(safeUrls[type])('allows safe URL %s', url => {
+ it.each(safeUrls[type])('allows safe URL %s', (url) => {
const htmlHref = `<svg><use href="${url}"></use></svg>`;
expect(sanitize(htmlHref)).toBe(htmlHref);
@@ -68,7 +68,7 @@ describe('~/lib/dompurify', () => {
expect(sanitize(htmlXlink)).toBe(htmlXlink);
});
- it.each(unsafeUrls)('sanitizes unsafe URL %s', url => {
+ it.each(unsafeUrls)('sanitizes unsafe URL %s', (url) => {
const htmlHref = `<svg><use href="${url}"></use></svg>`;
const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
@@ -87,7 +87,7 @@ describe('~/lib/dompurify', () => {
window.gon = originalGon;
});
- it.each([...safeUrls.root, ...safeUrls.absolute, ...unsafeUrls])('sanitizes URL %s', url => {
+ it.each([...safeUrls.root, ...safeUrls.absolute, ...unsafeUrls])('sanitizes URL %s', (url) => {
const htmlHref = `<svg><use href="${url}"></use></svg>`;
const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
diff --git a/spec/frontend/lib/utils/ajax_cache_spec.js b/spec/frontend/lib/utils/ajax_cache_spec.js
index e2ee70b9d69..641dd3684fa 100644
--- a/spec/frontend/lib/utils/ajax_cache_spec.js
+++ b/spec/frontend/lib/utils/ajax_cache_spec.js
@@ -104,7 +104,7 @@ describe('AjaxCache', () => {
it('stores and returns data from Ajax call if cache is empty', () => {
mock.onGet(dummyEndpoint).reply(200, dummyResponse);
- return AjaxCache.retrieve(dummyEndpoint).then(data => {
+ return AjaxCache.retrieve(dummyEndpoint).then((data) => {
expect(data).toEqual(dummyResponse);
expect(AjaxCache.internalStorage[dummyEndpoint]).toEqual(dummyResponse);
});
@@ -126,7 +126,7 @@ describe('AjaxCache', () => {
mock.onGet(dummyEndpoint).networkError();
expect.assertions(2);
- return AjaxCache.retrieve(dummyEndpoint).catch(error => {
+ return AjaxCache.retrieve(dummyEndpoint).catch((error) => {
expect(error.message).toBe(`${dummyEndpoint}: ${errorMessage}`);
expect(error.textStatus).toBe(errorMessage);
});
@@ -135,7 +135,7 @@ describe('AjaxCache', () => {
it('makes no Ajax call if matching data exists', () => {
AjaxCache.internalStorage[dummyEndpoint] = dummyResponse;
- return AjaxCache.retrieve(dummyEndpoint).then(data => {
+ return AjaxCache.retrieve(dummyEndpoint).then((data) => {
expect(data).toBe(dummyResponse);
expect(axios.get).not.toHaveBeenCalled();
});
@@ -153,7 +153,7 @@ describe('AjaxCache', () => {
return Promise.all([
AjaxCache.retrieve(dummyEndpoint),
AjaxCache.retrieve(dummyEndpoint, true),
- ]).then(data => {
+ ]).then((data) => {
expect(data).toEqual([oldDummyResponse, dummyResponse]);
});
});
diff --git a/spec/frontend/lib/utils/apollo_startup_js_link_spec.js b/spec/frontend/lib/utils/apollo_startup_js_link_spec.js
index faead3ff8fe..c0e5b06651f 100644
--- a/spec/frontend/lib/utils/apollo_startup_js_link_spec.js
+++ b/spec/frontend/lib/utils/apollo_startup_js_link_spec.js
@@ -58,9 +58,9 @@ describe('StartupJSLink', () => {
link = ApolloLink.from([startupLink, new ApolloLink(() => Observable.of(FORWARDED_RESPONSE))]);
};
- it('forwards requests if no calls are set up', done => {
+ it('forwards requests if no calls are set up', (done) => {
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls).toBe(null);
expect(startupLink.request).toEqual(StartupJSLink.noopRequest);
@@ -68,7 +68,7 @@ describe('StartupJSLink', () => {
});
});
- it('forwards requests if the operation is not pre-loaded', done => {
+ it('forwards requests if the operation is not pre-loaded', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -79,7 +79,7 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ operationName: 'notLoaded' })).subscribe(result => {
+ link.request(mockOperation({ operationName: 'notLoaded' })).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(1);
done();
@@ -87,7 +87,7 @@ describe('StartupJSLink', () => {
});
describe('variable match errors: ', () => {
- it('forwards requests if the variables are not matching', done => {
+ it('forwards requests if the variables are not matching', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -98,14 +98,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('forwards requests if more variables are set in the operation', done => {
+ it('forwards requests if more variables are set in the operation', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -115,14 +115,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('forwards requests if less variables are set in the operation', done => {
+ it('forwards requests if less variables are set in the operation', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -133,14 +133,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ variables: { id: 3 } })).subscribe(result => {
+ link.request(mockOperation({ variables: { id: 3 } })).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('forwards requests if different variables are set', done => {
+ it('forwards requests if different variables are set', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -151,14 +151,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ variables: { id: 3 } })).subscribe(result => {
+ link.request(mockOperation({ variables: { id: 3 } })).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('forwards requests if array variables have a different order', done => {
+ it('forwards requests if array variables have a different order', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -169,7 +169,7 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ variables: { id: [4, 3] } })).subscribe(result => {
+ link.request(mockOperation({ variables: { id: [4, 3] } })).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
@@ -178,7 +178,7 @@ describe('StartupJSLink', () => {
});
describe('error handling', () => {
- it('forwards the call if the fetchCall is failing with a HTTP Error', done => {
+ it('forwards the call if the fetchCall is failing with a HTTP Error', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -189,14 +189,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('forwards the call if it errors (e.g. failing JSON)', done => {
+ it('forwards the call if it errors (e.g. failing JSON)', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -207,14 +207,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('forwards the call if the response contains an error', done => {
+ it('forwards the call if the response contains an error', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -225,14 +225,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it("forwards the call if the response doesn't contain a data object", done => {
+ it("forwards the call if the response doesn't contain a data object", (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -243,7 +243,7 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(FORWARDED_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
@@ -251,7 +251,7 @@ describe('StartupJSLink', () => {
});
});
- it('resolves the request if the operation is matching', done => {
+ it('resolves the request if the operation is matching', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -262,14 +262,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(STARTUP_JS_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('resolves the request exactly once', done => {
+ it('resolves the request exactly once', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -280,17 +280,17 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation()).subscribe(result => {
+ link.request(mockOperation()).subscribe((result) => {
expect(result).toEqual(STARTUP_JS_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
- link.request(mockOperation()).subscribe(result2 => {
+ link.request(mockOperation()).subscribe((result2) => {
expect(result2).toEqual(FORWARDED_RESPONSE);
done();
});
});
});
- it('resolves the request if the variables have a different order', done => {
+ it('resolves the request if the variables have a different order', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -301,14 +301,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ variables: { name: 'foo', id: 3 } })).subscribe(result => {
+ link.request(mockOperation({ variables: { name: 'foo', id: 3 } })).subscribe((result) => {
expect(result).toEqual(STARTUP_JS_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('resolves the request if the variables have undefined values', done => {
+ it('resolves the request if the variables have undefined values', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -321,14 +321,14 @@ describe('StartupJSLink', () => {
setupLink();
link
.request(mockOperation({ variables: { name: 'foo', undef: undefined } }))
- .subscribe(result => {
+ .subscribe((result) => {
expect(result).toEqual(STARTUP_JS_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('resolves the request if the variables are of an array format', done => {
+ it('resolves the request if the variables are of an array format', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -339,14 +339,14 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ variables: { id: [3, 4] } })).subscribe(result => {
+ link.request(mockOperation({ variables: { id: [3, 4] } })).subscribe((result) => {
expect(result).toEqual(STARTUP_JS_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
});
});
- it('resolves multiple requests correctly', done => {
+ it('resolves multiple requests correctly', (done) => {
window.gl = {
startup_graphql_calls: [
{
@@ -362,10 +362,10 @@ describe('StartupJSLink', () => {
],
};
setupLink();
- link.request(mockOperation({ operationName: OPERATION_NAME_TWO })).subscribe(result => {
+ link.request(mockOperation({ operationName: OPERATION_NAME_TWO })).subscribe((result) => {
expect(result).toEqual(STARTUP_JS_RESPONSE_TWO);
expect(startupLink.startupCalls.size).toBe(1);
- link.request(mockOperation({ operationName: OPERATION_NAME })).subscribe(result2 => {
+ link.request(mockOperation({ operationName: OPERATION_NAME })).subscribe((result2) => {
expect(result2).toEqual(STARTUP_JS_RESPONSE);
expect(startupLink.startupCalls.size).toBe(0);
done();
diff --git a/spec/frontend/lib/utils/chart_utils_spec.js b/spec/frontend/lib/utils/chart_utils_spec.js
index e811b8405fb..65bb68c5017 100644
--- a/spec/frontend/lib/utils/chart_utils_spec.js
+++ b/spec/frontend/lib/utils/chart_utils_spec.js
@@ -3,7 +3,11 @@ import { firstAndLastY } from '~/lib/utils/chart_utils';
describe('Chart utils', () => {
describe('firstAndLastY', () => {
it('returns the first and last y-values of a given data set as an array', () => {
- const data = [['', 1], ['', 2], ['', 3]];
+ const data = [
+ ['', 1],
+ ['', 2],
+ ['', 3],
+ ];
expect(firstAndLastY(data)).toEqual([1, 3]);
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 433fb368f55..90222f0f718 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -1,4 +1,3 @@
-import $ from 'jquery';
import * as commonUtils from '~/lib/utils/common_utils';
describe('common_utils', () => {
@@ -214,60 +213,90 @@ describe('common_utils', () => {
describe('scrollToElement*', () => {
let elem;
- const windowHeight = 1000;
+ const windowHeight = 550;
const elemTop = 100;
+ const id = 'scroll_test';
beforeEach(() => {
elem = document.createElement('div');
+ elem.id = id;
+ document.body.appendChild(elem);
window.innerHeight = windowHeight;
window.mrTabs = { currentAction: 'show' };
- jest.spyOn($.fn, 'animate');
- jest.spyOn($.fn, 'offset').mockReturnValue({ top: elemTop });
+ jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
+ jest.spyOn(Element.prototype, 'getBoundingClientRect').mockReturnValue({ top: elemTop });
});
afterEach(() => {
- $.fn.animate.mockRestore();
- $.fn.offset.mockRestore();
+ window.scrollTo.mockRestore();
+ Element.prototype.getBoundingClientRect.mockRestore();
+ elem.remove();
});
- describe('scrollToElement', () => {
+ describe('scrollToElement with HTMLElement', () => {
it('scrolls to element', () => {
commonUtils.scrollToElement(elem);
- expect($.fn.animate).toHaveBeenCalledWith(
- {
- scrollTop: elemTop,
- },
- expect.any(Number),
- );
+ expect(window.scrollTo).toHaveBeenCalledWith({
+ behavior: 'smooth',
+ top: elemTop,
+ });
});
it('scrolls to element with offset', () => {
const offset = 50;
commonUtils.scrollToElement(elem, { offset });
- expect($.fn.animate).toHaveBeenCalledWith(
- {
- scrollTop: elemTop + offset,
- },
- expect.any(Number),
- );
+ expect(window.scrollTo).toHaveBeenCalledWith({
+ behavior: 'smooth',
+ top: elemTop + offset,
+ });
+ });
+ });
+
+ describe('scrollToElement with Selector', () => {
+ it('scrolls to element', () => {
+ commonUtils.scrollToElement(`#${id}`);
+ expect(window.scrollTo).toHaveBeenCalledWith({
+ behavior: 'smooth',
+ top: elemTop,
+ });
+ });
+
+ it('scrolls to element with offset', () => {
+ const offset = 50;
+ commonUtils.scrollToElement(`#${id}`, { offset });
+ expect(window.scrollTo).toHaveBeenCalledWith({
+ behavior: 'smooth',
+ top: elemTop + offset,
+ });
});
});
describe('scrollToElementWithContext', () => {
- it('scrolls with context', () => {
- commonUtils.scrollToElementWithContext();
- expect($.fn.animate).toHaveBeenCalledWith(
- {
- scrollTop: elemTop - windowHeight * 0.1,
- },
- expect.any(Number),
- );
+ // This is what the implementation of scrollToElementWithContext
+ // scrolls to, in case we change tha implementation
+ // it needs to be adjusted
+ const elementTopWithContext = elemTop - windowHeight * 0.1;
+
+ it('with HTMLElement scrolls with context', () => {
+ commonUtils.scrollToElementWithContext(elem);
+ expect(window.scrollTo).toHaveBeenCalledWith({
+ behavior: 'smooth',
+ top: elementTopWithContext,
+ });
+ });
+
+ it('with Selector scrolls with context', () => {
+ commonUtils.scrollToElementWithContext(`#${id}`);
+ expect(window.scrollTo).toHaveBeenCalledWith({
+ behavior: 'smooth',
+ top: elementTopWithContext,
+ });
});
});
});
describe('debounceByAnimationFrame', () => {
- it('debounces a function to allow a maximum of one call per animation frame', done => {
+ it('debounces a function to allow a maximum of one call per animation frame', (done) => {
const spy = jest.fn();
const debouncedSpy = commonUtils.debounceByAnimationFrame(spy);
window.requestAnimationFrame(() => {
@@ -404,54 +433,54 @@ describe('common_utils', () => {
describe('backOff', () => {
beforeEach(() => {
// shortcut our timeouts otherwise these tests will take a long time to finish
- jest.spyOn(window, 'setTimeout').mockImplementation(cb => setImmediate(cb, 0));
+ jest.spyOn(window, 'setTimeout').mockImplementation((cb) => setImmediate(cb, 0));
});
- it('solves the promise from the callback', done => {
+ it('solves the promise from the callback', (done) => {
const expectedResponseValue = 'Success!';
commonUtils
.backOff((next, stop) =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve(expectedResponseValue);
})
- .then(resp => {
+ .then((resp) => {
stop(resp);
})
.catch(done.fail),
)
- .then(respBackoff => {
+ .then((respBackoff) => {
expect(respBackoff).toBe(expectedResponseValue);
done();
})
.catch(done.fail);
});
- it('catches the rejected promise from the callback ', done => {
+ it('catches the rejected promise from the callback ', (done) => {
const errorMessage = 'Mistakes were made!';
commonUtils
.backOff((next, stop) => {
new Promise((resolve, reject) => {
reject(new Error(errorMessage));
})
- .then(resp => {
+ .then((resp) => {
stop(resp);
})
- .catch(err => stop(err));
+ .catch((err) => stop(err));
})
- .catch(errBackoffResp => {
+ .catch((errBackoffResp) => {
expect(errBackoffResp instanceof Error).toBe(true);
expect(errBackoffResp.message).toBe(errorMessage);
done();
});
});
- it('solves the promise correctly after retrying a third time', done => {
+ it('solves the promise correctly after retrying a third time', (done) => {
let numberOfCalls = 1;
const expectedResponseValue = 'Success!';
commonUtils
.backOff((next, stop) =>
Promise.resolve(expectedResponseValue)
- .then(resp => {
+ .then((resp) => {
if (numberOfCalls < 3) {
numberOfCalls += 1;
next();
@@ -461,7 +490,7 @@ describe('common_utils', () => {
})
.catch(done.fail),
)
- .then(respBackoff => {
+ .then((respBackoff) => {
const timeouts = window.setTimeout.mock.calls.map(([, timeout]) => timeout);
expect(timeouts).toEqual([2000, 4000]);
@@ -471,10 +500,10 @@ describe('common_utils', () => {
.catch(done.fail);
});
- it('rejects the backOff promise after timing out', done => {
+ it('rejects the backOff promise after timing out', (done) => {
commonUtils
- .backOff(next => next(), 64000)
- .catch(errBackoffResp => {
+ .backOff((next) => next(), 64000)
+ .catch((errBackoffResp) => {
const timeouts = window.setTimeout.mock.calls.map(([, timeout]) => timeout);
expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
@@ -485,27 +514,6 @@ describe('common_utils', () => {
});
});
- describe('resetFavicon', () => {
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('data-original-href', 'default/favicon');
- document.body.appendChild(favicon);
- });
-
- afterEach(() => {
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should reset page favicon to the default icon', () => {
- const favicon = document.getElementById('favicon');
- favicon.setAttribute('href', 'new/favicon');
- commonUtils.resetFavicon();
-
- expect(document.getElementById('favicon').getAttribute('href')).toEqual('default/favicon');
- });
- });
-
describe('spriteIcon', () => {
let beforeGon;
@@ -533,8 +541,8 @@ describe('common_utils', () => {
});
describe('convertObjectProps*', () => {
- const mockConversionFunction = prop => `${prop}_converted`;
- const isEmptyObject = obj =>
+ const mockConversionFunction = (prop) => `${prop}_converted`;
+ const isEmptyObject = (obj) =>
typeof obj === 'object' && obj !== null && Object.keys(obj).length === 0;
const mockObjects = {
diff --git a/spec/frontend/lib/utils/datetime_range_spec.js b/spec/frontend/lib/utils/datetime_range_spec.js
index 8b1f284615d..996a8e2e47b 100644
--- a/spec/frontend/lib/utils/datetime_range_spec.js
+++ b/spec/frontend/lib/utils/datetime_range_spec.js
@@ -64,7 +64,7 @@ describe('Date time range utils', () => {
};
Object.entries(rangeTypes).forEach(([type, examples]) => {
- examples.forEach(example => expect(getRangeType(example)).toEqual(type));
+ examples.forEach((example) => expect(getRangeType(example)).toEqual(type));
});
});
});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 6092b44720f..66efd43262b 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -566,7 +566,7 @@ describe('getDatesInRange', () => {
it('applies mapper function if provided fro each item in range', () => {
const d1 = new Date('2019-01-01');
const d2 = new Date('2019-01-31');
- const formatter = date => date.getDate();
+ const formatter = (date) => date.getDate();
const range = datetimeUtility.getDatesInRange(d1, d2, formatter);
@@ -608,6 +608,92 @@ describe('secondsToDays', () => {
});
});
+describe('nDaysAfter', () => {
+ const date = new Date('2019-07-16T00:00:00.000Z');
+
+ it.each`
+ numberOfDays | expectedResult
+ ${1} | ${new Date('2019-07-17T00:00:00.000Z').valueOf()}
+ ${90} | ${new Date('2019-10-14T00:00:00.000Z').valueOf()}
+ ${-1} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
+ ${0} | ${date.valueOf()}
+ ${0.9} | ${date.valueOf()}
+ `('returns $numberOfDays day(s) after the provided date', ({ numberOfDays, expectedResult }) => {
+ expect(datetimeUtility.nDaysAfter(date, numberOfDays)).toBe(expectedResult);
+ });
+});
+
+describe('nDaysBefore', () => {
+ const date = new Date('2019-07-16T00:00:00.000Z');
+
+ it.each`
+ numberOfDays | expectedResult
+ ${1} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
+ ${90} | ${new Date('2019-04-17T00:00:00.000Z').valueOf()}
+ ${-1} | ${new Date('2019-07-17T00:00:00.000Z').valueOf()}
+ ${0} | ${date.valueOf()}
+ ${0.9} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
+ `('returns $numberOfDays day(s) before the provided date', ({ numberOfDays, expectedResult }) => {
+ expect(datetimeUtility.nDaysBefore(date, numberOfDays)).toBe(expectedResult);
+ });
+});
+
+describe('nMonthsAfter', () => {
+ // February has 28 days
+ const feb2019 = new Date('2019-02-15T00:00:00.000Z');
+ // Except in 2020, it had 29 days
+ const feb2020 = new Date('2020-02-15T00:00:00.000Z');
+ // April has 30 days
+ const apr2020 = new Date('2020-04-15T00:00:00.000Z');
+ // May has 31 days
+ const may2020 = new Date('2020-05-15T00:00:00.000Z');
+
+ it.each`
+ date | numberOfMonths | expectedResult
+ ${feb2019} | ${1} | ${new Date('2019-03-15T00:00:00.000Z').valueOf()}
+ ${feb2020} | ${1} | ${new Date('2020-03-15T00:00:00.000Z').valueOf()}
+ ${apr2020} | ${1} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
+ ${may2020} | ${1} | ${new Date('2020-06-15T00:00:00.000Z').valueOf()}
+ ${may2020} | ${12} | ${new Date('2021-05-15T00:00:00.000Z').valueOf()}
+ ${may2020} | ${-1} | ${new Date('2020-04-15T00:00:00.000Z').valueOf()}
+ ${may2020} | ${0} | ${may2020.valueOf()}
+ ${may2020} | ${0.9} | ${may2020.valueOf()}
+ `(
+ 'returns $numberOfMonths month(s) after the provided date',
+ ({ date, numberOfMonths, expectedResult }) => {
+ expect(datetimeUtility.nMonthsAfter(date, numberOfMonths)).toBe(expectedResult);
+ },
+ );
+});
+
+describe('nMonthsBefore', () => {
+ // The previous month (February) has 28 days
+ const march2019 = new Date('2019-03-15T00:00:00.000Z');
+ // Except in 2020, it had 29 days
+ const march2020 = new Date('2020-03-15T00:00:00.000Z');
+ // The previous month (April) has 30 days
+ const may2020 = new Date('2020-05-15T00:00:00.000Z');
+ // The previous month (May) has 31 days
+ const june2020 = new Date('2020-06-15T00:00:00.000Z');
+
+ it.each`
+ date | numberOfMonths | expectedResult
+ ${march2019} | ${1} | ${new Date('2019-02-15T00:00:00.000Z').valueOf()}
+ ${march2020} | ${1} | ${new Date('2020-02-15T00:00:00.000Z').valueOf()}
+ ${may2020} | ${1} | ${new Date('2020-04-15T00:00:00.000Z').valueOf()}
+ ${june2020} | ${1} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
+ ${june2020} | ${12} | ${new Date('2019-06-15T00:00:00.000Z').valueOf()}
+ ${june2020} | ${-1} | ${new Date('2020-07-15T00:00:00.000Z').valueOf()}
+ ${june2020} | ${0} | ${june2020.valueOf()}
+ ${june2020} | ${0.9} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
+ `(
+ 'returns $numberOfMonths month(s) before the provided date',
+ ({ date, numberOfMonths, expectedResult }) => {
+ expect(datetimeUtility.nMonthsBefore(date, numberOfMonths)).toBe(expectedResult);
+ },
+ );
+});
+
describe('approximateDuration', () => {
it.each`
seconds
@@ -731,3 +817,83 @@ describe('datesMatch', () => {
expect(datetimeUtility.datesMatch(date1, date2)).toBe(expected);
});
});
+
+describe('format24HourTimeStringFromInt', () => {
+ const expectedFormattedTimes = [
+ [0, '00:00'],
+ [2, '02:00'],
+ [6, '06:00'],
+ [9, '09:00'],
+ [10, '10:00'],
+ [16, '16:00'],
+ [22, '22:00'],
+ [32, ''],
+ [NaN, ''],
+ ['Invalid Int', ''],
+ [null, ''],
+ [undefined, ''],
+ ];
+
+ expectedFormattedTimes.forEach(([timeInt, expectedTimeStringIn24HourNotation]) => {
+ it(`formats ${timeInt} as ${expectedTimeStringIn24HourNotation}`, () => {
+ expect(datetimeUtility.format24HourTimeStringFromInt(timeInt)).toBe(
+ expectedTimeStringIn24HourNotation,
+ );
+ });
+ });
+});
+
+describe('getOverlappingDaysInPeriods', () => {
+ const start = new Date(2021, 0, 11);
+ const end = new Date(2021, 0, 13);
+
+ describe('when date periods overlap', () => {
+ const givenPeriodLeft = new Date(2021, 0, 11);
+ const givenPeriodRight = new Date(2021, 0, 14);
+
+ it('returns an overlap object that contains the amount of days overlapping, start date of overlap and end date of overlap', () => {
+ expect(
+ datetimeUtility.getOverlappingDaysInPeriods(
+ { start, end },
+ { start: givenPeriodLeft, end: givenPeriodRight },
+ ),
+ ).toEqual({
+ daysOverlap: 2,
+ overlapStartDate: givenPeriodLeft.getTime(),
+ overlapEndDate: end.getTime(),
+ });
+ });
+ });
+
+ describe('when date periods do not overlap', () => {
+ const givenPeriodLeft = new Date(2021, 0, 9);
+ const givenPeriodRight = new Date(2021, 0, 10);
+
+ it('returns an overlap object that contains a 0 value for days overlapping', () => {
+ expect(
+ datetimeUtility.getOverlappingDaysInPeriods(
+ { start, end },
+ { start: givenPeriodLeft, end: givenPeriodRight },
+ ),
+ ).toEqual({ daysOverlap: 0 });
+ });
+ });
+
+ describe('when date periods contain an invalid Date', () => {
+ const startInvalid = new Date(NaN);
+ const endInvalid = new Date(NaN);
+ const error = __('Invalid period');
+
+ it('throws an exception when the left period contains an invalid date', () => {
+ expect(() =>
+ datetimeUtility.getOverlappingDaysInPeriods({ start, end }, { start: startInvalid, end }),
+ ).toThrow(error);
+ });
+
+ it('throws an exception when the right period contains an invalid date', () => {
+ expect(() =>
+ datetimeUtility.getOverlappingDaysInPeriods({ start, end }, { start, end: endInvalid }),
+ ).toThrow(error);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/dom_utils_spec.js b/spec/frontend/lib/utils/dom_utils_spec.js
index f5c2a797df5..7c4c20e651f 100644
--- a/spec/frontend/lib/utils/dom_utils_spec.js
+++ b/spec/frontend/lib/utils/dom_utils_spec.js
@@ -45,7 +45,7 @@ describe('DOM Utils', () => {
});
describe('canScrollUp', () => {
- [1, 100].forEach(scrollTop => {
+ [1, 100].forEach((scrollTop) => {
it(`is true if scrollTop is > 0 (${scrollTop})`, () => {
expect(
canScrollUp({
@@ -55,7 +55,7 @@ describe('DOM Utils', () => {
});
});
- [0, -10].forEach(scrollTop => {
+ [0, -10].forEach((scrollTop) => {
it(`is false if scrollTop is <= 0 (${scrollTop})`, () => {
expect(
canScrollUp({
diff --git a/spec/frontend/lib/utils/favicon_ci_spec.js b/spec/frontend/lib/utils/favicon_ci_spec.js
new file mode 100644
index 00000000000..e35b008b862
--- /dev/null
+++ b/spec/frontend/lib/utils/favicon_ci_spec.js
@@ -0,0 +1,50 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { setFaviconOverlay, resetFavicon } from '~/lib/utils/favicon';
+import { setCiStatusFavicon } from '~/lib/utils/favicon_ci';
+
+jest.mock('~/lib/utils/favicon');
+
+const TEST_URL = '/test/pipelinable/1';
+const TEST_FAVICON = '/favicon.test.ico';
+
+describe('~/lib/utils/favicon_ci', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ mock = null;
+ });
+
+ describe('setCiStatusFavicon', () => {
+ it.each`
+ response | setFaviconOverlayCalls | resetFaviconCalls
+ ${{}} | ${[]} | ${[[]]}
+ ${{ favicon: TEST_FAVICON }} | ${[[TEST_FAVICON]]} | ${[]}
+ `(
+ 'with response=$response',
+ async ({ response, setFaviconOverlayCalls, resetFaviconCalls }) => {
+ mock.onGet(TEST_URL).replyOnce(200, response);
+
+ expect(setFaviconOverlay).not.toHaveBeenCalled();
+ expect(resetFavicon).not.toHaveBeenCalled();
+
+ await setCiStatusFavicon(TEST_URL);
+
+ expect(setFaviconOverlay.mock.calls).toEqual(setFaviconOverlayCalls);
+ expect(resetFavicon.mock.calls).toEqual(resetFaviconCalls);
+ },
+ );
+
+ it('with error', async () => {
+ mock.onGet(TEST_URL).replyOnce(500);
+
+ await expect(setCiStatusFavicon(TEST_URL)).rejects.toEqual(expect.any(Error));
+ expect(resetFavicon).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/favicon_spec.js b/spec/frontend/lib/utils/favicon_spec.js
new file mode 100644
index 00000000000..1b986432b8a
--- /dev/null
+++ b/spec/frontend/lib/utils/favicon_spec.js
@@ -0,0 +1,39 @@
+import { FaviconOverlayManager } from '@gitlab/favicon-overlay';
+import * as faviconUtils from '~/lib/utils/favicon';
+
+jest.mock('@gitlab/favicon-overlay');
+
+describe('~/lib/utils/favicon', () => {
+ afterEach(() => {
+ faviconUtils.clearMemoizeCache();
+ });
+
+ describe.each`
+ fnName | managerFn | args
+ ${'setFaviconOverlay'} | ${FaviconOverlayManager.setFaviconOverlay} | ${['test']}
+ ${'resetFavicon'} | ${FaviconOverlayManager.resetFaviconOverlay} | ${[]}
+ `('$fnName', ({ fnName, managerFn, args }) => {
+ const call = () => faviconUtils[fnName](...args);
+
+ it('initializes only once when called', async () => {
+ expect(FaviconOverlayManager.initialize).not.toHaveBeenCalled();
+
+ // Call twice so we can make sure initialize is only called once
+ await call();
+ await call();
+
+ expect(FaviconOverlayManager.initialize).toHaveBeenCalledWith({
+ faviconSelector: '#favicon',
+ });
+ expect(FaviconOverlayManager.initialize).toHaveBeenCalledTimes(1);
+ });
+
+ it('passes call to manager', async () => {
+ expect(managerFn).not.toHaveBeenCalled();
+
+ await call();
+
+ expect(managerFn).toHaveBeenCalledWith(...args);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/forms_spec.js b/spec/frontend/lib/utils/forms_spec.js
index a69be99ab98..f65bd8ffe0c 100644
--- a/spec/frontend/lib/utils/forms_spec.js
+++ b/spec/frontend/lib/utils/forms_spec.js
@@ -1,7 +1,7 @@
import { serializeForm, serializeFormObject, isEmptyValue } from '~/lib/utils/forms';
describe('lib/utils/forms', () => {
- const createDummyForm = inputs => {
+ const createDummyForm = (inputs) => {
const form = document.createElement('form');
form.innerHTML = inputs
@@ -9,7 +9,7 @@ describe('lib/utils/forms', () => {
let str = ``;
if (type === 'select') {
str = `<select name="${name}">`;
- value.forEach(v => {
+ value.forEach((v) => {
if (v.length > 0) {
str += `<option value="${v}"></option> `;
}
@@ -81,8 +81,8 @@ describe('lib/utils/forms', () => {
jest
.spyOn(FormData.prototype, 'getAll')
- .mockImplementation(name =>
- formData.map(elem => (elem.name === name ? elem.value : undefined)),
+ .mockImplementation((name) =>
+ formData.map((elem) => (elem.name === name ? elem.value : undefined)),
);
const data = serializeForm(form);
diff --git a/spec/frontend/lib/utils/highlight_spec.js b/spec/frontend/lib/utils/highlight_spec.js
index 638bbf65ae9..f34e203f9a4 100644
--- a/spec/frontend/lib/utils/highlight_spec.js
+++ b/spec/frontend/lib/utils/highlight_spec.js
@@ -8,13 +8,13 @@ describe('highlight', () => {
});
it(`should return an empty string in the case of invalid inputs`, () => {
- [null, undefined].forEach(input => {
+ [null, undefined].forEach((input) => {
expect(highlight(input, 'match')).toBe('');
});
});
it(`should return the original value if match is null, undefined, or ''`, () => {
- [null, undefined].forEach(match => {
+ [null, undefined].forEach((match) => {
expect(highlight('gitlab', match)).toBe('gitlab');
});
});
diff --git a/spec/frontend/lib/utils/icon_utils_spec.js b/spec/frontend/lib/utils/icon_utils_spec.js
index f798dc6744d..db1f174703b 100644
--- a/spec/frontend/lib/utils/icon_utils_spec.js
+++ b/spec/frontend/lib/utils/icon_utils_spec.js
@@ -34,13 +34,13 @@ describe('Icon utils', () => {
});
it('extracts svg icon path content from sprite icons', () => {
- return getSvgIconPathContent(mockName).then(path => {
+ return getSvgIconPathContent(mockName).then((path) => {
expect(path).toBe(mockPath);
});
});
it('returns null if icon path content does not exist', () => {
- return getSvgIconPathContent('missing-icon').then(path => {
+ return getSvgIconPathContent('missing-icon').then((path) => {
expect(path).toBe(null);
});
});
@@ -58,22 +58,22 @@ describe('Icon utils', () => {
});
it('returns null', () => {
- return getSvgIconPathContent(mockName).then(path => {
+ return getSvgIconPathContent(mockName).then((path) => {
expect(path).toBe(null);
});
});
it('extracts svg icon path content, after 2 attempts', () => {
return getSvgIconPathContent(mockName)
- .then(path1 => {
+ .then((path1) => {
expect(path1).toBe(null);
return getSvgIconPathContent(mockName);
})
- .then(path2 => {
+ .then((path2) => {
expect(path2).toBe(null);
return getSvgIconPathContent(mockName);
})
- .then(path3 => {
+ .then((path3) => {
expect(path3).toBe(mockPath);
});
});
diff --git a/spec/frontend/lib/utils/mock_data.js b/spec/frontend/lib/utils/mock_data.js
index c466b0cd1ed..df1f79529e7 100644
--- a/spec/frontend/lib/utils/mock_data.js
+++ b/spec/frontend/lib/utils/mock_data.js
@@ -3,6 +3,3 @@ export const faviconDataUrl =
export const overlayDataUrl =
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAA85JREFUWAntVllIVGEUPv/9b46O41KplYN7PeRkti8TjQlhCUGh3MmeQugpIsGKAi2soIcIooiohxYKK2daqDAlIpIiWwxtQaJcaHE0d5tMrbn37z9XRqfR0TvVW56Hudf//uec72zfEWBCJjIwkYGJDPzvGSD/KgExN3Oi2Q+2DJgSDYQEMwItVGH1iZGmJw/Si1y+/PwVAMYYib22MYc/8hVQFgKDEfYoId0KYzagAQebsos/ewMZoeB9wdffcTYpQSaCTWHKoqSQaDk7zkIt0+aCUR8BelEHrf3dUNv9AcqbnsHtT5UKB/hTASh0SLYjnjb/CIDRJi0XiFAaJOpCD8zLpdb4NB66b1OfelthX815dtdRRfiti2aAXLvVLiMQ6olGyztGDkSo4JGGXk8/QFdGpYzpHG2GBQTDhtgVhPEaVbbVpvI6GJz22rv4TcAfrYI1x7Rj5MWWAppomKFVVb2302SFzUkZHAbkG+0b1+Gh77yNYjrmqnWTrLBLRxdvBWv8qlFujH/kYjJYyvLkj71t78zAUvzMAMnHhpN4zf9UREJhd8omyssxu1IgazQDwDnHUcNuH6vhPIE1fmuBzHt74Hn7W89jWGtcAjoaIDOFrdcMYJBkgOCoaRF0Lj0oglddDbCj6tRvKjphEpgjkzEQs2YAKsNxMzjn3nKurhzK+Ly7xe28ua8TwgMMcHJZnvvT0BPtEEKM4tDJ+C8GvIIk4ylINIXVZ0EUKJxYuh3mhCeokbudl6TtVc88dfBdLwbyaWB6zQCYQJpBYSrDGQxBQ/ZWRM2B+VNmQnVnHWx7elyNuL2/R336co7KyJR8CL9oLgEuFlREevWUkEl6uGwpVEG4FBm0OEf9N10NMgPlvWYAuNVwsWDKvcUNYsHUWTCZ13ysyFEXe6TO6aC8CUr9IiK+A05TQrc8yjwmxARHeeMAPlfQJw+AQRwu0YhL/GDXi9NwufG+S8dYkuYMqIb4SsWthotlNMOUCOM6r+G9cqXxPmd1dqrBav/o1zJy2l5/NUjJA/VORwYuFnOUaTQcPs9wMqwV++Xv8oADxKAcZ8nLPr8AoGW+xR6HSqYk3GodAz2QNj0V+Gr26dT9ASNH5239Pf0gktVNWZca8ZvfAFBprWS6hSu1pqt++Y0PD+WIwDAhIWQGtzvSHDbcodfFUFB9hg1Gjs5LXqIdFL+acFBl+FddqYwdxsWC3I70OvgfUaA65zhq2O2c8VxYcyIGFTVlXegYtvCXANCQZJMobjVcLMjtSK/IcEgyOOe8Ve5w7ryKDefp2P3+C/5ohv8HZmVLAAAAAElFTkSuQmCC';
-
-export const faviconWithOverlayDataUrl =
- 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAGtElEQVRYR8WXf3CT9R3H35/nSdIQIktrCf0RStI0FYRjVBAccxTq5MDBKUoz4ZyjbPO87q4yBsPDMdExTjlvIsdQexyI0oMBeuKhdjsNHhwcMgpjIlublLIm/UlJKZSSJs/z/e6+T5v0CQ22wB/7/pPck8/383l9fj6fEOec8H88NAjAS1LwknsFSVLU8WXd1rtm85LUeKnwGQKzjj3s33azvsEAAEIlnn8ByHL4/Pa7BgAQLCm8QOBOh88vDQkQeMxjMkcQEYKqYsyJWWPhgs/80TsFafzROJtkNIXFfYI0pfXqPeennjqlxPUNikBoTuEmEF+lCRBV3G0aQiWFrwH8d30AWJubGdiEfZzdGqDEEwbICnADQGGHry7zTr0X94IlnnMACggwAWh0+PxOvb5EBGqmTTNkj7ySxWS62C+g5Usm1Zn95YXG24UQ+r5n75Li6Ux4LBkyc7/4t5YSLSr6Lgg9UvBLcKocMEYKON/gGB3YoA/bcGFCczzLQdieLE9bHL66FakBSjzCU0cSAHDa4at7aLhG9XLBEk8zAVnxZxyIEhBy+PwFgwAafpxvNzK5NZUhrX28JA07Cl6SmtvcOUwm4ZAouHj7ad+jMrN1dqb3iG7oS4EYPh2etQS+XiesC8TQ3ZD3yZJsHuUPgbMcI+ej5v3ncv5PasNlk1p7JJnzJL+I0/O5h+u0VCdqIDi78AQRHuirft3hYJzQPvawPydVdPI+/OnTnNNKBjYVXHRa8rFFGeb4w1he0wZ7d/84IXTEhxzxUsgitB2LPFGwvgGUfLSeZUpEXqEqrIdz0nr4iHOUfeOccb/tNMtutzWHPeWcJc0aMxm5lkxYDGloj1zB+Sv/RXXTSXzaeBwSY3j+bHNv2bdtMYCbpHtRkNFd36xFQN3tXkZhvgP1fdPi5kMEXL4oIXKVAA58M8aCVQs84BYLXi5aDq+zGJTqYr+i4PV2vHxmJ/7WUoOn2i/jz6yhW7JjrdSV8U4fQFV+I2Q4UIsedMCSSlcsgp72WtnSajOhzDsBNtsYfFD8e+Rbs4fdIG98uw9vnj+AX7FWvk4NHZOXXphF/INx2SpJIU2L8L4GDAoMwlP9kWSg6awcKVs83tyUnY5Dj75+W8bjutae3o5d9X/HTiWAuUtOS6RUOR8Hp48TxjgU/AMSeKJ1Ej/tMWXG1sxwGt98sBxe5+xhe64XVLiK2Z9XwNgdRLXyzQsC4ENwelIHAFxDBOdh1qdCdNLCoon8RnY+HZ6/+TtzPhTZweAxlJ94C5VqoI2U3a7rACzJjQqgBd24CGscos1kxPQZ38fqSU/jhQkDvN9lrKG7FeUnNuPVKcvwYOb4hGgvi2HSx8vwRKyJkVLl+hk43gdBAcfADBD1cA4RXIdZ1EN1Zjqem+DGoUc2oigjMUlvaV8YL/1qPVpuhOG+JwdH5m1Okn3m6Eacaz3V2jeI9uTbVYY6AKOSKw8MX0MBg2lXjh3r3Hk4s7ASdrMtSWxnoBpZIzIwP3e69lxv3Gay4q/F6zDJ5kq6s6amEnsafJ0Db8P9JKkx1w5wPJuY36IToojgNMzb8rLwmsuB2kW7YDWMSCgTg+YXx9+AQZKxdUaFZiju+a2Mi8uvnH0f2/2f9g4AVE4z4LlTilrlehag9xIpEam4jO4DXfdaV97nwtH5byW137VYD5Yc2YAz4YAGIYx2RLq0z1Sex8l//fUWfBI83jh4Kd1PEuAwqVGjWEwSS+nJJmt0sWu86d0frMQCR/LbWQ8hDAxlXMgUV69Q67ubv0q5FUNAlHKmVLnXE/gfREpUiaQHqAizXbO0UN98BMTSo39Cw7UW7E2Rc728qJGHP68ASbQyNYCQTkAUzCSwQ+CwvSjnsQPGLOnI/C0YO3Lwxq5yhhtqb1KNpGqT1TXvigJU0jh33xpAf7NymoGNDJ9sJtPkYuNkqTh7KnY8vGaoeZPy93+GA1joe4kzzv/SVLqvYngA/dFgVfnlb8tjtm6Ux+I39y/Gqone24IQM+GxL15UO3q7WrhsnhJatCs8PAC9md3OrPK0goaDyEj7uXsuXi0qg4HkIUGE52XHNqmXIl0RGOiHoUV7xb+v5K14SC39At79Ximdhc8ekjImuiyjsXryUszLnY40yThIhSi4bbUHsbfBJ6ZKE5dpQdz4HQOgf2a8tLvklY+M6cuvSnJummxSZ46+X+7biMzaRnSu84IauNYsE5HCOX+HDCPWi7DrKW8/BTcVZ2UN8Me57kc5448TaCYR5XJwC0BtHMwPjs/SgAP1pfuCqSL8Pxhr/wunLWAOAAAAAElFTkSuQmCC';
diff --git a/spec/frontend/lib/utils/poll_spec.js b/spec/frontend/lib/utils/poll_spec.js
index 135c752b5cb..f2ca5df3672 100644
--- a/spec/frontend/lib/utils/poll_spec.js
+++ b/spec/frontend/lib/utils/poll_spec.js
@@ -50,7 +50,7 @@ describe('Poll', () => {
};
});
- it('calls the success callback when no header for interval is provided', done => {
+ it('calls the success callback when no header for interval is provided', (done) => {
mockServiceCall({ status: 200 });
setup();
@@ -62,7 +62,7 @@ describe('Poll', () => {
});
});
- it('calls the error callback when the http request returns an error', done => {
+ it('calls the error callback when the http request returns an error', (done) => {
mockServiceCall({ status: 500 }, true);
setup();
@@ -74,7 +74,7 @@ describe('Poll', () => {
});
});
- it('skips the error callback when request is aborted', done => {
+ it('skips the error callback when request is aborted', (done) => {
mockServiceCall({ status: 0 }, true);
setup();
@@ -87,7 +87,7 @@ describe('Poll', () => {
});
});
- it('should call the success callback when the interval header is -1', done => {
+ it('should call the success callback when the interval header is -1', (done) => {
mockServiceCall({ status: 200, headers: { 'poll-interval': -1 } });
setup()
.then(() => {
@@ -100,8 +100,8 @@ describe('Poll', () => {
});
describe('for 2xx status code', () => {
- successCodes.forEach(httpCode => {
- it(`starts polling when http status is ${httpCode} and interval header is provided`, done => {
+ successCodes.forEach((httpCode) => {
+ it(`starts polling when http status is ${httpCode} and interval header is provided`, (done) => {
mockServiceCall({ status: httpCode, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
@@ -129,7 +129,7 @@ describe('Poll', () => {
});
describe('with delayed initial request', () => {
- it('delays the first request', async done => {
+ it('delays the first request', async (done) => {
mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
@@ -158,7 +158,7 @@ describe('Poll', () => {
});
describe('stop', () => {
- it('stops polling when method is called', done => {
+ it('stops polling when method is called', (done) => {
mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
@@ -186,7 +186,7 @@ describe('Poll', () => {
});
describe('enable', () => {
- it('should enable polling upon a response', done => {
+ it('should enable polling upon a response', (done) => {
mockServiceCall({ status: 200 });
const Polling = new Poll({
resource: service,
@@ -212,7 +212,7 @@ describe('Poll', () => {
});
describe('restart', () => {
- it('should restart polling when its called', done => {
+ it('should restart polling when its called', (done) => {
mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
diff --git a/spec/frontend/lib/utils/poll_until_complete_spec.js b/spec/frontend/lib/utils/poll_until_complete_spec.js
index c1df30756fd..38203c460e3 100644
--- a/spec/frontend/lib/utils/poll_until_complete_spec.js
+++ b/spec/frontend/lib/utils/poll_until_complete_spec.js
@@ -70,7 +70,7 @@ describe('pollUntilComplete', () => {
});
it('rejects with the error response', () =>
- pollUntilComplete(endpoint).catch(error => {
+ pollUntilComplete(endpoint).catch((error) => {
expect(error.response.data).toBe(errorMessage);
}));
});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 9c50bf577dc..1f3659b5c76 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -300,13 +300,13 @@ describe('text_utility', () => {
});
it(`should return an empty string for invalid inputs`, () => {
- [undefined, null, 4, {}, true, new Date()].forEach(input => {
+ [undefined, null, 4, {}, true, new Date()].forEach((input) => {
expect(textUtils.truncateNamespace(input)).toBe('');
});
});
it(`should not alter strings that aren't formatted as namespaces`, () => {
- ['', ' ', '\t', 'a', 'a \\ b'].forEach(input => {
+ ['', ' ', '\t', 'a', 'a \\ b'].forEach((input) => {
expect(textUtils.truncateNamespace(input)).toBe(input);
});
});
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 0f9290e36b5..5846acbdb79 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -15,7 +15,7 @@ const shas = {
],
};
-const setWindowLocation = value => {
+const setWindowLocation = (value) => {
Object.defineProperty(window, 'location', {
writable: true,
value,
@@ -337,7 +337,7 @@ describe('URL utility', () => {
describe('urlContainsSha', () => {
it('returns true when there is a valid 40-character SHA1 hash in the URL', () => {
- shas.valid.forEach(sha => {
+ shas.valid.forEach((sha) => {
expect(
urlUtils.urlContainsSha({ url: `http://urlstuff/${sha}/moreurlstuff` }),
).toBeTruthy();
@@ -345,7 +345,7 @@ describe('URL utility', () => {
});
it('returns false when there is not a valid 40-character SHA1 hash in the URL', () => {
- shas.invalid.forEach(str => {
+ shas.invalid.forEach((str) => {
expect(urlUtils.urlContainsSha({ url: `http://urlstuff/${str}/moreurlstuff` })).toBeFalsy();
});
});
@@ -356,8 +356,8 @@ describe('URL utility', () => {
let invalidUrls = [];
beforeAll(() => {
- validUrls = shas.valid.map(sha => `http://urlstuff/${sha}/moreurlstuff`);
- invalidUrls = shas.invalid.map(str => `http://urlstuff/${str}/moreurlstuff`);
+ validUrls = shas.valid.map((sha) => `http://urlstuff/${sha}/moreurlstuff`);
+ invalidUrls = shas.invalid.map((str) => `http://urlstuff/${str}/moreurlstuff`);
});
it('returns the valid 40-character SHA1 hash from the URL', () => {
@@ -367,7 +367,7 @@ describe('URL utility', () => {
});
it('returns null from a URL with no valid 40-character SHA1 hash', () => {
- invalidUrls.forEach(url => {
+ invalidUrls.forEach((url) => {
expect(urlUtils.getShaFromUrl({ url })).toBeNull();
});
});
@@ -589,11 +589,11 @@ describe('URL utility', () => {
];
describe('with URL constructor support', () => {
- it.each(safeUrls)('returns true for %s', url => {
+ it.each(safeUrls)('returns true for %s', (url) => {
expect(urlUtils.isSafeURL(url)).toBe(true);
});
- it.each(unsafeUrls)('returns false for %s', url => {
+ it.each(unsafeUrls)('returns false for %s', (url) => {
expect(urlUtils.isSafeURL(url)).toBe(false);
});
});
@@ -807,7 +807,7 @@ describe('URL utility', () => {
it.each([[httpProtocol], [httpsProtocol]])(
'when no url passed, returns correct protocol for %i from window location',
- protocol => {
+ (protocol) => {
setWindowLocation({
protocol,
});
diff --git a/spec/frontend/lib/utils/users_cache_spec.js b/spec/frontend/lib/utils/users_cache_spec.js
index 7ed87123482..4034f39ee9c 100644
--- a/spec/frontend/lib/utils/users_cache_spec.js
+++ b/spec/frontend/lib/utils/users_cache_spec.js
@@ -1,4 +1,4 @@
-import Api from '~/api';
+import * as UserApi from '~/api/user_api';
import UsersCache from '~/lib/utils/users_cache';
describe('UsersCache', () => {
@@ -88,10 +88,12 @@ describe('UsersCache', () => {
let apiSpy;
beforeEach(() => {
- jest.spyOn(Api, 'users').mockImplementation((query, options) => apiSpy(query, options));
+ jest
+ .spyOn(UserApi, 'getUsers')
+ .mockImplementation((query, options) => apiSpy(query, options));
});
- it('stores and returns data from API call if cache is empty', done => {
+ it('stores and returns data from API call if cache is empty', (done) => {
apiSpy = (query, options) => {
expect(query).toBe('');
expect(options).toEqual({
@@ -104,7 +106,7 @@ describe('UsersCache', () => {
};
UsersCache.retrieve(dummyUsername)
- .then(user => {
+ .then((user) => {
expect(user).toBe(dummyUser);
expect(UsersCache.internalStorage[dummyUsername]).toBe(dummyUser);
})
@@ -112,7 +114,7 @@ describe('UsersCache', () => {
.catch(done.fail);
});
- it('returns undefined if Ajax call fails and cache is empty', done => {
+ it('returns undefined if Ajax call fails and cache is empty', (done) => {
const dummyError = new Error('server exploded');
apiSpy = (query, options) => {
@@ -125,21 +127,21 @@ describe('UsersCache', () => {
};
UsersCache.retrieve(dummyUsername)
- .then(user => done.fail(`Received unexpected user: ${JSON.stringify(user)}`))
- .catch(error => {
+ .then((user) => done.fail(`Received unexpected user: ${JSON.stringify(user)}`))
+ .catch((error) => {
expect(error).toBe(dummyError);
})
.then(done)
.catch(done.fail);
});
- it('makes no Ajax call if matching data exists', done => {
+ it('makes no Ajax call if matching data exists', (done) => {
UsersCache.internalStorage[dummyUsername] = dummyUser;
apiSpy = () => done.fail(new Error('expected no Ajax call!'));
UsersCache.retrieve(dummyUsername)
- .then(user => {
+ .then((user) => {
expect(user).toBe(dummyUser);
})
.then(done)
@@ -151,11 +153,11 @@ describe('UsersCache', () => {
let apiSpy;
beforeEach(() => {
- jest.spyOn(Api, 'user').mockImplementation(id => apiSpy(id));
+ jest.spyOn(UserApi, 'getUser').mockImplementation((id) => apiSpy(id));
});
- it('stores and returns data from API call if cache is empty', done => {
- apiSpy = id => {
+ it('stores and returns data from API call if cache is empty', (done) => {
+ apiSpy = (id) => {
expect(id).toBe(dummyUserId);
return Promise.resolve({
@@ -164,7 +166,7 @@ describe('UsersCache', () => {
};
UsersCache.retrieveById(dummyUserId)
- .then(user => {
+ .then((user) => {
expect(user).toBe(dummyUser);
expect(UsersCache.internalStorage[dummyUserId]).toBe(dummyUser);
})
@@ -172,31 +174,31 @@ describe('UsersCache', () => {
.catch(done.fail);
});
- it('returns undefined if Ajax call fails and cache is empty', done => {
+ it('returns undefined if Ajax call fails and cache is empty', (done) => {
const dummyError = new Error('server exploded');
- apiSpy = id => {
+ apiSpy = (id) => {
expect(id).toBe(dummyUserId);
return Promise.reject(dummyError);
};
UsersCache.retrieveById(dummyUserId)
- .then(user => done.fail(`Received unexpected user: ${JSON.stringify(user)}`))
- .catch(error => {
+ .then((user) => done.fail(`Received unexpected user: ${JSON.stringify(user)}`))
+ .catch((error) => {
expect(error).toBe(dummyError);
})
.then(done)
.catch(done.fail);
});
- it('makes no Ajax call if matching data exists', done => {
+ it('makes no Ajax call if matching data exists', (done) => {
UsersCache.internalStorage[dummyUserId] = dummyUser;
apiSpy = () => done.fail(new Error('expected no Ajax call!'));
UsersCache.retrieveById(dummyUserId)
- .then(user => {
+ .then((user) => {
expect(user).toBe(dummyUser);
})
.then(done)
@@ -208,11 +210,11 @@ describe('UsersCache', () => {
let apiSpy;
beforeEach(() => {
- jest.spyOn(Api, 'userStatus').mockImplementation(id => apiSpy(id));
+ jest.spyOn(UserApi, 'getUserStatus').mockImplementation((id) => apiSpy(id));
});
- it('stores and returns data from API call if cache is empty', done => {
- apiSpy = id => {
+ it('stores and returns data from API call if cache is empty', (done) => {
+ apiSpy = (id) => {
expect(id).toBe(dummyUserId);
return Promise.resolve({
@@ -221,7 +223,7 @@ describe('UsersCache', () => {
};
UsersCache.retrieveStatusById(dummyUserId)
- .then(userStatus => {
+ .then((userStatus) => {
expect(userStatus).toBe(dummyUserStatus);
expect(UsersCache.internalStorage[dummyUserId].status).toBe(dummyUserStatus);
})
@@ -229,25 +231,25 @@ describe('UsersCache', () => {
.catch(done.fail);
});
- it('returns undefined if Ajax call fails and cache is empty', done => {
+ it('returns undefined if Ajax call fails and cache is empty', (done) => {
const dummyError = new Error('server exploded');
- apiSpy = id => {
+ apiSpy = (id) => {
expect(id).toBe(dummyUserId);
return Promise.reject(dummyError);
};
UsersCache.retrieveStatusById(dummyUserId)
- .then(userStatus => done.fail(`Received unexpected user: ${JSON.stringify(userStatus)}`))
- .catch(error => {
+ .then((userStatus) => done.fail(`Received unexpected user: ${JSON.stringify(userStatus)}`))
+ .catch((error) => {
expect(error).toBe(dummyError);
})
.then(done)
.catch(done.fail);
});
- it('makes no Ajax call if matching data exists', done => {
+ it('makes no Ajax call if matching data exists', (done) => {
UsersCache.internalStorage[dummyUserId] = {
status: dummyUserStatus,
};
@@ -255,7 +257,7 @@ describe('UsersCache', () => {
apiSpy = () => done.fail(new Error('expected no Ajax call!'));
UsersCache.retrieveStatusById(dummyUserId)
- .then(userStatus => {
+ .then((userStatus) => {
expect(userStatus).toBe(dummyUserStatus);
})
.then(done)
diff --git a/spec/frontend/line_highlighter_spec.js b/spec/frontend/line_highlighter_spec.js
index 0da1ea1df2d..1091bbf8aba 100644
--- a/spec/frontend/line_highlighter_spec.js
+++ b/spec/frontend/line_highlighter_spec.js
@@ -2,6 +2,7 @@
import $ from 'jquery';
import LineHighlighter from '~/line_highlighter';
+import * as utils from '~/lib/utils/common_utils';
describe('LineHighlighter', () => {
const testContext = {};
@@ -50,10 +51,10 @@ describe('LineHighlighter', () => {
});
it('scrolls to the first highlighted line on initial load', () => {
- const spy = jest.spyOn($, 'scrollTo');
+ jest.spyOn(utils, 'scrollToElement');
new LineHighlighter({ hash: '#L5-25' });
- expect(spy).toHaveBeenCalledWith('#L5', expect.anything());
+ expect(utils.scrollToElement).toHaveBeenCalledWith('#L5', expect.anything());
});
it('discards click events', () => {
@@ -88,9 +89,7 @@ describe('LineHighlighter', () => {
describe('clickHandler', () => {
it('handles clicking on a child icon element', () => {
const spy = jest.spyOn(testContext.class, 'setHash');
- $('#L13 i')
- .mousedown()
- .click();
+ $('#L13 [data-testid="link-icon"]').mousedown().click();
expect(spy).toHaveBeenCalledWith(13);
expect($('#LC13')).toHaveClass(testContext.css);
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
index e32deaea993..351ff317feb 100644
--- a/spec/frontend/logs/components/environment_logs_spec.js
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -20,7 +20,7 @@ jest.mock('~/lib/utils/scroll_utils');
const module = 'environmentLogs';
jest.mock('lodash/throttle', () =>
- jest.fn(func => {
+ jest.fn((func) => {
return func;
}),
);
@@ -59,7 +59,7 @@ describe('EnvironmentLogs', () => {
const findInfiniteScroll = () => wrapper.find({ ref: 'infiniteScroll' });
const findLogTrace = () => wrapper.find({ ref: 'logTrace' });
const findLogFooter = () => wrapper.find({ ref: 'logFooter' });
- const getInfiniteScrollAttr = attr => parseInt(findInfiniteScroll().attributes(attr), 10);
+ const getInfiniteScrollAttr = (attr) => parseInt(findInfiniteScroll().attributes(attr), 10);
const mockSetInitData = () => {
state.pods.options = mockPods;
@@ -181,11 +181,7 @@ describe('EnvironmentLogs', () => {
it('shows a logs trace', () => {
expect(findLogTrace().text()).toBe('');
- expect(
- findLogTrace()
- .find('.js-build-loader-animation')
- .isVisible(),
- ).toBe(true);
+ expect(findLogTrace().find('.js-build-loader-animation').isVisible()).toBe(true);
});
});
@@ -217,7 +213,7 @@ describe('EnvironmentLogs', () => {
describe('state with data', () => {
beforeEach(() => {
- dispatch.mockImplementation(actionName => {
+ dispatch.mockImplementation((actionName) => {
if (actionName === `${module}/setInitData`) {
mockSetInitData();
} else if (actionName === `${module}/showPodLogs`) {
diff --git a/spec/frontend/logs/components/log_advanced_filters_spec.js b/spec/frontend/logs/components/log_advanced_filters_spec.js
index 3a3c23c95b8..dfa8913a301 100644
--- a/spec/frontend/logs/components/log_advanced_filters_spec.js
+++ b/spec/frontend/logs/components/log_advanced_filters_spec.js
@@ -18,10 +18,10 @@ describe('LogAdvancedFilters', () => {
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
- const getSearchToken = type =>
+ const getSearchToken = (type) =>
findFilteredSearch()
.props('availableTokens')
- .filter(token => token.type === type)[0];
+ .filter((token) => token.type === type)[0];
const mockStateLoading = () => {
state.timeRange.selected = defaultTimeRange;
@@ -73,6 +73,8 @@ describe('LogAdvancedFilters', () => {
});
it('displays search tokens', () => {
+ initWrapper();
+
expect(getSearchToken(TOKEN_TYPE_POD_NAME)).toMatchObject({
title: 'Pod name',
unique: true,
diff --git a/spec/frontend/logs/components/log_control_buttons_spec.js b/spec/frontend/logs/components/log_control_buttons_spec.js
index dff38ecb15e..55b28445786 100644
--- a/spec/frontend/logs/components/log_control_buttons_spec.js
+++ b/spec/frontend/logs/components/log_control_buttons_spec.js
@@ -9,7 +9,7 @@ describe('LogControlButtons', () => {
const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
const findRefreshBtn = () => wrapper.find('.js-refresh-log');
- const initWrapper = opts => {
+ const initWrapper = (opts) => {
wrapper = shallowMount(LogControlButtons, {
listeners: {
scrollUp: () => {},
diff --git a/spec/frontend/logs/components/log_simple_filters_spec.js b/spec/frontend/logs/components/log_simple_filters_spec.js
index b819f0d25a8..5bd42fd7dbc 100644
--- a/spec/frontend/logs/components/log_simple_filters_spec.js
+++ b/spec/frontend/logs/components/log_simple_filters_spec.js
@@ -18,7 +18,7 @@ describe('LogSimpleFilters', () => {
const findPodsDropdownItems = () =>
findPodsDropdown()
.findAll(GlDropdownItem)
- .filter(item => !('disabled' in item.attributes()));
+ .filter((item) => !('disabled' in item.attributes()));
const mockPodsLoading = () => {
state.pods.options = [];
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index e4501abdc76..bc58f1e677f 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -62,7 +62,7 @@ describe('Logs Store actions', () => {
const latestGetParams = () => mock.history.get[mock.history.get.length - 1].params;
- convertToFixedRange.mockImplementation(range => {
+ convertToFixedRange.mockImplementation((range) => {
if (range === defaultTimeRange) {
return { ...mockDefaultRange };
}
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
index 4a095e0f26e..51f6494b011 100644
--- a/spec/frontend/logs/stores/mutations_spec.js
+++ b/spec/frontend/logs/stores/mutations_spec.js
@@ -23,7 +23,7 @@ describe('Logs Store Mutations', () => {
});
it('ensures mutation types are correctly named', () => {
- Object.keys(types).forEach(k => {
+ Object.keys(types).forEach((k) => {
expect(k).toEqual(types[k]);
});
});
diff --git a/spec/frontend/matchers.js b/spec/frontend/matchers.js
index 50feba86a61..945abdafe9a 100644
--- a/spec/frontend/matchers.js
+++ b/spec/frontend/matchers.js
@@ -10,7 +10,7 @@ export default {
const iconReferences = [].slice.apply(element.querySelectorAll('svg use'));
const matchingIcon = iconReferences.find(
- reference => reference.parentNode.getAttribute('data-testid') === `${iconName}-icon`,
+ (reference) => reference.parentNode.getAttribute('data-testid') === `${iconName}-icon`,
);
const pass = Boolean(matchingIcon);
@@ -21,7 +21,7 @@ export default {
} else {
message = `${element.outerHTML} does not contain the sprite icon "${iconName}"!`;
- const existingIcons = iconReferences.map(reference => {
+ const existingIcons = iconReferences.map((reference) => {
const iconUrl = reference.getAttribute('href');
return `"${iconUrl.replace(/^.+#/, '')}"`;
});
@@ -40,10 +40,7 @@ export default {
let clearMatch;
try {
- clearReceived = received
- .replace(/\s\s+/gm, ' ')
- .replace(/\s\./gm, '.')
- .trim();
+ clearReceived = received.replace(/\s\s+/gm, ' ').replace(/\s\./gm, '.').trim();
} catch (e) {
return { actual: received, message: 'The received value is not a string', pass: false };
}
diff --git a/spec/frontend/matchers_spec.js b/spec/frontend/matchers_spec.js
index 0a2478f978a..dfd6f754c72 100644
--- a/spec/frontend/matchers_spec.js
+++ b/spec/frontend/matchers_spec.js
@@ -3,7 +3,7 @@ describe('Custom jest matchers', () => {
describe('malformed input', () => {
it.each([null, 1, Symbol, Array, Object])(
'fails graciously if the expected value is %s',
- expected => {
+ (expected) => {
expect(expected).not.toMatchInterpolatedText('null');
},
);
@@ -11,7 +11,7 @@ describe('Custom jest matchers', () => {
describe('malformed matcher', () => {
it.each([null, 1, Symbol, Array, Object])(
'fails graciously if the matcher is %s',
- matcher => {
+ (matcher) => {
expect('null').not.toMatchInterpolatedText(matcher);
},
);
diff --git a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
index 9a8434a1222..30166e2d5ae 100644
--- a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
@@ -49,7 +49,7 @@ describe('AccessRequestActionButtons', () => {
describe('when member is the current user', () => {
it('sets `message` prop correctly', () => {
expect(findRemoveMemberButton().props('message')).toBe(
- `Are you sure you want to withdraw your access request for "${member.source.name}"`,
+ `Are you sure you want to withdraw your access request for "${member.source.fullName}"`,
);
});
});
@@ -64,7 +64,7 @@ describe('AccessRequestActionButtons', () => {
});
expect(findRemoveMemberButton().props('message')).toBe(
- `Are you sure you want to deny ${member.user.name}'s request to join "${member.source.name}"`,
+ `Are you sure you want to deny ${member.user.name}'s request to join "${member.source.fullName}"`,
);
});
});
diff --git a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
index 887b21dc1d0..fe63f9bfaa7 100644
--- a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
@@ -39,7 +39,7 @@ describe('InviteActionButtons', () => {
it('sets props correctly', () => {
expect(findRemoveMemberButton().props()).toEqual({
memberId: member.id,
- message: `Are you sure you want to revoke the invitation for ${member.invite.email} to join "${member.source.name}"`,
+ message: `Are you sure you want to revoke the invitation for ${member.invite.email} to join "${member.source.fullName}"`,
title: 'Revoke invite',
isAccessRequest: false,
icon: 'remove',
diff --git a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
index a48942dd277..05ea0dc2886 100644
--- a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
@@ -57,10 +57,8 @@ describe('ResendInviteButton', () => {
it('displays form with correct action and inputs', () => {
expect(findForm().attributes('action')).toBe('/groups/foo-bar/-/group_members/1/resend_invite');
- expect(
- findForm()
- .find('input[name="authenticity_token"]')
- .attributes('value'),
- ).toBe('mock-csrf-token');
+ expect(findForm().find('input[name="authenticity_token"]').attributes('value')).toBe(
+ 'mock-csrf-token',
+ );
});
});
diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
index b03e80a537d..f28e5040006 100644
--- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
@@ -39,7 +39,7 @@ describe('UserActionButtons', () => {
it('sets props correctly', () => {
expect(findRemoveMemberButton().props()).toEqual({
memberId: member.id,
- message: `Are you sure you want to remove ${member.user.name} from "${member.source.name}"`,
+ message: `Are you sure you want to remove ${member.user.name} from "${member.source.fullName}"`,
title: 'Remove member',
isAccessRequest: false,
icon: 'remove',
@@ -56,7 +56,7 @@ describe('UserActionButtons', () => {
});
expect(findRemoveMemberButton().props('message')).toBe(
- `Are you sure you want to remove this orphaned member from "${orphanedMember.source.name}"`,
+ `Are you sure you want to remove this orphaned member from "${orphanedMember.source.fullName}"`,
);
});
});
diff --git a/spec/frontend/members/components/avatars/user_avatar_spec.js b/spec/frontend/members/components/avatars/user_avatar_spec.js
index 7d6a9065975..411ec1a54de 100644
--- a/spec/frontend/members/components/avatars/user_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/user_avatar_spec.js
@@ -22,7 +22,7 @@ describe('UserAvatar', () => {
const getByText = (text, options) =>
createWrapper(within(wrapper.element).findByText(text, options));
- const findStatusEmoji = emoji => wrapper.find(`gl-emoji[data-name="${emoji}"]`);
+ const findStatusEmoji = (emoji) => wrapper.find(`gl-emoji[data-name="${emoji}"]`);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
index 91277ae6d03..0d9f9acbbeb 100644
--- a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
+++ b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
@@ -10,7 +10,7 @@ localVue.use(Vuex);
describe('FilterSortContainer', () => {
let wrapper;
- const createComponent = state => {
+ const createComponent = (state) => {
const store = new Vuex.Store({
state: {
filteredSearchBar: {
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index ca885000c2f..2bed1e803ca 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -10,7 +10,7 @@ localVue.use(Vuex);
describe('MembersFilteredSearchBar', () => {
let wrapper;
- const createComponent = state => {
+ const createComponent = (state) => {
const store = new Vuex.Store({
state: {
sourceId: 1,
@@ -86,7 +86,10 @@ describe('MembersFilteredSearchBar', () => {
token: GlFilteredSearchToken,
unique: true,
operators: [{ value: '=', description: 'is' }],
- options: [{ value: 'exclude', title: 'Direct' }, { value: 'only', title: 'Inherited' }],
+ options: [
+ { value: 'exclude', title: 'Direct' },
+ { value: 'only', title: 'Inherited' },
+ ],
},
]);
});
diff --git a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
index 6fe67aded3d..d98c9116512 100644
--- a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
+++ b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
@@ -12,7 +12,7 @@ describe('SortDropdown', () => {
const URL_HOST = 'https://localhost/';
- const createComponent = state => {
+ const createComponent = (state) => {
const store = new Vuex.Store({
state: {
sourceId: 1,
@@ -38,10 +38,10 @@ describe('SortDropdown', () => {
const findSortDirectionToggle = () =>
findSortingComponent().find('button[title="Sort direction"]');
const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
- const findDropdownItemByText = text =>
+ const findDropdownItemByText = (text) =>
wrapper
.findAll(GlSortingItem)
- .wrappers.find(dropdownItemWrapper => dropdownItemWrapper.text() === text);
+ .wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.text() === text);
describe('dropdown options', () => {
beforeEach(() => {
@@ -76,7 +76,7 @@ describe('SortDropdown', () => {
createComponent();
- expectedDropdownItems.forEach(expectedDropdownItem => {
+ expectedDropdownItems.forEach((expectedDropdownItem) => {
const dropdownItem = findDropdownItemByText(expectedDropdownItem.label);
expect(dropdownItem).not.toBe(null);
diff --git a/spec/frontend/members/components/modals/leave_modal_spec.js b/spec/frontend/members/components/modals/leave_modal_spec.js
index d7acf12212c..dca47d1f6af 100644
--- a/spec/frontend/members/components/modals/leave_modal_spec.js
+++ b/spec/frontend/members/components/modals/leave_modal_spec.js
@@ -60,11 +60,11 @@ describe('LeaveModal', () => {
});
it('displays modal title', () => {
- expect(getByText(`Leave "${member.source.name}"`).exists()).toBe(true);
+ expect(getByText(`Leave "${member.source.fullName}"`).exists()).toBe(true);
});
it('displays modal body', () => {
- expect(getByText(`Are you sure you want to leave "${member.source.name}"?`).exists()).toBe(
+ expect(getByText(`Are you sure you want to leave "${member.source.fullName}"?`).exists()).toBe(
true,
);
});
diff --git a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
index 593dbcd28ba..234857419b6 100644
--- a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
@@ -31,7 +31,7 @@ describe('RemoveGroupLinkModal', () => {
});
};
- const createComponent = state => {
+ const createComponent = (state) => {
wrapper = mount(RemoveGroupLinkModal, {
localVue,
store: createStore(state),
diff --git a/spec/frontend/members/components/table/created_at_spec.js b/spec/frontend/members/components/table/created_at_spec.js
index a9f809cd805..dc1f62722ab 100644
--- a/spec/frontend/members/components/table/created_at_spec.js
+++ b/spec/frontend/members/components/table/created_at_spec.js
@@ -13,7 +13,7 @@ describe('CreatedAt', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = mount(CreatedAt, {
propsData: {
date,
diff --git a/spec/frontend/members/components/table/expiration_datepicker_spec.js b/spec/frontend/members/components/table/expiration_datepicker_spec.js
index ba1b2256e76..0caaafb8d7b 100644
--- a/spec/frontend/members/components/table/expiration_datepicker_spec.js
+++ b/spec/frontend/members/components/table/expiration_datepicker_spec.js
@@ -25,7 +25,7 @@ describe('ExpirationDatepicker', () => {
actions = {
updateMemberExpiration: jest.fn(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolveUpdateMemberExpiration = resolve;
}),
),
@@ -72,11 +72,9 @@ describe('ExpirationDatepicker', () => {
});
it('sets `minDate` prop as tomorrow', () => {
- expect(
- findDatepicker()
- .props('minDate')
- .toISOString(),
- ).toBe(new Date('2020-3-16').toISOString());
+ expect(findDatepicker().props('minDate').toISOString()).toBe(
+ new Date('2020-3-16').toISOString(),
+ );
});
it('sets `target` prop as `null` so datepicker opens on focus', () => {
diff --git a/spec/frontend/members/components/table/expires_at_spec.js b/spec/frontend/members/components/table/expires_at_spec.js
index cf0fc78656e..321008727cd 100644
--- a/spec/frontend/members/components/table/expires_at_spec.js
+++ b/spec/frontend/members/components/table/expires_at_spec.js
@@ -10,7 +10,7 @@ describe('ExpiresAt', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = mount(ExpiresAt, {
propsData,
directives: {
@@ -22,7 +22,7 @@ describe('ExpiresAt', () => {
const getByText = (text, options) =>
createWrapper(within(wrapper.element).getByText(text, options));
- const getTooltipDirective = elementWrapper => getBinding(elementWrapper.element, 'gl-tooltip');
+ const getTooltipDirective = (elementWrapper) => getBinding(elementWrapper.element, 'gl-tooltip');
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/components/table/member_avatar_spec.js b/spec/frontend/members/components/table/member_avatar_spec.js
index 98177893c18..4341dfbbaf9 100644
--- a/spec/frontend/members/components/table/member_avatar_spec.js
+++ b/spec/frontend/members/components/table/member_avatar_spec.js
@@ -9,7 +9,7 @@ import InviteAvatar from '~/members/components/avatars/invite_avatar.vue';
describe('MemberList', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(MemberAvatar, {
propsData: {
isCurrentUser: false,
diff --git a/spec/frontend/members/components/table/member_source_spec.js b/spec/frontend/members/components/table/member_source_spec.js
index 48ac06f32f6..95547090aed 100644
--- a/spec/frontend/members/components/table/member_source_spec.js
+++ b/spec/frontend/members/components/table/member_source_spec.js
@@ -6,12 +6,12 @@ import MemberSource from '~/members/components/table/member_source.vue';
describe('MemberSource', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = mount(MemberSource, {
propsData: {
memberSource: {
id: 102,
- name: 'Foo bar',
+ fullName: 'Foo bar',
webUrl: 'https://gitlab.com/groups/foo-bar',
},
...propsData,
@@ -25,7 +25,7 @@ describe('MemberSource', () => {
const getByText = (text, options) =>
createWrapper(getByTextHelper(wrapper.element, text, options));
- const getTooltipDirective = elementWrapper => getBinding(elementWrapper.element, 'gl-tooltip');
+ const getTooltipDirective = (elementWrapper) => getBinding(elementWrapper.element, 'gl-tooltip');
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 9945cc7ee57..dbaccde069c 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -39,7 +39,7 @@ describe('MembersTable', () => {
});
};
- const createComponent = state => {
+ const createComponent = (state) => {
wrapper = mount(MembersTable, {
localVue,
store: createStore(state),
@@ -63,6 +63,10 @@ describe('MembersTable', () => {
createWrapper(getByTestIdHelper(wrapper.element, id, options));
const findTable = () => wrapper.find(GlTable);
+ const findTableCellByMemberId = (tableCellLabel, memberId) =>
+ getByTestId(`members-table-row-${memberId}`).find(
+ `[data-label="${tableCellLabel}"][role="cell"]`,
+ );
afterEach(() => {
wrapper.destroy();
@@ -100,10 +104,7 @@ describe('MembersTable', () => {
if (expectedComponent) {
expect(
- wrapper
- .find(`[data-label="${label}"][role="cell"]`)
- .find(expectedComponent)
- .exists(),
+ wrapper.find(`[data-label="${label}"][role="cell"]`).find(expectedComponent).exists(),
).toBe(true);
}
});
@@ -117,10 +118,7 @@ describe('MembersTable', () => {
expect(actionField.exists()).toBe(true);
expect(actionField.classes('gl-sr-only')).toBe(true);
expect(
- wrapper
- .find(`[data-label="Actions"][role="cell"]`)
- .find(MemberActionButtons)
- .exists(),
+ wrapper.find(`[data-label="Actions"][role="cell"]`).find(MemberActionButtons).exists(),
).toBe(true);
});
@@ -137,16 +135,30 @@ describe('MembersTable', () => {
canRemove: true,
};
+ const memberNoPermissions = {
+ ...memberMock,
+ id: 2,
+ };
+
describe.each`
permission | members
- ${'canUpdate'} | ${[memberCanUpdate]}
- ${'canRemove'} | ${[memberCanRemove]}
- ${'canResend'} | ${[invite]}
+ ${'canUpdate'} | ${[memberNoPermissions, memberCanUpdate]}
+ ${'canRemove'} | ${[memberNoPermissions, memberCanRemove]}
+ ${'canResend'} | ${[memberNoPermissions, invite]}
`('when one of the members has $permission permissions', ({ members }) => {
it('renders the "Actions" field', () => {
createComponent({ members, tableFields: ['actions'] });
expect(getByTestId('col-actions').exists()).toBe(true);
+
+ expect(findTableCellByMemberId('Actions', members[0].id).classes()).toStrictEqual([
+ 'col-actions',
+ 'gl-display-none!',
+ 'gl-display-lg-table-cell!',
+ ]);
+ expect(findTableCellByMemberId('Actions', members[1].id).classes()).toStrictEqual([
+ 'col-actions',
+ ]);
});
});
@@ -177,12 +189,9 @@ describe('MembersTable', () => {
it('renders badge in "Max role" field', () => {
createComponent({ members: [memberMock], tableFields: ['maxRole'] });
- expect(
- wrapper
- .find(`[data-label="Max role"][role="cell"]`)
- .find(GlBadge)
- .text(),
- ).toBe(memberMock.accessLevel.stringValue);
+ expect(wrapper.find(`[data-label="Max role"][role="cell"]`).find(GlBadge).text()).toBe(
+ memberMock.accessLevel.stringValue,
+ );
});
});
@@ -203,10 +212,6 @@ describe('MembersTable', () => {
it('adds QA selector to table row', () => {
createComponent();
- expect(
- findTable()
- .find('tbody tr')
- .attributes('data-qa-selector'),
- ).toBe('member_row');
+ expect(findTable().find('tbody tr').attributes('data-qa-selector')).toBe('member_row');
});
});
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index 6c6abf35bd7..96a388614f3 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -44,7 +44,7 @@ describe('RoleDropdown', () => {
const getDropdownMenu = () => within(wrapper.element).getByRole('menu');
const getByTextInDropdownMenu = (text, options = {}) =>
createWrapper(within(getDropdownMenu()).getByText(text, options));
- const getDropdownItemByText = text =>
+ const getDropdownItemByText = (text) =>
createWrapper(
within(getDropdownMenu())
.getByText(text, { selector: '[role="menuitem"] p' })
@@ -53,7 +53,7 @@ describe('RoleDropdown', () => {
const getCheckedDropdownItem = () =>
wrapper
.findAll(GlDropdownItem)
- .wrappers.find(dropdownItemWrapper => dropdownItemWrapper.props('isChecked'));
+ .wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.props('isChecked'));
const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
const findDropdown = () => wrapper.find(GlDropdown);
@@ -63,7 +63,7 @@ describe('RoleDropdown', () => {
});
describe('when dropdown is open', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent();
findDropdownToggle().trigger('click');
@@ -73,7 +73,7 @@ describe('RoleDropdown', () => {
});
it('renders all valid roles', () => {
- Object.keys(member.validRoles).forEach(role => {
+ Object.keys(member.validRoles).forEach((role) => {
expect(getDropdownItemByText(role).exists()).toBe(true);
});
});
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index 5674929716d..e668f2a1998 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -7,7 +7,7 @@ export const member = {
accessLevel: { integerValue: 50, stringValue: 'Owner' },
source: {
id: 178,
- name: 'Foo Bar',
+ fullName: 'Foo Bar',
webUrl: 'https://gitlab.com/groups/foo-bar',
},
user: {
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index 7bbfddf8fc6..7cd4e735b55 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -24,7 +24,7 @@ describe('Members Utils', () => {
it('has correct properties for each badge', () => {
const badges = generateBadges(memberMock, true);
- badges.forEach(badge => {
+ badges.forEach((badge) => {
expect(badge).toEqual(
expect.objectContaining({
show: expect.any(Boolean),
diff --git a/spec/frontend/merge_request/components/status_box_spec.js b/spec/frontend/merge_request/components/status_box_spec.js
new file mode 100644
index 00000000000..e6b6512476b
--- /dev/null
+++ b/spec/frontend/merge_request/components/status_box_spec.js
@@ -0,0 +1,81 @@
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
+import StatusBox from '~/merge_request/components/status_box.vue';
+import mrEventHub from '~/merge_request/eventhub';
+
+let wrapper;
+
+function factory(propsData) {
+ wrapper = shallowMount(StatusBox, { propsData, stubs: { GlSprintf } });
+}
+
+const testCases = [
+ {
+ name: 'Open',
+ state: 'opened',
+ class: 'status-box-open',
+ icon: 'issue-open-m',
+ },
+ {
+ name: 'Closed',
+ state: 'closed',
+ class: 'status-box-mr-closed',
+ icon: 'close',
+ },
+ {
+ name: 'Merged',
+ state: 'merged',
+ class: 'status-box-mr-merged',
+ icon: 'git-merge',
+ },
+];
+
+describe('Merge request status box component', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ testCases.forEach((testCase) => {
+ describe(`when merge request is ${testCase.name}`, () => {
+ it('renders human readable test', () => {
+ factory({
+ initialState: testCase.state,
+ });
+
+ expect(wrapper.text()).toContain(testCase.name);
+ });
+
+ it('sets css class', () => {
+ factory({
+ initialState: testCase.state,
+ });
+
+ expect(wrapper.classes()).toContain(testCase.class);
+ });
+
+ it('renders icon', () => {
+ factory({
+ initialState: testCase.state,
+ });
+
+ expect(wrapper.find('[data-testid="status-icon"]').props('name')).toBe(testCase.icon);
+ });
+ });
+ });
+
+ it('updates with eventhub event', async () => {
+ factory({
+ initialState: 'opened',
+ });
+
+ expect(wrapper.text()).toContain('Open');
+
+ mrEventHub.$emit('mr.state.updated', { state: 'closed' });
+
+ await nextTick();
+
+ expect(wrapper.text()).toContain('Closed');
+ });
+});
diff --git a/spec/frontend/merge_request_spec.js b/spec/frontend/merge_request_spec.js
index 1cb7206b97f..5624043260a 100644
--- a/spec/frontend/merge_request_spec.js
+++ b/spec/frontend/merge_request_spec.js
@@ -28,14 +28,11 @@ describe('MergeRequest', () => {
mock.restore();
});
- it('modifies the Markdown field', done => {
+ it('modifies the Markdown field', (done) => {
jest.spyOn($, 'ajax').mockImplementation();
const changeEvent = document.createEvent('HTMLEvents');
changeEvent.initEvent('change', true, true);
- $('input[type=checkbox]')
- .first()
- .attr('checked', true)[0]
- .dispatchEvent(changeEvent);
+ $('input[type=checkbox]').first().attr('checked', true)[0].dispatchEvent(changeEvent);
setImmediate(() => {
expect($('.js-task-list-field').val()).toBe(
'- [x] Task List Item\n- [ ]\n- [ ] Task List Item 2\n',
@@ -44,15 +41,12 @@ describe('MergeRequest', () => {
});
});
- it('ensure that task with only spaces does not get checked incorrectly', done => {
+ it('ensure that task with only spaces does not get checked incorrectly', (done) => {
// fixed in 'deckar01-task_list', '2.2.1' gem
jest.spyOn($, 'ajax').mockImplementation();
const changeEvent = document.createEvent('HTMLEvents');
changeEvent.initEvent('change', true, true);
- $('input[type=checkbox]')
- .last()
- .attr('checked', true)[0]
- .dispatchEvent(changeEvent);
+ $('input[type=checkbox]').last().attr('checked', true)[0].dispatchEvent(changeEvent);
setImmediate(() => {
expect($('.js-task-list-field').val()).toBe(
'- [ ] Task List Item\n- [ ]\n- [x] Task List Item 2\n',
@@ -67,7 +61,7 @@ describe('MergeRequest', () => {
const index = 3;
const checked = true;
- it('submits an ajax request on tasklist:changed', done => {
+ it('submits an ajax request on tasklist:changed', (done) => {
$('.js-task-list-field').trigger({
type: 'tasklist:changed',
detail: { lineNumber, lineSource, index, checked },
@@ -89,7 +83,7 @@ describe('MergeRequest', () => {
});
});
- it('shows an error notification when tasklist update failed', done => {
+ it('shows an error notification when tasklist update failed', (done) => {
mock
.onPatch(`${TEST_HOST}/frontend-fixtures/merge-requests-project/-/merge_requests/1.json`)
.reply(409, {});
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index 85a4ee8974e..82462036219 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -5,7 +5,6 @@ import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
import '~/commit/pipelines/pipelines_bundle';
import '~/lib/utils/common_utils';
-import 'vendor/jquery.scrollTo';
jest.mock('~/lib/utils/webpack', () => ({
resetServiceWorkersPublicPath: jest.fn(),
@@ -14,7 +13,7 @@ jest.mock('~/lib/utils/webpack', () => ({
describe('MergeRequestTabs', () => {
const testContext = {};
const stubLocation = {};
- const setLocation = stubs => {
+ const setLocation = (stubs) => {
const defaults = {
pathname: '',
search: '',
@@ -265,7 +264,7 @@ describe('MergeRequestTabs', () => {
beforeEach(() => {
jest.spyOn(mainContent, 'getBoundingClientRect').mockReturnValue({ top: 10 });
jest.spyOn(tabContent, 'getBoundingClientRect').mockReturnValue({ top: 100 });
- jest.spyOn(document, 'querySelector').mockImplementation(selector => {
+ jest.spyOn(document, 'querySelector').mockImplementation((selector) => {
return selector === '.content-wrapper' ? mainContent : tabContent;
});
testContext.class.currentAction = 'commits';
diff --git a/spec/frontend/milestones/milestone_combobox_spec.js b/spec/frontend/milestones/milestone_combobox_spec.js
index 047484f117f..8c519abe382 100644
--- a/spec/frontend/milestones/milestone_combobox_spec.js
+++ b/spec/frontend/milestones/milestone_combobox_spec.js
@@ -1,9 +1,9 @@
+import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { GlLoadingIcon, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
-import { s__ } from '~/locale';
import { ENTER_KEY } from '~/lib/utils/keys';
import MilestoneCombobox from '~/milestones/components/milestone_combobox.vue';
import { projectMilestones, groupMilestones } from './mock_data';
@@ -14,8 +14,7 @@ const extraLinks = [
{ text: 'Manage milestones', url: '/h5bp/html5-boilerplate/-/milestones' },
];
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
describe('Milestone combobox component', () => {
const projectId = '8';
@@ -29,26 +28,31 @@ describe('Milestone combobox component', () => {
let searchApiCallSpy;
const createComponent = (props = {}, attrs = {}) => {
+ const propsData = {
+ projectId,
+ groupId,
+ groupMilestonesAvailable,
+ extraLinks,
+ value: [],
+ ...props,
+ };
+
wrapper = mount(MilestoneCombobox, {
- propsData: {
- projectId,
- groupId,
- groupMilestonesAvailable,
- extraLinks,
- value: [],
- ...props,
- },
+ propsData,
attrs,
listeners: {
// simulate a parent component v-model binding
- input: selectedMilestone => {
+ input: (selectedMilestone) => {
+ // ugly hack because setProps plays bad with immediate watchers
+ // see https://github.com/vuejs/vue-test-utils/issues/1140 and
+ // https://github.com/vuejs/vue-test-utils/pull/1752
+ propsData.value = selectedMilestone;
wrapper.setProps({ value: selectedMilestone });
},
},
stubs: {
GlSearchBoxByType: true,
},
- localVue,
store: createStore(),
});
};
@@ -71,13 +75,13 @@ describe('Milestone combobox component', () => {
mock
.onGet(`/api/v4/projects/${projectId}/milestones`)
- .reply(config => projectMilestonesApiCallSpy(config));
+ .reply((config) => projectMilestonesApiCallSpy(config));
mock
.onGet(`/api/v4/groups/${groupId}/milestones`)
- .reply(config => groupMilestonesApiCallSpy(config));
+ .reply((config) => groupMilestonesApiCallSpy(config));
- mock.onGet(`/api/v4/projects/${projectId}/search`).reply(config => searchApiCallSpy(config));
+ mock.onGet(`/api/v4/projects/${projectId}/search`).reply((config) => searchApiCallSpy(config));
});
afterEach(() => {
@@ -115,7 +119,7 @@ describe('Milestone combobox component', () => {
return projectMilestoneSection
.text()
- .includes(s__('MilestoneCombobox|An error occurred while searching for milestones'));
+ .includes('An error occurred while searching for milestones');
};
const groupMilestoneSectionContainsErrorMessage = () => {
@@ -123,13 +127,13 @@ describe('Milestone combobox component', () => {
return groupMilestoneSection
.text()
- .includes(s__('MilestoneCombobox|An error occurred while searching for milestones'));
+ .includes('An error occurred while searching for milestones');
};
//
// Convenience methods
//
- const updateQuery = newQuery => {
+ const updateQuery = (newQuery) => {
findSearchBox().vm.$emit('input', newQuery);
};
@@ -141,13 +145,13 @@ describe('Milestone combobox component', () => {
findFirstGroupMilestonesDropdownItem().vm.$emit('click');
};
- const waitForRequests = ({ andClearMocks } = { andClearMocks: false }) =>
- axios.waitForAll().then(() => {
- if (andClearMocks) {
- projectMilestonesApiCallSpy.mockClear();
- groupMilestonesApiCallSpy.mockClear();
- }
- });
+ const waitForRequests = async ({ andClearMocks } = { andClearMocks: false }) => {
+ await axios.waitForAll();
+ if (andClearMocks) {
+ projectMilestonesApiCallSpy.mockClear();
+ groupMilestonesApiCallSpy.mockClear();
+ }
+ };
describe('initialization behavior', () => {
beforeEach(createComponent);
@@ -250,7 +254,7 @@ describe('Milestone combobox component', () => {
describe('when the search query is empty', () => {
it('renders a "no results" message', () => {
- expect(findNoResults().text()).toBe(s__('MilestoneCombobox|No matching results'));
+ expect(findNoResults().text()).toBe('No matching results');
});
});
});
@@ -333,23 +337,19 @@ describe('Milestone combobox component', () => {
it('renders a checkmark by the selected item', async () => {
selectFirstProjectMilestone();
- await localVue.nextTick();
+ await nextTick();
expect(
- findFirstProjectMilestonesDropdownItem()
- .find('span')
- .classes('selected-item'),
- ).toBe(false);
+ findFirstProjectMilestonesDropdownItem().find('span').classes('selected-item'),
+ ).toBe(true);
selectFirstProjectMilestone();
- await localVue.nextTick();
+ await nextTick();
expect(
- findFirstProjectMilestonesDropdownItem()
- .find('span')
- .classes('selected-item'),
- ).toBe(true);
+ findFirstProjectMilestonesDropdownItem().find('span').classes('selected-item'),
+ ).toBe(false);
});
describe('when a project milestones is selected', () => {
@@ -364,22 +364,21 @@ describe('Milestone combobox component', () => {
it("displays the project milestones name in the dropdown's button", async () => {
selectFirstProjectMilestone();
- await localVue.nextTick();
+ await nextTick();
- expect(findButtonContent().text()).toBe(s__('MilestoneCombobox|No milestone'));
+ expect(findButtonContent().text()).toBe('v1.0');
selectFirstProjectMilestone();
+ await nextTick();
- await localVue.nextTick();
- expect(findButtonContent().text()).toBe('v1.0');
+ expect(findButtonContent().text()).toBe('No milestone');
});
- it('updates the v-model binding with the project milestone title', () => {
- expect(wrapper.vm.value).toEqual([]);
-
+ it('updates the v-model binding with the project milestone title', async () => {
selectFirstProjectMilestone();
+ await nextTick();
- expect(wrapper.vm.value).toEqual(['v1.0']);
+ expect(wrapper.emitted().input[0][0]).toStrictEqual(['v1.0']);
});
});
});
@@ -463,23 +462,19 @@ describe('Milestone combobox component', () => {
it('renders a checkmark by the selected item', async () => {
selectFirstGroupMilestone();
- await localVue.nextTick();
+ await nextTick();
- expect(
- findFirstGroupMilestonesDropdownItem()
- .find('span')
- .classes('selected-item'),
- ).toBe(false);
+ expect(findFirstGroupMilestonesDropdownItem().find('span').classes('selected-item')).toBe(
+ true,
+ );
selectFirstGroupMilestone();
- await localVue.nextTick();
+ await nextTick();
- expect(
- findFirstGroupMilestonesDropdownItem()
- .find('span')
- .classes('selected-item'),
- ).toBe(true);
+ expect(findFirstGroupMilestonesDropdownItem().find('span').classes('selected-item')).toBe(
+ false,
+ );
});
describe('when a group milestones is selected', () => {
@@ -494,22 +489,21 @@ describe('Milestone combobox component', () => {
it("displays the group milestones name in the dropdown's button", async () => {
selectFirstGroupMilestone();
- await localVue.nextTick();
+ await nextTick();
- expect(findButtonContent().text()).toBe(s__('MilestoneCombobox|No milestone'));
+ expect(findButtonContent().text()).toBe('group-v1.0');
selectFirstGroupMilestone();
+ await nextTick();
- await localVue.nextTick();
- expect(findButtonContent().text()).toBe('group-v1.0');
+ expect(findButtonContent().text()).toBe('No milestone');
});
- it('updates the v-model binding with the group milestone title', () => {
- expect(wrapper.vm.value).toEqual([]);
-
+ it('updates the v-model binding with the group milestone title', async () => {
selectFirstGroupMilestone();
+ await nextTick();
- expect(wrapper.vm.value).toEqual(['group-v1.0']);
+ expect(wrapper.emitted().input[0][0]).toStrictEqual(['group-v1.0']);
});
});
});
diff --git a/spec/frontend/mini_pipeline_graph_dropdown_spec.js b/spec/frontend/mini_pipeline_graph_dropdown_spec.js
index 1ecf01894af..b21ddabbfb1 100644
--- a/spec/frontend/mini_pipeline_graph_dropdown_spec.js
+++ b/spec/frontend/mini_pipeline_graph_dropdown_spec.js
@@ -1,8 +1,8 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import MiniPipelineGraph from '~/mini_pipeline_graph_dropdown';
-import waitForPromises from './helpers/wait_for_promises';
describe('Mini Pipeline Graph Dropdown', () => {
preloadFixtures('static/mini_dropdown_graph.html');
@@ -64,7 +64,7 @@ describe('Mini Pipeline Graph Dropdown', () => {
expect(ajaxSpy.mock.calls[0][0]).toEqual('foobar');
});
- it('should not close when user uses cmd/ctrl + click', done => {
+ it('should not close when user uses cmd/ctrl + click', (done) => {
mock.onGet('foobar').reply(200, {
html: `<li>
<a class="mini-pipeline-graph-dropdown-item" href="#">
@@ -90,7 +90,7 @@ describe('Mini Pipeline Graph Dropdown', () => {
.catch(done.fail);
});
- it('should close the dropdown when request returns an error', done => {
+ it('should close the dropdown when request returns an error', (done) => {
mock.onGet('foobar').networkError();
new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents();
diff --git a/spec/frontend/mocks/ce/lib/utils/axios_utils.js b/spec/frontend/mocks/ce/lib/utils/axios_utils.js
index 85fad231d28..674563b9f28 100644
--- a/spec/frontend/mocks/ce/lib/utils/axios_utils.js
+++ b/spec/frontend/mocks/ce/lib/utils/axios_utils.js
@@ -5,7 +5,7 @@ const axios = jest.requireActual('~/lib/utils/axios_utils').default;
axios.isMock = true;
// Fail tests for unmocked requests
-axios.defaults.adapter = config => {
+axios.defaults.adapter = (config) => {
const message =
`Unexpected unmocked request: ${JSON.stringify(config, null, 2)}\n` +
'Consider using the `axios-mock-adapter` module in tests.';
@@ -23,7 +23,7 @@ const onRequest = () => {
};
// Use setImmediate to alloow the response interceptor to finish
-const onResponse = config => {
+const onResponse = (config) => {
activeRequests -= 1;
setImmediate(() => {
events.emit('response', config);
@@ -31,7 +31,7 @@ const onResponse = config => {
};
const subscribeToResponse = (predicate = () => true) =>
- new Promise(resolve => {
+ new Promise((resolve) => {
const listener = (config = {}) => {
if (predicate(config)) {
events.off('response', listener);
@@ -49,7 +49,7 @@ const subscribeToResponse = (predicate = () => true) =>
/**
* Registers a callback function to be run after a request to the given URL finishes.
*/
-axios.waitFor = url => subscribeToResponse(({ url: configUrl }) => configUrl === url);
+axios.waitFor = (url) => subscribeToResponse(({ url: configUrl }) => configUrl === url);
/**
* Registers a callback function to be run after all requests have finished. If there are no requests waiting, the callback is executed immediately.
@@ -58,18 +58,18 @@ axios.waitForAll = () => subscribeToResponse(() => activeRequests === 0);
axios.countActiveRequests = () => activeRequests;
-axios.interceptors.request.use(config => {
+axios.interceptors.request.use((config) => {
onRequest();
return config;
});
// Remove the global counter
axios.interceptors.response.use(
- response => {
+ (response) => {
onResponse(response.config);
return response;
},
- err => {
+ (err) => {
onResponse(err.config);
return Promise.reject(err);
},
diff --git a/spec/frontend/mocks/mocks_helper.js b/spec/frontend/mocks/mocks_helper.js
index 0aa80331434..295483cd64c 100644
--- a/spec/frontend/mocks/mocks_helper.js
+++ b/spec/frontend/mocks/mocks_helper.js
@@ -19,9 +19,9 @@ const prefixMap = [
// { mocksRoot: 'virtual', requirePrefix: '' }, // We'll deal with virtual mocks later
];
-const mockFileFilter = stats => stats.isFile() && stats.path.endsWith('.js');
+const mockFileFilter = (stats) => stats.isFile() && stats.path.endsWith('.js');
-const getMockFiles = root => readdir.sync(root, { deep: MAX_DEPTH, filter: mockFileFilter });
+const getMockFiles = (root) => readdir.sync(root, { deep: MAX_DEPTH, filter: mockFileFilter });
// Function that performs setting a mock. This has to be overridden by the unit test, because
// jest.setMock can't be overwritten across files.
@@ -36,7 +36,7 @@ export const setupManualMocks = function setupManualMocks(setMock = defaultSetMo
return;
}
- getMockFiles(path.join(__dirname, mocksRoot)).forEach(mockPath => {
+ getMockFiles(path.join(__dirname, mocksRoot)).forEach((mockPath) => {
const mockPathNoExt = mockPath.substring(0, mockPath.length - path.extname(mockPath).length);
const sourcePath = path.join(requirePrefix, mockPathNoExt);
const mockPathRelative = `./${path.join(mocksRoot, mockPathNoExt)}`;
diff --git a/spec/frontend/mocks/mocks_helper_spec.js b/spec/frontend/mocks/mocks_helper_spec.js
index a91e8ecfa4a..0abe5c6b949 100644
--- a/spec/frontend/mocks/mocks_helper_spec.js
+++ b/spec/frontend/mocks/mocks_helper_spec.js
@@ -47,13 +47,13 @@ describe('mocks_helper.js', () => {
const readdirSpy = readdir.sync;
expect(readdirSpy).toHaveBeenCalled();
- readdirSpy.mock.calls.forEach(call => {
+ readdirSpy.mock.calls.forEach((call) => {
expect(call[1].deep).toBeLessThan(100);
});
});
it('sets up mocks for CE (the ~/ prefix)', () => {
- fs.existsSync.mockImplementation(root => root.endsWith('ce'));
+ fs.existsSync.mockImplementation((root) => root.endsWith('ce'));
readdir.sync.mockReturnValue(['root.js', 'lib/utils/util.js']);
setupManualMocks();
@@ -72,7 +72,7 @@ describe('mocks_helper.js', () => {
};
fs.existsSync.mockReturnValue(true);
- readdir.sync.mockImplementation(root => files[root]);
+ readdir.sync.mockImplementation((root) => files[root]);
setupManualMocks();
expect(readdir.sync).toHaveBeenCalledTimes(1);
@@ -84,7 +84,7 @@ describe('mocks_helper.js', () => {
});
it('fails when given a virtual mock', () => {
- fs.existsSync.mockImplementation(p => p.endsWith('ce'));
+ fs.existsSync.mockImplementation((p) => p.endsWith('ce'));
readdir.sync.mockReturnValue(['virtual', 'shouldntBeImported']);
setMock.mockImplementation(() => {
throw new Error('Could not locate module');
@@ -106,7 +106,7 @@ describe('mocks_helper.js', () => {
});
});
- it('survives jest.isolateModules()', done => {
+ it('survives jest.isolateModules()', (done) => {
jest.isolateModules(() => {
const axios2 = require('~/lib/utils/axios_utils').default;
expect(axios2.isMock).toBe(true);
diff --git a/spec/frontend/monitoring/alert_widget_spec.js b/spec/frontend/monitoring/alert_widget_spec.js
index d004b1da0b6..fd9fb374274 100644
--- a/spec/frontend/monitoring/alert_widget_spec.js
+++ b/spec/frontend/monitoring/alert_widget_spec.js
@@ -28,12 +28,20 @@ describe('AlertWidget', () => {
const nonFiringAlertResult = [
{
- values: [[0, 1], [1, 42], [2, 41]],
+ values: [
+ [0, 1],
+ [1, 42],
+ [2, 41],
+ ],
},
];
const firingAlertResult = [
{
- values: [[0, 42], [1, 43], [2, 44]],
+ values: [
+ [0, 42],
+ [1, 43],
+ [2, 44],
+ ],
},
];
const metricId = '5';
@@ -75,7 +83,7 @@ describe('AlertWidget', () => {
},
};
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(AlertWidget, {
stubs: { GlTooltip, GlSprintf },
propsData: {
@@ -88,10 +96,7 @@ describe('AlertWidget', () => {
const findWidgetForm = () => wrapper.find({ ref: 'widgetForm' });
const findAlertErrorMessage = () => wrapper.find({ ref: 'alertErrorMessage' });
const findCurrentSettingsText = () =>
- wrapper
- .find({ ref: 'alertCurrentSetting' })
- .text()
- .replace(/\s\s+/g, ' ');
+ wrapper.find({ ref: 'alertCurrentSetting' }).text().replace(/\s\s+/g, ' ');
const findBadge = () => wrapper.find(GlBadge);
const findTooltip = () => wrapper.find(GlTooltip);
@@ -103,7 +108,7 @@ describe('AlertWidget', () => {
it('displays a loading spinner and disables form when fetching alerts', () => {
let resolveReadAlert;
mockReadAlert.mockReturnValue(
- new Promise(resolve => {
+ new Promise((resolve) => {
resolveReadAlert = resolve;
}),
);
@@ -126,7 +131,7 @@ describe('AlertWidget', () => {
it('does not render loading spinner if showLoadingState is false', () => {
let resolveReadAlert;
mockReadAlert.mockReturnValue(
- new Promise(resolve => {
+ new Promise((resolve) => {
resolveReadAlert = resolve;
}),
);
@@ -300,11 +305,7 @@ describe('AlertWidget', () => {
createComponent(propsWithManyAlerts);
return waitForPromises().then(() => {
- expect(
- findTooltip()
- .text()
- .replace(/\s\s+/g, ' '),
- ).toEqual('Firing: alert-label > 42');
+ expect(findTooltip().text().replace(/\s\s+/g, ' ')).toEqual('Firing: alert-label > 42');
});
});
});
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index 17720aeb702..e873edaad3b 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -37,6 +37,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
class="flex-grow-1"
data-qa-selector="environments_dropdown"
headertext=""
+ hideheaderborder="true"
id="monitor-environments-dropdown"
menu-class="monitor-environment-dropdown-menu"
size="medium"
diff --git a/spec/frontend/monitoring/components/alert_widget_form_spec.js b/spec/frontend/monitoring/components/alert_widget_form_spec.js
index 6d71a9b09e5..6d87fb85f4d 100644
--- a/spec/frontend/monitoring/components/alert_widget_form_spec.js
+++ b/spec/frontend/monitoring/components/alert_widget_form_spec.js
@@ -76,11 +76,15 @@ describe('AlertWidgetForm', () => {
});
it('shows correct title and button text', () => {
+ createComponent();
+
expect(modalTitle()).toBe('Add alert');
expect(submitButton().text()).toBe('Add');
});
it('sets tracking options for create alert', () => {
+ createComponent();
+
expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.create);
});
diff --git a/spec/frontend/monitoring/components/charts/annotations_spec.js b/spec/frontend/monitoring/components/charts/annotations_spec.js
index fc90175d307..1eac0935fe4 100644
--- a/spec/frontend/monitoring/components/charts/annotations_spec.js
+++ b/spec/frontend/monitoring/components/charts/annotations_spec.js
@@ -36,7 +36,7 @@ describe('annotations spec', () => {
}),
);
- annotations.data.forEach(annotation => {
+ annotations.data.forEach((annotation) => {
expect(annotation).toEqual(expect.any(Object));
});
@@ -58,7 +58,7 @@ describe('annotations spec', () => {
}),
);
- annotations.markLine.data.forEach(annotation => {
+ annotations.markLine.data.forEach((annotation) => {
expect(annotation).toEqual(expect.any(Object));
});
@@ -83,7 +83,7 @@ describe('annotations spec', () => {
}),
);
- annotations.markLine.data.forEach(annotation => {
+ annotations.markLine.data.forEach((annotation) => {
expect(annotation).toEqual(expect.any(Object));
});
diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js
index ebb49a2a0aa..dad3003d536 100644
--- a/spec/frontend/monitoring/components/charts/anomaly_spec.js
+++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js
@@ -15,7 +15,7 @@ const TEST_LOWER = 9;
describe('Anomaly chart component', () => {
let wrapper;
- const setupAnomalyChart = props => {
+ const setupAnomalyChart = (props) => {
wrapper = shallowMount(Anomaly, {
propsData: { ...props },
});
@@ -91,7 +91,7 @@ describe('Anomaly chart component', () => {
expect(series.length).toEqual(2); // 1 upper + 1 lower boundaries
expect(series[0].stack).toEqual(series[1].stack);
- series.forEach(s => {
+ series.forEach((s) => {
expect(s.type).toBe('line');
expect(s.lineStyle.width).toBe(0);
expect(s.lineStyle.color).toMatch(/rgba\(.+\)/);
diff --git a/spec/frontend/monitoring/components/charts/bar_spec.js b/spec/frontend/monitoring/components/charts/bar_spec.js
index a363fafdc31..40edde5f666 100644
--- a/spec/frontend/monitoring/components/charts/bar_spec.js
+++ b/spec/frontend/monitoring/components/charts/bar_spec.js
@@ -44,7 +44,7 @@ describe('Bar component', () => {
it('should return chartData as array of arrays', () => {
expect(chartData).toBeInstanceOf(Array);
- chartData.forEach(item => {
+ chartData.forEach((item) => {
expect(item).toBeInstanceOf(Array);
});
});
diff --git a/spec/frontend/monitoring/components/charts/column_spec.js b/spec/frontend/monitoring/components/charts/column_spec.js
index fbcff33d692..0c4b6e8990d 100644
--- a/spec/frontend/monitoring/components/charts/column_spec.js
+++ b/spec/frontend/monitoring/components/charts/column_spec.js
@@ -45,7 +45,7 @@ describe('Column component', () => {
});
};
const findChart = () => wrapper.find(GlColumnChart);
- const chartProps = prop => findChart().props(prop);
+ const chartProps = (prop) => findChart().props(prop);
beforeEach(() => {
createWrapper();
@@ -58,7 +58,7 @@ describe('Column component', () => {
describe('xAxisLabel', () => {
const mockDate = Date.UTC(2020, 4, 26, 20); // 8:00 PM in GMT
- const useXAxisFormatter = date => {
+ const useXAxisFormatter = (date) => {
const { xAxis } = chartProps('option');
const { formatter } = xAxis.axisLabel;
return formatter(date);
diff --git a/spec/frontend/monitoring/components/charts/gauge_spec.js b/spec/frontend/monitoring/components/charts/gauge_spec.js
index 850e2ca87db..9215f2e411f 100644
--- a/spec/frontend/monitoring/components/charts/gauge_spec.js
+++ b/spec/frontend/monitoring/components/charts/gauge_spec.js
@@ -49,7 +49,7 @@ describe('Gauge Chart component', () => {
it.each(invalidCases)(
'if min has invalid value, defaults are used for both min and max',
- invalidValue => {
+ (invalidValue) => {
createWrapper({ minValue: invalidValue });
expect(findGaugeChart().props('min')).toBe(MIN_DEFAULT);
@@ -59,7 +59,7 @@ describe('Gauge Chart component', () => {
it.each(invalidCases)(
'if max has invalid value, defaults are used for both min and max',
- invalidValue => {
+ (invalidValue) => {
createWrapper({ minValue: invalidValue });
expect(findGaugeChart().props('min')).toBe(MIN_DEFAULT);
diff --git a/spec/frontend/monitoring/components/charts/options_spec.js b/spec/frontend/monitoring/components/charts/options_spec.js
index 3372d27e4f9..064ce6f204c 100644
--- a/spec/frontend/monitoring/components/charts/options_spec.js
+++ b/spec/frontend/monitoring/components/charts/options_spec.js
@@ -145,7 +145,7 @@ describe('options spec', () => {
expect(thresholds).toEqual([]);
});
- it.each(invalidCases)('invalid values for thresholds are filtered out', invalidValue => {
+ it.each(invalidCases)('invalid values for thresholds are filtered out', (invalidValue) => {
thresholds = getValidThresholds({
mode: 'absolute',
range: { min: 0, max: 100 },
@@ -197,7 +197,7 @@ describe('options spec', () => {
it.each(invalidCases)(
'when min has invalid value, empty result is returned',
- invalidValue => {
+ (invalidValue) => {
thresholds = getValidThresholds({
mode: 'absolute',
range: { min: invalidValue, max: 100 },
@@ -210,7 +210,7 @@ describe('options spec', () => {
it.each(invalidCases)(
'when max has invalid value, empty result is returned',
- invalidValue => {
+ (invalidValue) => {
thresholds = getValidThresholds({
mode: 'absolute',
range: { min: 0, max: invalidValue },
@@ -244,7 +244,7 @@ describe('options spec', () => {
it.each(invalidCases)(
'if invalid values are passed, empty result is returned',
- invalidValue => {
+ (invalidValue) => {
thresholds = getValidThresholds({
mode: 'absolute',
range: { min: 0, max: 100 },
@@ -259,7 +259,7 @@ describe('options spec', () => {
describe('mode', () => {
it.each(invalidCases)(
'if invalid values are passed, empty result is returned',
- invalidValue => {
+ (invalidValue) => {
thresholds = getValidThresholds({
mode: invalidValue,
range: { min: 0, max: 100 },
@@ -305,7 +305,7 @@ describe('options spec', () => {
const outOfPercentBoundsValues = [-1, 0, 100, 101];
it.each(outOfPercentBoundsValues)(
'when values out of 0-100 range are passed, empty result is returned',
- invalidValue => {
+ (invalidValue) => {
thresholds = getValidThresholds({
mode: 'percentage',
range: { min: 0, max: 1000 },
diff --git a/spec/frontend/monitoring/components/charts/stacked_column_spec.js b/spec/frontend/monitoring/components/charts/stacked_column_spec.js
index 2032258730a..4d4224a8b11 100644
--- a/spec/frontend/monitoring/components/charts/stacked_column_spec.js
+++ b/spec/frontend/monitoring/components/charts/stacked_column_spec.js
@@ -6,7 +6,7 @@ import StackedColumnChart from '~/monitoring/components/charts/stacked_column.vu
import { stackedColumnGraphData } from '../../graph_data';
jest.mock('~/lib/utils/icon_utils', () => ({
- getSvgIconPathContent: jest.fn().mockImplementation(icon => Promise.resolve(`${icon}-content`)),
+ getSvgIconPathContent: jest.fn().mockImplementation((icon) => Promise.resolve(`${icon}-content`)),
}));
describe('Stacked column chart component', () => {
@@ -26,7 +26,7 @@ describe('Stacked column chart component', () => {
stubs: {
GlPopover: true,
},
- attachToDocument: true,
+ attachTo: document.body,
});
beforeEach(() => {
@@ -48,7 +48,7 @@ describe('Stacked column chart component', () => {
data.forEach((series, index) => {
const { values } = stackedColumnMockedData.metrics[index].result[0];
- expect(series.data).toEqual(values.map(value => value[1]));
+ expect(series.data).toEqual(values.map((value) => value[1]));
});
});
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 8fcee80a2d8..b7e1cb91987 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -2,7 +2,7 @@ import { mount, shallowMount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import timezoneMock from 'timezone-mock';
import { GlLink } from '@gitlab/ui';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import {
GlAreaChart,
GlLineChart,
@@ -23,14 +23,14 @@ import { timeSeriesGraphData } from '../../graph_data';
jest.mock('lodash/throttle', () =>
// this throttle mock executes immediately
- jest.fn(func => {
+ jest.fn((func) => {
// eslint-disable-next-line no-param-reassign
func.cancel = jest.fn();
return func;
}),
);
jest.mock('~/lib/utils/icon_utils', () => ({
- getSvgIconPathContent: jest.fn().mockImplementation(icon => Promise.resolve(`${icon}-content`)),
+ getSvgIconPathContent: jest.fn().mockImplementation((icon) => Promise.resolve(`${icon}-content`)),
}));
describe('Time series component', () => {
@@ -53,7 +53,7 @@ describe('Time series component', () => {
stubs: {
GlPopover: true,
},
- attachToDocument: true,
+ attachTo: document.body,
});
};
@@ -122,7 +122,7 @@ describe('Time series component', () => {
},
],
}),
- off: jest.fn(eChartEvent => {
+ off: jest.fn((eChartEvent) => {
delete eChartMock.handlers[eChartEvent];
}),
on: jest.fn((eChartEvent, fn) => {
@@ -193,7 +193,7 @@ describe('Time series component', () => {
it('does not throw error if data point is outside the zoom range', () => {
const seriesDataWithoutValue = {
...mockLineSeriesData(),
- seriesData: mockLineSeriesData().seriesData.map(data => ({
+ seriesData: mockLineSeriesData().seriesData.map((data) => ({
...data,
value: undefined,
})),
@@ -270,7 +270,7 @@ describe('Time series component', () => {
beforeEach(() => {
wrapper.vm.formatTooltipText({
...mockAnnotationsSeriesData,
- seriesData: mockAnnotationsSeriesData.seriesData.map(data => ({
+ seriesData: mockAnnotationsSeriesData.seriesData.map((data) => ({
...data,
data: annotationsMetadata,
})),
@@ -546,7 +546,7 @@ describe('Time series component', () => {
describe('xAxisLabel', () => {
const mockDate = Date.UTC(2020, 4, 26, 20); // 8:00 PM in GMT
- const useXAxisFormatter = date => {
+ const useXAxisFormatter = (date) => {
const { xAxis } = getChartOptions();
const { formatter } = xAxis.axisLabel;
return formatter(date);
@@ -620,7 +620,7 @@ describe('Time series component', () => {
},
];
- glChartComponents.forEach(dynamicComponent => {
+ glChartComponents.forEach((dynamicComponent) => {
describe(`GitLab UI: ${dynamicComponent.chartType}`, () => {
const findChartComponent = () => wrapper.find(dynamicComponent.component);
@@ -705,7 +705,7 @@ describe('Time series component', () => {
let lineColors;
beforeEach(() => {
- lineColors = wrapper.find(GlAreaChart).vm.series.map(item => item.lineStyle.color);
+ lineColors = wrapper.find(GlAreaChart).vm.series.map((item) => item.lineStyle.color);
});
it('should contain different colors for contiguous time series', () => {
@@ -727,7 +727,7 @@ describe('Time series component', () => {
const legendColors = wrapper
.find(GlChartLegend)
.props('seriesInfo')
- .map(item => item.color);
+ .map((item) => item.color);
lineColors.forEach((color, index) => {
expect(color).toBe(legendColors[index]);
diff --git a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
index b22e05ec30a..43d5937a3a1 100644
--- a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
@@ -107,7 +107,7 @@ describe('Actions menu', () => {
describe('adding new metric from modal', () => {
let origPage;
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(Tracking, 'event').mockReturnValue();
createShallowWrapper();
@@ -123,7 +123,7 @@ describe('Actions menu', () => {
document.body.dataset.page = origPage;
});
- it('is tracked', done => {
+ it('is tracked', (done) => {
const submitButton = findAddMetricModalSubmitButton().vm;
wrapper.vm.$nextTick(() => {
@@ -219,7 +219,7 @@ describe('Actions menu', () => {
});
});
- describe.each(ootbDashboards)('when current dashboard is OOTB', dashboard => {
+ describe.each(ootbDashboards)('when current dashboard is OOTB', (dashboard) => {
beforeEach(() => {
setupAllDashboards(store, dashboard.path);
});
@@ -240,7 +240,7 @@ describe('Actions menu', () => {
createShallowWrapper();
});
- describe.each(ootbDashboards)('when current dashboard is OOTB', dashboard => {
+ describe.each(ootbDashboards)('when current dashboard is OOTB', (dashboard) => {
beforeEach(() => {
setupAllDashboards(store, dashboard.path);
});
diff --git a/spec/frontend/monitoring/components/dashboard_header_spec.js b/spec/frontend/monitoring/components/dashboard_header_spec.js
index f9a7a4d5a93..32fd9c45e8d 100644
--- a/spec/frontend/monitoring/components/dashboard_header_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_header_spec.js
@@ -41,7 +41,7 @@ describe('Dashboard header', () => {
const findActionsMenu = () => wrapper.find(ActionsMenu);
- const setSearchTerm = searchTerm => {
+ const setSearchTerm = (searchTerm) => {
store.commit(`monitoringDashboard/${types.SET_ENVIRONMENTS_FILTER}`, searchTerm);
};
@@ -147,13 +147,13 @@ describe('Dashboard header', () => {
it('environments dropdown items can be checked', () => {
const items = findEnvsDropdownItems();
- const checkItems = findEnvsDropdownItems().filter(item => item.props('isCheckItem'));
+ const checkItems = findEnvsDropdownItems().filter((item) => item.props('isCheckItem'));
expect(items).toHaveLength(checkItems.length);
});
it('checks the currently selected environment', () => {
- const selectedItems = findEnvsDropdownItems().filter(item => item.props('isChecked'));
+ const selectedItems = findEnvsDropdownItems().filter((item) => item.props('isChecked'));
expect(selectedItems).toHaveLength(1);
expect(selectedItems.at(0).text()).toBe(currentEnvironmentName);
@@ -218,7 +218,7 @@ describe('Dashboard header', () => {
});
describe('timezone setting', () => {
- const setupWithTimezone = value => {
+ const setupWithTimezone = (value) => {
store = createStore({ dashboardTimezone: value });
createShallowWrapper();
};
@@ -295,7 +295,7 @@ describe('Dashboard header', () => {
});
describe('adding metrics prop', () => {
- it.each(ootbDashboards)('gets passed true if current dashboard is OOTB', dashboardPath => {
+ it.each(ootbDashboards)('gets passed true if current dashboard is OOTB', (dashboardPath) => {
createShallowWrapper({ customMetricsAvailable: true });
store.state.monitoringDashboard.emptyState = false;
@@ -308,7 +308,7 @@ describe('Dashboard header', () => {
it.each(customDashboards)(
'gets passed false if current dashboard is custom',
- dashboardPath => {
+ (dashboardPath) => {
createShallowWrapper({ customMetricsAvailable: true });
store.state.monitoringDashboard.emptyState = false;
diff --git a/spec/frontend/monitoring/components/dashboard_panel_spec.js b/spec/frontend/monitoring/components/dashboard_panel_spec.js
index 1808faf8f0e..f64e05d3a2c 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_spec.js
@@ -57,7 +57,7 @@ describe('Dashboard Panel', () => {
const findTitle = () => wrapper.find({ ref: 'graphTitle' });
const findCtxMenu = () => wrapper.find({ ref: 'contextualMenu' });
const findMenuItems = () => wrapper.findAll(GlDropdownItem);
- const findMenuItemByText = text => findMenuItems().filter(i => i.text() === text);
+ const findMenuItemByText = (text) => findMenuItems().filter((i) => i.text() === text);
const findAlertsWidget = () => wrapper.find(AlertWidget);
const createWrapper = (props, { mountFn = shallowMount, ...options } = {}) => {
@@ -82,7 +82,7 @@ describe('Dashboard Panel', () => {
});
};
- const setMetricsSavedToDb = val =>
+ const setMetricsSavedToDb = (val) =>
monitoringDashboard.getters.metricsSavedToDb.mockReturnValue(val);
beforeEach(() => {
@@ -214,7 +214,7 @@ describe('Dashboard Panel', () => {
});
describe('Supports different panel types', () => {
- const dataWithType = type => {
+ const dataWithType = (type) => {
return {
...graphData,
type,
@@ -777,11 +777,7 @@ describe('Dashboard Panel', () => {
await wrapper.vm.$nextTick();
expect(findRunbookLinks().length).toBe(1);
- expect(
- findRunbookLinks()
- .at(0)
- .attributes('href'),
- ).toBe(invalidUrl);
+ expect(findRunbookLinks().at(0).attributes('href')).toBe(invalidUrl);
});
});
});
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 27e479ba498..db35f1cdde3 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -126,7 +126,7 @@ describe('Dashboard', () => {
});
describe('panel containers layout', () => {
- const findPanelLayoutWrapperAt = index => {
+ const findPanelLayoutWrapperAt = (index) => {
return wrapper
.find(GraphGroup)
.findAll('[data-testid="dashboard-panel-layout-wrapper"]')
@@ -225,7 +225,7 @@ describe('Dashboard', () => {
describe('when the URL contains a reference to a panel', () => {
let location;
- const setSearch = search => {
+ const setSearch = (search) => {
window.location = { ...location, search };
};
@@ -391,7 +391,7 @@ describe('Dashboard', () => {
});
describe('when all panels in the first group are loading', () => {
- const findGroupAt = i => wrapper.findAll(GraphGroup).at(i);
+ const findGroupAt = (i) => wrapper.findAll(GraphGroup).at(i);
beforeEach(() => {
setupStoreWithDashboard(store);
@@ -434,7 +434,7 @@ describe('Dashboard', () => {
setupStoreWithData(store);
wrapper.vm.$nextTick(() => {
- wrapper.findAll(GraphGroup).wrappers.forEach(groupWrapper => {
+ wrapper.findAll(GraphGroup).wrappers.forEach((groupWrapper) => {
expect(groupWrapper.props('isLoading')).toBe(false);
});
});
@@ -505,7 +505,7 @@ describe('Dashboard', () => {
let group;
let panel;
- const mockKeyup = key => window.dispatchEvent(new KeyboardEvent('keyup', { key }));
+ const mockKeyup = (key) => window.dispatchEvent(new KeyboardEvent('keyup', { key }));
const MockPanel = {
template: `<div><slot name="top-left"/></div>`,
@@ -532,7 +532,7 @@ describe('Dashboard', () => {
it('displays a single panel and others are hidden', () => {
const panels = wrapper.findAll(MockPanel);
- const visiblePanels = panels.filter(w => w.isVisible());
+ const visiblePanels = panels.filter((w) => w.isVisible());
expect(findExpandedPanel().isVisible()).toBe(true);
// v-show for hiding panels is more performant than v-if
@@ -587,18 +587,15 @@ describe('Dashboard', () => {
});
it('group empty area displays a NO_DATA state', () => {
- expect(
- wrapper
- .findAll({ ref: 'empty-group' })
- .at(0)
- .props('selectedState'),
- ).toEqual(metricStates.NO_DATA);
+ expect(wrapper.findAll({ ref: 'empty-group' }).at(0).props('selectedState')).toEqual(
+ metricStates.NO_DATA,
+ );
});
});
describe('drag and drop function', () => {
const findDraggables = () => wrapper.findAll(VueDraggable);
- const findEnabledDraggables = () => findDraggables().filter(f => !f.attributes('disabled'));
+ const findEnabledDraggables = () => findDraggables().filter((f) => !f.attributes('disabled'));
const findDraggablePanels = () => wrapper.findAll('.js-draggable-panel');
const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
@@ -634,9 +631,7 @@ describe('Dashboard', () => {
describe('when rearrange button is clicked', () => {
const findFirstDraggableRemoveButton = () =>
- findDraggablePanels()
- .at(0)
- .find('.js-draggable-remove');
+ findDraggablePanels().at(0).find('.js-draggable-remove');
beforeEach(() => {
findRearrangeButton().vm.$emit('click');
@@ -668,11 +663,7 @@ describe('Dashboard', () => {
});
it('shows a remove button, which removes a panel', () => {
- expect(
- findFirstDraggableRemoveButton()
- .find('a')
- .exists(),
- ).toBe(true);
+ expect(findFirstDraggableRemoveButton().find('a').exists()).toBe(true);
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
findFirstDraggableRemoveButton().trigger('click');
@@ -772,10 +763,7 @@ describe('Dashboard', () => {
const panelIndex = 1; // skip expanded panel
const getClipboardTextFirstPanel = () =>
- wrapper
- .findAll(DashboardPanel)
- .at(panelIndex)
- .props('clipboardText');
+ wrapper.findAll(DashboardPanel).at(panelIndex).props('clipboardText');
beforeEach(() => {
setupStoreWithData(store);
diff --git a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
index ef5784183b2..6ed190051ce 100644
--- a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
+++ b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import { GlDropdownItem, GlIcon } from '@gitlab/ui';
@@ -27,18 +28,17 @@ describe('DashboardsDropdown', () => {
...props,
defaultBranch,
},
- sync: false,
...storeOpts,
...opts,
});
}
const findItems = () => wrapper.findAll(GlDropdownItem);
- const findItemAt = i => wrapper.findAll(GlDropdownItem).at(i);
+ const findItemAt = (i) => wrapper.findAll(GlDropdownItem).at(i);
const findSearchInput = () => wrapper.find({ ref: 'monitorDashboardsDropdownSearch' });
const findNoItemsMsg = () => wrapper.find({ ref: 'monitorDashboardsDropdownMsg' });
const findStarredListDivider = () => wrapper.find({ ref: 'starredListDivider' });
- const setSearchTerm = searchTerm => wrapper.setData({ searchTerm });
+ const setSearchTerm = (searchTerm) => wrapper.setData({ searchTerm });
beforeEach(() => {
mockDashboards = dashboardGitResponse;
@@ -72,22 +72,20 @@ describe('DashboardsDropdown', () => {
expect(findNoItemsMsg().isVisible()).toBe(false);
});
- it('filters dropdown items when searched for item exists in the list', () => {
+ it('filters dropdown items when searched for item exists in the list', async () => {
const searchTerm = 'Overview';
setSearchTerm(searchTerm);
+ await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(findItems()).toHaveLength(1);
- });
+ expect(findItems()).toHaveLength(1);
});
- it('shows no items found message when searched for item does not exists in the list', () => {
+ it('shows no items found message when searched for item does not exists in the list', async () => {
const searchTerm = 'does-not-exist';
setSearchTerm(searchTerm);
+ await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoItemsMsg().isVisible()).toBe(true);
- });
+ expect(findNoItemsMsg().isVisible()).toBe(true);
});
});
@@ -105,7 +103,7 @@ describe('DashboardsDropdown', () => {
describe('when the dashboard is missing a display name', () => {
beforeEach(() => {
- mockDashboards = dashboardGitResponse.map(d => ({ ...d, display_name: undefined }));
+ mockDashboards = dashboardGitResponse.map((d) => ({ ...d, display_name: undefined }));
createComponent();
});
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
index 29115ffb817..4abb5b2e27b 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import DuplicateDashboardForm from '~/monitoring/components/duplicate_dashboard_form.vue';
@@ -9,20 +10,19 @@ const createMountedWrapper = (props = {}) => {
// Use `mount` to render native input elements
wrapper = mount(DuplicateDashboardForm, {
propsData: { ...props },
- sync: false,
// We need to attach to document, so that `document.activeElement` is properly set in jsdom
- attachToDocument: true,
+ attachTo: document.body,
});
};
describe('DuplicateDashboardForm', () => {
const defaultBranch = 'master';
- const findByRef = ref => wrapper.find({ ref });
+ const findByRef = (ref) => wrapper.find({ ref });
const setValue = (ref, val) => {
findByRef(ref).setValue(val);
};
- const setChecked = value => {
+ const setChecked = (value) => {
const input = wrapper.find(`.form-check-input[value="${value}"]`);
input.element.checked = true;
input.trigger('click');
@@ -47,34 +47,34 @@ describe('DuplicateDashboardForm', () => {
describe('validates the file name', () => {
const findInvalidFeedback = () => findByRef('fileNameFormGroup').find('.invalid-feedback');
- it('when is empty', () => {
+ it('when is empty', async () => {
setValue('fileName', '');
- return wrapper.vm.$nextTick(() => {
- expect(findByRef('fileNameFormGroup').classes()).toContain('is-valid');
- expect(findInvalidFeedback().exists()).toBe(false);
- });
+ await nextTick();
+
+ expect(findByRef('fileNameFormGroup').classes()).toContain('is-valid');
+ expect(findInvalidFeedback().exists()).toBe(false);
});
- it('when is valid', () => {
+ it('when is valid', async () => {
setValue('fileName', 'my_dashboard.yml');
- return wrapper.vm.$nextTick(() => {
- expect(findByRef('fileNameFormGroup').classes()).toContain('is-valid');
- expect(findInvalidFeedback().exists()).toBe(false);
- });
+ await nextTick();
+
+ expect(findByRef('fileNameFormGroup').classes()).toContain('is-valid');
+ expect(findInvalidFeedback().exists()).toBe(false);
});
- it('when is not valid', () => {
+ it('when is not valid', async () => {
setValue('fileName', 'my_dashboard.exe');
- return wrapper.vm.$nextTick(() => {
- expect(findByRef('fileNameFormGroup').classes()).toContain('is-invalid');
- expect(findInvalidFeedback().text()).toBeTruthy();
- });
+ await nextTick();
+
+ expect(findByRef('fileNameFormGroup').classes()).toContain('is-invalid');
+ expect(findInvalidFeedback().text()).toBeTruthy();
});
});
describe('emits `change` event', () => {
const lastChange = () =>
- wrapper.vm.$nextTick().then(() => {
+ nextTick().then(() => {
wrapper.find('form').trigger('change');
// Resolves to the last emitted change
@@ -133,19 +133,19 @@ describe('DuplicateDashboardForm', () => {
expect(lastChange()).resolves.toMatchObject({
branch: defaultBranch,
}),
- wrapper.vm.$nextTick(() => {
+ nextTick(() => {
expect(findByRef('branchName').isVisible()).toBe(false);
}),
]);
});
- it('when `new` branch option is chosen, focuses on the branch name input', () => {
+ it('when `new` branch option is chosen, focuses on the branch name input', async () => {
setChecked(wrapper.vm.$options.radioVals.NEW);
- return wrapper.vm.$nextTick().then(() => {
- wrapper.find('form').trigger('change');
- expect(document.activeElement).toBe(findByRef('branchName').element);
- });
+ await nextTick();
+
+ wrapper.find('form').trigger('change');
+ expect(document.activeElement).toBe(findByRef('branchName').element);
});
});
});
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
index d8ffb4443ac..7e7065da179 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
@@ -1,3 +1,5 @@
+import Vuex from 'vuex';
+import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import { GlAlert, GlLoadingIcon, GlModal } from '@gitlab/ui';
@@ -8,6 +10,8 @@ import DuplicateDashboardForm from '~/monitoring/components/duplicate_dashboard_
import { dashboardGitResponse } from '../mock_data';
+Vue.use(Vuex);
+
describe('duplicate dashboard modal', () => {
let wrapper;
let mockDashboards;
@@ -15,25 +19,28 @@ describe('duplicate dashboard modal', () => {
let duplicateDashboardAction;
let okEvent;
- function createComponent(opts = {}) {
- const storeOpts = {
- methods: {
- duplicateSystemDashboard: jest.fn(),
- },
- computed: {
- allDashboards: () => mockDashboards,
- selectedDashboard: () => mockSelectedDashboard,
+ function createComponent() {
+ const store = new Vuex.Store({
+ modules: {
+ monitoringDashboard: {
+ namespaced: true,
+ actions: {
+ duplicateSystemDashboard: duplicateDashboardAction,
+ },
+ getters: {
+ allDashboards: () => mockDashboards,
+ selectedDashboard: () => mockSelectedDashboard,
+ },
+ },
},
- };
+ });
return shallowMount(DuplicateDashboardModal, {
propsData: {
defaultBranch: 'master',
modalId: 'id',
},
- sync: false,
- ...storeOpts,
- ...opts,
+ store,
});
}
@@ -51,12 +58,7 @@ describe('duplicate dashboard modal', () => {
preventDefault: jest.fn(),
};
- wrapper = createComponent({
- methods: {
- // Mock vuex actions
- duplicateSystemDashboard: duplicateDashboardAction,
- },
- });
+ wrapper = createComponent();
wrapper.vm.$refs.duplicateDashboardModal.hide = jest.fn();
});
@@ -65,34 +67,33 @@ describe('duplicate dashboard modal', () => {
expect(findDuplicateDashboardForm().exists()).toBe(true);
});
- it('saves a new dashboard', () => {
+ it('saves a new dashboard', async () => {
findModal().vm.$emit('ok', okEvent);
- return waitForPromises().then(() => {
- expect(okEvent.preventDefault).toHaveBeenCalled();
- expect(wrapper.emitted().dashboardDuplicated).toBeTruthy();
- expect(wrapper.emitted().dashboardDuplicated[0]).toEqual([dashboardGitResponse[0]]);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.vm.$refs.duplicateDashboardModal.hide).toHaveBeenCalled();
- expect(findAlert().exists()).toBe(false);
- });
+ await waitForPromises();
+ expect(okEvent.preventDefault).toHaveBeenCalled();
+ expect(wrapper.emitted().dashboardDuplicated).toBeTruthy();
+ expect(wrapper.emitted().dashboardDuplicated[0]).toEqual([dashboardGitResponse[0]]);
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.vm.$refs.duplicateDashboardModal.hide).toHaveBeenCalled();
+ expect(findAlert().exists()).toBe(false);
});
- it('handles error when a new dashboard is not saved', () => {
+ it('handles error when a new dashboard is not saved', async () => {
const errMsg = 'An error occurred';
duplicateDashboardAction.mockRejectedValueOnce(errMsg);
findModal().vm.$emit('ok', okEvent);
- return waitForPromises().then(() => {
- expect(okEvent.preventDefault).toHaveBeenCalled();
+ await waitForPromises();
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(errMsg);
+ expect(okEvent.preventDefault).toHaveBeenCalled();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.vm.$refs.duplicateDashboardModal.hide).not.toHaveBeenCalled();
- });
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(errMsg);
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.vm.$refs.duplicateDashboardModal.hide).not.toHaveBeenCalled();
});
it('updates the form on changes', () => {
@@ -101,9 +102,7 @@ describe('duplicate dashboard modal', () => {
commitMessage: 'A commit message',
};
- findModal()
- .find(DuplicateDashboardForm)
- .vm.$emit('change', formVals);
+ findModal().find(DuplicateDashboardForm).vm.$emit('change', formVals);
// Binding's second argument contains the modal id
expect(wrapper.vm.form).toEqual(formVals);
diff --git a/spec/frontend/monitoring/components/embeds/embed_group_spec.js b/spec/frontend/monitoring/components/embeds/embed_group_spec.js
index 01089752933..cb06a1a6b64 100644
--- a/spec/frontend/monitoring/components/embeds/embed_group_spec.js
+++ b/spec/frontend/monitoring/components/embeds/embed_group_spec.js
@@ -76,7 +76,7 @@ describe('Embed Group', () => {
expect(wrapper.find('.gl-card-body').classes()).not.toContain('d-none');
});
- it('collapses when clicked', done => {
+ it('collapses when clicked', (done) => {
metricsWithDataGetter.mockReturnValue([1]);
mountComponent({ shallow: false, stubs: { MetricEmbed: true } });
@@ -134,7 +134,7 @@ describe('Embed Group', () => {
});
it('passes the correct props to the Embed components', () => {
- expect(wrapper.findAll(MetricEmbed).wrappers.map(item => item.props())).toEqual(
+ expect(wrapper.findAll(MetricEmbed).wrappers.map((item) => item.props())).toEqual(
multipleEmbedProps(),
);
});
diff --git a/spec/frontend/monitoring/components/graph_group_spec.js b/spec/frontend/monitoring/components/graph_group_spec.js
index ebcd6c0df3a..c57461c2d09 100644
--- a/spec/frontend/monitoring/components/graph_group_spec.js
+++ b/spec/frontend/monitoring/components/graph_group_spec.js
@@ -11,7 +11,7 @@ describe('Graph group component', () => {
const findCaretIcon = () => wrapper.find(GlIcon);
const findToggleButton = () => wrapper.find('[data-testid="group-toggle-button"]');
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(GraphGroup, {
propsData,
});
diff --git a/spec/frontend/monitoring/components/group_empty_state_spec.js b/spec/frontend/monitoring/components/group_empty_state_spec.js
index 4a550efe23c..1dd2ed4e141 100644
--- a/spec/frontend/monitoring/components/group_empty_state_spec.js
+++ b/spec/frontend/monitoring/components/group_empty_state_spec.js
@@ -35,7 +35,7 @@ describe('GroupEmptyState', () => {
metricStates.LOADING,
metricStates.UNKNOWN_ERROR,
'FOO STATE', // does not fail with unknown states
- ])('given state %s', selectedState => {
+ ])('given state %s', (selectedState) => {
beforeEach(() => {
wrapper = createComponent({ selectedState });
});
diff --git a/spec/frontend/monitoring/components/links_section_spec.js b/spec/frontend/monitoring/components/links_section_spec.js
index b771d63d51f..2daad77d513 100644
--- a/spec/frontend/monitoring/components/links_section_spec.js
+++ b/spec/frontend/monitoring/components/links_section_spec.js
@@ -12,7 +12,7 @@ describe('Links Section component', () => {
store,
});
};
- const setState = links => {
+ const setState = (links) => {
store.state.monitoringDashboard = {
...store.state.monitoringDashboard,
emptyState: null,
diff --git a/spec/frontend/monitoring/components/refresh_button_spec.js b/spec/frontend/monitoring/components/refresh_button_spec.js
index 8a478362b5e..a03d29309d4 100644
--- a/spec/frontend/monitoring/components/refresh_button_spec.js
+++ b/spec/frontend/monitoring/components/refresh_button_spec.js
@@ -17,9 +17,9 @@ describe('RefreshButton', () => {
const findRefreshBtn = () => wrapper.find(GlButton);
const findDropdown = () => wrapper.find(GlDropdown);
const findOptions = () => findDropdown().findAll(GlDropdownItem);
- const findOptionAt = index => findOptions().at(index);
+ const findOptionAt = (index) => findOptions().at(index);
- const expectFetchDataToHaveBeenCalledTimes = times => {
+ const expectFetchDataToHaveBeenCalledTimes = (times) => {
const refreshCalls = dispatch.mock.calls.filter(([action, payload]) => {
return action === 'monitoringDashboard/fetchDashboardData' && payload === undefined;
});
diff --git a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
index cc384aef231..f5db17ce367 100644
--- a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
+++ b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
@@ -10,11 +10,14 @@ describe('Custom variable component', () => {
label: 'Select environment',
value: 'Production',
options: {
- values: [{ text: 'Production', value: 'prod' }, { text: 'Canary', value: 'canary' }],
+ values: [
+ { text: 'Production', value: 'prod' },
+ { text: 'Canary', value: 'canary' },
+ ],
},
};
- const createShallowWrapper = props => {
+ const createShallowWrapper = (props) => {
wrapper = shallowMount(DropdownField, {
propsData: {
...defaultProps,
@@ -54,9 +57,7 @@ describe('Custom variable component', () => {
createShallowWrapper();
jest.spyOn(wrapper.vm, '$emit');
- findDropdownItems()
- .at(1)
- .vm.$emit('click');
+ findDropdownItems().at(1).vm.$emit('click');
return wrapper.vm.$nextTick(() => {
expect(wrapper.vm.$emit).toHaveBeenCalledWith('input', 'canary');
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
index 18ec74550b4..0f3dbaac493 100644
--- a/spec/frontend/monitoring/fixture_data.js
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -35,7 +35,7 @@ const firstPanel = metricsDashboardViewModel.panelGroups[0].panels[0];
export const graphData = {
...firstPanel,
- metrics: firstPanel.metrics.map(metric => ({
+ metrics: firstPanel.metrics.map((metric) => ({
...metric,
result: metricsResult,
state: metricStates.OK,
@@ -44,7 +44,7 @@ export const graphData = {
export const graphDataEmpty = {
...firstPanel,
- metrics: firstPanel.metrics.map(metric => ({
+ metrics: firstPanel.metrics.map((metric) => ({
...metric,
result: [],
state: metricStates.NO_DATA,
diff --git a/spec/frontend/monitoring/graph_data.js b/spec/frontend/monitoring/graph_data.js
index 494fdb1b159..0b23ee41927 100644
--- a/spec/frontend/monitoring/graph_data.js
+++ b/spec/frontend/monitoring/graph_data.js
@@ -4,8 +4,8 @@ import { panelTypes, metricStates } from '~/monitoring/constants';
const initTime = 1435781450; // "Wed, 01 Jul 2015 20:10:50 GMT"
const intervalSeconds = 120;
-const makeValue = val => [initTime, val];
-const makeValues = vals => vals.map((val, i) => [initTime + intervalSeconds * i, val]);
+const makeValue = (val) => [initTime, val];
+const makeValues = (vals) => vals.map((val, i) => [initTime + intervalSeconds * i, val]);
// Raw Promethues Responses
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index aea8815fb10..ca06c96c7d6 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,7 +1,8 @@
+// The path below needs to be relative because we import the mock-data to karma
+import { TEST_HOST } from '../__helpers__/test_constants';
import invalidUrl from '~/lib/utils/invalid_url';
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
-import { TEST_HOST } from '../helpers/test_constants';
export const mockProjectDir = '/frontend-fixtures/environments-project';
export const mockApiEndpoint = `${TEST_HOST}/monitoring/mock`;
diff --git a/spec/frontend/monitoring/requests/index_spec.js b/spec/frontend/monitoring/requests/index_spec.js
index a91c209875a..078de5f15d1 100644
--- a/spec/frontend/monitoring/requests/index_spec.js
+++ b/spec/frontend/monitoring/requests/index_spec.js
@@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
-import { backoffMockImplementation } from 'jest/helpers/backoff_helper';
+import { backoffMockImplementation } from 'helpers/backoff_helper';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import * as commonUtils from '~/lib/utils/common_utils';
@@ -31,7 +31,7 @@ describe('monitoring metrics_requests', () => {
it('returns a dashboard response', () => {
mock.onGet(dashboardEndpoint).reply(statusCodes.OK, response);
- return getDashboard(dashboardEndpoint, params).then(data => {
+ return getDashboard(dashboardEndpoint, params).then((data) => {
expect(data).toEqual(metricsDashboardResponse);
});
});
@@ -41,7 +41,7 @@ describe('monitoring metrics_requests', () => {
mock.onGet(dashboardEndpoint).replyOnce(statusCodes.NO_CONTENT);
mock.onGet(dashboardEndpoint).reply(statusCodes.OK, response);
- return getDashboard(dashboardEndpoint, params).then(data => {
+ return getDashboard(dashboardEndpoint, params).then((data) => {
expect(data).toEqual(metricsDashboardResponse);
expect(mock.history.get).toHaveLength(3);
});
@@ -50,7 +50,7 @@ describe('monitoring metrics_requests', () => {
it('rejects after getting an error', () => {
mock.onGet(dashboardEndpoint).reply(500);
- return getDashboard(dashboardEndpoint, params).catch(error => {
+ return getDashboard(dashboardEndpoint, params).catch((error) => {
expect(error).toEqual(expect.any(Error));
expect(mock.history.get).toHaveLength(1);
});
@@ -74,7 +74,7 @@ describe('monitoring metrics_requests', () => {
it('returns a dashboard response', () => {
mock.onGet(prometheusEndpoint).reply(statusCodes.OK, response);
- return getPrometheusQueryData(prometheusEndpoint, params).then(data => {
+ return getPrometheusQueryData(prometheusEndpoint, params).then((data) => {
expect(data).toEqual(response.data);
});
});
@@ -85,7 +85,7 @@ describe('monitoring metrics_requests', () => {
mock.onGet(prometheusEndpoint).replyOnce(statusCodes.NO_CONTENT);
mock.onGet(prometheusEndpoint).reply(statusCodes.OK, response); // 3rd attempt
- return getPrometheusQueryData(prometheusEndpoint, params).then(data => {
+ return getPrometheusQueryData(prometheusEndpoint, params).then((data) => {
expect(data).toEqual(response.data);
expect(mock.history.get).toHaveLength(3);
});
@@ -97,7 +97,7 @@ describe('monitoring metrics_requests', () => {
error: 'An error ocurred',
});
- return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => {
expect(error).toEqual(new Error('Request failed with status code 500'));
});
});
@@ -109,7 +109,7 @@ describe('monitoring metrics_requests', () => {
error: 'An error ocurred',
});
- return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => {
expect(error).toEqual(new Error('Request failed with status code 401'));
});
});
@@ -123,7 +123,7 @@ describe('monitoring metrics_requests', () => {
error: 'An error ocurred',
}); // 3rd attempt
- return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => {
expect(error).toEqual(new Error('Request failed with status code 500'));
expect(mock.history.get).toHaveLength(3);
});
@@ -140,7 +140,7 @@ describe('monitoring metrics_requests', () => {
error: reason,
});
- return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => {
expect(error).toEqual(new Error(reason));
expect(mock.history.get).toHaveLength(1);
});
diff --git a/spec/frontend/monitoring/router_spec.js b/spec/frontend/monitoring/router_spec.js
index 8b97c8ed125..c30c4c56a6c 100644
--- a/spec/frontend/monitoring/router_spec.js
+++ b/spec/frontend/monitoring/router_spec.js
@@ -63,12 +63,7 @@ describe('Monitoring router', () => {
});
expect(wrapper.find(DashboardPage).exists()).toBe(true);
- expect(
- wrapper
- .find(DashboardPage)
- .find(Dashboard)
- .exists(),
- ).toBe(true);
+ expect(wrapper.find(DashboardPage).find(Dashboard).exists()).toBe(true);
});
});
@@ -91,12 +86,7 @@ describe('Monitoring router', () => {
});
expect(wrapper.find(DashboardPage).exists()).toBe(true);
- expect(
- wrapper
- .find(DashboardPage)
- .find(Dashboard)
- .exists(),
- ).toBe(true);
+ expect(wrapper.find(DashboardPage).find(Dashboard).exists()).toBe(true);
});
});
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 5c7ab4e6a1f..319441b5ba2 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { backoffMockImplementation } from 'jest/helpers/backoff_helper';
+import { backoffMockImplementation } from 'helpers/backoff_helper';
import Tracking from '~/tracking';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
@@ -88,7 +88,7 @@ describe('Monitoring store actions', () => {
// Setup
describe('setGettingStartedEmptyState', () => {
- it('should commit SET_GETTING_STARTED_EMPTY_STATE mutation', done => {
+ it('should commit SET_GETTING_STARTED_EMPTY_STATE mutation', (done) => {
testAction(
setGettingStartedEmptyState,
null,
@@ -105,7 +105,7 @@ describe('Monitoring store actions', () => {
});
describe('setInitialState', () => {
- it('should commit SET_INITIAL_STATE mutation', done => {
+ it('should commit SET_INITIAL_STATE mutation', (done) => {
testAction(
setInitialState,
{
@@ -233,7 +233,7 @@ describe('Monitoring store actions', () => {
};
});
- it('dispatches a failure', done => {
+ it('dispatches a failure', (done) => {
result()
.then(() => {
expect(commit).toHaveBeenCalledWith(
@@ -250,7 +250,7 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
- it('dispatches a failure action when a message is returned', done => {
+ it('dispatches a failure action when a message is returned', (done) => {
result()
.then(() => {
expect(dispatch).toHaveBeenCalledWith(
@@ -265,7 +265,7 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
- it('does not show a flash error when showErrorBanner is disabled', done => {
+ it('does not show a flash error when showErrorBanner is disabled', (done) => {
state.showErrorBanner = false;
result()
@@ -322,7 +322,7 @@ describe('Monitoring store actions', () => {
state.timeRange = defaultTimeRange;
});
- it('commits empty state when state.groups is empty', done => {
+ it('commits empty state when state.groups is empty', (done) => {
const localGetters = {
metricsWithData: () => [],
};
@@ -353,7 +353,7 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
- it('dispatches fetchPrometheusMetric for each panel query', done => {
+ it('dispatches fetchPrometheusMetric for each panel query', (done) => {
state.dashboard.panelGroups = convertObjectPropsToCamelCase(
metricsDashboardResponse.dashboard.panel_groups,
);
@@ -390,7 +390,7 @@ describe('Monitoring store actions', () => {
done();
});
- it('dispatches fetchPrometheusMetric for each panel query, handles an error', done => {
+ it('dispatches fetchPrometheusMetric for each panel query, handles an error', (done) => {
state.dashboard.panelGroups = metricsDashboardViewModel.panelGroups;
const metric = state.dashboard.panelGroups[0].panels[0].metrics[0];
@@ -449,7 +449,7 @@ describe('Monitoring store actions', () => {
};
});
- it('commits result', done => {
+ it('commits result', (done) => {
mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
@@ -485,7 +485,7 @@ describe('Monitoring store actions', () => {
step: 60,
};
- it('uses calculated step', done => {
+ it('uses calculated step', (done) => {
mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
@@ -527,7 +527,7 @@ describe('Monitoring store actions', () => {
step: 7,
};
- it('uses metric step', done => {
+ it('uses metric step', (done) => {
mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
@@ -558,7 +558,7 @@ describe('Monitoring store actions', () => {
});
});
- it('commits failure, when waiting for results and getting a server error', done => {
+ it('commits failure, when waiting for results and getting a server error', (done) => {
mock.onGet(prometheusEndpointPath).reply(500);
const error = new Error('Request failed with status code 500');
@@ -583,7 +583,7 @@ describe('Monitoring store actions', () => {
},
],
[],
- ).catch(e => {
+ ).catch((e) => {
expect(e).toEqual(error);
done();
});
@@ -991,7 +991,7 @@ describe('Monitoring store actions', () => {
state.dashboardsEndpoint = '/dashboards.json';
});
- it('Succesful POST request resolves', done => {
+ it('Succesful POST request resolves', (done) => {
mock.onPost(state.dashboardsEndpoint).reply(statusCodes.CREATED, {
dashboard: dashboardGitResponse[1],
});
@@ -1004,7 +1004,7 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
- it('Succesful POST request resolves to a dashboard', done => {
+ it('Succesful POST request resolves to a dashboard', (done) => {
const mockCreatedDashboard = dashboardGitResponse[1];
const params = {
@@ -1026,7 +1026,7 @@ describe('Monitoring store actions', () => {
});
testAction(duplicateSystemDashboard, params, state, [], [])
- .then(result => {
+ .then((result) => {
expect(mock.history.post).toHaveLength(1);
expect(mock.history.post[0].data).toEqual(expectedPayload);
expect(result).toEqual(mockCreatedDashboard);
@@ -1036,10 +1036,10 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
- it('Failed POST request throws an error', done => {
+ it('Failed POST request throws an error', (done) => {
mock.onPost(state.dashboardsEndpoint).reply(statusCodes.BAD_REQUEST);
- testAction(duplicateSystemDashboard, {}, state, [], []).catch(err => {
+ testAction(duplicateSystemDashboard, {}, state, [], []).catch((err) => {
expect(mock.history.post).toHaveLength(1);
expect(err).toEqual(expect.any(String));
@@ -1047,14 +1047,14 @@ describe('Monitoring store actions', () => {
});
});
- it('Failed POST request throws an error with a description', done => {
+ it('Failed POST request throws an error with a description', (done) => {
const backendErrorMsg = 'This file already exists!';
mock.onPost(state.dashboardsEndpoint).reply(statusCodes.BAD_REQUEST, {
error: backendErrorMsg,
});
- testAction(duplicateSystemDashboard, {}, state, [], []).catch(err => {
+ testAction(duplicateSystemDashboard, {}, state, [], []).catch((err) => {
expect(mock.history.post).toHaveLength(1);
expect(err).toEqual(expect.any(String));
expect(err).toEqual(expect.stringContaining(backendErrorMsg));
@@ -1067,7 +1067,7 @@ describe('Monitoring store actions', () => {
// Variables manipulation
describe('updateVariablesAndFetchData', () => {
- it('should commit UPDATE_VARIABLE_VALUE mutation and fetch data', done => {
+ it('should commit UPDATE_VARIABLE_VALUE mutation and fetch data', (done) => {
testAction(
updateVariablesAndFetchData,
{ pod: 'POD' },
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 509de8a4596..771ec0ea549 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -365,7 +365,7 @@ describe('Monitoring store Getters', () => {
describe('selectedDashboard', () => {
const { selectedDashboard } = getters;
- const localGetters = localState => ({
+ const localGetters = (localState) => ({
fullDashboardPath: getters.fullDashboardPath(localState),
});
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 8d1351fc909..571828eb67c 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -465,7 +465,10 @@ describe('Monitoring mutations', () => {
});
expect(variable.options).toEqual({
- values: [{ text: 'prometheus', value: 'prometheus' }, { text: 'node', value: 'node' }],
+ values: [
+ { text: 'prometheus', value: 'prometheus' },
+ { text: 'node', value: 'node' },
+ ],
});
});
});
@@ -509,7 +512,7 @@ describe('Monitoring mutations', () => {
});
describe('panel preview metric', () => {
- const getPreviewMetricAt = i => stateCopy.panelPreviewGraphData.metrics[i];
+ const getPreviewMetricAt = (i) => stateCopy.panelPreviewGraphData.metrics[i];
beforeEach(() => {
stateCopy.panelPreviewGraphData = {
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index b97948fa1bf..58bb87cb332 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -127,7 +127,7 @@ describe('mapToDashboardViewModel', () => {
let dashboard;
- const setupWithPanel = panel => {
+ const setupWithPanel = (panel) => {
dashboard = {
panel_groups: [
{
@@ -340,7 +340,7 @@ describe('mapToDashboardViewModel', () => {
],
});
- const getMappedMetric = dashboard => {
+ const getMappedMetric = (dashboard) => {
return mapToDashboardViewModel(dashboard).panelGroups[0].panels[0].metrics[0];
};
@@ -775,7 +775,11 @@ describe('normalizeQueryResponseData', () => {
job: 'prometheus',
instance: 'localhost:9090',
},
- values: [[1435781430.781, '1'], [1435781445.781, '2'], [1435781460.781, '3']],
+ values: [
+ [1435781430.781, '1'],
+ [1435781445.781, '2'],
+ [1435781460.781, '3'],
+ ],
},
{
metric: {
@@ -783,7 +787,11 @@ describe('normalizeQueryResponseData', () => {
job: 'node',
instance: 'localhost:9091',
},
- values: [[1435781430.781, '4'], [1435781445.781, '5'], [1435781460.781, '6']],
+ values: [
+ [1435781430.781, '4'],
+ [1435781445.781, '5'],
+ [1435781460.781, '6'],
+ ],
},
],
};
@@ -840,7 +848,10 @@ describe('normalizeQueryResponseData', () => {
job: 'prometheus',
instance: 'localhost:9090',
},
- values: [[1435781430.781, '1'], [1435781460.781, 'NaN']],
+ values: [
+ [1435781430.781, '1'],
+ [1435781460.781, 'NaN'],
+ ],
},
],
};
@@ -849,7 +860,10 @@ describe('normalizeQueryResponseData', () => {
{
metric: { __name__: 'up', instance: 'localhost:9090', job: 'prometheus' },
value: ['2015-07-01T20:11:00.781Z', NaN],
- values: [['2015-07-01T20:10:30.781Z', 1], ['2015-07-01T20:11:00.781Z', NaN]],
+ values: [
+ ['2015-07-01T20:10:30.781Z', 1],
+ ['2015-07-01T20:11:00.781Z', NaN],
+ ],
},
]);
});
diff --git a/spec/frontend/monitoring/store_utils.js b/spec/frontend/monitoring/store_utils.js
index 6c8267e6a3c..911ccc78f7b 100644
--- a/spec/frontend/monitoring/store_utils.js
+++ b/spec/frontend/monitoring/store_utils.js
@@ -15,7 +15,7 @@ export const setMetricResult = ({ store, result, group = 0, panel = 0, metric =
});
};
-const setEnvironmentData = store => {
+const setEnvironmentData = (store) => {
store.commit(`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`, environmentData);
};
@@ -28,14 +28,14 @@ export const setupAllDashboards = (store, path) => {
}
};
-export const setupStoreWithDashboard = store => {
+export const setupStoreWithDashboard = (store) => {
store.commit(
`monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
metricsDashboardPayload,
);
};
-export const setupStoreWithLinks = store => {
+export const setupStoreWithLinks = (store) => {
store.commit(`monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`, {
...metricsDashboardPayload,
links: [
@@ -47,7 +47,7 @@ export const setupStoreWithLinks = store => {
});
};
-export const setupStoreWithData = store => {
+export const setupStoreWithData = (store) => {
setupAllDashboards(store);
setupStoreWithDashboard(store);
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index fd7d09f7f72..cd49e4c7968 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import * as monitoringUtils from '~/monitoring/utils';
import * as urlUtils from '~/lib/utils/url_utility';
import { mockProjectDir, barMockData } from './mock_data';
@@ -295,7 +295,7 @@ describe('monitoring/utils', () => {
${'NOT_A_GROUP'} | ${title} | ${yLabel} | ${'group'}
${group} | ${'NOT_A_TITLE'} | ${yLabel} | ${'title'}
${group} | ${title} | ${'NOT_A_Y_LABEL'} | ${'y_label'}
- `('throws an error when $missingField is incorrect', params => {
+ `('throws an error when $missingField is incorrect', (params) => {
const search = `?group=${params.group}&title=${params.title}&y_label=${params.yLabel}`;
expect(() => expandedPanelPayloadFromUrl(metricsDashboardViewModel, search)).toThrow();
});
@@ -308,7 +308,7 @@ describe('monitoring/utils', () => {
const [panelGroup] = metricsDashboardViewModel.panelGroups;
const [panel] = panelGroup.panels;
- const getUrlParams = url => urlUtils.queryToObject(url.split('?')[1]);
+ const getUrlParams = (url) => urlUtils.queryToObject(url.split('?')[1]);
it('returns URL for a panel when query parameters are given', () => {
const params = getUrlParams(panelToUrl(dashboard, {}, panelGroup.group, panel));
diff --git a/spec/frontend/mr_popover/index_spec.js b/spec/frontend/mr_popover/index_spec.js
index b9db2342687..fd8ced17aea 100644
--- a/spec/frontend/mr_popover/index_spec.js
+++ b/spec/frontend/mr_popover/index_spec.js
@@ -1,5 +1,5 @@
+import { setHTMLFixture } from 'helpers/fixtures';
import * as createDefaultClient from '~/lib/graphql';
-import { setHTMLFixture } from '../helpers/fixtures';
import initMRPopovers from '~/mr_popover/index';
createDefaultClient.default = jest.fn();
diff --git a/spec/frontend/new_branch_spec.js b/spec/frontend/new_branch_spec.js
index cff7ec1a9ee..7e6b8a78d4f 100644
--- a/spec/frontend/new_branch_spec.js
+++ b/spec/frontend/new_branch_spec.js
@@ -12,9 +12,7 @@ describe('Branch', () => {
preloadFixtures('branches/new_branch.html');
function fillNameWith(value) {
- $('.js-branch-name')
- .val(value)
- .trigger('blur');
+ $('.js-branch-name').val(value).trigger('blur');
}
function expectToHaveError(error) {
@@ -23,7 +21,7 @@ describe('Branch', () => {
beforeEach(() => {
loadFixtures('branches/new_branch.html');
- $('form').on('submit', e => e.preventDefault());
+ $('form').on('submit', (e) => e.preventDefault());
testContext.form = new NewBranchForm($('.js-create-branch-form'), []);
});
diff --git a/spec/frontend/notebook/cells/code_spec.js b/spec/frontend/notebook/cells/code_spec.js
index 33dabe2b6dc..e14767f2594 100644
--- a/spec/frontend/notebook/cells/code_spec.js
+++ b/spec/frontend/notebook/cells/code_spec.js
@@ -11,7 +11,7 @@ describe('Code component', () => {
json = getJSONFixture('blob/notebook/basic.json');
});
- const setupComponent = cell => {
+ const setupComponent = (cell) => {
const comp = new Component({
propsData: {
cell,
@@ -22,7 +22,7 @@ describe('Code component', () => {
};
describe('without output', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = setupComponent(json.cells[0]);
setImmediate(() => {
@@ -36,7 +36,7 @@ describe('Code component', () => {
});
describe('with output', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = setupComponent(json.cells[2]);
setImmediate(() => {
diff --git a/spec/frontend/notebook/cells/output/index_spec.js b/spec/frontend/notebook/cells/output/index_spec.js
index b9a2dfb8f34..2985abf0f4f 100644
--- a/spec/frontend/notebook/cells/output/index_spec.js
+++ b/spec/frontend/notebook/cells/output/index_spec.js
@@ -7,7 +7,7 @@ describe('Output component', () => {
let vm;
let json;
- const createComponent = output => {
+ const createComponent = (output) => {
vm = new Component({
propsData: {
outputs: [].concat(output),
@@ -18,12 +18,14 @@ describe('Output component', () => {
};
beforeEach(() => {
+ // This is the output after rendering a jupyter notebook
json = getJSONFixture('blob/notebook/basic.json');
});
describe('text output', () => {
- beforeEach(done => {
- createComponent(json.cells[2].outputs[0]);
+ beforeEach((done) => {
+ const textType = json.cells[2];
+ createComponent(textType.outputs[0]);
setImmediate(() => {
done();
@@ -40,8 +42,9 @@ describe('Output component', () => {
});
describe('image output', () => {
- beforeEach(done => {
- createComponent(json.cells[3].outputs[0]);
+ beforeEach((done) => {
+ const imageType = json.cells[3];
+ createComponent(imageType.outputs[0]);
setImmediate(() => {
done();
@@ -55,23 +58,42 @@ describe('Output component', () => {
describe('html output', () => {
it('renders raw HTML', () => {
- createComponent(json.cells[4].outputs[0]);
+ const htmlType = json.cells[4];
+ createComponent(htmlType.outputs[0]);
expect(vm.$el.querySelector('p')).not.toBeNull();
- expect(vm.$el.querySelectorAll('p').length).toBe(1);
+ expect(vm.$el.querySelectorAll('p')).toHaveLength(1);
expect(vm.$el.textContent.trim()).toContain('test');
});
it('renders multiple raw HTML outputs', () => {
- createComponent([json.cells[4].outputs[0], json.cells[4].outputs[0]]);
+ const htmlType = json.cells[4];
+ createComponent([htmlType.outputs[0], htmlType.outputs[0]]);
- expect(vm.$el.querySelectorAll('p').length).toBe(2);
+ expect(vm.$el.querySelectorAll('p')).toHaveLength(2);
+ });
+ });
+
+ describe('LaTeX output', () => {
+ it('renders LaTeX', () => {
+ const output = {
+ data: {
+ 'text/latex': ['$$F(k) = \\int_{-\\infty}^{\\infty} f(x) e^{2\\pi i k} dx$$'],
+ 'text/plain': ['<IPython.core.display.Latex object>'],
+ },
+ metadata: {},
+ output_type: 'display_data',
+ };
+ createComponent(output);
+
+ expect(vm.$el.querySelector('.MathJax')).not.toBeNull();
});
});
describe('svg output', () => {
- beforeEach(done => {
- createComponent(json.cells[5].outputs[0]);
+ beforeEach((done) => {
+ const svgType = json.cells[5];
+ createComponent(svgType.outputs[0]);
setImmediate(() => {
done();
@@ -84,8 +106,9 @@ describe('Output component', () => {
});
describe('default to plain text', () => {
- beforeEach(done => {
- createComponent(json.cells[6].outputs[0]);
+ beforeEach((done) => {
+ const unknownType = json.cells[6];
+ createComponent(unknownType.outputs[0]);
setImmediate(() => {
done();
@@ -101,8 +124,9 @@ describe('Output component', () => {
expect(vm.$el.querySelector('.prompt span')).not.toBeNull();
});
- it("renders as plain text when doesn't recognise other types", done => {
- createComponent(json.cells[7].outputs[0]);
+ it("renders as plain text when doesn't recognise other types", (done) => {
+ const unknownType = json.cells[7];
+ createComponent(unknownType.outputs[0]);
setImmediate(() => {
expect(vm.$el.querySelector('pre')).not.toBeNull();
diff --git a/spec/frontend/notebook/cells/output/latex_spec.js b/spec/frontend/notebook/cells/output/latex_spec.js
new file mode 100644
index 00000000000..848d2069421
--- /dev/null
+++ b/spec/frontend/notebook/cells/output/latex_spec.js
@@ -0,0 +1,40 @@
+import { shallowMount } from '@vue/test-utils';
+import LatexOutput from '~/notebook/cells/output/latex.vue';
+import Prompt from '~/notebook/cells/prompt.vue';
+
+describe('LaTeX output cell', () => {
+ beforeEach(() => {
+ window.MathJax = {
+ tex2svg: jest.fn((code) => ({ outerHTML: code })),
+ };
+ });
+
+ const inlineLatex = '$$F(k) = \\int_{-\\infty}^{\\infty} f(x) e^{2\\pi i k} dx$$';
+ const count = 12345;
+
+ const createComponent = (rawCode, index) =>
+ shallowMount(LatexOutput, {
+ propsData: {
+ count,
+ index,
+ rawCode,
+ },
+ });
+
+ it.each`
+ index | expectation
+ ${0} | ${true}
+ ${1} | ${false}
+ `('sets `Prompt.show-output` to $expectation when index is $index', ({ index, expectation }) => {
+ const wrapper = createComponent(inlineLatex, index);
+ const prompt = wrapper.find(Prompt);
+
+ expect(prompt.props().count).toEqual(count);
+ expect(prompt.props().showOutput).toEqual(expectation);
+ });
+
+ it('strips the `$$` delimter from LaTeX', () => {
+ createComponent(inlineLatex, 0);
+ expect(window.MathJax.tex2svg).toHaveBeenCalledWith(expect.not.stringContaining('$$'));
+ });
+});
diff --git a/spec/frontend/notebook/cells/prompt_spec.js b/spec/frontend/notebook/cells/prompt_spec.js
index cf5a7a603c6..8cdcd1f84de 100644
--- a/spec/frontend/notebook/cells/prompt_spec.js
+++ b/spec/frontend/notebook/cells/prompt_spec.js
@@ -7,7 +7,7 @@ describe('Prompt component', () => {
let vm;
describe('input', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
type: 'In',
@@ -31,7 +31,7 @@ describe('Prompt component', () => {
});
describe('output', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
type: 'Out',
diff --git a/spec/frontend/notebook/index_spec.js b/spec/frontend/notebook/index_spec.js
index 36b092be976..945af08e4d5 100644
--- a/spec/frontend/notebook/index_spec.js
+++ b/spec/frontend/notebook/index_spec.js
@@ -14,7 +14,7 @@ describe('Notebook component', () => {
});
describe('without JSON', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
notebook: {},
@@ -33,7 +33,7 @@ describe('Notebook component', () => {
});
describe('with JSON', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
notebook: json,
@@ -65,7 +65,7 @@ describe('Notebook component', () => {
});
describe('with worksheets', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
notebook: jsonWithWorksheet,
diff --git a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
deleted file mode 100644
index 13af29821d8..00000000000
--- a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
+++ /dev/null
@@ -1,21 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`JumpToNextDiscussionButton matches the snapshot 1`] = `
-<div
- class="btn-group"
- role="group"
->
- <button
- class="btn btn-default discussion-next-btn"
- data-track-event="click_button"
- data-track-label="mr_next_unresolved_thread"
- data-track-property="click_next_unresolved_thread"
- title="Jump to next unresolved thread"
- >
- <gl-icon-stub
- name="comment-next"
- size="16"
- />
- </button>
-</div>
-`;
diff --git a/spec/frontend/notes/components/comment_field_layout_spec.js b/spec/frontend/notes/components/comment_field_layout_spec.js
new file mode 100644
index 00000000000..4d9b4ea8c6f
--- /dev/null
+++ b/spec/frontend/notes/components/comment_field_layout_spec.js
@@ -0,0 +1,137 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CommentFieldLayout from '~/notes/components/comment_field_layout.vue';
+import EmailParticipantsWarning from '~/notes/components/email_participants_warning.vue';
+import NoteableWarning from '~/vue_shared/components/notes/noteable_warning.vue';
+
+describe('Comment Field Layout Component', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const LOCKED_DISCUSSION_DOCS_PATH = 'docs/locked/path';
+ const CONFIDENTIAL_ISSUES_DOCS_PATH = 'docs/confidential/path';
+
+ const noteableDataMock = {
+ confidential: false,
+ discussion_locked: false,
+ locked_discussion_docs_path: LOCKED_DISCUSSION_DOCS_PATH,
+ confidential_issues_docs_path: CONFIDENTIAL_ISSUES_DOCS_PATH,
+ };
+
+ const findIssuableNoteWarning = () => wrapper.find(NoteableWarning);
+ const findEmailParticipantsWarning = () => wrapper.find(EmailParticipantsWarning);
+ const findErrorAlert = () => wrapper.findByTestId('comment-field-alert-container');
+
+ const createWrapper = (props = {}, slots = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(CommentFieldLayout, {
+ propsData: {
+ noteableData: noteableDataMock,
+ ...props,
+ },
+ slots,
+ }),
+ );
+ };
+
+ describe('.error-alert', () => {
+ it('does not exist by default', () => {
+ createWrapper();
+
+ expect(findErrorAlert().exists()).toBe(false);
+ });
+
+ it('exists when withAlertContainer is true', () => {
+ createWrapper({ withAlertContainer: true });
+
+ expect(findErrorAlert().isVisible()).toBe(true);
+ });
+ });
+
+ describe('issue is not confidential and not locked', () => {
+ it('does not show IssuableNoteWarning', () => {
+ createWrapper();
+
+ expect(findIssuableNoteWarning().exists()).toBe(false);
+ });
+ });
+
+ describe('issue is confidential', () => {
+ beforeEach(() => {
+ createWrapper({
+ noteableData: { ...noteableDataMock, confidential: true },
+ });
+ });
+
+ it('shows IssuableNoteWarning', () => {
+ expect(findIssuableNoteWarning().isVisible()).toBe(true);
+ });
+
+ it('sets IssuableNoteWarning props', () => {
+ expect(findIssuableNoteWarning().props()).toMatchObject({
+ isLocked: false,
+ isConfidential: true,
+ lockedNoteableDocsPath: LOCKED_DISCUSSION_DOCS_PATH,
+ confidentialNoteableDocsPath: CONFIDENTIAL_ISSUES_DOCS_PATH,
+ });
+ });
+ });
+
+ describe('issue is locked', () => {
+ beforeEach(() => {
+ createWrapper({
+ noteableData: { ...noteableDataMock, discussion_locked: true },
+ });
+ });
+
+ it('shows IssuableNoteWarning', () => {
+ expect(findIssuableNoteWarning().isVisible()).toBe(true);
+ });
+
+ it('sets IssuableNoteWarning props', () => {
+ expect(findIssuableNoteWarning().props()).toMatchObject({
+ isConfidential: false,
+ isLocked: true,
+ lockedNoteableDocsPath: LOCKED_DISCUSSION_DOCS_PATH,
+ confidentialNoteableDocsPath: CONFIDENTIAL_ISSUES_DOCS_PATH,
+ });
+ });
+ });
+
+ describe('issue has no email participants', () => {
+ it('does not show EmailParticipantsWarning', () => {
+ createWrapper();
+
+ expect(findEmailParticipantsWarning().exists()).toBe(false);
+ });
+ });
+
+ describe('issue has email participants', () => {
+ beforeEach(() => {
+ createWrapper({
+ noteableData: {
+ ...noteableDataMock,
+ issue_email_participants: [
+ { email: 'someone@gitlab.com' },
+ { email: 'another@gitlab.com' },
+ ],
+ },
+ });
+ });
+
+ it('shows EmailParticipantsWarning', () => {
+ expect(findEmailParticipantsWarning().isVisible()).toBe(true);
+ });
+
+ it('sets EmailParticipantsWarning props', () => {
+ expect(findEmailParticipantsWarning().props('emails')).toEqual([
+ 'someone@gitlab.com',
+ 'another@gitlab.com',
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index fca1beca999..002c4f206cb 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -181,7 +181,7 @@ describe('issue_comment_form component', () => {
describe('edit mode', () => {
beforeEach(() => {
- mountComponent();
+ mountComponent({ mountFunction: mount });
});
it('should enter edit mode when arrow up is pressed', () => {
@@ -200,7 +200,7 @@ describe('issue_comment_form component', () => {
describe('event enter', () => {
beforeEach(() => {
- mountComponent();
+ mountComponent({ mountFunction: mount });
});
it('should save note when cmd+enter is pressed', () => {
@@ -368,17 +368,6 @@ describe('issue_comment_form component', () => {
});
});
});
-
- describe('issue is confidential', () => {
- it('shows information warning', () => {
- mountComponent({
- noteableData: { ...noteableDataMock, confidential: true },
- mountFunction: mount,
- });
-
- expect(wrapper.find('[data-testid="confidential-warning"]').exists()).toBe(true);
- });
- });
});
describe('user is not logged in', () => {
diff --git a/spec/frontend/notes/components/diff_discussion_header_spec.js b/spec/frontend/notes/components/diff_discussion_header_spec.js
index 9162bee2078..3940439a32b 100644
--- a/spec/frontend/notes/components/diff_discussion_header_spec.js
+++ b/spec/frontend/notes/components/diff_discussion_header_spec.js
@@ -45,7 +45,7 @@ describe('diff_discussion_header component', () => {
const truncatedCommitId = commitId.substr(0, 8);
let commitElement;
- beforeEach(done => {
+ beforeEach((done) => {
store.state.diffs = {
projectPath: 'something',
};
@@ -72,7 +72,7 @@ describe('diff_discussion_header component', () => {
});
describe('for diff threads without a commit id', () => {
- it('should show started a thread on the diff text', done => {
+ it('should show started a thread on the diff text', (done) => {
Object.assign(wrapper.vm.discussion, {
for_commit: false,
commit_id: null,
@@ -85,7 +85,7 @@ describe('diff_discussion_header component', () => {
});
});
- it('should show thread on older version text', done => {
+ it('should show thread on older version text', (done) => {
Object.assign(wrapper.vm.discussion, {
for_commit: false,
commit_id: null,
@@ -109,7 +109,7 @@ describe('diff_discussion_header component', () => {
});
describe('for diff thread with a commit id', () => {
- it('should display started thread on commit header', done => {
+ it('should display started thread on commit header', (done) => {
wrapper.vm.discussion.for_commit = false;
wrapper.vm.$nextTick(() => {
@@ -121,7 +121,7 @@ describe('diff_discussion_header component', () => {
});
});
- it('should display outdated change on commit header', done => {
+ it('should display outdated change on commit header', (done) => {
wrapper.vm.discussion.for_commit = false;
wrapper.vm.discussion.active = false;
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index b26eb00bfdc..48e569720e9 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -4,7 +4,6 @@ import DiscussionActions from '~/notes/components/discussion_actions.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveDiscussionButton from '~/notes/components/discussion_resolve_button.vue';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
-import JumpToNextDiscussionButton from '~/notes/components/discussion_jump_to_next_button.vue';
import createStore from '~/notes/stores';
// NOTE: clone mock_data so that it is not accidentally mutated
@@ -21,7 +20,7 @@ const createUnallowedNote = () =>
describe('DiscussionActions', () => {
let wrapper;
- const createComponentFactory = (shallow = true) => (props, options) => {
+ const createComponentFactory = (shallow = true) => (props) => {
const store = createStore();
const mountFn = shallow ? shallowMount : mount;
@@ -35,11 +34,6 @@ describe('DiscussionActions', () => {
shouldShowJumpToNextDiscussion: true,
...props,
},
- provide: {
- glFeatures: {
- hideJumpToNextUnresolvedInThreads: options?.hideJumpToNextUnresolvedInThreads,
- },
- },
});
};
@@ -55,7 +49,6 @@ describe('DiscussionActions', () => {
expect(wrapper.find(ReplyPlaceholder).exists()).toBe(true);
expect(wrapper.find(ResolveDiscussionButton).exists()).toBe(true);
expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(true);
- expect(wrapper.find(JumpToNextDiscussionButton).exists()).toBe(true);
});
it('only renders reply placholder if disccusion is not resolvable', () => {
@@ -66,7 +59,6 @@ describe('DiscussionActions', () => {
expect(wrapper.find(ReplyPlaceholder).exists()).toBe(true);
expect(wrapper.find(ResolveDiscussionButton).exists()).toBe(false);
expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(false);
- expect(wrapper.find(JumpToNextDiscussionButton).exists()).toBe(false);
});
it('does not render resolve with issue button if resolveWithIssuePath is falsy', () => {
@@ -75,12 +67,6 @@ describe('DiscussionActions', () => {
expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(false);
});
- it('does not render jump to next discussion button if shouldShowJumpToNextDiscussion is false', () => {
- createComponent({ shouldShowJumpToNextDiscussion: false });
-
- expect(wrapper.find(JumpToNextDiscussionButton).exists()).toBe(false);
- });
-
describe.each`
desc | notes | shouldRender
${'with no notes'} | ${[]} | ${true}
@@ -101,13 +87,6 @@ describe('DiscussionActions', () => {
});
});
- it('does not render jump to next discussion button if feature flag is enabled', () => {
- const createComponent = createComponentFactory();
- createComponent({}, { hideJumpToNextUnresolvedInThreads: true });
-
- expect(wrapper.find(JumpToNextDiscussionButton).exists()).toBe(false);
- });
-
describe('events handling', () => {
const createComponent = createComponentFactory(false);
@@ -117,19 +96,13 @@ describe('DiscussionActions', () => {
it('emits showReplyForm event when clicking on reply placeholder', () => {
jest.spyOn(wrapper.vm, '$emit');
- wrapper
- .find(ReplyPlaceholder)
- .find('button')
- .trigger('click');
+ wrapper.find(ReplyPlaceholder).find('button').trigger('click');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('showReplyForm');
});
it('emits resolve event when clicking on resolve button', () => {
jest.spyOn(wrapper.vm, '$emit');
- wrapper
- .find(ResolveDiscussionButton)
- .find('button')
- .trigger('click');
+ wrapper.find(ResolveDiscussionButton).find('button').trigger('click');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('resolve');
});
});
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
index d82590c7e9e..ebf7d52f38b 100644
--- a/spec/frontend/notes/components/discussion_counter_spec.js
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -95,7 +95,7 @@ describe('DiscussionCounter component', () => {
describe('toggle all threads button', () => {
let toggleAllButton;
- const updateStoreWithExpanded = expanded => {
+ const updateStoreWithExpanded = (expanded) => {
const discussion = { ...discussionMock, expanded };
store.commit(types.SET_INITIAL_DISCUSSIONS, [discussion]);
store.dispatch('updateResolvableDiscussionsCounts');
diff --git a/spec/frontend/notes/components/discussion_filter_note_spec.js b/spec/frontend/notes/components/discussion_filter_note_spec.js
index d35f8f7c28d..9ae3f08df77 100644
--- a/spec/frontend/notes/components/discussion_filter_note_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_note_spec.js
@@ -31,20 +31,14 @@ describe('DiscussionFilterNote component', () => {
it('emits `dropdownSelect` event with 0 parameter on clicking Show all activity button', () => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- wrapper
- .findAll(GlButton)
- .at(0)
- .vm.$emit('click');
+ wrapper.findAll(GlButton).at(0).vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 0);
});
it('emits `dropdownSelect` event with 1 parameter on clicking Show comments only button', () => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- wrapper
- .findAll(GlButton)
- .at(1)
- .vm.$emit('click');
+ wrapper.findAll(GlButton).at(1).vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 1);
});
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index 9f3655c53b9..aeba8e8056c 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -1,7 +1,7 @@
import Vuex from 'vuex';
import { createLocalVue, mount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import createEventHub from '~/helpers/event_hub_factory';
import axios from '~/lib/utils/axios_utils';
@@ -25,7 +25,8 @@ describe('DiscussionFilter component', () => {
const filterDiscussion = jest.fn();
- const findFilter = filterType => wrapper.find(`.dropdown-item[data-filter-type="${filterType}"]`);
+ const findFilter = (filterType) =>
+ wrapper.find(`.dropdown-item[data-filter-type="${filterType}"]`);
const mountComponent = () => {
const discussions = [
@@ -82,12 +83,9 @@ describe('DiscussionFilter component', () => {
});
it('renders the default selected item', () => {
- expect(
- wrapper
- .find('#discussion-filter-dropdown .dropdown-item')
- .text()
- .trim(),
- ).toBe(discussionFiltersMock[0].title);
+ expect(wrapper.find('#discussion-filter-dropdown .dropdown-item').text().trim()).toBe(
+ discussionFiltersMock[0].title,
+ );
});
it('updates to the selected item', () => {
@@ -148,7 +146,7 @@ describe('DiscussionFilter component', () => {
window.mrTabs = undefined;
});
- it('only renders when discussion tab is active', done => {
+ it('only renders when discussion tab is active', (done) => {
eventHub.$emit('MergeRequestTabChange', 'commit');
wrapper.vm.$nextTick(() => {
@@ -163,7 +161,7 @@ describe('DiscussionFilter component', () => {
window.location.hash = '';
});
- it('updates the filter when the URL links to a note', done => {
+ it('updates the filter when the URL links to a note', (done) => {
window.location.hash = `note_${discussionMock.notes[0].id}`;
wrapper.vm.currentValue = discussionFiltersMock[2].value;
wrapper.vm.handleLocationHash();
@@ -174,7 +172,7 @@ describe('DiscussionFilter component', () => {
});
});
- it('does not update the filter when the current filter is "Show all activity"', done => {
+ it('does not update the filter when the current filter is "Show all activity"', (done) => {
window.location.hash = `note_${discussionMock.notes[0].id}`;
wrapper.vm.handleLocationHash();
@@ -184,7 +182,7 @@ describe('DiscussionFilter component', () => {
});
});
- it('only updates filter when the URL links to a note', done => {
+ it('only updates filter when the URL links to a note', (done) => {
window.location.hash = `testing123`;
wrapper.vm.handleLocationHash();
@@ -194,7 +192,7 @@ describe('DiscussionFilter component', () => {
});
});
- it('fetches discussions when there is a hash', done => {
+ it('fetches discussions when there is a hash', (done) => {
window.location.hash = `note_${discussionMock.notes[0].id}`;
wrapper.vm.currentValue = discussionFiltersMock[2].value;
jest.spyOn(wrapper.vm, 'selectFilter').mockImplementation(() => {});
@@ -206,7 +204,7 @@ describe('DiscussionFilter component', () => {
});
});
- it('does not fetch discussions when there is no hash', done => {
+ it('does not fetch discussions when there is no hash', (done) => {
window.location.hash = '';
jest.spyOn(wrapper.vm, 'selectFilter').mockImplementation(() => {});
wrapper.vm.handleLocationHash();
diff --git a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
deleted file mode 100644
index 183966cf435..00000000000
--- a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import JumpToNextDiscussionButton from '~/notes/components/discussion_jump_to_next_button.vue';
-import { mockTracking } from '../../helpers/tracking_helper';
-
-describe('JumpToNextDiscussionButton', () => {
- const fromDiscussionId = 'abc123';
- let wrapper;
- let trackingSpy;
- let jumpFn;
-
- beforeEach(() => {
- jumpFn = jest.fn();
- wrapper = shallowMount(JumpToNextDiscussionButton, {
- propsData: { fromDiscussionId },
- });
- wrapper.setMethods({ jumpToNextRelativeDiscussion: jumpFn });
-
- trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('matches the snapshot', () => {
- expect(wrapper.vm.$el).toMatchSnapshot();
- });
-
- it('calls jumpToNextRelativeDiscussion when clicked', () => {
- wrapper.find({ ref: 'button' }).trigger('click');
-
- expect(jumpFn).toHaveBeenCalledWith(fromDiscussionId);
- });
-
- it('sends the correct tracking event when clicked', () => {
- wrapper.find({ ref: 'button' }).trigger('click');
-
- expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
- label: 'mr_next_unresolved_thread',
- property: 'click_next_unresolved_thread',
- });
- });
-});
diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js
index 8cc98f978c2..e803dcb7b4a 100644
--- a/spec/frontend/notes/components/discussion_notes_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_spec.js
@@ -122,12 +122,11 @@ describe('DiscussionNotes', () => {
describe('events', () => {
describe('with groupped notes and replies expanded', () => {
- const findNoteAtIndex = index => {
+ const findNoteAtIndex = (index) => {
const noteComponents = [NoteableNote, SystemNote, PlaceholderNote, PlaceholderSystemNote];
- const allowedNames = noteComponents.map(c => c.name);
return wrapper
.findAll('.notes *')
- .filter(w => allowedNames.includes(w.name()))
+ .filter((w) => noteComponents.some((Component) => w.is(Component)))
.at(index);
};
diff --git a/spec/frontend/notes/components/discussion_resolve_button_spec.js b/spec/frontend/notes/components/discussion_resolve_button_spec.js
index 41701e54dfa..5105e1013d3 100644
--- a/spec/frontend/notes/components/discussion_resolve_button_spec.js
+++ b/spec/frontend/notes/components/discussion_resolve_button_spec.js
@@ -7,7 +7,7 @@ const buttonTitle = 'Resolve discussion';
describe('resolveDiscussionButton', () => {
let wrapper;
- const factory = options => {
+ const factory = (options) => {
wrapper = shallowMount(resolveDiscussionButton, {
...options,
});
diff --git a/spec/frontend/notes/components/email_participants_warning_spec.js b/spec/frontend/notes/components/email_participants_warning_spec.js
new file mode 100644
index 00000000000..ab1a6b152a4
--- /dev/null
+++ b/spec/frontend/notes/components/email_participants_warning_spec.js
@@ -0,0 +1,70 @@
+import { mount } from '@vue/test-utils';
+import EmailParticipantsWarning from '~/notes/components/email_participants_warning.vue';
+
+describe('Email Participants Warning Component', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findMoreButton = () => wrapper.find('button');
+
+ const createWrapper = (emails) => {
+ wrapper = mount(EmailParticipantsWarning, {
+ propsData: { emails },
+ });
+ };
+
+ describe('with 3 or less emails', () => {
+ beforeEach(() => {
+ createWrapper(['a@gitlab.com', 'b@gitlab.com', 'c@gitlab.com']);
+ });
+
+ it('more button does not exist', () => {
+ expect(findMoreButton().exists()).toBe(false);
+ });
+
+ it('all emails are displayed', () => {
+ expect(wrapper.text()).toBe(
+ 'a@gitlab.com, b@gitlab.com, and c@gitlab.com will be notified of your comment.',
+ );
+ });
+ });
+
+ describe('with more than 3 emails', () => {
+ beforeEach(() => {
+ createWrapper(['a@gitlab.com', 'b@gitlab.com', 'c@gitlab.com', 'd@gitlab.com']);
+ });
+
+ it('only displays first 3 emails', () => {
+ expect(wrapper.text()).toContain('a@gitlab.com, b@gitlab.com, c@gitlab.com');
+ expect(wrapper.text()).not.toContain('d@gitlab.com');
+ });
+
+ it('more button does exist', () => {
+ expect(findMoreButton().exists()).toBe(true);
+ });
+
+ it('more button displays the correct wordage', () => {
+ expect(findMoreButton().text()).toBe('and 1 more');
+ });
+
+ describe('when more button clicked', () => {
+ beforeEach(() => {
+ findMoreButton().trigger('click');
+ });
+
+ it('more button no longer exists', () => {
+ expect(findMoreButton().exists()).toBe(false);
+ });
+
+ it('all emails are displayed', () => {
+ expect(wrapper.text()).toBe(
+ 'a@gitlab.com, b@gitlab.com, c@gitlab.com, and d@gitlab.com will be notified of your comment.',
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/notes/components/multiline_comment_form_spec.js b/spec/frontend/notes/components/multiline_comment_form_spec.js
new file mode 100644
index 00000000000..081fd6e10ef
--- /dev/null
+++ b/spec/frontend/notes/components/multiline_comment_form_spec.js
@@ -0,0 +1,89 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
+import { GlFormSelect } from '@gitlab/ui';
+import MultilineCommentForm from '~/notes/components/multiline_comment_form.vue';
+import notesModule from '~/notes/stores/modules';
+
+describe('MultilineCommentForm', () => {
+ Vue.use(Vuex);
+ const setSelectedCommentPosition = jest.fn();
+ const testLine = {
+ line_code: 'test',
+ type: 'test',
+ old_line: 'test',
+ new_line: 'test',
+ };
+
+ const createWrapper = (props = {}, state) => {
+ setSelectedCommentPosition.mockReset();
+
+ const store = new Vuex.Store({
+ modules: { notes: notesModule() },
+ actions: { setSelectedCommentPosition },
+ });
+ if (state) store.replaceState({ ...store.state, ...state });
+
+ const propsData = {
+ line: { ...testLine },
+ commentLineOptions: [{ text: '1' }],
+ ...props,
+ };
+ return mount(MultilineCommentForm, { propsData, store });
+ };
+
+ describe('created', () => {
+ it('sets commentLineStart to line', () => {
+ const line = { ...testLine };
+ const wrapper = createWrapper({ line });
+
+ expect(wrapper.vm.commentLineStart).toEqual(line);
+ expect(setSelectedCommentPosition).toHaveBeenCalled();
+ });
+
+ it('sets commentLineStart to lineRange', () => {
+ const lineRange = {
+ start: { ...testLine },
+ };
+ const wrapper = createWrapper({ lineRange });
+
+ expect(wrapper.vm.commentLineStart).toEqual(lineRange.start);
+ expect(setSelectedCommentPosition).toHaveBeenCalled();
+ });
+
+ it('sets commentLineStart to selectedCommentPosition', () => {
+ const notes = {
+ selectedCommentPosition: {
+ start: { ...testLine },
+ },
+ };
+ const wrapper = createWrapper({}, { notes });
+
+ expect(wrapper.vm.commentLineStart).toEqual(wrapper.vm.selectedCommentPosition.start);
+ expect(setSelectedCommentPosition).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('destroyed', () => {
+ it('calls setSelectedCommentPosition', () => {
+ const wrapper = createWrapper();
+ wrapper.destroy();
+
+ // Once during created, once during destroyed
+ expect(setSelectedCommentPosition).toHaveBeenCalledTimes(2);
+ });
+ });
+
+ it('handles changing the start line', () => {
+ const line = { ...testLine };
+ const wrapper = createWrapper({ line });
+ const glSelect = wrapper.findComponent(GlFormSelect);
+
+ glSelect.vm.$emit('change', { ...testLine });
+
+ expect(wrapper.vm.commentLineStart).toEqual(line);
+ expect(wrapper.emitted('input')).toBeTruthy();
+ // Once during created, once during updateCommentLineStart
+ expect(setSelectedCommentPosition).toHaveBeenCalledTimes(2);
+ });
+});
diff --git a/spec/frontend/notes/components/multiline_comment_utils_spec.js b/spec/frontend/notes/components/multiline_comment_utils_spec.js
index 99b33e7cd5f..05faff02a6c 100644
--- a/spec/frontend/notes/components/multiline_comment_utils_spec.js
+++ b/spec/frontend/notes/components/multiline_comment_utils_spec.js
@@ -7,7 +7,7 @@ import {
describe('Multiline comment utilities', () => {
describe('get start & end line numbers', () => {
- const lineRanges = ['old', 'new', null].map(type => ({
+ const lineRanges = ['old', 'new', null].map((type) => ({
start: { new_line: 1, old_line: 1, type },
end: { new_line: 2, old_line: 2, type },
}));
@@ -35,7 +35,7 @@ describe('Multiline comment utilities', () => {
});
});
const inlineDiffLines = [{ line_code: '1' }, { line_code: '2' }, { line_code: '3' }];
- const parallelDiffLines = inlineDiffLines.map(line => ({
+ const parallelDiffLines = inlineDiffLines.map((line) => ({
left: { ...line },
right: { ...line },
}));
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index f01c6c6b84e..3cfc1445cb8 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -65,23 +65,11 @@ describe('noteActions', () => {
});
it('should render noteable author badge', () => {
- expect(
- wrapper
- .findAll('.note-role')
- .at(0)
- .text()
- .trim(),
- ).toEqual('Author');
+ expect(wrapper.findAll('.note-role').at(0).text().trim()).toEqual('Author');
});
it('should render access level badge', () => {
- expect(
- wrapper
- .findAll('.note-role')
- .at(1)
- .text()
- .trim(),
- ).toEqual(props.accessLevel);
+ expect(wrapper.findAll('.note-role').at(1).text().trim()).toEqual(props.accessLevel);
});
it('should render contributor badge', () => {
@@ -91,13 +79,7 @@ describe('noteActions', () => {
});
return wrapper.vm.$nextTick().then(() => {
- expect(
- wrapper
- .findAll('.note-role')
- .at(1)
- .text()
- .trim(),
- ).toBe('Contributor');
+ expect(wrapper.findAll('.note-role').at(1).text().trim()).toBe('Contributor');
});
});
@@ -119,7 +101,7 @@ describe('noteActions', () => {
expect(wrapper.find('.js-btn-copy-note-link').exists()).toBe(true);
});
- it('should not show copy link action when `noteUrl` prop is empty', done => {
+ it('should not show copy link action when `noteUrl` prop is empty', (done) => {
wrapper.setProps({
...props,
author: {
@@ -145,7 +127,7 @@ describe('noteActions', () => {
expect(wrapper.find('.js-note-delete').exists()).toBe(true);
});
- it('closes tooltip when dropdown opens', done => {
+ it('closes tooltip when dropdown opens', (done) => {
wrapper.find('.more-actions-toggle').trigger('click');
const rootWrapper = createWrapper(wrapper.vm.$root);
diff --git a/spec/frontend/notes/components/note_attachment_spec.js b/spec/frontend/notes/components/note_attachment_spec.js
index 9d1051676e1..d47c2beaaf8 100644
--- a/spec/frontend/notes/components/note_attachment_spec.js
+++ b/spec/frontend/notes/components/note_attachment_spec.js
@@ -7,7 +7,7 @@ describe('Issue note attachment', () => {
const findImage = () => wrapper.find({ ref: 'attachmentImage' });
const findUrl = () => wrapper.find({ ref: 'attachmentUrl' });
- const createComponent = attachment => {
+ const createComponent = (attachment) => {
wrapper = shallowMount(NoteAttachment, {
propsData: {
attachment,
diff --git a/spec/frontend/notes/components/note_awards_list_spec.js b/spec/frontend/notes/components/note_awards_list_spec.js
index 5ab183e5452..13a817902e6 100644
--- a/spec/frontend/notes/components/note_awards_list_spec.js
+++ b/spec/frontend/notes/components/note_awards_list_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import createStore from '~/notes/stores';
import awardsNote from '~/notes/components/note_awards_list.vue';
@@ -106,7 +106,7 @@ describe('note_awards_list component', () => {
awardsMock = [...new Array(1)].map(createAwardEmoji);
mountComponent();
- awardsMock.forEach(award => {
+ awardsMock.forEach((award) => {
expect(findTooltip()).toContain(award.user.name);
});
});
@@ -115,7 +115,7 @@ describe('note_awards_list component', () => {
awardsMock = [...new Array(2)].map(createAwardEmoji);
mountComponent();
- awardsMock.forEach(award => {
+ awardsMock.forEach((award) => {
expect(findTooltip()).toContain(award.user.name);
});
});
@@ -125,7 +125,7 @@ describe('note_awards_list component', () => {
mountComponent();
// Testing only the first 10 awards since 11 onward will not be displayed.
- awardsMock.slice(0, 10).forEach(award => {
+ awardsMock.slice(0, 10).forEach((award) => {
expect(findTooltip()).toContain(award.user.name);
});
});
diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js
index efad0785afe..3c11c266f90 100644
--- a/spec/frontend/notes/components/note_body_spec.js
+++ b/spec/frontend/notes/components/note_body_spec.js
@@ -38,7 +38,7 @@ describe('issue_note_body component', () => {
});
describe('isEditing', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.isEditing = true;
Vue.nextTick(done);
});
diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js
index cc434d6c952..e64a75bede9 100644
--- a/spec/frontend/notes/components/note_form_spec.js
+++ b/spec/frontend/notes/components/note_form_spec.js
@@ -1,4 +1,5 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import createStore from '~/notes/stores';
import NoteForm from '~/notes/components/note_form.vue';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
@@ -18,17 +19,14 @@ describe('issue_note_form component', () => {
let props;
const createComponentWrapper = () => {
- const localVue = createLocalVue();
- return shallowMount(localVue.extend(NoteForm), {
+ return mount(NoteForm, {
store,
propsData: props,
- // see https://gitlab.com/gitlab-org/gitlab-foss/issues/56317 for the following
- localVue,
});
};
beforeEach(() => {
- getDraft.mockImplementation(key => {
+ getDraft.mockImplementation((key) => {
if (key === dummyAutosaveKey) {
return dummyDraft;
}
@@ -60,15 +58,14 @@ describe('issue_note_form component', () => {
expect(wrapper.vm.noteHash).toBe(`#note_${props.noteId}`);
});
- it('return note hash as `#` when `noteId` is empty', () => {
+ it('return note hash as `#` when `noteId` is empty', async () => {
wrapper.setProps({
...props,
noteId: '',
});
+ await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.noteHash).toBe('#');
- });
+ expect(wrapper.vm.noteHash).toBe('#');
});
});
@@ -77,7 +74,7 @@ describe('issue_note_form component', () => {
wrapper = createComponentWrapper();
});
- it('should show conflict message if note changes outside the component', () => {
+ it('should show conflict message if note changes outside the component', async () => {
wrapper.setProps({
...props,
isEditing: true,
@@ -87,17 +84,12 @@ describe('issue_note_form component', () => {
const message =
'This comment has changed since you started editing, please review the updated comment to ensure information is not lost.';
- return wrapper.vm.$nextTick().then(() => {
- const conflictWarning = wrapper.find('.js-conflict-edit-warning');
+ await nextTick();
- expect(conflictWarning.exists()).toBe(true);
- expect(
- conflictWarning
- .text()
- .replace(/\s+/g, ' ')
- .trim(),
- ).toBe(message);
- });
+ const conflictWarning = wrapper.find('.js-conflict-edit-warning');
+
+ expect(conflictWarning.exists()).toBe(true);
+ expect(conflictWarning.text().replace(/\s+/g, ' ').trim()).toBe(message);
});
});
@@ -161,36 +153,33 @@ describe('issue_note_form component', () => {
});
describe('actions', () => {
- it('should be possible to cancel', () => {
- const cancelHandler = jest.fn();
+ it('should be possible to cancel', async () => {
wrapper.setProps({
...props,
isEditing: true,
});
- wrapper.setMethods({ cancelHandler });
+ await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- const cancelButton = wrapper.find('[data-testid="cancel"]');
- cancelButton.trigger('click');
+ const cancelButton = wrapper.find('[data-testid="cancel"]');
+ cancelButton.trigger('click');
+ await nextTick();
- expect(cancelHandler).toHaveBeenCalledWith(true);
- });
+ expect(wrapper.emitted().cancelForm).toHaveLength(1);
});
- it('should be possible to update the note', () => {
+ it('should be possible to update the note', async () => {
wrapper.setProps({
...props,
isEditing: true,
});
+ await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- const textarea = wrapper.find('textarea');
- textarea.setValue('Foo');
- const saveButton = wrapper.find('.js-vue-issue-save');
- saveButton.trigger('click');
+ const textarea = wrapper.find('textarea');
+ textarea.setValue('Foo');
+ const saveButton = wrapper.find('.js-vue-issue-save');
+ saveButton.trigger('click');
- expect(wrapper.vm.isSubmitting).toBe(true);
- });
+ expect(wrapper.vm.isSubmitting).toBe(true);
});
});
});
@@ -204,7 +193,7 @@ describe('issue_note_form component', () => {
});
wrapper = createComponentWrapper();
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('displays the draft in textarea', () => {
@@ -222,7 +211,7 @@ describe('issue_note_form component', () => {
});
wrapper = createComponentWrapper();
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('leaves the textarea empty', () => {
@@ -278,15 +267,14 @@ describe('issue_note_form component', () => {
});
});
- it('should be possible to cancel', () => {
+ it('should be possible to cancel', async () => {
jest.spyOn(wrapper.vm, 'cancelHandler');
- return wrapper.vm.$nextTick().then(() => {
- const cancelButton = wrapper.find('[data-testid="cancelBatchCommentsEnabled"]');
- cancelButton.trigger('click');
+ await nextTick();
+ const cancelButton = wrapper.find('[data-testid="cancelBatchCommentsEnabled"]');
+ cancelButton.trigger('click');
- expect(wrapper.vm.cancelHandler).toHaveBeenCalledWith(true);
- });
+ expect(wrapper.vm.cancelHandler).toHaveBeenCalledWith(true);
});
it('shows resolve checkbox', () => {
@@ -299,7 +287,7 @@ describe('issue_note_form component', () => {
discussion: {
...discussionMock,
notes: [
- ...discussionMock.notes.map(n => ({
+ ...discussionMock.notes.map((n) => ({
...n,
resolvable: true,
current_user: { ...n.current_user, can_resolve_discussion: false },
@@ -309,7 +297,7 @@ describe('issue_note_form component', () => {
},
});
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(wrapper.find('.js-resolve-checkbox').exists()).toBe(false);
});
@@ -317,7 +305,7 @@ describe('issue_note_form component', () => {
it('hides actions for commits', () => {
wrapper.setProps({ discussion: { for_commit: true } });
- return wrapper.vm.$nextTick(() => {
+ return nextTick(() => {
expect(wrapper.find('.note-form-actions').text()).not.toContain('Start a review');
});
});
@@ -331,7 +319,7 @@ describe('issue_note_form component', () => {
textarea.setValue('Foo');
textarea.trigger('keydown.enter', { metaKey: true });
- return wrapper.vm.$nextTick(() => {
+ return nextTick(() => {
expect(wrapper.vm.handleAddToReview).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 1c6d0bafda8..132e3d8aa7e 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -41,7 +41,7 @@ describe('NoteHeader component', () => {
},
};
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(NoteHeader, {
localVue,
store: new Vuex.Store({
@@ -252,7 +252,7 @@ describe('NoteHeader component', () => {
});
describe('when author username link is hovered', () => {
- it('toggles hover specific CSS classes on author name link', done => {
+ it('toggles hover specific CSS classes on author name link', (done) => {
createComponent({ author });
const authorUsernameLink = wrapper.find({ ref: 'authorUsernameLink' });
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index 1c6603899d3..b87c6cd7f2b 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -1,8 +1,10 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import mockDiffFile from 'jest/diffs/mock_data/diff_file';
import { trimText } from 'helpers/text_helper';
import createStore from '~/notes/stores';
-import noteableDiscussion from '~/notes/components/noteable_discussion.vue';
+import NoteableDiscussion from '~/notes/components/noteable_discussion.vue';
+import DiscussionNotes from '~/notes/components/discussion_notes.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
import NoteForm from '~/notes/components/note_form.vue';
@@ -17,8 +19,6 @@ import {
const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
-const localVue = createLocalVue();
-
describe('noteable_discussion component', () => {
let store;
let wrapper;
@@ -32,10 +32,9 @@ describe('noteable_discussion component', () => {
store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
- wrapper = mount(localVue.extend(noteableDiscussion), {
+ wrapper = mount(NoteableDiscussion, {
store,
propsData: { discussion: discussionMock },
- localVue,
});
});
@@ -47,63 +46,58 @@ describe('noteable_discussion component', () => {
expect(wrapper.find('.discussion-header').exists()).toBe(false);
});
- it('should render thread header', () => {
+ it('should render thread header', async () => {
const discussion = { ...discussionMock };
discussion.diff_file = mockDiffFile;
discussion.diff_discussion = true;
discussion.expanded = false;
wrapper.setProps({ discussion });
+ await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find('.discussion-header').exists()).toBe(true);
- });
+ expect(wrapper.find('.discussion-header').exists()).toBe(true);
});
describe('actions', () => {
- it('should toggle reply form', () => {
- const replyPlaceholder = wrapper.find(ReplyPlaceholder);
+ it('should toggle reply form', async () => {
+ await nextTick();
- return wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.vm.isReplying).toEqual(false);
+ expect(wrapper.vm.isReplying).toEqual(false);
- replyPlaceholder.vm.$emit('onClick');
- })
- .then(() => wrapper.vm.$nextTick())
- .then(() => {
- expect(wrapper.vm.isReplying).toEqual(true);
+ const replyPlaceholder = wrapper.find(ReplyPlaceholder);
+ replyPlaceholder.vm.$emit('onClick');
+ await nextTick();
- const noteForm = wrapper.find(NoteForm);
+ expect(wrapper.vm.isReplying).toEqual(true);
- expect(noteForm.exists()).toBe(true);
+ const noteForm = wrapper.find(NoteForm);
- const noteFormProps = noteForm.props();
+ expect(noteForm.exists()).toBe(true);
- expect(noteFormProps.discussion).toBe(discussionMock);
- expect(noteFormProps.isEditing).toBe(false);
- expect(noteFormProps.line).toBe(null);
- expect(noteFormProps.saveButtonTitle).toBe('Comment');
- expect(noteFormProps.autosaveKey).toBe(`Note/Issue/${discussionMock.id}/Reply`);
- });
+ const noteFormProps = noteForm.props();
+
+ expect(noteFormProps.discussion).toBe(discussionMock);
+ expect(noteFormProps.isEditing).toBe(false);
+ expect(noteFormProps.line).toBe(null);
+ expect(noteFormProps.saveButtonTitle).toBe('Comment');
+ expect(noteFormProps.autosaveKey).toBe(`Note/Issue/${discussionMock.id}/Reply`);
});
it('should expand discussion', async () => {
- const expandDiscussion = jest.fn();
- const discussion = { ...discussionMock };
- discussion.expanded = false;
+ const discussion = { ...discussionMock, expanded: false };
wrapper.setProps({ discussion });
- wrapper.setMethods({ expandDiscussion });
+ store.dispatch = jest.fn();
- await wrapper.vm.$nextTick();
+ await nextTick();
- wrapper.vm.showReplyForm();
+ wrapper.find(DiscussionNotes).vm.$emit('startReplying');
- await wrapper.vm.$nextTick();
+ await nextTick();
- expect(expandDiscussion).toHaveBeenCalledWith({ discussionId: discussion.id });
+ expect(store.dispatch).toHaveBeenCalledWith('expandDiscussion', {
+ discussionId: discussion.id,
+ });
});
it('does not render jump to thread button', () => {
@@ -132,7 +126,7 @@ describe('noteable_discussion component', () => {
...getJSONFixture(discussionWithTwoUnresolvedNotes)[0],
expanded: true,
};
- discussion.notes = discussion.notes.map(note => ({
+ discussion.notes = discussion.notes.map((note) => ({
...note,
resolved: false,
current_user: {
@@ -143,7 +137,7 @@ describe('noteable_discussion component', () => {
wrapper.setProps({ discussion });
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('displays a button to resolve with issue', () => {
@@ -169,10 +163,9 @@ describe('noteable_discussion component', () => {
window.gon.current_user_id = userDataMock.id;
store.dispatch('setUserData', userDataMock);
- wrapper = mount(localVue.extend(noteableDiscussion), {
+ wrapper = mount(NoteableDiscussion, {
store,
propsData: { discussion: discussionMock },
- localVue,
});
});
@@ -188,10 +181,9 @@ describe('noteable_discussion component', () => {
store.dispatch('setNoteableData', loggedOutnoteableData);
store.dispatch('setNotesData', notesDataMock);
- wrapper = mount(localVue.extend(noteableDiscussion), {
+ wrapper = mount(NoteableDiscussion, {
store,
propsData: { discussion: discussionMock },
- localVue,
});
});
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index a08e86d92d3..6f06665f412 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -208,7 +208,7 @@ describe('issue_note', () => {
expect(noteBodyProps.helpPagePath).toBe('');
});
- it('prevents note preview xss', done => {
+ it('prevents note preview xss', (done) => {
const imgSrc = 'data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7';
const noteBody = `<img src="${imgSrc}" onload="alert(1)" />`;
const alertSpy = jest.spyOn(window, 'alert');
@@ -230,7 +230,7 @@ describe('issue_note', () => {
});
describe('cancel edit', () => {
- it('restores content of updated note', done => {
+ it('restores content of updated note', (done) => {
const updatedText = 'updated note text';
store.hotUpdate({
actions: {
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index e905a12919e..e495a4738e0 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -36,16 +36,16 @@ describe('note_app', () => {
const getComponentOrder = () => {
return wrapper
.findAll('#notes-list,.js-comment-form')
- .wrappers.map(node => (node.is(CommentForm) ? TYPE_COMMENT_FORM : TYPE_NOTES_LIST));
+ .wrappers.map((node) => (node.is(CommentForm) ? TYPE_COMMENT_FORM : TYPE_NOTES_LIST));
};
/**
* waits for fetchNotes() to complete
*/
const waitForDiscussionsRequest = () =>
- new Promise(resolve => {
+ new Promise((resolve) => {
const { vm } = wrapper.find(NotesApp);
- const unwatch = vm.$watch('isFetching', isFetching => {
+ const unwatch = vm.$watch('isFetching', (isFetching) => {
if (isFetching) {
return;
}
@@ -129,18 +129,15 @@ describe('note_app', () => {
'/gitlab-org/gitlab-foss/issues/26/discussions.json'
][0].notes[0];
- expect(
- wrapper
- .find('.main-notes-list .note-header-author-name')
- .text()
- .trim(),
- ).toEqual(note.author.name);
+ expect(wrapper.find('.main-notes-list .note-header-author-name').text().trim()).toEqual(
+ note.author.name,
+ );
expect(wrapper.find('.main-notes-list .note-text').html()).toContain(note.note_html);
});
it('should render form', () => {
- expect(wrapper.find('.js-main-target-form').name()).toEqual('form');
+ expect(wrapper.find('.js-main-target-form').element.tagName).toBe('FORM');
expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual(
'Write a comment or drag your files here…',
);
@@ -205,7 +202,7 @@ describe('note_app', () => {
});
it('should render form', () => {
- expect(wrapper.find('.js-main-target-form').name()).toEqual('form');
+ expect(wrapper.find('.js-main-target-form').element.tagName).toBe('FORM');
expect(wrapper.find('.js-main-target-form textarea').attributes('placeholder')).toEqual(
'Write a comment or drag your files here…',
);
@@ -272,23 +269,15 @@ describe('note_app', () => {
it('should render markdown docs url', () => {
const { markdownDocsPath } = mockData.notesDataMock;
- expect(
- wrapper
- .find(`a[href="${markdownDocsPath}"]`)
- .text()
- .trim(),
- ).toEqual('Markdown');
+ expect(wrapper.find(`a[href="${markdownDocsPath}"]`).text().trim()).toEqual('Markdown');
});
it('should render quick action docs url', () => {
const { quickActionsDocsPath } = mockData.notesDataMock;
- expect(
- wrapper
- .find(`a[href="${quickActionsDocsPath}"]`)
- .text()
- .trim(),
- ).toEqual('quick actions');
+ expect(wrapper.find(`a[href="${quickActionsDocsPath}"]`).text().trim()).toEqual(
+ 'quick actions',
+ );
});
});
@@ -304,12 +293,9 @@ describe('note_app', () => {
const { markdownDocsPath } = mockData.notesDataMock;
return Vue.nextTick().then(() => {
- expect(
- wrapper
- .find(`.edit-note a[href="${markdownDocsPath}"]`)
- .text()
- .trim(),
- ).toEqual('Markdown is supported');
+ expect(wrapper.find(`.edit-note a[href="${markdownDocsPath}"]`).text().trim()).toEqual(
+ 'Markdown is supported',
+ );
});
});
diff --git a/spec/frontend/notes/components/toggle_replies_widget_spec.js b/spec/frontend/notes/components/toggle_replies_widget_spec.js
index b4f68b039cf..409e1bc3951 100644
--- a/spec/frontend/notes/components/toggle_replies_widget_spec.js
+++ b/spec/frontend/notes/components/toggle_replies_widget_spec.js
@@ -3,7 +3,7 @@ import mountComponent from 'helpers/vue_mount_component_helper';
import toggleRepliesWidget from '~/notes/components/toggle_replies_widget.vue';
import { note } from '../mock_data';
-const deepCloneObject = obj => JSON.parse(JSON.stringify(obj));
+const deepCloneObject = (obj) => JSON.parse(JSON.stringify(obj));
describe('toggle replies widget for notes', () => {
let vm;
diff --git a/spec/frontend/notes/helpers.js b/spec/frontend/notes/helpers.js
index c8168a49a5b..ed34f985e94 100644
--- a/spec/frontend/notes/helpers.js
+++ b/spec/frontend/notes/helpers.js
@@ -1,4 +1,4 @@
-export const resetStore = store => {
+export const resetStore = (store) => {
store.replaceState({
notes: [],
targetNoteHash: null,
diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js
index 4114df618e5..9c9a648d213 100644
--- a/spec/frontend/notes/mixins/discussion_navigation_spec.js
+++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js
@@ -34,7 +34,7 @@ describe('Discussion navigation mixin', () => {
setHTMLFixture(
[...'abcde']
.map(
- id =>
+ (id) =>
`<ul class="notes" data-discussion-id="${id}"></ul>
<div class="discussion" data-discussion-id="${id}"></div>`,
)
diff --git a/spec/frontend/notes/old_notes_spec.js b/spec/frontend/notes/old_notes_spec.js
index dee4f93f0ce..00821980e8a 100644
--- a/spec/frontend/notes/old_notes_spec.js
+++ b/spec/frontend/notes/old_notes_spec.js
@@ -82,9 +82,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
it('modifies the Markdown field', () => {
const changeEvent = document.createEvent('HTMLEvents');
changeEvent.initEvent('change', true, true);
- $('input[type=checkbox]')
- .attr('checked', true)[0]
- .dispatchEvent(changeEvent);
+ $('input[type=checkbox]').attr('checked', true)[0].dispatchEvent(changeEvent);
expect($('.js-task-list-field.original-task-list').val()).toBe('- [x] Task List Item');
});
@@ -129,7 +127,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
jest.spyOn(notes, 'renderNote');
- $('.js-comment-button').on('click', e => {
+ $('.js-comment-button').on('click', (e) => {
const $form = $(this);
e.preventDefault();
notes.addNote($form, {});
@@ -552,7 +550,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
expect($notesContainer.find('.note.being-posted').length).toBeGreaterThan(0);
});
- it('should remove placeholder note when new comment is done posting', done => {
+ it('should remove placeholder note when new comment is done posting', (done) => {
mockNotesPost();
$('.js-comment-button').click();
@@ -564,7 +562,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
});
describe('postComment', () => {
- it('disables the submit button', done => {
+ it('disables the submit button', (done) => {
const $submitButton = $form.find('.js-comment-submit-button');
expect($submitButton).not.toBeDisabled();
@@ -587,7 +585,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
});
});
- it('should show actual note element when new comment is done posting', done => {
+ it('should show actual note element when new comment is done posting', (done) => {
mockNotesPost();
$('.js-comment-button').click();
@@ -598,7 +596,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
});
});
- it('should reset Form when new comment is done posting', done => {
+ it('should reset Form when new comment is done posting', (done) => {
mockNotesPost();
$('.js-comment-button').click();
@@ -609,7 +607,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
});
});
- it('should show flash error message when new comment failed to be posted', done => {
+ it('should show flash error message when new comment failed to be posted', (done) => {
mockNotesPostError();
jest.spyOn(notes, 'addFlash');
@@ -660,7 +658,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
$form.find('textarea.js-note-text').val(sampleComment);
});
- it('should remove quick action placeholder when comment with quick actions is done posting', done => {
+ it('should remove quick action placeholder when comment with quick actions is done posting', (done) => {
jest.spyOn(gl.awardsHandler, 'addAwardToEmojiBar');
$('.js-comment-button').click();
@@ -695,7 +693,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
$form.find('textarea.js-note-text').val(sampleComment);
});
- it('should show message placeholder including lines starting with slash', done => {
+ it('should show message placeholder including lines starting with slash', (done) => {
$('.js-comment-button').click();
expect($notesContainer.find('.note.being-posted').length).toEqual(1); // Placeholder shown
@@ -733,7 +731,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
$form.find('textarea.js-note-text').html(sampleComment);
});
- it('should not render a script tag', done => {
+ it('should not render a script tag', (done) => {
$('.js-comment-button').click();
setImmediate(() => {
@@ -746,12 +744,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
.find(`#note_${note.id}`)
.find('.js-task-list-container');
- expect(
- $updatedNoteEl
- .find('.note-text')
- .text()
- .trim(),
- ).toEqual('');
+ expect($updatedNoteEl.find('.note-text').text().trim()).toEqual('');
done();
});
@@ -912,26 +905,15 @@ describe.skip('Old Notes (~/notes.js)', () => {
expect($tempNote.find('.timeline-icon .avatar').attr('src')).toEqual(currentUserAvatar);
expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeFalsy();
- expect(
- $tempNoteHeader
- .find('.d-none.d-sm-inline-block')
- .text()
- .trim(),
- ).toEqual(currentUserFullname);
-
- expect(
- $tempNoteHeader
- .find('.note-headline-light')
- .text()
- .trim(),
- ).toEqual(`@${currentUsername}`);
-
- expect(
- $tempNote
- .find('.note-body .note-text p')
- .text()
- .trim(),
- ).toEqual(sampleComment);
+ expect($tempNoteHeader.find('.d-none.d-sm-inline-block').text().trim()).toEqual(
+ currentUserFullname,
+ );
+
+ expect($tempNoteHeader.find('.note-headline-light').text().trim()).toEqual(
+ `@${currentUsername}`,
+ );
+
+ expect($tempNote.find('.note-body .note-text p').text().trim()).toEqual(sampleComment);
});
it('should return constructed placeholder element for discussion note based on form contents', () => {
@@ -959,12 +941,9 @@ describe.skip('Old Notes (~/notes.js)', () => {
});
const $tempNoteHeader = $tempNote.find('.note-header');
- expect(
- $tempNoteHeader
- .find('.d-none.d-sm-inline-block')
- .text()
- .trim(),
- ).toEqual('Foo &lt;script&gt;alert(&quot;XSS&quot;)&lt;/script&gt;');
+ expect($tempNoteHeader.find('.d-none.d-sm-inline-block').text().trim()).toEqual(
+ 'Foo &lt;script&gt;alert(&quot;XSS&quot;)&lt;/script&gt;',
+ );
});
});
@@ -987,12 +966,7 @@ describe.skip('Old Notes (~/notes.js)', () => {
expect($tempNote.attr('id')).toEqual(uniqueId);
expect($tempNote.hasClass('being-posted')).toBeTruthy();
expect($tempNote.hasClass('fade-in-half')).toBeTruthy();
- expect(
- $tempNote
- .find('.timeline-content i')
- .text()
- .trim(),
- ).toEqual(sampleCommandDescription);
+ expect($tempNote.find('.timeline-content i').text().trim()).toEqual(sampleCommandDescription);
});
});
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index c9912621785..f0e6a0a68dd 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1,5 +1,6 @@
import { TEST_HOST } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
import { deprecatedCreateFlash as Flash } from '~/flash';
import * as actions from '~/notes/stores/actions';
@@ -8,7 +9,6 @@ import * as mutationTypes from '~/notes/stores/mutation_types';
import * as notesConstants from '~/notes/constants';
import createStore from '~/notes/stores';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
-import testAction from '../../helpers/vuex_action_helper';
import { resetStore } from '../helpers';
import {
discussionMock,
@@ -53,7 +53,7 @@ describe('Actions Notes Store', () => {
});
describe('setNotesData', () => {
- it('should set received notes data', done => {
+ it('should set received notes data', (done) => {
testAction(
actions.setNotesData,
notesDataMock,
@@ -66,7 +66,7 @@ describe('Actions Notes Store', () => {
});
describe('setNoteableData', () => {
- it('should set received issue data', done => {
+ it('should set received issue data', (done) => {
testAction(
actions.setNoteableData,
noteableDataMock,
@@ -79,7 +79,7 @@ describe('Actions Notes Store', () => {
});
describe('setUserData', () => {
- it('should set received user data', done => {
+ it('should set received user data', (done) => {
testAction(
actions.setUserData,
userDataMock,
@@ -92,7 +92,7 @@ describe('Actions Notes Store', () => {
});
describe('setLastFetchedAt', () => {
- it('should set received timestamp', done => {
+ it('should set received timestamp', (done) => {
testAction(
actions.setLastFetchedAt,
'timestamp',
@@ -105,7 +105,7 @@ describe('Actions Notes Store', () => {
});
describe('setInitialNotes', () => {
- it('should set initial notes', done => {
+ it('should set initial notes', (done) => {
testAction(
actions.setInitialNotes,
[individualNote],
@@ -118,7 +118,7 @@ describe('Actions Notes Store', () => {
});
describe('setTargetNoteHash', () => {
- it('should set target note hash', done => {
+ it('should set target note hash', (done) => {
testAction(
actions.setTargetNoteHash,
'hash',
@@ -131,7 +131,7 @@ describe('Actions Notes Store', () => {
});
describe('toggleDiscussion', () => {
- it('should toggle discussion', done => {
+ it('should toggle discussion', (done) => {
testAction(
actions.toggleDiscussion,
{ discussionId: discussionMock.id },
@@ -144,7 +144,7 @@ describe('Actions Notes Store', () => {
});
describe('expandDiscussion', () => {
- it('should expand discussion', done => {
+ it('should expand discussion', (done) => {
testAction(
actions.expandDiscussion,
{ discussionId: discussionMock.id },
@@ -157,7 +157,7 @@ describe('Actions Notes Store', () => {
});
describe('collapseDiscussion', () => {
- it('should commit collapse discussion', done => {
+ it('should commit collapse discussion', (done) => {
testAction(
actions.collapseDiscussion,
{ discussionId: discussionMock.id },
@@ -175,7 +175,7 @@ describe('Actions Notes Store', () => {
});
describe('closeMergeRequest', () => {
- it('sets state as closed', done => {
+ it('sets state as closed', (done) => {
store
.dispatch('closeIssuable', { notesData: { closeIssuePath: '' } })
.then(() => {
@@ -188,7 +188,7 @@ describe('Actions Notes Store', () => {
});
describe('reopenMergeRequest', () => {
- it('sets state as reopened', done => {
+ it('sets state as reopened', (done) => {
store
.dispatch('reopenIssuable', { notesData: { reopenIssuePath: '' } })
.then(() => {
@@ -203,7 +203,7 @@ describe('Actions Notes Store', () => {
describe('emitStateChangedEvent', () => {
it('emits an event on the document', () => {
- document.addEventListener('issuable_vue_app:change', event => {
+ document.addEventListener('issuable_vue_app:change', (event) => {
expect(event.detail.data).toEqual({ id: '1', state: 'closed' });
expect(event.detail.isClosed).toEqual(false);
});
@@ -213,7 +213,7 @@ describe('Actions Notes Store', () => {
});
describe('toggleStateButtonLoading', () => {
- it('should set loading as true', done => {
+ it('should set loading as true', (done) => {
testAction(
actions.toggleStateButtonLoading,
true,
@@ -224,7 +224,7 @@ describe('Actions Notes Store', () => {
);
});
- it('should set loading as false', done => {
+ it('should set loading as false', (done) => {
testAction(
actions.toggleStateButtonLoading,
false,
@@ -237,11 +237,11 @@ describe('Actions Notes Store', () => {
});
describe('toggleIssueLocalState', () => {
- it('sets issue state as closed', done => {
+ it('sets issue state as closed', (done) => {
testAction(actions.toggleIssueLocalState, 'closed', {}, [{ type: 'CLOSE_ISSUE' }], [], done);
});
- it('sets issue state as reopened', done => {
+ it('sets issue state as reopened', (done) => {
testAction(
actions.toggleIssueLocalState,
'reopened',
@@ -297,24 +297,21 @@ describe('Actions Notes Store', () => {
});
describe('poll', () => {
- beforeEach(done => {
+ beforeEach((done) => {
axiosMock
.onGet(notesDataMock.notesPath)
.reply(200, { notes: [], last_fetched_at: '123456' }, { 'poll-interval': '1000' });
- store
- .dispatch('setNotesData', notesDataMock)
- .then(done)
- .catch(done.fail);
+ store.dispatch('setNotesData', notesDataMock).then(done).catch(done.fail);
});
- it('calls service with last fetched state', done => {
+ it('calls service with last fetched state', (done) => {
store
.dispatch('poll')
.then(() => {
jest.advanceTimersByTime(2);
})
- .then(() => new Promise(resolve => requestAnimationFrame(resolve)))
+ .then(() => new Promise((resolve) => requestAnimationFrame(resolve)))
.then(() => {
expect(store.state.lastFetchedAt).toBe('123456');
@@ -322,7 +319,7 @@ describe('Actions Notes Store', () => {
})
.then(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
requestAnimationFrame(resolve);
}),
)
@@ -340,7 +337,7 @@ describe('Actions Notes Store', () => {
});
describe('setNotesFetchedState', () => {
- it('should set notes fetched state', done => {
+ it('should set notes fetched state', (done) => {
testAction(
actions.setNotesFetchedState,
true,
@@ -367,7 +364,7 @@ describe('Actions Notes Store', () => {
document.body.setAttribute('data-page', '');
});
- it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', done => {
+ it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', (done) => {
const note = { path: endpoint, id: 1 };
testAction(
@@ -392,7 +389,7 @@ describe('Actions Notes Store', () => {
);
});
- it('dispatches removeDiscussionsFromDiff on merge request page', done => {
+ it('dispatches removeDiscussionsFromDiff on merge request page', (done) => {
const note = { path: endpoint, id: 1 };
document.body.setAttribute('data-page', 'projects:merge_requests:show');
@@ -438,7 +435,7 @@ describe('Actions Notes Store', () => {
document.body.setAttribute('data-page', '');
});
- it('dispatches removeNote', done => {
+ it('dispatches removeNote', (done) => {
const note = { path: endpoint, id: 1 };
testAction(
@@ -471,7 +468,7 @@ describe('Actions Notes Store', () => {
axiosMock.onAny().reply(200, res);
});
- it('commits ADD_NEW_NOTE and dispatches updateMergeRequestWidget', done => {
+ it('commits ADD_NEW_NOTE and dispatches updateMergeRequestWidget', (done) => {
testAction(
actions.createNewNote,
{ endpoint: `${TEST_HOST}`, data: {} },
@@ -507,7 +504,7 @@ describe('Actions Notes Store', () => {
axiosMock.onAny().replyOnce(200, res);
});
- it('does not commit ADD_NEW_NOTE or dispatch updateMergeRequestWidget', done => {
+ it('does not commit ADD_NEW_NOTE or dispatch updateMergeRequestWidget', (done) => {
testAction(
actions.createNewNote,
{ endpoint: `${TEST_HOST}`, data: {} },
@@ -530,7 +527,7 @@ describe('Actions Notes Store', () => {
});
describe('as note', () => {
- it('commits UPDATE_NOTE and dispatches updateMergeRequestWidget', done => {
+ it('commits UPDATE_NOTE and dispatches updateMergeRequestWidget', (done) => {
testAction(
actions.toggleResolveNote,
{ endpoint: `${TEST_HOST}`, isResolved: true, discussion: false },
@@ -555,7 +552,7 @@ describe('Actions Notes Store', () => {
});
describe('as discussion', () => {
- it('commits UPDATE_DISCUSSION and dispatches updateMergeRequestWidget', done => {
+ it('commits UPDATE_DISCUSSION and dispatches updateMergeRequestWidget', (done) => {
testAction(
actions.toggleResolveNote,
{ endpoint: `${TEST_HOST}`, isResolved: true, discussion: true },
@@ -591,7 +588,7 @@ describe('Actions Notes Store', () => {
});
describe('setCommentsDisabled', () => {
- it('should set comments disabled state', done => {
+ it('should set comments disabled state', (done) => {
testAction(
actions.setCommentsDisabled,
true,
@@ -604,7 +601,7 @@ describe('Actions Notes Store', () => {
});
describe('updateResolvableDiscussionsCounts', () => {
- it('commits UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS', done => {
+ it('commits UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS', (done) => {
testAction(
actions.updateResolvableDiscussionsCounts,
null,
@@ -617,7 +614,7 @@ describe('Actions Notes Store', () => {
});
describe('convertToDiscussion', () => {
- it('commits CONVERT_TO_DISCUSSION with noteId', done => {
+ it('commits CONVERT_TO_DISCUSSION with noteId', (done) => {
const noteId = 'dummy-note-id';
testAction(
actions.convertToDiscussion,
@@ -721,7 +718,7 @@ describe('Actions Notes Store', () => {
describe('replyToDiscussion', () => {
const payload = { endpoint: TEST_HOST, data: {} };
- it('updates discussion if response contains disussion', done => {
+ it('updates discussion if response contains disussion', (done) => {
const discussion = { notes: [] };
axiosMock.onAny().reply(200, { discussion });
@@ -741,7 +738,7 @@ describe('Actions Notes Store', () => {
);
});
- it('adds a reply to a discussion', done => {
+ it('adds a reply to a discussion', (done) => {
const res = {};
axiosMock.onAny().reply(200, res);
@@ -759,7 +756,7 @@ describe('Actions Notes Store', () => {
});
describe('removeConvertedDiscussion', () => {
- it('commits CONVERT_TO_DISCUSSION with noteId', done => {
+ it('commits CONVERT_TO_DISCUSSION with noteId', (done) => {
const noteId = 'dummy-id';
testAction(
actions.removeConvertedDiscussion,
@@ -784,7 +781,7 @@ describe('Actions Notes Store', () => {
};
});
- it('when unresolved, dispatches action', done => {
+ it('when unresolved, dispatches action', (done) => {
testAction(
actions.resolveDiscussion,
{ discussionId },
@@ -804,8 +801,8 @@ describe('Actions Notes Store', () => {
);
});
- it('when resolved, does nothing', done => {
- getters.isDiscussionResolved = id => id === discussionId;
+ it('when resolved, does nothing', (done) => {
+ getters.isDiscussionResolved = (id) => id === discussionId;
testAction(
actions.resolveDiscussion,
@@ -826,7 +823,7 @@ describe('Actions Notes Store', () => {
const res = { errors: { something: ['went wrong'] } };
const error = { message: 'Unprocessable entity', response: { data: res } };
- it('throws an error', done => {
+ it('throws an error', (done) => {
actions
.saveNote(
{
@@ -836,7 +833,7 @@ describe('Actions Notes Store', () => {
payload,
)
.then(() => done.fail('Expected error to be thrown!'))
- .catch(err => {
+ .catch((err) => {
expect(err).toBe(error);
expect(Flash).not.toHaveBeenCalled();
})
@@ -849,7 +846,7 @@ describe('Actions Notes Store', () => {
const res = { errors: { base: ['something went wrong'] } };
const error = { message: 'Unprocessable entity', response: { data: res } };
- it('sets flash alert using errors.base message', done => {
+ it('sets flash alert using errors.base message', (done) => {
actions
.saveNote(
{
@@ -858,7 +855,7 @@ describe('Actions Notes Store', () => {
},
{ ...payload, flashContainer },
)
- .then(resp => {
+ .then((resp) => {
expect(resp.hasFlash).toBe(true);
expect(Flash).toHaveBeenCalledWith(
'Your comment could not be submitted because something went wrong',
@@ -875,7 +872,7 @@ describe('Actions Notes Store', () => {
describe('if response contains no errors', () => {
const res = { valid: true };
- it('returns the response', done => {
+ it('returns the response', (done) => {
actions
.saveNote(
{
@@ -884,7 +881,7 @@ describe('Actions Notes Store', () => {
},
payload,
)
- .then(data => {
+ .then((data) => {
expect(data).toBe(res);
expect(Flash).not.toHaveBeenCalled();
})
@@ -917,11 +914,10 @@ describe('Actions Notes Store', () => {
.catch(done.fail);
};
- it('when service success, commits and resolves discussion', done => {
+ it('when service success, commits and resolves discussion', (done) => {
testSubmitSuggestion(done, () => {
expect(commit.mock.calls).toEqual([
[mutationTypes.SET_RESOLVING_DISCUSSION, true],
- [mutationTypes.APPLY_SUGGESTION, { discussionId, noteId, suggestionId }],
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
@@ -934,7 +930,7 @@ describe('Actions Notes Store', () => {
});
});
- it('when service fails, flashes error message', done => {
+ it('when service fails, flashes error message', (done) => {
const response = { response: { data: { message: TEST_ERROR_MESSAGE } } };
Api.applySuggestion.mockReturnValue(Promise.reject(response));
@@ -949,7 +945,7 @@ describe('Actions Notes Store', () => {
});
});
- it('when service fails, and no error message available, uses default message', done => {
+ it('when service fails, and no error message available, uses default message', (done) => {
const response = { response: 'foo' };
Api.applySuggestion.mockReturnValue(Promise.reject(response));
@@ -968,7 +964,7 @@ describe('Actions Notes Store', () => {
});
});
- it('when resolve discussion fails, fail gracefully', done => {
+ it('when resolve discussion fails, fail gracefully', (done) => {
dispatch.mockReturnValue(Promise.reject());
testSubmitSuggestion(done, () => {
@@ -999,13 +995,11 @@ describe('Actions Notes Store', () => {
.catch(done.fail);
};
- it('when service succeeds, commits, resolves discussions, resets batch and applying batch state', done => {
+ it('when service succeeds, commits, resolves discussions, resets batch and applying batch state', (done) => {
testSubmitSuggestionBatch(done, () => {
expect(commit.mock.calls).toEqual([
[mutationTypes.SET_APPLYING_BATCH_STATE, true],
[mutationTypes.SET_RESOLVING_DISCUSSION, true],
- [mutationTypes.APPLY_SUGGESTION, batchSuggestionsInfo[0]],
- [mutationTypes.APPLY_SUGGESTION, batchSuggestionsInfo[1]],
[mutationTypes.CLEAR_SUGGESTION_BATCH],
[mutationTypes.SET_APPLYING_BATCH_STATE, false],
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
@@ -1022,7 +1016,7 @@ describe('Actions Notes Store', () => {
});
});
- it('when service fails, flashes error message, resets applying batch state', done => {
+ it('when service fails, flashes error message, resets applying batch state', (done) => {
const response = { response: { data: { message: TEST_ERROR_MESSAGE } } };
Api.applySuggestionBatch.mockReturnValue(Promise.reject(response));
@@ -1040,7 +1034,7 @@ describe('Actions Notes Store', () => {
});
});
- it('when service fails, and no error message available, uses default message', done => {
+ it('when service fails, and no error message available, uses default message', (done) => {
const response = { response: 'foo' };
Api.applySuggestionBatch.mockReturnValue(Promise.reject(response));
@@ -1062,15 +1056,13 @@ describe('Actions Notes Store', () => {
});
});
- it('when resolve discussions fails, fails gracefully, resets batch and applying batch state', done => {
+ it('when resolve discussions fails, fails gracefully, resets batch and applying batch state', (done) => {
dispatch.mockReturnValue(Promise.reject());
testSubmitSuggestionBatch(done, () => {
expect(commit.mock.calls).toEqual([
[mutationTypes.SET_APPLYING_BATCH_STATE, true],
[mutationTypes.SET_RESOLVING_DISCUSSION, true],
- [mutationTypes.APPLY_SUGGESTION, batchSuggestionsInfo[0]],
- [mutationTypes.APPLY_SUGGESTION, batchSuggestionsInfo[1]],
[mutationTypes.CLEAR_SUGGESTION_BATCH],
[mutationTypes.SET_APPLYING_BATCH_STATE, false],
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
@@ -1084,7 +1076,7 @@ describe('Actions Notes Store', () => {
describe('addSuggestionInfoToBatch', () => {
const suggestionInfo = batchSuggestionsInfoMock[0];
- it("adds a suggestion's info to the current batch", done => {
+ it("adds a suggestion's info to the current batch", (done) => {
testAction(
actions.addSuggestionInfoToBatch,
suggestionInfo,
@@ -1099,7 +1091,7 @@ describe('Actions Notes Store', () => {
describe('removeSuggestionInfoFromBatch', () => {
const suggestionInfo = batchSuggestionsInfoMock[0];
- it("removes a suggestion's info the current batch", done => {
+ it("removes a suggestion's info the current batch", (done) => {
testAction(
actions.removeSuggestionInfoFromBatch,
suggestionInfo.suggestionId,
@@ -1139,7 +1131,7 @@ describe('Actions Notes Store', () => {
});
describe('setDiscussionSortDirection', () => {
- it('calls the correct mutation with the correct args', done => {
+ it('calls the correct mutation with the correct args', (done) => {
testAction(
actions.setDiscussionSortDirection,
{ direction: notesConstants.DESC, persist: false },
@@ -1157,7 +1149,7 @@ describe('Actions Notes Store', () => {
});
describe('setSelectedCommentPosition', () => {
- it('calls the correct mutation with the correct args', done => {
+ it('calls the correct mutation with the correct args', (done) => {
testAction(
actions.setSelectedCommentPosition,
{},
@@ -1178,7 +1170,7 @@ describe('Actions Notes Store', () => {
};
describe('if response contains no errors', () => {
- it('dispatches requestDeleteDescriptionVersion', done => {
+ it('dispatches requestDeleteDescriptionVersion', (done) => {
axiosMock.onDelete(endpoint).replyOnce(200);
testAction(
actions.softDeleteDescriptionVersion,
@@ -1201,7 +1193,7 @@ describe('Actions Notes Store', () => {
describe('if response contains errors', () => {
const errorMessage = 'Request failed with status code 503';
- it('dispatches receiveDeleteDescriptionVersionError and throws an error', done => {
+ it('dispatches receiveDeleteDescriptionVersionError and throws an error', (done) => {
axiosMock.onDelete(endpoint).replyOnce(503);
testAction(
actions.softDeleteDescriptionVersion,
@@ -1236,7 +1228,7 @@ describe('Actions Notes Store', () => {
});
describe('updateAssignees', () => {
- it('update the assignees state', done => {
+ it('update the assignees state', (done) => {
testAction(
actions.updateAssignees,
[userDataMock.id],
@@ -1351,7 +1343,7 @@ describe('Actions Notes Store', () => {
});
describe('updateDiscussionPosition', () => {
- it('update the assignees state', done => {
+ it('update the assignees state', (done) => {
const updatedPosition = { discussionId: 1, position: { test: true } };
testAction(
actions.updateDiscussionPosition,
diff --git a/spec/frontend/notes/stores/getters_spec.js b/spec/frontend/notes/stores/getters_spec.js
index 1a369caee49..fd04d08b6a5 100644
--- a/spec/frontend/notes/stores/getters_spec.js
+++ b/spec/frontend/notes/stores/getters_spec.js
@@ -72,7 +72,7 @@ describe('Getters Notes Store', () => {
state.isTimelineEnabled = true;
expect(getters.discussions(state).length).toEqual(discussionMock.notes.length);
- getters.discussions(state).forEach(discussion => {
+ getters.discussions(state).forEach((discussion) => {
expect(discussion.individual_note).toBe(true);
expect(discussion.id).toBe(discussion.notes[0].id);
expect(discussion.created_at).toBe(discussion.notes[0].created_at);
@@ -298,17 +298,18 @@ describe('Getters Notes Store', () => {
};
});
- [{ step: 1, id: '123', expected: '123' }, { step: -1, id: '123', expected: '123' }].forEach(
- ({ step, id, expected }) => {
- it(`with step ${step} and match, returns only value`, () => {
- const params = createDiscussionNeighborParams(id, true, step);
-
- expect(getters.findUnresolvedDiscussionIdNeighbor(state, localGetters)(params)).toBe(
- expected,
- );
- });
- },
- );
+ [
+ { step: 1, id: '123', expected: '123' },
+ { step: -1, id: '123', expected: '123' },
+ ].forEach(({ step, id, expected }) => {
+ it(`with step ${step} and match, returns only value`, () => {
+ const params = createDiscussionNeighborParams(id, true, step);
+
+ expect(getters.findUnresolvedDiscussionIdNeighbor(state, localGetters)(params)).toBe(
+ expected,
+ );
+ });
+ });
it('with no match, returns only value', () => {
const params = createDiscussionNeighborParams('bogus', true, 1);
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index ec4de925721..66fc74525ad 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -357,7 +357,7 @@ describe('Notes Store mutations', () => {
mutations.SET_EXPAND_DISCUSSIONS(state, { discussionIds, expanded: true });
- state.discussions.forEach(discussion => {
+ state.discussions.forEach((discussion) => {
expect(discussion.expanded).toEqual(true);
});
});
@@ -371,7 +371,7 @@ describe('Notes Store mutations', () => {
mutations.SET_EXPAND_DISCUSSIONS(state, { discussionIds, expanded: false });
- state.discussions.forEach(discussion => {
+ state.discussions.forEach((discussion) => {
expect(discussion.expanded).toEqual(false);
});
});
@@ -698,7 +698,7 @@ describe('Notes Store mutations', () => {
});
describe('SET_APPLYING_BATCH_STATE', () => {
- const buildDiscussions = suggestionsInfo => {
+ const buildDiscussions = (suggestionsInfo) => {
const suggestions = suggestionsInfo.map(({ suggestionId }) => ({ id: suggestionId }));
const notes = suggestionsInfo.map(({ noteId }, index) => ({
@@ -738,7 +738,7 @@ describe('Notes Store mutations', () => {
const expectedSuggestions = [updatedSuggestion, suggestions[1]];
const actualSuggestions = state.discussions
- .map(discussion => discussion.notes.map(n => n.suggestions))
+ .map((discussion) => discussion.notes.map((n) => n.suggestions))
.flat(2);
expect(actualSuggestions).toEqual(expectedSuggestions);
diff --git a/spec/frontend/oauth_remember_me_spec.js b/spec/frontend/oauth_remember_me_spec.js
index e12db05ac43..910676a97ed 100644
--- a/spec/frontend/oauth_remember_me_spec.js
+++ b/spec/frontend/oauth_remember_me_spec.js
@@ -2,10 +2,8 @@ import $ from 'jquery';
import OAuthRememberMe from '~/pages/sessions/new/oauth_remember_me';
describe('OAuthRememberMe', () => {
- const findFormAction = selector => {
- return $(`#oauth-container .oauth-login${selector}`)
- .parent('form')
- .attr('action');
+ const findFormAction = (selector) => {
+ return $(`#oauth-container .oauth-login${selector}`).parent('form').attr('action');
};
preloadFixtures('static/oauth_remember_me.html');
diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js
index c7ea23f9913..3216eece391 100644
--- a/spec/frontend/operation_settings/components/metrics_settings_spec.js
+++ b/spec/frontend/operation_settings/components/metrics_settings_spec.js
@@ -61,6 +61,7 @@ describe('operation settings external dashboard component', () => {
describe('expand/collapse button', () => {
it('renders as an expand button by default', () => {
+ mountComponent();
const button = wrapper.find(GlButton);
expect(button.text()).toBe('Expand');
diff --git a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
index d317264bdae..318cea98b92 100644
--- a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
+++ b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
@@ -9,7 +9,7 @@ exports[`PackageTitle renders with tags 1`] = `
class="gl-display-flex gl-justify-content-space-between gl-py-3"
>
<div
- class="gl-flex-direction-column"
+ class="gl-flex-direction-column gl-flex-grow-1"
>
<div
class="gl-display-flex"
@@ -54,6 +54,7 @@ exports[`PackageTitle renders with tags 1`] = `
link=""
size="s"
text="maven"
+ texttooltip=""
/>
</div>
<div
@@ -65,6 +66,7 @@ exports[`PackageTitle renders with tags 1`] = `
link=""
size="s"
text="300 bytes"
+ texttooltip=""
/>
</div>
<div
@@ -95,7 +97,7 @@ exports[`PackageTitle renders without tags 1`] = `
class="gl-display-flex gl-justify-content-space-between gl-py-3"
>
<div
- class="gl-flex-direction-column"
+ class="gl-flex-direction-column gl-flex-grow-1"
>
<div
class="gl-display-flex"
@@ -140,6 +142,7 @@ exports[`PackageTitle renders without tags 1`] = `
link=""
size="s"
text="maven"
+ texttooltip=""
/>
</div>
<div
@@ -151,6 +154,7 @@ exports[`PackageTitle renders without tags 1`] = `
link=""
size="s"
text="300 bytes"
+ texttooltip=""
/>
</div>
</div>
diff --git a/spec/frontend/packages/details/components/additional_metadata_spec.js b/spec/frontend/packages/details/components/additional_metadata_spec.js
index 111e4205abb..8466a630ecb 100644
--- a/spec/frontend/packages/details/components/additional_metadata_spec.js
+++ b/spec/frontend/packages/details/components/additional_metadata_spec.js
@@ -11,7 +11,7 @@ describe('Package Additional Metadata', () => {
packageEntity: { ...mavenPackage },
};
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(component, {
propsData: { ...defaultProps, ...props },
stubs: {
@@ -33,7 +33,7 @@ describe('Package Additional Metadata', () => {
const findConanRecipe = () => wrapper.find('[data-testid="conan-recipe"]');
const findMavenApp = () => wrapper.find('[data-testid="maven-app"]');
const findMavenGroup = () => wrapper.find('[data-testid="maven-group"]');
- const findElementLink = container => container.find(GlLink);
+ const findElementLink = (container) => container.find(GlLink);
it('has the correct title', () => {
mountComponent();
diff --git a/spec/frontend/packages/details/components/conan_installation_spec.js b/spec/frontend/packages/details/components/conan_installation_spec.js
index c79d1bb50dd..92c1f8e7f4a 100644
--- a/spec/frontend/packages/details/components/conan_installation_spec.js
+++ b/spec/frontend/packages/details/components/conan_installation_spec.js
@@ -48,21 +48,13 @@ describe('ConanInstallation', () => {
describe('installation commands', () => {
it('renders the correct command', () => {
- expect(
- findCodeInstructions()
- .at(0)
- .props('instruction'),
- ).toBe(conanInstallationCommandStr);
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(conanInstallationCommandStr);
});
});
describe('setup commands', () => {
it('renders the correct command', () => {
- expect(
- findCodeInstructions()
- .at(1)
- .props('instruction'),
- ).toBe(conanSetupCommandStr);
+ expect(findCodeInstructions().at(1).props('instruction')).toBe(conanSetupCommandStr);
});
});
});
diff --git a/spec/frontend/packages/details/components/maven_installation_spec.js b/spec/frontend/packages/details/components/maven_installation_spec.js
index f301a03a7f3..ff62969e709 100644
--- a/spec/frontend/packages/details/components/maven_installation_spec.js
+++ b/spec/frontend/packages/details/components/maven_installation_spec.js
@@ -51,11 +51,7 @@ describe('MavenInstallation', () => {
describe('installation commands', () => {
it('renders the correct xml block', () => {
- expect(
- findCodeInstructions()
- .at(0)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
instruction: xmlCodeBlock,
multiline: true,
trackingAction: TrackingActions.COPY_MAVEN_XML,
@@ -63,11 +59,7 @@ describe('MavenInstallation', () => {
});
it('renders the correct maven command', () => {
- expect(
- findCodeInstructions()
- .at(1)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
instruction: mavenCommandStr,
multiline: false,
trackingAction: TrackingActions.COPY_MAVEN_COMMAND,
@@ -77,11 +69,7 @@ describe('MavenInstallation', () => {
describe('setup commands', () => {
it('renders the correct xml block', () => {
- expect(
- findCodeInstructions()
- .at(2)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(2).props()).toMatchObject({
instruction: mavenSetupXml,
multiline: true,
trackingAction: TrackingActions.COPY_MAVEN_SETUP,
diff --git a/spec/frontend/packages/details/components/npm_installation_spec.js b/spec/frontend/packages/details/components/npm_installation_spec.js
index 4223a05453c..dd6e62185a9 100644
--- a/spec/frontend/packages/details/components/npm_installation_spec.js
+++ b/spec/frontend/packages/details/components/npm_installation_spec.js
@@ -47,11 +47,7 @@ describe('NpmInstallation', () => {
describe('installation commands', () => {
it('renders the correct npm command', () => {
- expect(
- findCodeInstructions()
- .at(0)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
instruction: 'npm i @Test/package',
multiline: false,
trackingAction: TrackingActions.COPY_NPM_INSTALL_COMMAND,
@@ -59,11 +55,7 @@ describe('NpmInstallation', () => {
});
it('renders the correct yarn command', () => {
- expect(
- findCodeInstructions()
- .at(1)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
instruction: 'yarn add @Test/package',
multiline: false,
trackingAction: TrackingActions.COPY_YARN_INSTALL_COMMAND,
@@ -73,11 +65,7 @@ describe('NpmInstallation', () => {
describe('setup commands', () => {
it('renders the correct npm command', () => {
- expect(
- findCodeInstructions()
- .at(2)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(2).props()).toMatchObject({
instruction: 'echo @Test:registry=undefined/ >> .npmrc',
multiline: false,
trackingAction: TrackingActions.COPY_NPM_SETUP_COMMAND,
@@ -85,11 +73,7 @@ describe('NpmInstallation', () => {
});
it('renders the correct yarn command', () => {
- expect(
- findCodeInstructions()
- .at(3)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(3).props()).toMatchObject({
instruction: 'echo \\"@Test:registry\\" \\"undefined/\\" >> .yarnrc',
multiline: false,
trackingAction: TrackingActions.COPY_YARN_SETUP_COMMAND,
diff --git a/spec/frontend/packages/details/components/nuget_installation_spec.js b/spec/frontend/packages/details/components/nuget_installation_spec.js
index b381d131e94..685d0808dd9 100644
--- a/spec/frontend/packages/details/components/nuget_installation_spec.js
+++ b/spec/frontend/packages/details/components/nuget_installation_spec.js
@@ -49,11 +49,7 @@ describe('NugetInstallation', () => {
describe('installation commands', () => {
it('renders the correct command', () => {
- expect(
- findCodeInstructions()
- .at(0)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
instruction: nugetInstallationCommandStr,
trackingAction: TrackingActions.COPY_NUGET_INSTALL_COMMAND,
});
@@ -62,11 +58,7 @@ describe('NugetInstallation', () => {
describe('setup commands', () => {
it('renders the correct command', () => {
- expect(
- findCodeInstructions()
- .at(1)
- .props(),
- ).toMatchObject({
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
instruction: nugetSetupCommandStr,
trackingAction: TrackingActions.COPY_NUGET_SETUP_COMMAND,
});
diff --git a/spec/frontend/packages/details/components/package_files_spec.js b/spec/frontend/packages/details/components/package_files_spec.js
index 813a2170154..9bcf6ed9235 100644
--- a/spec/frontend/packages/details/components/package_files_spec.js
+++ b/spec/frontend/packages/details/components/package_files_spec.js
@@ -24,7 +24,6 @@ describe('Package Files', () => {
stubs: {
...stubChildren(component),
GlTable: false,
- GlLink: '<div><slot></slot></div>',
},
});
};
diff --git a/spec/frontend/packages/details/components/package_history_spec.js b/spec/frontend/packages/details/components/package_history_spec.js
index c43ac9b9c40..5a6b386e2ca 100644
--- a/spec/frontend/packages/details/components/package_history_spec.js
+++ b/spec/frontend/packages/details/components/package_history_spec.js
@@ -15,10 +15,10 @@ describe('Package History', () => {
packageEntity: { ...mavenPackage },
};
- const createPipelines = amount =>
+ const createPipelines = (amount) =>
[...Array(amount)].map((x, index) => ({ ...mockPipelineInfo, id: index + 1 }));
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(component, {
propsData: { ...defaultProps, ...props },
stubs: {
@@ -35,9 +35,9 @@ describe('Package History', () => {
wrapper = null;
});
- const findHistoryElement = testId => wrapper.find(`[data-testid="${testId}"]`);
- const findElementLink = container => container.find(GlLink);
- const findElementTimeAgo = container => container.find(TimeAgoTooltip);
+ const findHistoryElement = (testId) => wrapper.find(`[data-testid="${testId}"]`);
+ const findElementLink = (container) => container.find(GlLink);
+ const findElementTimeAgo = (container) => container.find(TimeAgoTooltip);
const findTitle = () => wrapper.find('[data-testid="title"]');
const findTimeline = () => wrapper.find('[data-testid="timeline"]');
diff --git a/spec/frontend/packages/details/store/actions_spec.js b/spec/frontend/packages/details/store/actions_spec.js
index 70f87d18bcb..e823a00ebc4 100644
--- a/spec/frontend/packages/details/store/actions_spec.js
+++ b/spec/frontend/packages/details/store/actions_spec.js
@@ -12,7 +12,7 @@ jest.mock('~/api.js');
describe('Actions Package details store', () => {
describe('fetchPackageVersions', () => {
- it('should fetch the package versions', done => {
+ it('should fetch the package versions', (done) => {
Api.projectPackage = jest.fn().mockResolvedValue({ data: packageEntity });
testAction(
@@ -35,14 +35,17 @@ describe('Actions Package details store', () => {
);
});
- it("does not set the versions if they don't exist", done => {
+ it("does not set the versions if they don't exist", (done) => {
Api.projectPackage = jest.fn().mockResolvedValue({ data: { packageEntity, versions: null } });
testAction(
fetchPackageVersions,
undefined,
{ packageEntity },
- [{ type: types.SET_LOADING, payload: true }, { type: types.SET_LOADING, payload: false }],
+ [
+ { type: types.SET_LOADING, payload: true },
+ { type: types.SET_LOADING, payload: false },
+ ],
[],
() => {
expect(Api.projectPackage).toHaveBeenCalledWith(
@@ -54,14 +57,17 @@ describe('Actions Package details store', () => {
);
});
- it('should create flash on API error', done => {
+ it('should create flash on API error', (done) => {
Api.projectPackage = jest.fn().mockRejectedValue();
testAction(
fetchPackageVersions,
undefined,
{ packageEntity },
- [{ type: types.SET_LOADING, payload: true }, { type: types.SET_LOADING, payload: false }],
+ [
+ { type: types.SET_LOADING, payload: true },
+ { type: types.SET_LOADING, payload: false },
+ ],
[],
() => {
expect(Api.projectPackage).toHaveBeenCalledWith(
@@ -76,7 +82,7 @@ describe('Actions Package details store', () => {
});
describe('deletePackage', () => {
- it('should call Api.deleteProjectPackage', done => {
+ it('should call Api.deleteProjectPackage', (done) => {
Api.deleteProjectPackage = jest.fn().mockResolvedValue();
testAction(deletePackage, undefined, { packageEntity }, [], [], () => {
expect(Api.deleteProjectPackage).toHaveBeenCalledWith(
@@ -86,7 +92,7 @@ describe('Actions Package details store', () => {
done();
});
});
- it('should create flash on API error', done => {
+ it('should create flash on API error', (done) => {
Api.deleteProjectPackage = jest.fn().mockRejectedValue();
testAction(deletePackage, undefined, { packageEntity }, [], [], () => {
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
index c51130dae00..b2df1ac5ab6 100644
--- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -34,7 +34,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -95,7 +95,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no Composer packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -156,7 +156,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no Conan packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -217,7 +217,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no Generic packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -278,7 +278,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no Maven packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -339,7 +339,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no NPM packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -400,7 +400,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no NuGet packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
@@ -461,7 +461,7 @@ exports[`packages_list_app renders 1`] = `
class="svg-250 svg-content"
>
<img
- alt="There are no PyPI packages yet"
+ alt=""
class="gl-max-w-full"
src="helpSvg"
/>
diff --git a/spec/frontend/packages/list/components/packages_sort_spec.js b/spec/frontend/packages/list/components/packages_sort_spec.js
index 5c4794d8f63..d15ad9bd542 100644
--- a/spec/frontend/packages/list/components/packages_sort_spec.js
+++ b/spec/frontend/packages/list/components/packages_sort_spec.js
@@ -16,7 +16,7 @@ describe('packages_sort', () => {
const findPackageListSorting = () => wrapper.find(GlSorting);
const findSortingItems = () => wrapper.findAll(GlSortingItem);
- const createStore = isGroupPage => {
+ const createStore = (isGroupPage) => {
const state = {
config: {
isGroupPage,
diff --git a/spec/frontend/packages/list/stores/actions_spec.js b/spec/frontend/packages/list/stores/actions_spec.js
index cf205ecbac4..05e1fe57cae 100644
--- a/spec/frontend/packages/list/stores/actions_spec.js
+++ b/spec/frontend/packages/list/stores/actions_spec.js
@@ -30,7 +30,7 @@ describe('Actions Package list store', () => {
sort: 'asc',
orderBy: 'version',
};
- it('should fetch the project packages list when isGroupPage is false', done => {
+ it('should fetch the project packages list when isGroupPage is false', (done) => {
testAction(
actions.requestPackagesList,
undefined,
@@ -50,7 +50,7 @@ describe('Actions Package list store', () => {
);
});
- it('should fetch the group packages list when isGroupPage is true', done => {
+ it('should fetch the group packages list when isGroupPage is true', (done) => {
testAction(
actions.requestPackagesList,
undefined,
@@ -70,7 +70,7 @@ describe('Actions Package list store', () => {
);
});
- it('should fetch packages of a certain type when selectedType is present', done => {
+ it('should fetch packages of a certain type when selectedType is present', (done) => {
const packageType = 'maven';
testAction(
@@ -102,14 +102,17 @@ describe('Actions Package list store', () => {
);
});
- it('should create flash on API error', done => {
+ it('should create flash on API error', (done) => {
Api.projectPackages = jest.fn().mockRejectedValue();
testAction(
actions.requestPackagesList,
undefined,
{ config: { isGroupPage: false, resourceId: 2 }, sorting },
[],
- [{ type: 'setLoading', payload: true }, { type: 'setLoading', payload: false }],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'setLoading', payload: false },
+ ],
() => {
expect(createFlash).toHaveBeenCalled();
done();
@@ -119,7 +122,7 @@ describe('Actions Package list store', () => {
});
describe('receivePackagesListSuccess', () => {
- it('should set received packages', done => {
+ it('should set received packages', (done) => {
const data = 'foo';
testAction(
@@ -137,7 +140,7 @@ describe('Actions Package list store', () => {
});
describe('setInitialState', () => {
- it('should commit setInitialState', done => {
+ it('should commit setInitialState', (done) => {
testAction(
actions.setInitialState,
'1',
@@ -150,7 +153,7 @@ describe('Actions Package list store', () => {
});
describe('setLoading', () => {
- it('should commit set main loading', done => {
+ it('should commit set main loading', (done) => {
testAction(
actions.setLoading,
true,
@@ -168,7 +171,7 @@ describe('Actions Package list store', () => {
delete_api_path: 'foo',
},
};
- it('should perform a delete operation on _links.delete_api_path', done => {
+ it('should perform a delete operation on _links.delete_api_path', (done) => {
mock.onDelete(payload._links.delete_api_path).replyOnce(200);
Api.projectPackages = jest.fn().mockResolvedValue({ data: 'foo' });
@@ -185,14 +188,17 @@ describe('Actions Package list store', () => {
);
});
- it('should stop the loading and call create flash on api error', done => {
+ it('should stop the loading and call create flash on api error', (done) => {
mock.onDelete(payload._links.delete_api_path).replyOnce(400);
testAction(
actions.requestDeletePackage,
payload,
null,
[],
- [{ type: 'setLoading', payload: true }, { type: 'setLoading', payload: false }],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'setLoading', payload: false },
+ ],
() => {
expect(createFlash).toHaveBeenCalled();
done();
@@ -205,7 +211,7 @@ describe('Actions Package list store', () => {
${'_links'} | ${{}}
${'delete_api_path'} | ${{ _links: {} }}
`('should reject and createFlash when $property is missing', ({ actionPayload }, done) => {
- testAction(actions.requestDeletePackage, actionPayload, null, [], []).catch(e => {
+ testAction(actions.requestDeletePackage, actionPayload, null, [], []).catch((e) => {
expect(e).toEqual(new Error(MISSING_DELETE_PATH_ERROR));
expect(createFlash).toHaveBeenCalledWith(DELETE_PACKAGE_ERROR_MESSAGE);
done();
@@ -214,7 +220,7 @@ describe('Actions Package list store', () => {
});
describe('setSorting', () => {
- it('should commit SET_SORTING', done => {
+ it('should commit SET_SORTING', (done) => {
testAction(
actions.setSorting,
'foo',
@@ -227,7 +233,7 @@ describe('Actions Package list store', () => {
});
describe('setFilter', () => {
- it('should commit SET_FILTER', done => {
+ it('should commit SET_FILTER', (done) => {
testAction(
actions.setFilter,
'foo',
diff --git a/spec/frontend/packages/shared/components/package_path_spec.js b/spec/frontend/packages/shared/components/package_path_spec.js
index 40d455ac77c..3c9cd3387ba 100644
--- a/spec/frontend/packages/shared/components/package_path_spec.js
+++ b/spec/frontend/packages/shared/components/package_path_spec.js
@@ -21,8 +21,8 @@ describe('PackagePath', () => {
const ELLIPSIS_CHEVRON = 'ellipsis-chevron';
const LEAF_LINK = 'leaf-link';
- const findItem = name => wrapper.find(`[data-testid="${name}"]`);
- const findTooltip = w => getBinding(w.element, 'gl-tooltip');
+ const findItem = (name) => wrapper.find(`[data-testid="${name}"]`);
+ const findTooltip = (w) => getBinding(w.element, 'gl-tooltip');
afterEach(() => {
wrapper.destroy();
@@ -64,13 +64,13 @@ describe('PackagePath', () => {
}
if (shouldExist.length) {
- it.each(shouldExist)(`should have %s`, element => {
+ it.each(shouldExist)(`should have %s`, (element) => {
expect(findItem(element).exists()).toBe(true);
});
}
if (shouldNotExist.length) {
- it.each(shouldNotExist)(`should not have %s`, element => {
+ it.each(shouldNotExist)(`should not have %s`, (element) => {
expect(findItem(element).exists()).toBe(false);
});
}
diff --git a/spec/frontend/packages/shared/components/package_tags_spec.js b/spec/frontend/packages/shared/components/package_tags_spec.js
index cc49a9a9244..d26e4e76b87 100644
--- a/spec/frontend/packages/shared/components/package_tags_spec.js
+++ b/spec/frontend/packages/shared/components/package_tags_spec.js
@@ -75,11 +75,7 @@ describe('PackageTags', () => {
const expectedStyle = [...defaultStyle, 'gl-display-flex', 'gl-ml-3'];
- expect(
- tagBadges()
- .at(0)
- .classes(),
- ).toEqual(expect.arrayContaining(expectedStyle));
+ expect(tagBadges().at(0).classes()).toEqual(expect.arrayContaining(expectedStyle));
});
it('shows tag badge for medium or heigher resolutions', () => {
@@ -87,11 +83,7 @@ describe('PackageTags', () => {
const expectedStyle = [...defaultStyle, 'd-md-flex'];
- expect(
- tagBadges()
- .at(1)
- .classes(),
- ).toEqual(expect.arrayContaining(expectedStyle));
+ expect(tagBadges().at(1).classes()).toEqual(expect.arrayContaining(expectedStyle));
});
it('correctly prepends left and appends right when there is more than one tag', () => {
diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js
index 3e4ce8eb323..590e3814bee 100644
--- a/spec/frontend/packages/shared/utils_spec.js
+++ b/spec/frontend/packages/shared/utils_spec.js
@@ -13,7 +13,7 @@ describe('Packages shared utils', () => {
expect(packageTypeToTrackCategory()).toMatchInlineSnapshot(`"UI::undefined"`);
});
- it.each(Object.keys(PackageType))('returns a correct category string for %s', packageKey => {
+ it.each(Object.keys(PackageType))('returns a correct category string for %s', (packageKey) => {
const packageName = PackageType[packageKey];
expect(packageTypeToTrackCategory(packageName)).toBe(
`UI::${TrackingCategories[packageName]}`,
diff --git a/spec/frontend/pager_spec.js b/spec/frontend/pager_spec.js
index 8b60f872bfd..c8a129e38e7 100644
--- a/spec/frontend/pager_spec.js
+++ b/spec/frontend/pager_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import Pager from '~/pager';
import { removeParams } from '~/lib/utils/url_utility';
@@ -89,7 +89,7 @@ describe('pager', () => {
Pager.init();
});
- it('shows loader while loading next page', done => {
+ it('shows loader while loading next page', (done) => {
mockSuccess();
jest.spyOn(Pager.loading, 'show').mockImplementation(() => {});
@@ -102,7 +102,7 @@ describe('pager', () => {
});
});
- it('hides loader on success', done => {
+ it('hides loader on success', (done) => {
mockSuccess();
jest.spyOn(Pager.loading, 'hide').mockImplementation(() => {});
@@ -115,7 +115,7 @@ describe('pager', () => {
});
});
- it('hides loader on error', done => {
+ it('hides loader on error', (done) => {
mockError();
jest.spyOn(Pager.loading, 'hide').mockImplementation(() => {});
@@ -128,7 +128,7 @@ describe('pager', () => {
});
});
- it('sends request to url with offset and limit params', done => {
+ it('sends request to url with offset and limit params', (done) => {
Pager.offset = 100;
Pager.limit = 20;
Pager.getOld();
@@ -149,7 +149,7 @@ describe('pager', () => {
});
});
- it('disables if return count is less than limit', done => {
+ it('disables if return count is less than limit', (done) => {
Pager.offset = 0;
Pager.limit = 20;
diff --git a/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js b/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js
index 7e9aec84016..2c76adf761f 100644
--- a/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js
+++ b/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js
@@ -8,10 +8,10 @@ describe('Abuse Reports', () => {
let $messages;
- const assertMaxLength = $message => {
+ const assertMaxLength = ($message) => {
expect($message.text().length).toEqual(MAX_MESSAGE_LENGTH);
};
- const findMessage = searchText =>
+ const findMessage = (searchText) =>
$messages.filter((index, element) => element.innerText.indexOf(searchText) > -1).first();
preloadFixtures(FIXTURE);
diff --git a/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js b/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js
index 6a239e307e9..8816609d1d2 100644
--- a/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js
+++ b/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js
@@ -24,7 +24,7 @@ describe('AccountAndLimits', () => {
expect($userInternalRegex.readOnly).toBeTruthy();
});
- it('is checked', done => {
+ it('is checked', (done) => {
if (!$userDefaultExternal.prop('checked')) $userDefaultExternal.click();
expect($userDefaultExternal.prop('checked')).toBeTruthy();
diff --git a/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js b/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
index c662fb7ba4a..81750b4827f 100644
--- a/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
+++ b/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import { redirectTo } from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
import stopJobsModal from '~/pages/admin/jobs/index/components/stop_jobs_modal.vue';
@@ -26,9 +26,9 @@ describe('stop_jobs_modal.vue', () => {
});
describe('onSubmit', () => {
- it('stops jobs and redirects to overview page', done => {
+ it('stops jobs and redirects to overview page', (done) => {
const responseURL = `${TEST_HOST}/stop_jobs_modal.vue/jobs`;
- jest.spyOn(axios, 'post').mockImplementation(url => {
+ jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(props.url);
return Promise.resolve({
request: {
@@ -45,16 +45,16 @@ describe('stop_jobs_modal.vue', () => {
.catch(done.fail);
});
- it('displays error if stopping jobs failed', done => {
+ it('displays error if stopping jobs failed', (done) => {
const dummyError = new Error('stopping jobs failed');
- jest.spyOn(axios, 'post').mockImplementation(url => {
+ jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(props.url);
return Promise.reject(dummyError);
});
vm.onSubmit()
.then(done.fail)
- .catch(error => {
+ .catch((error) => {
expect(error).toBe(dummyError);
expect(redirectTo).not.toHaveBeenCalled();
})
diff --git a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
index 3efefa8137f..d203a8ea0e0 100644
--- a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
+++ b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
@@ -11,10 +11,10 @@ describe('User Operation confirmation modal', () => {
let wrapper;
let formSubmitSpy;
- const findButton = variant =>
+ const findButton = (variant) =>
wrapper
.findAll(GlButton)
- .filter(w => w.attributes('variant') === variant)
+ .filter((w) => w.attributes('variant') === variant)
.at(0);
const findForm = () => wrapper.find('form');
const findUsernameInput = () => wrapper.find(GlFormInput);
@@ -25,7 +25,7 @@ describe('User Operation confirmation modal', () => {
const getMethodParam = () => new FormData(findForm().element).get('_method');
const getFormAction = () => findForm().attributes('action');
- const setUsername = username => {
+ const setUsername = (username) => {
findUsernameInput().vm.$emit('input', username);
};
diff --git a/spec/frontend/pages/admin/users/new/index_spec.js b/spec/frontend/pages/admin/users/new/index_spec.js
index 3896323eef7..60482860e84 100644
--- a/spec/frontend/pages/admin/users/new/index_spec.js
+++ b/spec/frontend/pages/admin/users/new/index_spec.js
@@ -20,7 +20,7 @@ describe('UserInternalRegexHandler', () => {
});
describe('Behaviour of userExternal checkbox when', () => {
- it('matches email as internal', done => {
+ it('matches email as internal', (done) => {
expect($warningMessage.hasClass('hidden')).toBeTruthy();
$userEmail.val('test@').trigger('input');
@@ -30,7 +30,7 @@ describe('UserInternalRegexHandler', () => {
done();
});
- it('matches email as external', done => {
+ it('matches email as external', (done) => {
expect($warningMessage.hasClass('hidden')).toBeTruthy();
$userEmail.val('test.ext@').trigger('input');
diff --git a/spec/frontend/pages/dashboard/todos/index/todos_spec.js b/spec/frontend/pages/dashboard/todos/index/todos_spec.js
index 5ecb7860103..5018b0c4f73 100644
--- a/spec/frontend/pages/dashboard/todos/index/todos_spec.js
+++ b/spec/frontend/pages/dashboard/todos/index/todos_spec.js
@@ -31,10 +31,10 @@ describe('Todos', () => {
});
describe('goToTodoUrl', () => {
- it('opens the todo url', done => {
+ it('opens the todo url', (done) => {
const todoLink = todoItem.dataset.url;
- visitUrl.mockImplementation(url => {
+ visitUrl.mockImplementation((url) => {
expect(url).toEqual(todoLink);
done();
});
@@ -61,7 +61,7 @@ describe('Todos', () => {
});
it('run native funcionality when avatar is clicked', () => {
- $('.todos-list a').on('click', e => e.preventDefault());
+ $('.todos-list a').on('click', (e) => e.preventDefault());
$('.todos-list img').trigger(metakeyEvent);
expect(visitUrl).not.toHaveBeenCalled();
@@ -72,7 +72,7 @@ describe('Todos', () => {
describe('on done todo click', () => {
let onToggleSpy;
- beforeEach(done => {
+ beforeEach((done) => {
const el = document.querySelector('.js-done-todo');
const path = el.dataset.href;
diff --git a/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js b/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js
index 695d1b686a5..a91fc5abe09 100644
--- a/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js
+++ b/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js
@@ -15,7 +15,7 @@ describe('BitbucketServerStatusTable', () => {
const findReconfigureButton = () =>
wrapper
.findAll(GlButton)
- .filter(w => w.props().variant === 'info')
+ .filter((w) => w.props().variant === 'info')
.at(0);
afterEach(() => {
diff --git a/spec/frontend/pages/labels/components/promote_label_modal_spec.js b/spec/frontend/pages/labels/components/promote_label_modal_spec.js
index f969808d78b..19807313c77 100644
--- a/spec/frontend/pages/labels/components/promote_label_modal_spec.js
+++ b/spec/frontend/pages/labels/components/promote_label_modal_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import promoteLabelModal from '~/pages/projects/labels/components/promote_label_modal.vue';
import eventHub from '~/pages/projects/labels/event_hub';
import axios from '~/lib/utils/axios_utils';
@@ -50,9 +50,9 @@ describe('Promote label modal', () => {
vm.$destroy();
});
- it('redirects when a label is promoted', done => {
+ it('redirects when a label is promoted', (done) => {
const responseURL = `${TEST_HOST}/dummy/endpoint`;
- jest.spyOn(axios, 'post').mockImplementation(url => {
+ jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(labelMockData.url);
expect(eventHub.$emit).toHaveBeenCalledWith(
'promoteLabelModal.requestStarted',
@@ -76,10 +76,10 @@ describe('Promote label modal', () => {
.catch(done.fail);
});
- it('displays an error if promoting a label failed', done => {
+ it('displays an error if promoting a label failed', (done) => {
const dummyError = new Error('promoting label failed');
dummyError.response = { status: 500 };
- jest.spyOn(axios, 'post').mockImplementation(url => {
+ jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(labelMockData.url);
expect(eventHub.$emit).toHaveBeenCalledWith(
'promoteLabelModal.requestStarted',
@@ -89,7 +89,7 @@ describe('Promote label modal', () => {
});
vm.onSubmit()
- .catch(error => {
+ .catch((error) => {
expect(error).toBe(dummyError);
expect(eventHub.$emit).toHaveBeenCalledWith('promoteLabelModal.requestFinished', {
labelUrl: labelMockData.url,
diff --git a/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js b/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js
index 1d9a964c3c3..7bb637356c2 100644
--- a/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js
+++ b/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import { redirectTo } from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
import deleteMilestoneModal from '~/pages/milestones/shared/components/delete_milestone_modal.vue';
@@ -32,9 +32,9 @@ describe('delete_milestone_modal.vue', () => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
});
- it('deletes milestone and redirects to overview page', done => {
+ it('deletes milestone and redirects to overview page', (done) => {
const responseURL = `${TEST_HOST}/delete_milestone_modal.vue/milestoneOverview`;
- jest.spyOn(axios, 'delete').mockImplementation(url => {
+ jest.spyOn(axios, 'delete').mockImplementation((url) => {
expect(url).toBe(props.milestoneUrl);
expect(eventHub.$emit).toHaveBeenCalledWith(
'deleteMilestoneModal.requestStarted',
@@ -60,10 +60,10 @@ describe('delete_milestone_modal.vue', () => {
.catch(done.fail);
});
- it('displays error if deleting milestone failed', done => {
+ it('displays error if deleting milestone failed', (done) => {
const dummyError = new Error('deleting milestone failed');
dummyError.response = { status: 418 };
- jest.spyOn(axios, 'delete').mockImplementation(url => {
+ jest.spyOn(axios, 'delete').mockImplementation((url) => {
expect(url).toBe(props.milestoneUrl);
expect(eventHub.$emit).toHaveBeenCalledWith(
'deleteMilestoneModal.requestStarted',
@@ -74,7 +74,7 @@ describe('delete_milestone_modal.vue', () => {
});
vm.onSubmit()
- .catch(error => {
+ .catch((error) => {
expect(error).toBe(dummyError);
expect(redirectTo).not.toHaveBeenCalled();
expect(eventHub.$emit).toHaveBeenCalledWith('deleteMilestoneModal.requestFinished', {
diff --git a/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js b/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js
index e8a6e259837..7cd94deb3da 100644
--- a/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js
+++ b/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js
@@ -1,99 +1,109 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
-import promoteMilestoneModal from '~/pages/milestones/shared/components/promote_milestone_modal.vue';
-import eventHub from '~/pages/milestones/shared/event_hub';
+import { GlModal } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import { setHTMLFixture } from 'helpers/fixtures';
+import waitForPromises from 'helpers/wait_for_promises';
+import PromoteMilestoneModal from '~/pages/milestones/shared/components/promote_milestone_modal.vue';
import axios from '~/lib/utils/axios_utils';
+import * as urlUtils from '~/lib/utils/url_utility';
+import * as flash from '~/flash';
+
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/flash');
describe('Promote milestone modal', () => {
- let vm;
- const Component = Vue.extend(promoteMilestoneModal);
+ let wrapper;
const milestoneMockData = {
milestoneTitle: 'v1.0',
url: `${TEST_HOST}/dummy/promote/milestones`,
groupName: 'group',
};
- describe('Modal title and description', () => {
- beforeEach(() => {
- vm = mountComponent(Component, milestoneMockData);
+ const promoteButton = () => document.querySelector('.js-promote-project-milestone-button');
+
+ beforeEach(() => {
+ setHTMLFixture(`<button
+ class="js-promote-project-milestone-button"
+ data-group-name="${milestoneMockData.groupName}"
+ data-milestone-title="${milestoneMockData.milestoneTitle}"
+ data-url="${milestoneMockData.url}">
+ Promote
+ </button>`);
+ wrapper = shallowMount(PromoteMilestoneModal);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Modal opener button', () => {
+ it('button gets disabled when the modal opens', () => {
+ expect(promoteButton().disabled).toBe(false);
+
+ promoteButton().click();
+
+ expect(promoteButton().disabled).toBe(true);
+ });
+
+ it('button gets enabled when the modal closes', () => {
+ promoteButton().click();
+
+ wrapper.findComponent(GlModal).vm.$emit('hide');
+
+ expect(promoteButton().disabled).toBe(false);
});
+ });
- afterEach(() => {
- vm.$destroy();
+ describe('Modal title and description', () => {
+ beforeEach(() => {
+ promoteButton().click();
});
it('contains the proper description', () => {
- expect(vm.text).toContain(
+ expect(wrapper.vm.text).toContain(
`Promoting ${milestoneMockData.milestoneTitle} will make it available for all projects inside ${milestoneMockData.groupName}.`,
);
});
it('contains the correct title', () => {
- expect(vm.title).toEqual('Promote v1.0 to group milestone?');
+ expect(wrapper.vm.title).toBe('Promote v1.0 to group milestone?');
});
});
describe('When requesting a milestone promotion', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- ...milestoneMockData,
- });
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- });
-
- afterEach(() => {
- vm.$destroy();
+ promoteButton().click();
});
- it('redirects when a milestone is promoted', done => {
+ it('redirects when a milestone is promoted', async () => {
const responseURL = `${TEST_HOST}/dummy/endpoint`;
- jest.spyOn(axios, 'post').mockImplementation(url => {
+ jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(milestoneMockData.url);
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'promoteMilestoneModal.requestStarted',
- milestoneMockData.url,
- );
return Promise.resolve({
- request: {
- responseURL,
+ data: {
+ url: responseURL,
},
});
});
- vm.onSubmit()
- .then(() => {
- expect(eventHub.$emit).toHaveBeenCalledWith('promoteMilestoneModal.requestFinished', {
- milestoneUrl: milestoneMockData.url,
- successful: true,
- });
- })
- .then(done)
- .catch(done.fail);
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+ await waitForPromises();
+
+ expect(urlUtils.visitUrl).toHaveBeenCalledWith(responseURL);
});
- it('displays an error if promoting a milestone failed', done => {
+ it('displays an error if promoting a milestone failed', async () => {
const dummyError = new Error('promoting milestone failed');
dummyError.response = { status: 500 };
- jest.spyOn(axios, 'post').mockImplementation(url => {
+ jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(milestoneMockData.url);
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'promoteMilestoneModal.requestStarted',
- milestoneMockData.url,
- );
return Promise.reject(dummyError);
});
- vm.onSubmit()
- .catch(error => {
- expect(error).toBe(dummyError);
- expect(eventHub.$emit).toHaveBeenCalledWith('promoteMilestoneModal.requestFinished', {
- milestoneUrl: milestoneMockData.url,
- successful: false,
- });
- })
- .then(done)
- .catch(done.fail);
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+ await waitForPromises();
+
+ expect(flash.deprecatedCreateFlash).toHaveBeenCalledWith(dummyError);
});
});
});
diff --git a/spec/frontend/pages/profiles/show/emoji_menu_spec.js b/spec/frontend/pages/profiles/show/emoji_menu_spec.js
index 08fc0b92424..f35fb57aec7 100644
--- a/spec/frontend/pages/profiles/show/emoji_menu_spec.js
+++ b/spec/frontend/pages/profiles/show/emoji_menu_spec.js
@@ -46,7 +46,7 @@ describe('EmojiMenu', () => {
const dummyEmoji = 'tropical_fish';
const dummyVotesBlock = () => $('<div />');
- it('calls selectEmojiCallback', done => {
+ it('calls selectEmojiCallback', (done) => {
expect(dummySelectEmojiCallback).not.toHaveBeenCalled();
emojiMenu.addAward(dummyVotesBlock(), dummyAwardUrl, dummyEmoji, false, () => {
@@ -55,7 +55,7 @@ describe('EmojiMenu', () => {
});
});
- it('does not make an axios request', done => {
+ it('does not make an axios request', (done) => {
jest.spyOn(axios, 'request').mockReturnValue();
emojiMenu.addAward(dummyVotesBlock(), dummyAwardUrl, dummyEmoji, false, () => {
diff --git a/spec/frontend/pages/projects/edit/mount_search_settings_spec.js b/spec/frontend/pages/projects/edit/mount_search_settings_spec.js
new file mode 100644
index 00000000000..b48809b3d00
--- /dev/null
+++ b/spec/frontend/pages/projects/edit/mount_search_settings_spec.js
@@ -0,0 +1,25 @@
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import initSearch from '~/search_settings';
+import mountSearchSettings from '~/pages/projects/edit/mount_search_settings';
+
+jest.mock('~/search_settings');
+
+describe('pages/projects/edit/mount_search_settings', () => {
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ it('initializes search settings when js-search-settings-app is available', async () => {
+ setHTMLFixture('<div class="js-search-settings-app"></div>');
+
+ await mountSearchSettings();
+
+ expect(initSearch).toHaveBeenCalled();
+ });
+
+ it('does not initialize search settings when js-search-settings-app is unavailable', async () => {
+ await mountSearchSettings();
+
+ expect(initSearch).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
index 73e3c385d33..b90c07a335b 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
@@ -30,7 +30,7 @@ describe('Fork groups list item component', () => {
const DUMMY_PATH = '/dummy/path';
- const createWrapper = propsData => {
+ const createWrapper = (propsData) => {
wrapper = shallowMount(ForkGroupsListItem, {
propsData: {
...DEFAULT_PROPS,
@@ -70,7 +70,7 @@ describe('Fork groups list item component', () => {
expect(
wrapper
.findAll(GlLink)
- .filter(w => w.text() === DUMMY_FULL_NAME)
+ .filter((w) => w.text() === DUMMY_FULL_NAME)
.at(0)
.attributes().href,
).toBe(DUMMY_PATH);
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
index 9993e4da980..91740c7ce3b 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
@@ -21,7 +21,7 @@ describe('Fork groups list component', () => {
const replyWith = (...args) => axiosMock.onGet(DEFAULT_PROPS.endpoint).reply(...args);
- const createWrapper = propsData => {
+ const createWrapper = (propsData) => {
wrapper = shallowMount(ForkGroupsList, {
propsData: {
...DEFAULT_PROPS,
@@ -104,12 +104,10 @@ describe('Fork groups list component', () => {
expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(namespaces.length);
namespaces.forEach((namespace, idx) => {
- expect(
- wrapper
- .findAll(ForkGroupsListItem)
- .at(idx)
- .props(),
- ).toStrictEqual({ group: namespace, hasReachedProjectLimit });
+ expect(wrapper.findAll(ForkGroupsListItem).at(idx).props()).toStrictEqual({
+ group: namespace,
+ hasReachedProjectLimit,
+ });
});
});
@@ -123,11 +121,6 @@ describe('Fork groups list component', () => {
await nextTick();
expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(1);
- expect(
- wrapper
- .findAll(ForkGroupsListItem)
- .at(0)
- .props().group.name,
- ).toBe('otherdummy');
+ expect(wrapper.findAll(ForkGroupsListItem).at(0).props().group.name).toBe('otherdummy');
});
});
diff --git a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
index 324c9788309..c4c48ea7517 100644
--- a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
+++ b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
@@ -12,6 +12,7 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
<gl-dropdown-stub
category="primary"
headertext=""
+ hideheaderborder="true"
size="medium"
text="rspec"
variant="default"
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index 9a119377542..27cd0fe34bf 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -26,7 +26,7 @@ describe('Interval Pattern Input Component', () => {
const findCustomInput = () => wrapper.find('#schedule_cron');
const findAllLabels = () => wrapper.findAll('label');
const findSelectedRadio = () =>
- wrapper.findAll('input[type="radio"]').wrappers.find(x => x.element.checked);
+ wrapper.findAll('input[type="radio"]').wrappers.find((x) => x.element.checked);
const findSelectedRadioKey = () => findSelectedRadio()?.attributes('data-testid');
const selectEveryDayRadio = () => findEveryDayRadio().trigger('click');
const selectEveryWeekRadio = () => findEveryWeekRadio().trigger('click');
@@ -129,7 +129,7 @@ describe('Interval Pattern Input Component', () => {
});
it('renders each label for radio options properly', () => {
- const labels = findAllLabels().wrappers.map(el => trimText(el.text()));
+ const labels = findAllLabels().wrappers.map((el) => trimText(el.text()));
expect(labels).toEqual([
'Every day (at 4:00am)',
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index 5efcedf678b..de63409b181 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -47,7 +47,7 @@ describe('Timezone Dropdown', () => {
const data = $dropdownEl.data('data');
const formatted = $wrapper.find(tzListSel).text();
- data.forEach(item => {
+ data.forEach((item) => {
expect(formatted).toContain(formatTimezone(item));
});
});
@@ -88,10 +88,7 @@ describe('Timezone Dropdown', () => {
onSelectTimezone,
});
- $wrapper
- .find(tzListSel)
- .first()
- .trigger('click');
+ $wrapper.find(tzListSel).first().trigger('click');
expect(onSelectTimezone).toHaveBeenCalled();
});
@@ -103,7 +100,7 @@ describe('Timezone Dropdown', () => {
new TimezoneDropdown({
$inputEl,
$dropdownEl,
- displayFormat: selectedItem => formatTimezone(selectedItem),
+ displayFormat: (selectedItem) => formatTimezone(selectedItem),
});
expect($wrapper.find(tzDropdownToggleText).html()).toEqual('[UTC - 2.5] Newfoundland');
@@ -118,10 +115,7 @@ describe('Timezone Dropdown', () => {
displayFormat,
});
- $wrapper
- .find(tzListSel)
- .first()
- .trigger('click');
+ $wrapper.find(tzListSel).first().trigger('click');
expect(displayFormat).toHaveBeenCalled();
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js b/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js
index 1fd9d285610..c90ebd47b08 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/project_feature_settings_spec.js
@@ -6,13 +6,20 @@ import projectFeatureToggle from '~/vue_shared/components/toggle_button.vue';
describe('Project Feature Settings', () => {
const defaultProps = {
name: 'Test',
- options: [[1, 1], [2, 2], [3, 3], [4, 4], [5, 5]],
+ options: [
+ [1, 1],
+ [2, 2],
+ [3, 3],
+ [4, 4],
+ [5, 5],
+ ],
value: 1,
disabledInput: false,
+ showToggle: true,
};
let wrapper;
- const mountComponent = customProps => {
+ const mountComponent = (customProps) => {
const propsData = { ...defaultProps, ...customProps };
return shallowMount(projectFeatureSetting, { propsData });
};
@@ -40,6 +47,14 @@ describe('Project Feature Settings', () => {
});
describe('Feature toggle', () => {
+ it('should be hidden if "showToggle" is passed false', async () => {
+ wrapper.setProps({ showToggle: false });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(projectFeatureToggle).element).toBeUndefined();
+ });
+
it('should enable the feature toggle if the value is not 0', () => {
expect(wrapper.find(projectFeatureToggle).props().value).toBe(true);
});
@@ -73,10 +88,7 @@ describe('Project Feature Settings', () => {
wrapper = mount(projectFeatureSetting, { propsData: defaultProps });
expect(wrapper.emitted().change).toBeUndefined();
- wrapper
- .find(projectFeatureToggle)
- .find('button')
- .trigger('click');
+ wrapper.find(projectFeatureToggle).find('button').trigger('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().change.length).toBe(1);
@@ -110,10 +122,7 @@ describe('Project Feature Settings', () => {
it('should emit the change when a new option is selected', () => {
expect(wrapper.emitted().change).toBeUndefined();
- wrapper
- .findAll('option')
- .at(1)
- .trigger('change');
+ wrapper.findAll('option').at(1).trigger('change');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().change.length).toBe(1);
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 0b58260ed1c..9aee6ec7ace 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -20,6 +20,7 @@ const defaultProps = {
buildsAccessLevel: 20,
wikiAccessLevel: 20,
snippetsAccessLevel: 20,
+ operationsAccessLevel: 20,
pagesAccessLevel: 10,
analyticsAccessLevel: 20,
containerRegistryEnabled: true,
@@ -68,8 +69,12 @@ describe('Settings Panel', () => {
});
};
- const overrideCurrentSettings = (currentSettingsProps, extraProps = {}) => {
- return mountComponent({ ...extraProps, currentSettings: currentSettingsProps });
+ const overrideCurrentSettings = (
+ currentSettingsProps,
+ extraProps = {},
+ mountFn = shallowMount,
+ ) => {
+ return mountComponent({ ...extraProps, currentSettings: currentSettingsProps }, mountFn);
};
const findLFSSettingsRow = () => wrapper.find({ ref: 'git-lfs-settings' });
@@ -362,7 +367,7 @@ describe('Settings Panel', () => {
const repositoryFeatureToggleButton = findRepositoryFeatureSetting().find('button');
const lfsFeatureToggleButton = findLFSFeatureToggle().find('button');
- const isToggleButtonChecked = toggleButton => toggleButton.classes('is-checked');
+ const isToggleButtonChecked = (toggleButton) => toggleButton.classes('is-checked');
// assert the initial state
expect(isToggleButtonChecked(lfsFeatureToggleButton)).toBe(true);
@@ -523,28 +528,30 @@ describe('Settings Panel', () => {
});
});
- it('should set the visibility level description based upon the selected visibility level', () => {
- wrapper
- .find('[name="project[project_feature_attributes][metrics_dashboard_access_level]"]')
- .setValue(visibilityOptions.PUBLIC);
-
- expect(wrapper.vm.metricsDashboardAccessLevel).toBe(visibilityOptions.PUBLIC);
- });
-
it('should contain help text', () => {
expect(wrapper.find({ ref: 'metrics-visibility-settings' }).props().helpText).toBe(
'With Metrics Dashboard you can visualize this project performance metrics',
);
});
- it('should disable the metrics visibility dropdown when the project visibility level changes to private', () => {
- wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
+ it.each`
+ scenario | selectedOption | selectedOptionLabel
+ ${{ visibilityLevel: visibilityOptions.PRIVATE }} | ${String(featureAccessLevel.PROJECT_MEMBERS)} | ${'Only Project Members'}
+ ${{ operationsAccessLevel: featureAccessLevel.NOT_ENABLED }} | ${String(featureAccessLevel.NOT_ENABLED)} | ${'Enable feature to choose access level'}
+ `(
+ 'should disable the metrics visibility dropdown when #scenario',
+ ({ scenario, selectedOption, selectedOptionLabel }) => {
+ wrapper = overrideCurrentSettings(scenario, {}, mount);
- const metricsSettingsRow = wrapper.find({ ref: 'metrics-visibility-settings' });
+ const select = wrapper.find({ ref: 'metrics-visibility-settings' }).find('select');
+ const option = select.find('option');
- expect(wrapper.vm.metricsOptionsDropdownEnabled).toBe(true);
- expect(metricsSettingsRow.find('select').attributes('disabled')).toBe('disabled');
- });
+ expect(select.attributes('disabled')).toBe('disabled');
+ expect(select.element.value).toBe(selectedOption);
+ expect(option.attributes('value')).toBe(selectedOption);
+ expect(option.text()).toBe(selectedOptionLabel);
+ },
+ );
});
describe('Settings panel with feature flags', () => {
@@ -568,4 +575,12 @@ describe('Settings Panel', () => {
expect(findAnalyticsRow().exists()).toBe(true);
});
});
+
+ describe('Operations', () => {
+ it('should show the operations toggle', async () => {
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find({ ref: 'operations-settings' }).exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/pages/search/show/refresh_counts_spec.js b/spec/frontend/pages/search/show/refresh_counts_spec.js
index ead268b3971..81c9bf74308 100644
--- a/spec/frontend/pages/search/show/refresh_counts_spec.js
+++ b/spec/frontend/pages/search/show/refresh_counts_spec.js
@@ -4,8 +4,11 @@ import axios from '~/lib/utils/axios_utils';
import refreshCounts from '~/pages/search/show/refresh_counts';
const URL = `${TEST_HOST}/search/count?search=lorem+ipsum&project_id=3`;
-const urlWithScope = scope => `${URL}&scope=${scope}`;
-const counts = [{ scope: 'issues', count: 4 }, { scope: 'merge_requests', count: 5 }];
+const urlWithScope = (scope) => `${URL}&scope=${scope}`;
+const counts = [
+ { scope: 'issues', count: 4 },
+ { scope: 'merge_requests', count: 5 },
+];
const fixture = `<div class="badge">22</div>
<div class="badge js-search-count hidden" data-url="${urlWithScope('issues')}"></div>
<div class="badge js-search-count hidden" data-url="${urlWithScope('merge_requests')}"></div>`;
diff --git a/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js b/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
index 4b50342bf84..8632c852720 100644
--- a/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
+++ b/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
@@ -2,10 +2,8 @@ import $ from 'jquery';
import preserveUrlFragment from '~/pages/sessions/new/preserve_url_fragment';
describe('preserve_url_fragment', () => {
- const findFormAction = selector => {
- return $(`.omniauth-container ${selector}`)
- .parent('form')
- .attr('action');
+ const findFormAction = (selector) => {
+ return $(`.omniauth-container ${selector}`).parent('form').attr('action');
};
preloadFixtures('sessions/new.html');
diff --git a/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js b/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js
index 589ec0ae047..f04c16d2ddb 100644
--- a/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js
+++ b/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js
@@ -56,7 +56,7 @@ describe('SigninTabsMemoizer', () => {
};
jest
.spyOn(document, 'querySelector')
- .mockImplementation(selector =>
+ .mockImplementation((selector) =>
selector === `${tabSelector} a[href="#bogus"]` ? null : fakeTab,
);
diff --git a/spec/frontend/pdf/index_spec.js b/spec/frontend/pdf/index_spec.js
index 0d8caa28fd1..1ae77a62675 100644
--- a/spec/frontend/pdf/index_spec.js
+++ b/spec/frontend/pdf/index_spec.js
@@ -14,7 +14,7 @@ const Component = Vue.extend(PDFLab);
describe('PDF component', () => {
let vm;
- const checkLoaded = done => {
+ const checkLoaded = (done) => {
if (vm.loading) {
setTimeout(() => {
checkLoaded(done);
@@ -25,7 +25,7 @@ describe('PDF component', () => {
};
describe('without PDF data', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
pdf: '',
@@ -43,7 +43,7 @@ describe('PDF component', () => {
});
describe('with PDF data', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = new Component({
propsData: {
pdf,
diff --git a/spec/frontend/pdf/page_spec.js b/spec/frontend/pdf/page_spec.js
index f9d94781265..4e0a6f78b63 100644
--- a/spec/frontend/pdf/page_spec.js
+++ b/spec/frontend/pdf/page_spec.js
@@ -14,7 +14,7 @@ describe('Page component', () => {
vm.$destroy();
});
- it('renders the page when mounting', done => {
+ it('renders the page when mounting', (done) => {
const promise = Promise.resolve();
const testPage = {
render: jest.fn().mockReturnValue({ promise: Promise.resolve() }),
diff --git a/spec/frontend/performance_bar/components/detailed_metric_spec.js b/spec/frontend/performance_bar/components/detailed_metric_spec.js
index 739b45e2193..754ffcc12e3 100644
--- a/spec/frontend/performance_bar/components/detailed_metric_spec.js
+++ b/spec/frontend/performance_bar/components/detailed_metric_spec.js
@@ -7,7 +7,7 @@ import RequestWarning from '~/performance_bar/components/request_warning.vue';
describe('detailedMetric', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(DetailedMetric, {
propsData: {
...props,
@@ -16,9 +16,9 @@ describe('detailedMetric', () => {
};
const findAllTraceBlocks = () => wrapper.findAll('pre');
- const findTraceBlockAtIndex = index => findAllTraceBlocks().at(index);
+ const findTraceBlockAtIndex = (index) => findAllTraceBlocks().at(index);
const findExpandBacktraceBtns = () => wrapper.findAll('[data-testid="backtrace-expand-btn"]');
- const findExpandedBacktraceBtnAtIndex = index => findExpandBacktraceBtns().at(index);
+ const findExpandedBacktraceBtnAtIndex = (index) => findExpandBacktraceBtns().at(index);
afterEach(() => {
wrapper.destroy();
@@ -95,7 +95,7 @@ describe('detailedMetric', () => {
expect(wrapper.find('.js-toggle-button')).not.toBeNull();
- wrapper.findAll('.performance-bar-modal td:nth-child(2)').wrappers.forEach(request => {
+ wrapper.findAll('.performance-bar-modal td:nth-child(2)').wrappers.forEach((request) => {
expect(request.text()).toContain('world');
});
});
diff --git a/spec/frontend/performance_bar/index_spec.js b/spec/frontend/performance_bar/index_spec.js
index bcd2cbbd530..8d9c32b7f12 100644
--- a/spec/frontend/performance_bar/index_spec.js
+++ b/spec/frontend/performance_bar/index_spec.js
@@ -4,25 +4,23 @@ import '~/performance_bar/components/performance_bar_app.vue';
import performanceBar from '~/performance_bar';
import PerformanceBarService from '~/performance_bar/services/performance_bar_service';
+jest.mock('~/performance_bar/performance_bar_log');
+
describe('performance bar wrapper', () => {
let mock;
let vm;
beforeEach(() => {
+ setFixtures('<div id="js-peek"></div>');
+ const peekWrapper = document.getElementById('js-peek');
performance.getEntriesByType = jest.fn().mockReturnValue([]);
- // clear html so that elements from previous tests don't mess with this test
- document.body.innerHTML = '';
- const peekWrapper = document.createElement('div');
-
peekWrapper.setAttribute('id', 'js-peek');
peekWrapper.setAttribute('data-env', 'development');
peekWrapper.setAttribute('data-request-id', '123');
peekWrapper.setAttribute('data-peek-url', '/-/peek/results');
peekWrapper.setAttribute('data-profile-url', '?lineprofiler=true');
- document.body.appendChild(peekWrapper);
-
mock = new MockAdapter(axios);
mock.onGet('/-/peek/results').reply(
@@ -48,6 +46,7 @@ describe('performance bar wrapper', () => {
afterEach(() => {
vm.$destroy();
+ document.getElementById('js-peek').remove();
mock.restore();
});
diff --git a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
index 56bab71540c..94dc1237cb0 100644
--- a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
+++ b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
@@ -3,7 +3,7 @@ import PerformanceBarStore from '~/performance_bar/stores/performance_bar_store'
describe('PerformanceBarStore', () => {
describe('truncateUrl', () => {
let store;
- const findUrl = id => store.findRequest(id).truncatedUrl;
+ const findUrl = (id) => store.findRequest(id).truncatedUrl;
beforeEach(() => {
store = new PerformanceBarStore();
diff --git a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
index ae2a9e5065d..aae25a3aa6d 100644
--- a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
+++ b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
@@ -16,8 +16,8 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
...props,
},
- // attachToDocument is required for input/submit events
- attachToDocument: mountFn === mount,
+ // attachTo is required for input/submit events
+ attachTo: mountFn === mount ? document.body : null,
});
};
diff --git a/spec/frontend/pipeline_editor/components/info/validation_segment_spec.js b/spec/frontend/pipeline_editor/components/info/validation_segment_spec.js
new file mode 100644
index 00000000000..8a991d82018
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/info/validation_segment_spec.js
@@ -0,0 +1,113 @@
+import { escape } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { sprintf } from '~/locale';
+import ValidationSegment, { i18n } from '~/pipeline_editor/components/info/validation_segment.vue';
+import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
+import { mockYmlHelpPagePath, mergeUnwrappedCiConfig } from '../../mock_data';
+
+describe('~/pipeline_editor/components/info/validation_segment.vue', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(ValidationSegment, {
+ provide: {
+ ymlHelpPagePath: mockYmlHelpPagePath,
+ },
+ propsData: {
+ ciConfig: mergeUnwrappedCiConfig(),
+ loading: false,
+ ...props,
+ },
+ }),
+ );
+ };
+
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findLearnMoreLink = () => wrapper.findByTestId('learnMoreLink');
+ const findValidationMsg = () => wrapper.findByTestId('validationMsg');
+
+ it('shows the loading state', () => {
+ createComponent({ loading: true });
+
+ expect(wrapper.text()).toBe(i18n.loading);
+ });
+
+ describe('when config is valid', () => {
+ beforeEach(() => {
+ createComponent({});
+ });
+
+ it('has check icon', () => {
+ expect(findIcon().props('name')).toBe('check');
+ });
+
+ it('shows a message for valid state', () => {
+ expect(findValidationMsg().text()).toContain(i18n.valid);
+ });
+
+ it('shows the learn more link', () => {
+ expect(findLearnMoreLink().attributes('href')).toBe(mockYmlHelpPagePath);
+ expect(findLearnMoreLink().text()).toBe(i18n.learnMore);
+ });
+ });
+
+ describe('when config is not valid', () => {
+ beforeEach(() => {
+ createComponent({
+ ciConfig: mergeUnwrappedCiConfig({
+ status: CI_CONFIG_STATUS_INVALID,
+ }),
+ });
+ });
+
+ it('has warning icon', () => {
+ expect(findIcon().props('name')).toBe('warning-solid');
+ });
+
+ it('has message for invalid state', () => {
+ expect(findValidationMsg().text()).toBe(i18n.invalid);
+ });
+
+ it('shows an invalid state with an error', () => {
+ const firstError = 'First Error';
+ const secondError = 'Second Error';
+
+ createComponent({
+ ciConfig: mergeUnwrappedCiConfig({
+ status: CI_CONFIG_STATUS_INVALID,
+ errors: [firstError, secondError],
+ }),
+ });
+
+ // Test the error is shown _and_ the string matches
+ expect(findValidationMsg().text()).toContain(firstError);
+ expect(findValidationMsg().text()).toBe(
+ sprintf(i18n.invalidWithReason, { reason: firstError }),
+ );
+ });
+
+ it('shows an invalid state with an error while preventing XSS', () => {
+ const evilError = '<script>evil();</script>';
+
+ createComponent({
+ ciConfig: mergeUnwrappedCiConfig({
+ status: CI_CONFIG_STATUS_INVALID,
+ errors: [evilError],
+ }),
+ });
+
+ const { innerHTML } = findValidationMsg().element;
+
+ expect(innerHTML).not.toContain(evilError);
+ expect(innerHTML).toContain(escape(evilError));
+ });
+
+ it('shows the learn more link', () => {
+ expect(findLearnMoreLink().attributes('href')).toBe(mockYmlHelpPagePath);
+ expect(findLearnMoreLink().text()).toBe('Learn more');
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
index e9c6ed60860..5e9471376bd 100644
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
@@ -25,24 +25,51 @@ describe('CI Lint Results', () => {
};
const findTable = () => wrapper.find(GlTable);
- const findByTestId = selector => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`);
- const findAllByTestId = selector => () => wrapper.findAll(`[data-testid="ci-lint-${selector}"]`);
+ const findByTestId = (selector) => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`);
+ const findAllByTestId = (selector) => () =>
+ wrapper.findAll(`[data-testid="ci-lint-${selector}"]`);
const findLinkToDoc = () => wrapper.find(GlLink);
const findErrors = findByTestId('errors');
const findWarnings = findByTestId('warnings');
const findStatus = findByTestId('status');
const findOnlyExcept = findByTestId('only-except');
const findLintParameters = findAllByTestId('parameter');
+ const findLintValues = findAllByTestId('value');
const findBeforeScripts = findAllByTestId('before-script');
const findScripts = findAllByTestId('script');
const findAfterScripts = findAllByTestId('after-script');
- const filterEmptyScripts = property => mockJobs.filter(job => job[property].length !== 0);
+ const filterEmptyScripts = (property) => mockJobs.filter((job) => job[property].length !== 0);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
+ describe('Empty results', () => {
+ it('renders with no jobs, errors or warnings defined', () => {
+ createComponent({ jobs: undefined, errors: undefined, warnings: undefined }, shallowMount);
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('renders when job has no properties defined', () => {
+ // job with no attributes such as `tagList` or `environment`
+ const job = {
+ stage: 'Stage Name',
+ name: 'test job',
+ };
+ createComponent({ jobs: [job] }, mount);
+
+ const param = findLintParameters().at(0);
+ const value = findLintValues().at(0);
+
+ expect(param.text()).toBe(`${job.stage} Job - ${job.name}`);
+
+ // This test should be updated once properties of each job are shown
+ // See https://gitlab.com/gitlab-org/gitlab/-/issues/291031
+ expect(value.text()).toBe('');
+ });
+ });
+
describe('Invalid results', () => {
beforeEach(() => {
createComponent({ valid: false, errors: mockErrors, warnings: mockWarnings }, mount);
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
new file mode 100644
index 00000000000..5ccf4bbdab4
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
@@ -0,0 +1,81 @@
+import { shallowMount, mount } from '@vue/test-utils';
+import { GlAlert, GlLink } from '@gitlab/ui';
+import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
+import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
+import { mergeUnwrappedCiConfig, mockLintHelpPagePath } from '../../mock_data';
+
+describe('~/pipeline_editor/components/lint/ci_lint.vue', () => {
+ let wrapper;
+
+ const createComponent = (props = {}, mountFn = shallowMount) => {
+ wrapper = mountFn(CiLint, {
+ provide: {
+ lintHelpPagePath: mockLintHelpPagePath,
+ },
+ propsData: {
+ ciConfig: mergeUnwrappedCiConfig(),
+ ...props,
+ },
+ });
+ };
+
+ const findAllByTestId = (selector) => wrapper.findAll(`[data-testid="${selector}"]`);
+ const findAlert = () => wrapper.find(GlAlert);
+ const findLintParameters = () => findAllByTestId('ci-lint-parameter');
+ const findLintParameterAt = (i) => findLintParameters().at(i);
+ const findLintValueAt = (i) => findAllByTestId('ci-lint-value').at(i);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('Valid Results', () => {
+ beforeEach(() => {
+ createComponent({}, mount);
+ });
+
+ it('displays valid results', () => {
+ expect(findAlert().text()).toMatch('Status: Syntax is correct.');
+ });
+
+ it('displays link to the right help page', () => {
+ expect(findAlert().find(GlLink).attributes('href')).toBe(mockLintHelpPagePath);
+ });
+
+ it('displays jobs', () => {
+ expect(findLintParameters()).toHaveLength(3);
+
+ expect(findLintParameterAt(0).text()).toBe('Test Job - job_test_1');
+ expect(findLintParameterAt(1).text()).toBe('Test Job - job_test_2');
+ expect(findLintParameterAt(2).text()).toBe('Build Job - job_build');
+ });
+
+ it('displays jobs details', () => {
+ expect(findLintParameters()).toHaveLength(3);
+
+ expect(findLintValueAt(0).text()).toMatchInterpolatedText(
+ 'echo "test 1" Only policy: branches, tags When: on_success',
+ );
+ expect(findLintValueAt(1).text()).toMatchInterpolatedText(
+ 'echo "test 2" Only policy: branches, tags When: on_success',
+ );
+ expect(findLintValueAt(2).text()).toMatchInterpolatedText(
+ 'echo "build" Only policy: branches, tags When: on_success',
+ );
+ });
+
+ it('displays invalid results', () => {
+ createComponent(
+ {
+ ciConfig: mergeUnwrappedCiConfig({
+ status: CI_CONFIG_STATUS_INVALID,
+ }),
+ },
+ mount,
+ );
+
+ expect(findAlert().text()).toMatch('Status: Syntax is incorrect.');
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/text_editor_spec.js b/spec/frontend/pipeline_editor/components/text_editor_spec.js
index 18f71ebc95c..9221d64c44b 100644
--- a/spec/frontend/pipeline_editor/components/text_editor_spec.js
+++ b/spec/frontend/pipeline_editor/components/text_editor_spec.js
@@ -1,30 +1,69 @@
import { shallowMount } from '@vue/test-utils';
-import EditorLite from '~/vue_shared/components/editor_lite.vue';
-import { mockCiYml } from '../mock_data';
+import {
+ mockCiConfigPath,
+ mockCiYml,
+ mockCommitSha,
+ mockProjectPath,
+ mockProjectNamespace,
+} from '../mock_data';
import TextEditor from '~/pipeline_editor/components/text_editor.vue';
describe('~/pipeline_editor/components/text_editor.vue', () => {
let wrapper;
- const editorReadyListener = jest.fn();
- const createComponent = (attrs = {}, mountFn = shallowMount) => {
+ let editorReadyListener;
+ let mockUse;
+ let mockRegisterCiSchema;
+
+ const MockEditorLite = {
+ template: '<div/>',
+ props: ['value', 'fileName'],
+ mounted() {
+ this.$emit('editor-ready');
+ },
+ methods: {
+ getEditor: () => ({
+ use: mockUse,
+ registerCiSchema: mockRegisterCiSchema,
+ }),
+ },
+ };
+
+ const createComponent = (opts = {}, mountFn = shallowMount) => {
wrapper = mountFn(TextEditor, {
+ provide: {
+ projectPath: mockProjectPath,
+ projectNamespace: mockProjectNamespace,
+ },
+ propsData: {
+ ciConfigPath: mockCiConfigPath,
+ commitSha: mockCommitSha,
+ },
attrs: {
value: mockCiYml,
- ...attrs,
},
listeners: {
'editor-ready': editorReadyListener,
},
+ stubs: {
+ EditorLite: MockEditorLite,
+ },
+ ...opts,
});
};
- const findEditor = () => wrapper.find(EditorLite);
+ const findEditor = () => wrapper.find(MockEditorLite);
+
+ beforeEach(() => {
+ editorReadyListener = jest.fn();
+ mockUse = jest.fn();
+ mockRegisterCiSchema = jest.fn();
- it('contains an editor', () => {
createComponent();
+ });
+ it('contains an editor', () => {
expect(findEditor().exists()).toBe(true);
});
@@ -32,8 +71,18 @@ describe('~/pipeline_editor/components/text_editor.vue', () => {
expect(findEditor().props('value')).toBe(mockCiYml);
});
- it('editor is configured for .yml', () => {
- expect(findEditor().props('fileName')).toBe('*.yml');
+ it('editor is configured for the CI config path', () => {
+ expect(findEditor().props('fileName')).toBe(mockCiConfigPath);
+ });
+
+ it('editor is configured with syntax highligting', async () => {
+ expect(mockUse).toHaveBeenCalledTimes(1);
+ expect(mockRegisterCiSchema).toHaveBeenCalledTimes(1);
+ expect(mockRegisterCiSchema).toHaveBeenCalledWith({
+ projectNamespace: mockProjectNamespace,
+ projectPath: mockProjectPath,
+ ref: mockCommitSha,
+ });
});
it('bubbles up events', () => {
diff --git a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
new file mode 100644
index 00000000000..d3d9bf08209
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
@@ -0,0 +1,91 @@
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
+import { GlTabs } from '@gitlab/ui';
+
+import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
+
+const mockContent1 = 'MOCK CONTENT 1';
+const mockContent2 = 'MOCK CONTENT 2';
+
+describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
+ let wrapper;
+ let mockChildMounted = jest.fn();
+
+ const MockChild = {
+ props: ['content'],
+ template: '<div>{{content}}</div>',
+ mounted() {
+ mockChildMounted(this.content);
+ },
+ };
+
+ const MockTabbedContent = {
+ components: {
+ EditorTab,
+ GlTabs,
+ MockChild,
+ },
+ template: `
+ <gl-tabs>
+ <editor-tab :title-link-attributes="{ 'data-testid': 'tab1-btn' }" :lazy="true">
+ <mock-child content="${mockContent1}"/>
+ </editor-tab>
+ <editor-tab :title-link-attributes="{ 'data-testid': 'tab2-btn' }" :lazy="true">
+ <mock-child content="${mockContent2}"/>
+ </editor-tab>
+ </gl-tabs>
+ `,
+ };
+
+ const createWrapper = () => {
+ wrapper = mount(MockTabbedContent);
+ };
+
+ beforeEach(() => {
+ mockChildMounted = jest.fn();
+ });
+
+ it('tabs are mounted lazily', async () => {
+ createWrapper();
+
+ expect(mockChildMounted).toHaveBeenCalledTimes(0);
+ });
+
+ it('first tab is only mounted after nextTick', async () => {
+ createWrapper();
+
+ await nextTick();
+
+ expect(mockChildMounted).toHaveBeenCalledTimes(1);
+ expect(mockChildMounted).toHaveBeenCalledWith(mockContent1);
+ });
+
+ describe('user interaction', () => {
+ const clickTab = async (testid) => {
+ wrapper.find(`[data-testid="${testid}"]`).trigger('click');
+ await nextTick();
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('mounts a tab once after selecting it', async () => {
+ await clickTab('tab2-btn');
+
+ expect(mockChildMounted).toHaveBeenCalledTimes(2);
+ expect(mockChildMounted).toHaveBeenNthCalledWith(1, mockContent1);
+ expect(mockChildMounted).toHaveBeenNthCalledWith(2, mockContent2);
+ });
+
+ it('mounts each tab once after selecting each', async () => {
+ await clickTab('tab2-btn');
+ await clickTab('tab1-btn');
+ await clickTab('tab2-btn');
+
+ expect(mockChildMounted).toHaveBeenCalledTimes(2);
+ expect(mockChildMounted).toHaveBeenNthCalledWith(1, mockContent1);
+ expect(mockChildMounted).toHaveBeenNthCalledWith(2, mockContent2);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap b/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap
index d7d4d0af90c..8670c44f6f6 100644
--- a/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap
+++ b/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap
@@ -27,7 +27,7 @@ Object {
"echo 'script 1'",
],
"stage": "test",
- "tagList": Array [
+ "tags": Array [
"tag 1",
],
"when": "on_success",
@@ -61,7 +61,7 @@ Object {
"echo 'script 2'",
],
"stage": "test",
- "tagList": Array [
+ "tags": Array [
"tag 2",
],
"when": "on_success",
diff --git a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
index b531f8af797..3e008527415 100644
--- a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
+++ b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
@@ -5,7 +5,7 @@ import {
mockCiYml,
mockDefaultBranch,
mockLintResponse,
- mockProjectPath,
+ mockProjectFullPath,
} from '../mock_data';
import httpStatus from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
@@ -32,12 +32,12 @@ describe('~/pipeline_editor/graphql/resolvers', () => {
it('resolves lint data with type names', async () => {
const result = resolvers.Query.blobContent(null, {
- projectPath: mockProjectPath,
+ projectPath: mockProjectFullPath,
path: mockCiConfigPath,
ref: mockDefaultBranch,
});
- expect(Api.getRawFile).toHaveBeenCalledWith(mockProjectPath, mockCiConfigPath, {
+ expect(Api.getRawFile).toHaveBeenCalledWith(mockProjectFullPath, mockCiConfigPath, {
ref: mockDefaultBranch,
});
diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js
index d882490c272..3eacc467c51 100644
--- a/spec/frontend/pipeline_editor/mock_data.js
+++ b/spec/frontend/pipeline_editor/mock_data.js
@@ -1,27 +1,142 @@
-export const mockProjectPath = 'user1/project1';
+import { CI_CONFIG_STATUS_VALID } from '~/pipeline_editor/constants';
+import { unwrapStagesWithNeeds } from '~/pipelines/components/unwrapping_utils';
+
+export const mockProjectNamespace = 'user1';
+export const mockProjectPath = 'project1';
+export const mockProjectFullPath = `${mockProjectNamespace}/${mockProjectPath}`;
export const mockDefaultBranch = 'master';
export const mockNewMergeRequestPath = '/-/merge_requests/new';
-export const mockCommitId = 'aabbccdd';
+export const mockCommitSha = 'aabbccdd';
+export const mockCommitNextSha = 'eeffgghh';
+export const mockLintHelpPagePath = '/-/lint-help';
+export const mockYmlHelpPagePath = '/-/yml-help';
export const mockCommitMessage = 'My commit message';
export const mockCiConfigPath = '.gitlab-ci.yml';
export const mockCiYml = `
-job1:
+stages:
+ - test
+ - build
+
+job_test_1:
stage: test
- script:
- - echo 'test'
+ script:
+ - echo "test 1"
+
+job_test_2:
+ stage: test
+ script:
+ - echo "test 2"
+
+job_build:
+ stage: build
+ script:
+ - echo "build"
+ needs: ["job_test_2"]
`;
+const mockJobFields = {
+ beforeScript: [],
+ afterScript: [],
+ environment: null,
+ allowFailure: false,
+ tags: [],
+ when: 'on_success',
+ only: { refs: ['branches', 'tags'], __typename: 'CiJobLimitType' },
+ except: null,
+ needs: { nodes: [], __typename: 'CiConfigNeedConnection' },
+ __typename: 'CiConfigJob',
+};
+
+// Mock result of the graphql query at:
+// app/assets/javascripts/pipeline_editor/graphql/queries/ci_config.graphql
export const mockCiConfigQueryResponse = {
data: {
ciConfig: {
errors: [],
- stages: [],
- status: '',
+ status: CI_CONFIG_STATUS_VALID,
+ stages: {
+ __typename: 'CiConfigStageConnection',
+ nodes: [
+ {
+ name: 'test',
+ groups: {
+ nodes: [
+ {
+ name: 'job_test_1',
+ size: 1,
+ jobs: {
+ nodes: [
+ {
+ name: 'job_test_1',
+ script: ['echo "test 1"'],
+ ...mockJobFields,
+ },
+ ],
+ __typename: 'CiConfigJobConnection',
+ },
+ __typename: 'CiConfigGroup',
+ },
+ {
+ name: 'job_test_2',
+ size: 1,
+ jobs: {
+ nodes: [
+ {
+ name: 'job_test_2',
+ script: ['echo "test 2"'],
+ ...mockJobFields,
+ },
+ ],
+ __typename: 'CiConfigJobConnection',
+ },
+ __typename: 'CiConfigGroup',
+ },
+ ],
+ __typename: 'CiConfigGroupConnection',
+ },
+ __typename: 'CiConfigStage',
+ },
+ {
+ name: 'build',
+ groups: {
+ nodes: [
+ {
+ name: 'job_build',
+ size: 1,
+ jobs: {
+ nodes: [
+ {
+ name: 'job_build',
+ script: ['echo "build"'],
+ ...mockJobFields,
+ },
+ ],
+ __typename: 'CiConfigJobConnection',
+ },
+ __typename: 'CiConfigGroup',
+ },
+ ],
+ __typename: 'CiConfigGroupConnection',
+ },
+ __typename: 'CiConfigStage',
+ },
+ ],
+ },
+ __typename: 'CiConfig',
},
},
};
+export const mergeUnwrappedCiConfig = (mergedConfig) => {
+ const { ciConfig } = mockCiConfigQueryResponse.data;
+ return {
+ ...ciConfig,
+ stages: unwrapStagesWithNeeds(ciConfig.stages.nodes),
+ ...mergedConfig,
+ };
+};
+
export const mockLintResponse = {
valid: true,
errors: [],
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index 14d6b03645c..d6b90900600 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -1,32 +1,29 @@
import { nextTick } from 'vue';
import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
-import {
- GlAlert,
- GlButton,
- GlFormInput,
- GlFormTextarea,
- GlLoadingIcon,
- GlTabs,
- GlTab,
-} from '@gitlab/ui';
+import { GlAlert, GlButton, GlFormInput, GlFormTextarea, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import httpStatusCodes from '~/lib/utils/http_status';
import { objectToQuery, redirectTo, refreshCurrentPage } from '~/lib/utils/url_utility';
import {
mockCiConfigPath,
mockCiConfigQueryResponse,
mockCiYml,
- mockCommitId,
+ mockCommitSha,
+ mockCommitNextSha,
mockCommitMessage,
mockDefaultBranch,
mockProjectPath,
+ mockProjectFullPath,
+ mockProjectNamespace,
mockNewMergeRequestPath,
} from './mock_data';
import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
-import getCiConfig from '~/pipeline_editor/graphql/queries/ci_config.graphql';
+import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.graphql';
+import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue';
import TextEditor from '~/pipeline_editor/components/text_editor.vue';
@@ -41,6 +38,19 @@ jest.mock('~/lib/utils/url_utility', () => ({
mergeUrlParams: jest.requireActual('~/lib/utils/url_utility').mergeUrlParams,
}));
+const MockEditorLite = {
+ template: '<div/>',
+};
+
+const mockProvide = {
+ projectFullPath: mockProjectFullPath,
+ projectPath: mockProjectPath,
+ projectNamespace: mockProjectNamespace,
+ glFeatures: {
+ ciConfigVisualizationTab: true,
+ },
+};
+
describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
let wrapper;
@@ -55,17 +65,15 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
lintLoading = false,
options = {},
mountFn = shallowMount,
- provide = {
- glFeatures: {
- ciConfigVisualizationTab: true,
- },
- },
+ provide = mockProvide,
} = {}) => {
mockMutate = jest.fn().mockResolvedValue({
data: {
commitCreate: {
errors: [],
- commit: {},
+ commit: {
+ sha: mockCommitNextSha,
+ },
},
},
});
@@ -73,9 +81,8 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
wrapper = mountFn(PipelineEditorApp, {
propsData: {
ciConfigPath: mockCiConfigPath,
- commitId: mockCommitId,
+ commitSha: mockCommitSha,
defaultBranch: mockDefaultBranch,
- projectPath: mockProjectPath,
newMergeRequestPath: mockNewMergeRequestPath,
...props,
},
@@ -84,9 +91,7 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
GlTabs,
GlButton,
CommitForm,
- EditorLite: {
- template: '<div/>',
- },
+ EditorLite: MockEditorLite,
TextEditor,
},
mocks: {
@@ -102,14 +107,14 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
mutate: mockMutate,
},
},
- // attachToDocument is required for input/submit events
- attachToDocument: mountFn === mount,
+ // attachTo is required for input/submit events
+ attachTo: mountFn === mount ? document.body : null,
...options,
});
};
const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
- const handlers = [[getCiConfig, mockCiConfigData]];
+ const handlers = [[getCiConfigData, mockCiConfigData]];
const resolvers = {
Query: {
blobContent() {
@@ -134,17 +139,17 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findAlert = () => wrapper.find(GlAlert);
- const findBlobFailureAlert = () => wrapper.find(GlAlert);
- const findTabAt = i => wrapper.findAll(GlTab).at(i);
+ const findTabAt = (i) => wrapper.findAll(EditorTab).at(i);
const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
const findTextEditor = () => wrapper.find(TextEditor);
+ const findEditorLite = () => wrapper.find(MockEditorLite);
const findCommitForm = () => wrapper.find(CommitForm);
const findPipelineGraph = () => wrapper.find(PipelineGraph);
const findCommitBtnLoadingIcon = () => wrapper.find('[type="submit"]').find(GlLoadingIcon);
beforeEach(() => {
mockBlobContentData = jest.fn();
- mockCiConfigData = jest.fn().mockResolvedValue(mockCiConfigQueryResponse);
+ mockCiConfigData = jest.fn();
});
afterEach(() => {
@@ -167,26 +172,14 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
describe('tabs', () => {
describe('editor tab', () => {
- beforeEach(() => {
- createComponent();
- });
+ it('displays editor only after the tab is mounted', async () => {
+ createComponent({ mountFn: mount });
- it('displays the tab and its content', async () => {
- expect(
- findTabAt(0)
- .find(TextEditor)
- .exists(),
- ).toBe(true);
- });
-
- it('displays tab lazily, until editor is ready', async () => {
- expect(findTabAt(0).attributes('lazy')).toBe('true');
-
- findTextEditor().vm.$emit('editor-ready');
+ expect(findTabAt(0).find(TextEditor).exists()).toBe(false);
await nextTick();
- expect(findTabAt(0).attributes('lazy')).toBe(undefined);
+ expect(findTabAt(0).find(TextEditor).exists()).toBe(true);
});
});
@@ -210,7 +203,12 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
describe('with feature flag off', () => {
beforeEach(() => {
- createComponent({ provide: { glFeatures: { ciConfigVisualizationTab: false } } });
+ createComponent({
+ provide: {
+ ...mockProvide,
+ glFeatures: { ciConfigVisualizationTab: false },
+ },
+ });
});
it('does not display the tab', () => {
@@ -224,28 +222,36 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
beforeEach(async () => {
createComponent({ mountFn: mount });
- await wrapper.setData({
+ wrapper.setData({
content: mockCiYml,
contentModel: mockCiYml,
});
+
+ await waitForPromises();
});
it('displays content after the query loads', () => {
expect(findLoadingIcon().exists()).toBe(false);
- expect(findTextEditor().attributes('value')).toBe(mockCiYml);
+
+ expect(findEditorLite().attributes('value')).toBe(mockCiYml);
+ expect(findEditorLite().attributes('file-name')).toBe(mockCiConfigPath);
+ });
+
+ it('configures text editor', () => {
+ expect(findTextEditor().props('commitSha')).toBe(mockCommitSha);
});
describe('commit form', () => {
const mockVariables = {
content: mockCiYml,
filePath: mockCiConfigPath,
- lastCommitId: mockCommitId,
+ lastCommitId: mockCommitSha,
message: mockCommitMessage,
- projectPath: mockProjectPath,
+ projectPath: mockProjectFullPath,
startBranch: mockDefaultBranch,
};
- const findInForm = selector => findCommitForm().find(selector);
+ const findInForm = (selector) => findCommitForm().find(selector);
const submitCommit = async ({
message = mockCommitMessage,
@@ -280,13 +286,29 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
});
});
- it('refreshes the page', () => {
- expect(refreshCurrentPage).toHaveBeenCalled();
+ it('displays an alert to indicate success', () => {
+ expect(findAlert().text()).toMatchInterpolatedText(
+ 'Your changes have been successfully committed.',
+ );
});
it('shows no saving state', () => {
expect(findCommitBtnLoadingIcon().exists()).toBe(false);
});
+
+ it('a second commit submits the latest sha, keeping the form updated', async () => {
+ await submitCommit();
+
+ expect(mockMutate).toHaveBeenCalledTimes(2);
+ expect(mockMutate).toHaveBeenLastCalledWith({
+ mutation: expect.any(Object),
+ variables: {
+ ...mockVariables,
+ lastCommitId: mockCommitNextSha,
+ branch: mockDefaultBranch,
+ },
+ });
+ });
});
describe('when the user commits changes to a new branch', () => {
@@ -307,10 +329,6 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
},
});
});
-
- it('refreshes the page', () => {
- expect(refreshCurrentPage).toHaveBeenCalledWith();
- });
});
describe('when the user commits changes to open a new merge request', () => {
@@ -349,7 +367,7 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
});
describe('when the commit fails', () => {
- it('shows a the error message', async () => {
+ it('shows an error message', async () => {
mockMutate.mockRejectedValueOnce(new Error('commit failed'));
await submitCommit();
@@ -385,61 +403,90 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
it('content is restored after cancel is called', async () => {
await cancelCommitForm();
- expect(findTextEditor().attributes('value')).toBe(mockCiYml);
+ expect(findEditorLite().attributes('value')).toBe(mockCiYml);
});
});
});
});
- describe('displays fetch content errors', () => {
- it('no error is shown when data is set', async () => {
+ describe('when queries are called', () => {
+ beforeEach(() => {
mockBlobContentData.mockResolvedValue(mockCiYml);
- createComponentWithApollo();
+ mockCiConfigData.mockResolvedValue(mockCiConfigQueryResponse);
+ });
- await waitForPromises();
+ describe('when file exists', () => {
+ beforeEach(async () => {
+ createComponentWithApollo();
- expect(findBlobFailureAlert().exists()).toBe(false);
- expect(findTextEditor().attributes('value')).toBe(mockCiYml);
- });
+ await waitForPromises();
+ });
- it('shows a 404 error message', async () => {
- mockBlobContentData.mockRejectedValueOnce({
- response: {
- status: 404,
- },
+ it('shows editor and commit form', () => {
+ expect(findEditorLite().exists()).toBe(true);
+ expect(findTextEditor().exists()).toBe(true);
});
- createComponentWithApollo();
- await waitForPromises();
+ it('no error is shown when data is set', async () => {
+ expect(findAlert().exists()).toBe(false);
+ expect(findEditorLite().attributes('value')).toBe(mockCiYml);
+ });
+
+ it('ci config query is called with correct variables', async () => {
+ createComponentWithApollo();
- expect(findBlobFailureAlert().text()).toBe(
- 'No CI file found in this repository, please add one.',
- );
+ await waitForPromises();
+
+ expect(mockCiConfigData).toHaveBeenCalledWith({
+ content: mockCiYml,
+ projectPath: mockProjectFullPath,
+ });
+ });
});
- it('shows a 400 error message', async () => {
- mockBlobContentData.mockRejectedValueOnce({
- response: {
- status: 400,
- },
+ describe('when no file exists', () => {
+ const expectedAlertMsg =
+ 'There is no .gitlab-ci.yml file in this repository, please add one and visit the Pipeline Editor again.';
+
+ it('shows a 404 error message and does not show editor or commit form', async () => {
+ mockBlobContentData.mockRejectedValueOnce({
+ response: {
+ status: httpStatusCodes.NOT_FOUND,
+ },
+ });
+ createComponentWithApollo();
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe(expectedAlertMsg);
+ expect(findEditorLite().exists()).toBe(false);
+ expect(findTextEditor().exists()).toBe(false);
});
- createComponentWithApollo();
- await waitForPromises();
+ it('shows a 400 error message and does not show editor or commit form', async () => {
+ mockBlobContentData.mockRejectedValueOnce({
+ response: {
+ status: httpStatusCodes.BAD_REQUEST,
+ },
+ });
+ createComponentWithApollo();
- expect(findBlobFailureAlert().text()).toBe(
- 'Repository does not have a default branch, please set one.',
- );
- });
+ await waitForPromises();
- it('shows a unkown error message', async () => {
- mockBlobContentData.mockRejectedValueOnce(new Error('My error!'));
- createComponentWithApollo();
- await waitForPromises();
+ expect(findAlert().text()).toBe(expectedAlertMsg);
+ expect(findEditorLite().exists()).toBe(false);
+ expect(findTextEditor().exists()).toBe(false);
+ });
+
+ it('shows a unkown error message', async () => {
+ mockBlobContentData.mockRejectedValueOnce(new Error('My error!'));
+ createComponentWithApollo();
+ await waitForPromises();
- expect(findBlobFailureAlert().text()).toBe(
- 'The CI configuration was not loaded, please try again.',
- );
+ expect(findAlert().text()).toBe('The CI configuration was not loaded, please try again.');
+ expect(findEditorLite().exists()).toBe(true);
+ expect(findTextEditor().exists()).toBe(true);
+ });
});
});
});
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
index b42339f626e..421ad9f4939 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -44,10 +44,7 @@ describe('Pipeline New Form', () => {
const findWarnings = () => wrapper.findAll('[data-testid="run-pipeline-warning"]');
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const getExpectedPostParams = () => JSON.parse(mock.history.post[0].data);
- const changeRef = i =>
- findDropdownItems()
- .at(i)
- .vm.$emit('click');
+ const changeRef = (i) => findDropdownItems().at(i).vm.$emit('click');
const createComponent = (term = '', props = {}, method = shallowMount) => {
wrapper = method(PipelineNewForm, {
@@ -99,11 +96,7 @@ describe('Pipeline New Form', () => {
createComponent('master');
expect(findDropdownItems()).toHaveLength(1);
- expect(
- findDropdownItems()
- .at(0)
- .text(),
- ).toBe('master');
+ expect(findDropdownItems().at(0).text()).toBe('master');
});
});
@@ -136,9 +129,7 @@ describe('Pipeline New Form', () => {
});
it('removes ci variable row on remove icon button click', async () => {
- findRemoveIcons()
- .at(1)
- .trigger('click');
+ findRemoveIcons().at(1).trigger('click');
await wrapper.vm.$nextTick();
@@ -298,26 +289,16 @@ describe('Pipeline New Form', () => {
});
it('adds a description to the first variable from yml', () => {
- expect(
- findVariableRows()
- .at(0)
- .text(),
- ).toContain(mockYmlDesc);
+ expect(findVariableRows().at(0).text()).toContain(mockYmlDesc);
});
it('removes the description when a variable key changes', async () => {
findKeyInputs().at(0).element.value = 'yml_var_modified';
- findKeyInputs()
- .at(0)
- .trigger('change');
+ findKeyInputs().at(0).trigger('change');
await wrapper.vm.$nextTick();
- expect(
- findVariableRows()
- .at(0)
- .text(),
- ).not.toContain(mockYmlDesc);
+ expect(findVariableRows().at(0).text()).not.toContain(mockYmlDesc);
});
});
diff --git a/spec/frontend/pipelines/blank_state_spec.js b/spec/frontend/pipelines/blank_state_spec.js
index bb069fdc2c8..c09d9232569 100644
--- a/spec/frontend/pipelines/blank_state_spec.js
+++ b/spec/frontend/pipelines/blank_state_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/pipelines/components/pipelines_list/blank_state.vue';
-import mountComponent from '../helpers/vue_mount_component_helper';
describe('Pipelines Blank State', () => {
let vm;
diff --git a/spec/frontend/pipelines/components/dag/dag_annotations_spec.js b/spec/frontend/pipelines/components/dag/dag_annotations_spec.js
index 5747c91bee8..80807c0b330 100644
--- a/spec/frontend/pipelines/components/dag/dag_annotations_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_annotations_spec.js
@@ -66,11 +66,7 @@ describe('The DAG annotations', () => {
expect(getAllTextBlocks().length).toBe(Object.keys(multiNote).length);
Object.values(multiNote).forEach((item, idx) => {
- expect(
- getAllTextBlocks()
- .at(idx)
- .text(),
- ).toBe(`${item.source.name} → ${item.target.name}`);
+ expect(getAllTextBlocks().at(idx).text()).toBe(`${item.source.name} → ${item.target.name}`);
});
});
diff --git a/spec/frontend/pipelines/components/dag/dag_graph_spec.js b/spec/frontend/pipelines/components/dag/dag_graph_spec.js
index 7786212cb69..ccfb2ae7cee 100644
--- a/spec/frontend/pipelines/components/dag/dag_graph_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_graph_spec.js
@@ -20,7 +20,7 @@ describe('The DAG graph', () => {
}
wrapper = shallowMount(DagGraph, {
- attachToDocument: true,
+ attachTo: document.body,
propsData,
data() {
return {
@@ -88,17 +88,13 @@ describe('The DAG graph', () => {
});
it('renders the title as text', () => {
- expect(
- getAllLabels()
- .at(0)
- .text(),
- ).toBe(parsedData.nodes[0].name);
+ expect(getAllLabels().at(0).text()).toBe(parsedData.nodes[0].name);
});
});
});
describe('interactions', () => {
- const strokeOpacity = opacity => `stroke-opacity: ${opacity};`;
+ const strokeOpacity = (opacity) => `stroke-opacity: ${opacity};`;
const baseOpacity = () => wrapper.vm.$options.viewOptions.baseOpacity;
describe('links', () => {
@@ -168,10 +164,10 @@ describe('The DAG graph', () => {
describe('nodes', () => {
const liveNode = () => getAllNodes().at(10);
const anotherLiveNode = () => getAllNodes().at(5);
- const nodesNotHighlighted = () => getAllNodes().filter(n => !n.classes(IS_HIGHLIGHTED));
- const linksNotHighlighted = () => getAllLinks().filter(n => !n.classes(IS_HIGHLIGHTED));
- const nodesHighlighted = () => getAllNodes().filter(n => n.classes(IS_HIGHLIGHTED));
- const linksHighlighted = () => getAllLinks().filter(n => n.classes(IS_HIGHLIGHTED));
+ const nodesNotHighlighted = () => getAllNodes().filter((n) => !n.classes(IS_HIGHLIGHTED));
+ const linksNotHighlighted = () => getAllLinks().filter((n) => !n.classes(IS_HIGHLIGHTED));
+ const nodesHighlighted = () => getAllNodes().filter((n) => n.classes(IS_HIGHLIGHTED));
+ const linksHighlighted = () => getAllLinks().filter((n) => n.classes(IS_HIGHLIGHTED));
describe('on click', () => {
it('highlights the clicked node and predecessors', () => {
@@ -180,19 +176,19 @@ describe('The DAG graph', () => {
expect(nodesNotHighlighted().length < getAllNodes().length).toBe(true);
expect(linksNotHighlighted().length < getAllLinks().length).toBe(true);
- linksHighlighted().wrappers.forEach(link => {
+ linksHighlighted().wrappers.forEach((link) => {
expect(link.attributes('style')).toBe(strokeOpacity(highlightIn));
});
- nodesHighlighted().wrappers.forEach(node => {
+ nodesHighlighted().wrappers.forEach((node) => {
expect(node.attributes('stroke')).not.toBe('#f2f2f2');
});
- linksNotHighlighted().wrappers.forEach(link => {
+ linksNotHighlighted().wrappers.forEach((link) => {
expect(link.attributes('style')).toBe(strokeOpacity(highlightOut));
});
- nodesNotHighlighted().wrappers.forEach(node => {
+ nodesNotHighlighted().wrappers.forEach((node) => {
expect(node.attributes('stroke')).toBe('#f2f2f2');
});
});
diff --git a/spec/frontend/pipelines/components/dag/dag_spec.js b/spec/frontend/pipelines/components/dag/dag_spec.js
index 08a43199594..f6195e30e44 100644
--- a/spec/frontend/pipelines/components/dag/dag_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_spec.js
@@ -21,7 +21,7 @@ describe('Pipeline DAG graph wrapper', () => {
const getAllAlerts = () => wrapper.findAll(GlAlert);
const getGraph = () => wrapper.find(DagGraph);
const getNotes = () => wrapper.find(DagAnnotations);
- const getErrorText = type => wrapper.vm.$options.errorTexts[type];
+ const getErrorText = (type) => wrapper.vm.$options.errorTexts[type];
const getEmptyState = () => wrapper.find(GlEmptyState);
const createComponent = ({
diff --git a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js b/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
index ceb6b64d4ad..5d3f680a57c 100644
--- a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
+++ b/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
@@ -30,7 +30,10 @@ describe('DAG visualization parsing utilities', () => {
{ source: 'job2', target: 'job4' },
];
- const dedupedLinks = [{ source: 'job1', target: 'job2' }, { source: 'job2', target: 'job4' }];
+ const dedupedLinks = [
+ { source: 'job1', target: 'job2' },
+ { source: 'job2', target: 'job4' },
+ ];
const nodeLookup = {
job1: {
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index 8a6586a7d7d..00fe9e784b3 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -11,10 +11,10 @@ describe('Pipelines filtered search', () => {
let mock;
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
- const getSearchToken = type =>
+ const getSearchToken = (type) =>
findFilteredSearch()
.props('availableTokens')
- .find(token => token.type === type);
+ .find((token) => token.type === type);
const findBranchToken = () => getSearchToken('ref');
const findTagToken = () => getSearchToken('tag');
const findUserToken = () => getSearchToken('username');
@@ -26,7 +26,7 @@ describe('Pipelines filtered search', () => {
projectId: '21',
params,
},
- attachToDocument: true,
+ attachTo: document.body,
});
};
diff --git a/spec/frontend/pipelines/empty_state_spec.js b/spec/frontend/pipelines/empty_state_spec.js
index 28a73c8863c..7e42a3b5ae9 100644
--- a/spec/frontend/pipelines/empty_state_spec.js
+++ b/spec/frontend/pipelines/empty_state_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { withGonExperiment } from 'helpers/experimentation_helper';
import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
+import Tracking from '~/tracking';
describe('Pipelines Empty State', () => {
let wrapper;
@@ -38,15 +40,104 @@ describe('Pipelines Empty State', () => {
expect(findGetStartedButton().attributes('href')).toBe('foo');
});
- it('should render empty state information', () => {
- expect(findInfoText()).toContain(
- 'Continuous Integration can help catch bugs by running your tests automatically',
- 'while Continuous Deployment can help you deliver code to your product environment',
- );
+ describe('when in control group', () => {
+ it('should render empty state information', () => {
+ expect(findInfoText()).toContain(
+ 'Continuous Integration can help catch bugs by running your tests automatically',
+ 'while Continuous Deployment can help you deliver code to your product environment',
+ );
+ });
+
+ it('should render a button', () => {
+ expect(findGetStartedButton().text()).toBe('Get started with Pipelines');
+ });
+ });
+
+ describe('when in experiment group', () => {
+ withGonExperiment('pipelinesEmptyState');
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('should render empty state information', () => {
+ expect(findInfoText()).toContain(
+ 'GitLab CI/CD can automatically build, test, and deploy your code. Let GitLab take care of time',
+ 'consuming tasks, so you can spend more time creating',
+ );
+ });
+
+ it('should render button text', () => {
+ expect(findGetStartedButton().text()).toBe('Get started with CI/CD');
+ });
});
- it('should render a button', () => {
- expect(findGetStartedButton().text()).toBe('Get started with Pipelines');
+ describe('tracking', () => {
+ let origGon;
+
+ describe('when data is set', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event').mockImplementation(() => {});
+ origGon = window.gon;
+
+ window.gon = {
+ tracking_data: {
+ category: 'Growth::Activation::Experiment::PipelinesEmptyState',
+ value: 1,
+ property: 'experimental_group',
+ label: 'label',
+ },
+ };
+ createWrapper();
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
+ });
+
+ it('tracks when mounted', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ 'Growth::Activation::Experiment::PipelinesEmptyState',
+ 'viewed',
+ {
+ value: 1,
+ label: 'label',
+ property: 'experimental_group',
+ },
+ );
+ });
+
+ it('tracks when button is clicked', () => {
+ findGetStartedButton().vm.$emit('click');
+
+ expect(Tracking.event).toHaveBeenCalledWith(
+ 'Growth::Activation::Experiment::PipelinesEmptyState',
+ 'documentation_clicked',
+ {
+ value: 1,
+ label: 'label',
+ property: 'experimental_group',
+ },
+ );
+ });
+ });
+
+ describe('when no data is defined', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event').mockImplementation(() => {});
+
+ createWrapper();
+ });
+
+ it('does not track on view', () => {
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+
+ it('does not track when button is clicked', () => {
+ findGetStartedButton().vm.$emit('click');
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+ });
});
});
});
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
index ab477292bc1..95d96e127c6 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -33,7 +33,7 @@ describe('pipeline graph action component', () => {
expect(wrapper.attributes('title')).toBe('bar');
});
- it('should update bootstrap tooltip when title changes', done => {
+ it('should update bootstrap tooltip when title changes', (done) => {
wrapper.setProps({ tooltipText: 'changed' });
wrapper.vm
@@ -51,7 +51,7 @@ describe('pipeline graph action component', () => {
});
describe('on click', () => {
- it('emits `pipelineActionRequestComplete` after a successful request', done => {
+ it('emits `pipelineActionRequestComplete` after a successful request', (done) => {
jest.spyOn(wrapper.vm, '$emit');
findButton().trigger('click');
@@ -64,7 +64,7 @@ describe('pipeline graph action component', () => {
.catch(done.fail);
});
- it('renders a loading icon while waiting for request', done => {
+ it('renders a loading icon while waiting for request', (done) => {
findButton().trigger('click');
wrapper.vm.$nextTick(() => {
diff --git a/spec/frontend/pipelines/graph/graph_component_legacy_spec.js b/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
index 3b1909b6564..840b1f8baf5 100644
--- a/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
@@ -1,4 +1,4 @@
-import Vue from 'vue';
+import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
import { setHTMLFixture } from 'helpers/fixtures';
@@ -18,7 +18,7 @@ describe('graph component', () => {
const findExpandPipelineBtn = () => wrapper.find('[data-testid="expand-pipeline-button"]');
const findAllExpandPipelineBtns = () => wrapper.findAll('[data-testid="expand-pipeline-button"]');
const findStageColumns = () => wrapper.findAll(StageColumnComponentLegacy);
- const findStageColumnAt = i => findStageColumns().at(i);
+ const findStageColumnAt = (i) => findStageColumns().at(i);
beforeEach(() => {
mediator = new PipelinesMediator({ endpoint: '' });
@@ -104,11 +104,9 @@ describe('graph component', () => {
});
it('should include the left-connector class in the build of the second child', () => {
- expect(
- findStageColumnAt(1)
- .find('.build:nth-child(1)')
- .classes('left-connector'),
- ).toBe(true);
+ expect(findStageColumnAt(1).find('.build:nth-child(1)').classes('left-connector')).toBe(
+ true,
+ );
});
it('should include the js-has-linked-pipelines flag', () => {
@@ -119,12 +117,7 @@ describe('graph component', () => {
describe('computeds and methods', () => {
describe('capitalizeStageName', () => {
it('it capitalizes the stage name', () => {
- expect(
- wrapper
- .findAll('.stage-column .stage-name')
- .at(1)
- .text(),
- ).toBe('Prebuild');
+ expect(wrapper.findAll('.stage-column .stage-name').at(1).text()).toBe('Prebuild');
});
});
@@ -160,21 +153,20 @@ describe('graph component', () => {
describe('triggered by', () => {
describe('on click', () => {
- it('should emit `onClickUpstreamPipeline` when triggered by linked pipeline is clicked', () => {
+ it('should emit `onClickUpstreamPipeline` when triggered by linked pipeline is clicked', async () => {
const btnWrapper = findExpandPipelineBtn();
btnWrapper.trigger('click');
- btnWrapper.vm.$nextTick(() => {
- expect(wrapper.emitted().onClickUpstreamPipeline).toEqual([
- store.state.pipeline.triggered_by,
- ]);
- });
+ await nextTick();
+ expect(wrapper.emitted().onClickUpstreamPipeline).toEqual([
+ store.state.pipeline.triggered_by,
+ ]);
});
});
describe('with expanded pipeline', () => {
- it('should render expanded pipeline', done => {
+ it('should render expanded pipeline', async () => {
// expand the pipeline
store.state.pipeline.triggered_by[0].isExpanded = true;
@@ -186,40 +178,46 @@ describe('graph component', () => {
},
});
- Vue.nextTick()
- .then(() => {
- expect(wrapper.find('.js-upstream-pipeline-12').exists()).toBe(true);
- })
- .then(done)
- .catch(done.fail);
+ await nextTick();
+ expect(wrapper.find('.js-upstream-pipeline-12').exists()).toBe(true);
});
});
});
describe('triggered', () => {
describe('on click', () => {
- it('should emit `onClickTriggered`', () => {
- // We have to mock this method since we do both style change and
- // emit and event, not mocking returns an error.
- wrapper.setMethods({
- handleClickedDownstream: jest.fn(() =>
- wrapper.vm.$emit('onClickTriggered', ...store.state.pipeline.triggered),
- ),
+ // We have to mock this property of HTMLElement since component relies on it
+ let offsetParentDescriptor;
+ beforeAll(() => {
+ offsetParentDescriptor = Object.getOwnPropertyDescriptor(
+ HTMLElement.prototype,
+ 'offsetParent',
+ );
+ Object.defineProperty(HTMLElement.prototype, 'offsetParent', {
+ get() {
+ return this.parentNode;
+ },
});
+ });
+ afterAll(() => {
+ Object.defineProperty(HTMLElement.prototype, offsetParentDescriptor);
+ });
+ it('should emit `onClickDownstreamPipeline`', async () => {
const btnWrappers = findAllExpandPipelineBtns();
const downstreamBtnWrapper = btnWrappers.at(btnWrappers.length - 1);
downstreamBtnWrapper.trigger('click');
- downstreamBtnWrapper.vm.$nextTick(() => {
- expect(wrapper.emitted().onClickTriggered).toEqual([store.state.pipeline.triggered]);
- });
+ await nextTick();
+ expect(wrapper.emitted().onClickDownstreamPipeline).toEqual([
+ [store.state.pipeline.triggered[1]],
+ ]);
});
});
describe('with expanded pipeline', () => {
- it('should render expanded pipeline', done => {
+ it('should render expanded pipeline', async () => {
// expand the pipeline
store.state.pipeline.triggered[0].isExpanded = true;
@@ -231,12 +229,8 @@ describe('graph component', () => {
},
});
- Vue.nextTick()
- .then(() => {
- expect(wrapper.find('.js-downstream-pipeline-34993051')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
+ await nextTick();
+ expect(wrapper.find('.js-downstream-pipeline-34993051')).not.toBeNull();
});
});
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index 7572dd83798..cfc3b7af282 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -1,7 +1,9 @@
import { mount, shallowMount } from '@vue/test-utils';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import JobItem from '~/pipelines/components/graph/job_item.vue';
import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
+import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import { GRAPHQL } from '~/pipelines/components/graph/constants';
import {
generateResponse,
@@ -13,21 +15,37 @@ describe('graph component', () => {
let wrapper;
const findLinkedColumns = () => wrapper.findAll(LinkedPipelinesColumn);
+ const findLinksLayer = () => wrapper.find(LinksLayer);
const findStageColumns = () => wrapper.findAll(StageColumnComponent);
const defaultProps = {
pipeline: generateResponse(mockPipelineResponse, 'root/fungi-xoxo'),
};
- const createComponent = ({ mountFn = shallowMount, props = {} } = {}) => {
+ const createComponent = ({
+ data = {},
+ mountFn = shallowMount,
+ props = {},
+ stubOverride = {},
+ } = {}) => {
wrapper = mountFn(PipelineGraph, {
propsData: {
...defaultProps,
...props,
},
+ data() {
+ return { ...data };
+ },
provide: {
dataMethod: GRAPHQL,
},
+ stubs: {
+ 'links-inner': true,
+ 'linked-pipeline': true,
+ 'job-item': true,
+ 'job-group-dropdown': true,
+ ...stubOverride,
+ },
});
};
@@ -45,17 +63,36 @@ describe('graph component', () => {
expect(findStageColumns()).toHaveLength(defaultProps.pipeline.stages.length);
});
+ it('renders the links layer', () => {
+ expect(findLinksLayer().exists()).toBe(true);
+ });
+
describe('when column requests a refresh', () => {
beforeEach(() => {
- findStageColumns()
- .at(0)
- .vm.$emit('refreshPipelineGraph');
+ findStageColumns().at(0).vm.$emit('refreshPipelineGraph');
});
it('refreshPipelineGraph is emitted', () => {
expect(wrapper.emitted().refreshPipelineGraph).toHaveLength(1);
});
});
+
+ describe('when links are present', () => {
+ beforeEach(async () => {
+ createComponent({
+ mountFn: mount,
+ stubOverride: { 'job-item': false },
+ data: { hoveredJobName: 'test_a' },
+ });
+ findLinksLayer().vm.$emit('highlightedJobsChange', ['test_c', 'build_c']);
+ });
+
+ it('dims unrelated jobs', () => {
+ const unrelatedJob = wrapper.find(JobItem);
+ expect(findLinksLayer().emitted().highlightedJobsChange).toHaveLength(1);
+ expect(unrelatedJob.classes('gl-opacity-3')).toBe(true);
+ });
+ });
});
describe('when linked pipelines are not present', () => {
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index 875aaa48037..54593c527cb 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -2,10 +2,10 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMount } from '@vue/test-utils';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
-import getPipelineDetails from '~/pipelines/graphql/queries/get_pipeline_details.query.graphql';
import { mockPipelineResponse } from './mock_data';
const defaultProvide = {
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index 8aabb2f9cdd..cb2837cbb39 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -7,7 +7,7 @@ describe('pipeline graph job item', () => {
const findJobWithoutLink = () => wrapper.find('[data-testid="job-without-link"]');
const findJobWithLink = () => wrapper.find('[data-testid="job-with-link"]');
- const createWrapper = propsData => {
+ const createWrapper = (propsData) => {
wrapper = mount(JobItem, {
propsData,
});
@@ -52,7 +52,7 @@ describe('pipeline graph job item', () => {
});
describe('name with link', () => {
- it('should render the job name and status with a link', done => {
+ it('should render the job name and status with a link', (done) => {
createWrapper({ job: mockJob });
wrapper.vm.$nextTick(() => {
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
index 37eb5f900dd..6db152f2607 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
@@ -1,10 +1,10 @@
import VueApollo from 'vue-apollo';
import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
-import getPipelineDetails from '~/pipelines/graphql/queries/get_pipeline_details.query.graphql';
import { DOWNSTREAM, GRAPHQL } from '~/pipelines/components/graph/constants';
import { LOAD_FAILURE } from '~/pipelines/constants';
import {
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index d53a11eea0e..7650cbd2d5c 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
@@ -56,7 +56,7 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
},
@@ -96,7 +96,7 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
},
@@ -136,7 +136,7 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
},
@@ -176,7 +176,7 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
},
@@ -200,7 +200,7 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
},
@@ -224,7 +224,7 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
},
@@ -277,18 +277,18 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_c',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_b',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name:
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
},
@@ -331,26 +331,26 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_d 3/3',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_d 2/3',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_d 1/3',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_b',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name:
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
},
@@ -377,26 +377,26 @@ export const mockPipelineResponse = {
},
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_d 3/3',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_d 2/3',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_d 1/3',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_b',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name:
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
},
@@ -433,18 +433,18 @@ export const mockPipelineResponse = {
action: null,
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_c',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_b',
},
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name:
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
},
@@ -481,10 +481,10 @@ export const mockPipelineResponse = {
action: null,
},
needs: {
- __typename: 'CiJobConnection',
+ __typename: 'CiBuildNeedConnection',
nodes: [
{
- __typename: 'CiJob',
+ __typename: 'CiBuildNeed',
name: 'build_b',
},
],
@@ -578,41 +578,54 @@ export const upstream = {
export const wrappedPipelineReturn = {
data: {
project: {
+ __typename: 'Project',
pipeline: {
+ __typename: 'Pipeline',
id: 'gid://gitlab/Ci::Pipeline/175',
iid: '38',
downstream: {
+ __typename: 'PipelineConnection',
nodes: [],
},
upstream: {
id: 'gid://gitlab/Ci::Pipeline/174',
iid: '37',
path: '/root/elemenohpee/-/pipelines/174',
+ __typename: 'Pipeline',
status: {
+ __typename: 'DetailedStatus',
group: 'success',
label: 'passed',
icon: 'status_success',
},
sourceJob: {
name: 'test_c',
+ __typename: 'CiJob',
},
project: {
id: 'gid://gitlab/Project/25',
name: 'elemenohpee',
fullPath: 'root/elemenohpee',
+ __typename: 'Project',
},
},
stages: {
+ __typename: 'CiStageConnection',
nodes: [
{
name: 'build',
+ __typename: 'CiStage',
status: {
action: null,
+ __typename: 'DetailedStatus',
},
groups: {
+ __typename: 'CiGroupConnection',
nodes: [
{
+ __typename: 'CiGroup',
status: {
+ __typename: 'DetailedStatus',
label: 'passed',
group: 'success',
icon: 'status_success',
@@ -620,20 +633,25 @@ export const wrappedPipelineReturn = {
name: 'build_n',
size: 1,
jobs: {
+ __typename: 'CiJobConnection',
nodes: [
{
+ __typename: 'CiJob',
name: 'build_n',
scheduledAt: null,
needs: {
+ __typename: 'CiBuildNeedConnection',
nodes: [],
},
status: {
+ __typename: 'DetailedStatus',
icon: 'status_success',
tooltip: 'passed',
hasDetails: true,
detailsPath: '/root/elemenohpee/-/jobs/1662',
group: 'success',
action: {
+ __typename: 'StatusAction',
buttonTitle: 'Retry this job',
icon: 'retry',
path: '/root/elemenohpee/-/jobs/1662/retry',
@@ -656,7 +674,7 @@ export const wrappedPipelineReturn = {
export const generateResponse = (raw, mockPath) => unwrapPipelineData(mockPath, raw.data);
-export const pipelineWithUpstreamDownstream = base => {
+export const pipelineWithUpstreamDownstream = (base) => {
const pip = { ...base };
pip.data.project.pipeline.downstream = downstream;
pip.data.project.pipeline.upstream = upstream;
diff --git a/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js b/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js
index 463e4c12c7d..2965325ea7c 100644
--- a/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js
+++ b/spec/frontend/pipelines/graph/stage_column_component_legacy_spec.js
@@ -40,12 +40,7 @@ describe('stage column component', () => {
});
it('should render provided title', () => {
- expect(
- wrapper
- .find('.stage-name')
- .text()
- .trim(),
- ).toBe('foo');
+ expect(wrapper.find('.stage-name').text().trim()).toBe('foo');
});
it('should render the provided groups', () => {
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/pipelines/graph/stage_column_component_spec.js
index 44803929f6d..202e25ccda3 100644
--- a/spec/frontend/pipelines/graph/stage_column_component_spec.js
+++ b/spec/frontend/pipelines/graph/stage_column_component_spec.js
@@ -30,6 +30,7 @@ const mockGroups = Array(4)
const defaultProps = {
title: 'Fish',
groups: mockGroups,
+ pipelineId: 159,
};
describe('stage column component', () => {
@@ -92,36 +93,51 @@ describe('stage column component', () => {
});
describe('job', () => {
- beforeEach(() => {
- createComponent({
- method: mount,
- props: {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
+ describe('text handling', () => {
+ beforeEach(() => {
+ createComponent({
+ method: mount,
+ props: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
},
- },
- ],
- title: 'test <img src=x onerror=alert(document.domain)>',
- },
+ ],
+ title: 'test <img src=x onerror=alert(document.domain)>',
+ },
+ });
});
- });
- it('capitalizes and escapes name', () => {
- expect(findStageColumnTitle().text()).toBe(
- 'Test &lt;img src=x onerror=alert(document.domain)&gt;',
- );
+ it('capitalizes and escapes name', () => {
+ expect(findStageColumnTitle().text()).toBe(
+ 'Test &lt;img src=x onerror=alert(document.domain)&gt;',
+ );
+ });
+
+ it('escapes id', () => {
+ expect(findStageColumnGroup().attributes('id')).toBe(
+ 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
+ );
+ });
});
- it('escapes id', () => {
- expect(findStageColumnGroup().attributes('id')).toBe(
- 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
- );
+ describe('interactions', () => {
+ beforeEach(() => {
+ createComponent({ method: mount });
+ });
+
+ it('emits jobHovered event on mouseenter and mouseleave', async () => {
+ await findStageColumnGroup().trigger('mouseenter');
+ expect(wrapper.emitted().jobHover).toEqual([[defaultProps.groups[0].name]]);
+ await findStageColumnGroup().trigger('mouseleave');
+ expect(wrapper.emitted().jobHover).toEqual([[defaultProps.groups[0].name], ['']]);
+ });
});
});
diff --git a/spec/frontend/pipelines/nav_controls_spec.js b/spec/frontend/pipelines/nav_controls_spec.js
index 139d53881c8..305dc557b39 100644
--- a/spec/frontend/pipelines/nav_controls_spec.js
+++ b/spec/frontend/pipelines/nav_controls_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import navControlsComp from '~/pipelines/components/pipelines_list/nav_controls.vue';
-import mountComponent from '../helpers/vue_mount_component_helper';
describe('Pipelines Nav Controls', () => {
let NavControlsComponent;
diff --git a/spec/frontend/pipelines/pipeline_graph/mock_data.js b/spec/frontend/pipelines/pipeline_graph/mock_data.js
index a77973b293c..7d1a7a79c7f 100644
--- a/spec/frontend/pipelines/pipeline_graph/mock_data.js
+++ b/spec/frontend/pipelines/pipeline_graph/mock_data.js
@@ -1,4 +1,4 @@
-import { createUniqueLinkId } from '~/pipelines/utils';
+import { createUniqueLinkId } from '~/pipelines/components/graph_shared/drawing_utils';
export const yamlString = `stages:
- empty
@@ -78,18 +78,12 @@ export const pipelineData = {
groups: [
{
name: 'deploy_1',
- jobs: [{ script: 'yarn magick', stage: 'deploy' }],
+ jobs: [{ script: 'yarn magick', stage: 'deploy', needs: ['test_1'] }],
id: jobId4,
},
],
},
],
- jobs: {
- [jobId1]: {},
- [jobId2]: {},
- [jobId3]: {},
- [jobId4]: {},
- },
};
export const singleStageData = {
diff --git a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
index 6704ee06c1a..b6b0a964383 100644
--- a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlAlert } from '@gitlab/ui';
import { pipelineData, singleStageData } from './mock_data';
-import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
+import { CI_CONFIG_STATUS_INVALID, CI_CONFIG_STATUS_VALID } from '~/pipeline_editor/constants';
import { DRAW_FAILURE, EMPTY_PIPELINE_DATA, INVALID_CI_CONFIG } from '~/pipelines/constants';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import StagePill from '~/pipelines/components/pipeline_graph/stage_pill.vue';
@@ -23,7 +23,7 @@ describe('pipeline graph component', () => {
const findAlert = () => wrapper.find(GlAlert);
const findAllStagePills = () => wrapper.findAll(StagePill);
const findAllStageBackgroundElements = () => wrapper.findAll('[data-testid="stage-background"]');
- const findStageBackgroundElementAt = index => findAllStageBackgroundElements().at(index);
+ const findStageBackgroundElementAt = (index) => findAllStageBackgroundElements().at(index);
const findAllJobPills = () => wrapper.findAll(JobPill);
afterEach(() => {
@@ -37,7 +37,7 @@ describe('pipeline graph component', () => {
});
it('renders an empty section', () => {
- expect(wrapper.text()).toBe(wrapper.vm.$options.warningTexts[EMPTY_PIPELINE_DATA]);
+ expect(wrapper.text()).toBe(wrapper.vm.$options.errorTexts[EMPTY_PIPELINE_DATA]);
expect(findPipelineGraph().exists()).toBe(false);
expect(findAllStagePills()).toHaveLength(0);
expect(findAllJobPills()).toHaveLength(0);
@@ -51,23 +51,25 @@ describe('pipeline graph component', () => {
it('renders an error message and does not render the graph', () => {
expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(wrapper.vm.$options.warningTexts[INVALID_CI_CONFIG]);
+ expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[INVALID_CI_CONFIG]);
expect(findPipelineGraph().exists()).toBe(false);
});
});
- describe('without `INVALID` status', () => {
+ describe('with `VALID` status', () => {
beforeEach(() => {
- wrapper = createComponent();
+ wrapper = createComponent({
+ pipelineData: { status: CI_CONFIG_STATUS_VALID, stages: [{ name: 'hello', groups: [] }] },
+ });
});
it('renders the graph with no status error', () => {
- expect(findAlert().text()).not.toBe(wrapper.vm.$options.warningTexts[INVALID_CI_CONFIG]);
+ expect(findAlert().exists()).toBe(false);
expect(findPipelineGraph().exists()).toBe(true);
});
});
- describe('with error while rendering the links', () => {
+ describe('with error while rendering the links with needs', () => {
beforeEach(() => {
wrapper = createComponent();
});
diff --git a/spec/frontend/pipelines/pipeline_graph/utils_spec.js b/spec/frontend/pipelines/pipeline_graph/utils_spec.js
index 12154df6fcf..070d3bf7dac 100644
--- a/spec/frontend/pipelines/pipeline_graph/utils_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/utils_spec.js
@@ -68,10 +68,10 @@ describe('utils functions', () => {
it('returns a hash with the jobname as key and all its data as value', () => {
const jobs = {
- [jobName1]: job1,
- [jobName2]: job2,
- [jobName3]: job3,
- [jobName4]: job4,
+ [jobName1]: { jobs: [job1], name: jobName1, needs: [] },
+ [jobName2]: { jobs: [job2], name: jobName2, needs: [] },
+ [jobName3]: { jobs: [job3], name: jobName3, needs: job3.needs },
+ [jobName4]: { jobs: [job4], name: jobName4, needs: job4.needs },
};
expect(createJobsHash(pipelineGraphData.stages)).toEqual(jobs);
@@ -110,5 +110,41 @@ describe('utils functions', () => {
[jobName4]: [jobName3, jobName1, jobName2],
});
});
+
+ it('handles parallel jobs by adding the group name as a need', () => {
+ const size = 3;
+ const jobOptimize1 = 'optimize_1';
+ const jobPrepareA = 'prepare_a';
+ const jobPrepareA1 = `${jobPrepareA} 1/${size}`;
+ const jobPrepareA2 = `${jobPrepareA} 2/${size}`;
+ const jobPrepareA3 = `${jobPrepareA} 3/${size}`;
+
+ const jobsParallel = {
+ [jobOptimize1]: {
+ jobs: [job1],
+ name: [jobOptimize1],
+ needs: [jobPrepareA1, jobPrepareA2, jobPrepareA3],
+ },
+ [jobPrepareA]: { jobs: [], name: jobPrepareA, needs: [], size },
+ [jobPrepareA1]: { jobs: [], name: jobPrepareA, needs: [], size },
+ [jobPrepareA2]: { jobs: [], name: jobPrepareA, needs: [], size },
+ [jobPrepareA3]: { jobs: [], name: jobPrepareA, needs: [], size },
+ };
+
+ expect(generateJobNeedsDict(jobsParallel)).toEqual({
+ [jobOptimize1]: [
+ jobPrepareA1,
+ // This is the important part, the `jobPrepareA` group name has been
+ // added to our list of needs.
+ jobPrepareA,
+ jobPrepareA2,
+ jobPrepareA3,
+ ],
+ [jobPrepareA]: [],
+ [jobPrepareA1]: [],
+ [jobPrepareA2]: [],
+ [jobPrepareA3]: [],
+ });
+ });
});
});
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index fc45af2c254..47315bd42e6 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -28,7 +28,7 @@ describe('Pipeline Url Component', () => {
pipelineScheduleUrl: 'foo',
};
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(PipelineUrlComponent, {
propsData: { ...defaultProps, ...props },
provide: {
diff --git a/spec/frontend/pipelines/pipelines_actions_spec.js b/spec/frontend/pipelines/pipelines_actions_spec.js
index 071a2b24889..69c1b7ce43d 100644
--- a/spec/frontend/pipelines/pipelines_actions_spec.js
+++ b/spec/frontend/pipelines/pipelines_actions_spec.js
@@ -55,11 +55,7 @@ describe('Pipelines Actions dropdown', () => {
});
it("renders a disabled action when it's not playable", () => {
- expect(
- findAllDropdownItems()
- .at(1)
- .attributes('disabled'),
- ).toBe('true');
+ expect(findAllDropdownItems().at(1).attributes('disabled')).toBe('true');
});
describe('on click', () => {
@@ -100,9 +96,7 @@ describe('Pipelines Actions dropdown', () => {
mock.onPost(scheduledJobAction.path).reply(200);
jest.spyOn(window, 'confirm').mockReturnValue(true);
- findAllDropdownItems()
- .at(0)
- .vm.$emit('click');
+ findAllDropdownItems().at(0).vm.$emit('click');
expect(window.confirm).toHaveBeenCalled();
@@ -115,28 +109,20 @@ describe('Pipelines Actions dropdown', () => {
mock.onPost(scheduledJobAction.path).reply(200);
jest.spyOn(window, 'confirm').mockReturnValue(false);
- findAllDropdownItems()
- .at(0)
- .vm.$emit('click');
+ findAllDropdownItems().at(0).vm.$emit('click');
expect(window.confirm).toHaveBeenCalled();
expect(mock.history.post.length).toBe(0);
});
it('displays the remaining time in the dropdown', () => {
- expect(
- findAllCountdowns()
- .at(0)
- .props('endDateString'),
- ).toBe(scheduledJobAction.scheduled_at);
+ expect(findAllCountdowns().at(0).props('endDateString')).toBe(
+ scheduledJobAction.scheduled_at,
+ );
});
it('displays 00:00:00 for expired jobs in the dropdown', () => {
- expect(
- findAllCountdowns()
- .at(1)
- .props('endDateString'),
- ).toBe(expiredJobAction.scheduled_at);
+ expect(findAllCountdowns().at(1).props('endDateString')).toBe(expiredJobAction.scheduled_at);
});
});
});
diff --git a/spec/frontend/pipelines/pipelines_artifacts_spec.js b/spec/frontend/pipelines/pipelines_artifacts_spec.js
index 83f6cb68eba..4f4c15fd4cc 100644
--- a/spec/frontend/pipelines/pipelines_artifacts_spec.js
+++ b/spec/frontend/pipelines/pipelines_artifacts_spec.js
@@ -1,12 +1,12 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlLink } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import PipelineArtifacts from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
describe('Pipelines Artifacts dropdown', () => {
let wrapper;
const createComponent = () => {
- wrapper = shallowMount(PipelineArtifacts, {
+ wrapper = mount(PipelineArtifacts, {
propsData: {
artifacts: [
{
@@ -22,8 +22,8 @@ describe('Pipelines Artifacts dropdown', () => {
});
};
- const findGlLink = () => wrapper.find(GlLink);
- const findAllGlLinks = () => wrapper.find('.dropdown-menu').findAll(GlLink);
+ const findFirstGlDropdownItem = () => wrapper.find(GlDropdownItem);
+ const findAllGlDropdownItems = () => wrapper.find(GlDropdown).findAll(GlDropdownItem);
beforeEach(() => {
createComponent();
@@ -35,12 +35,12 @@ describe('Pipelines Artifacts dropdown', () => {
});
it('should render a dropdown with all the provided artifacts', () => {
- expect(findAllGlLinks()).toHaveLength(2);
+ expect(findAllGlDropdownItems()).toHaveLength(2);
});
it('should render a link with the provided path', () => {
- expect(findGlLink().attributes('href')).toEqual('/download/path');
+ expect(findFirstGlDropdownItem().find('a').attributes('href')).toEqual('/download/path');
- expect(findGlLink().text()).toContain('artifact');
+ expect(findFirstGlDropdownItem().text()).toContain('artifact');
});
});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index ce0e76ba22d..5d82669b0b8 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
@@ -57,10 +58,10 @@ describe('Pipelines', () => {
};
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
- const findByTestId = id => wrapper.find(`[data-testid="${id}"]`);
+ const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
const findNavigationTabs = () => wrapper.find(NavigationTabs);
const findNavigationControls = () => wrapper.find(NavigationControls);
- const findTab = tab => findByTestId(`pipelines-tab-${tab}`);
+ const findTab = (tab) => findByTestId(`pipelines-tab-${tab}`);
const findRunPipelineButton = () => findByTestId('run-pipeline-button');
const findCiLintButton = () => findByTestId('ci-lint-button');
@@ -72,7 +73,7 @@ describe('Pipelines', () => {
const findTablePagination = () => wrapper.find(TablePagination);
- const createComponent = (props = defaultProps, methods) => {
+ const createComponent = (props = defaultProps) => {
wrapper = mount(PipelinesComponent, {
propsData: {
store: new Store(),
@@ -80,13 +81,15 @@ describe('Pipelines', () => {
params: {},
...props,
},
- methods: {
- ...methods,
- },
});
};
beforeEach(() => {
+ delete window.location;
+ });
+
+ beforeEach(() => {
+ window.location = { search: '' };
mock = new MockAdapter(axios);
pipelines = getJSONFixture(jsonFixtureName);
@@ -170,7 +173,7 @@ describe('Pipelines', () => {
it('renders tab empty state finished scope', () => {
wrapper.vm.scope = 'finished';
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findBlankState().text()).toBe('There are currently no finished pipelines.');
});
});
@@ -194,16 +197,8 @@ describe('Pipelines', () => {
});
it('renders empty state', () => {
- expect(
- findEmptyState()
- .find('h4')
- .text(),
- ).toBe('Build with confidence');
- expect(
- findEmptyState()
- .find(GlButton)
- .attributes('href'),
- ).toBe(paths.helpPagePath);
+ expect(findEmptyState().find('h4').text()).toBe('Build with confidence');
+ expect(findEmptyState().find(GlButton).attributes('href')).toBe(paths.helpPagePath);
});
it('does not render tabs nor buttons', () => {
@@ -320,11 +315,7 @@ describe('Pipelines', () => {
'This project is not currently set up to run pipelines.',
);
- expect(
- findEmptyState()
- .find(GlButton)
- .exists(),
- ).toBeFalsy();
+ expect(findEmptyState().find(GlButton).exists()).toBeFalsy();
});
it('does not render tabs or buttons', () => {
@@ -394,30 +385,23 @@ describe('Pipelines', () => {
});
it('should make an API request when using tabs', () => {
- const updateContentMock = jest.fn(() => {});
- createComponent(
- { hasGitlabCi: true, canCreatePipeline: true, ...paths },
- {
- updateContent: updateContentMock,
- },
- );
+ createComponent({ hasGitlabCi: true, canCreatePipeline: true, ...paths });
+ jest.spyOn(wrapper.vm.service, 'getPipelines');
return waitForPromises().then(() => {
findTab('finished').trigger('click');
- expect(updateContentMock).toHaveBeenCalledWith({ scope: 'finished', page: '1' });
+ expect(wrapper.vm.service.getPipelines).toHaveBeenCalledWith({
+ scope: 'finished',
+ page: '1',
+ });
});
});
describe('with pagination', () => {
it('should make an API request when using pagination', () => {
- const updateContentMock = jest.fn(() => {});
- createComponent(
- { hasGitlabCi: true, canCreatePipeline: true, ...paths },
- {
- updateContent: updateContentMock,
- },
- );
+ createComponent({ hasGitlabCi: true, canCreatePipeline: true, ...paths });
+ jest.spyOn(wrapper.vm.service, 'getPipelines');
return waitForPromises()
.then(() => {
@@ -430,12 +414,14 @@ describe('Pipelines', () => {
totalPages: 5,
};
- return wrapper.vm.$nextTick();
+ return nextTick();
})
.then(() => {
wrapper.find('.next-page-item').trigger('click');
-
- expect(updateContentMock).toHaveBeenCalledWith({ scope: 'all', page: '2' });
+ expect(wrapper.vm.service.getPipelines).toHaveBeenCalledWith({
+ scope: 'all',
+ page: '2',
+ });
});
});
});
@@ -554,7 +540,7 @@ describe('Pipelines', () => {
wrapper.vm.hasError = true;
wrapper.vm.isLoading = false;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findBlankState().props('message')).toBe(
'There was an error fetching the pipelines. Try again in a few moments or contact your support team.',
);
@@ -566,7 +552,7 @@ describe('Pipelines', () => {
wrapper.vm.hasError = false;
wrapper.vm.state.pipelines = pipelines.pipelines;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.find(PipelinesTableComponent).exists()).toBe(true);
});
});
@@ -575,7 +561,7 @@ describe('Pipelines', () => {
wrapper.vm.state.count.all = 10;
wrapper.vm.isLoading = false;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findBlankState().exists()).toBe(true);
expect(findBlankState().props('message')).toBe('There are currently no pipelines.');
});
@@ -584,7 +570,7 @@ describe('Pipelines', () => {
it('shows empty tab when project has CI', () => {
wrapper.vm.isLoading = false;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findBlankState().exists()).toBe(true);
expect(findBlankState().props('message')).toBe('There are currently no pipelines.');
});
@@ -595,7 +581,7 @@ describe('Pipelines', () => {
wrapper.vm.isLoading = false;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(wrapper.find(EmptyState).exists()).toBe(true);
});
});
@@ -606,7 +592,7 @@ describe('Pipelines', () => {
wrapper.vm.isLoading = true;
wrapper.vm.hasMadeRequest = true;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -616,7 +602,7 @@ describe('Pipelines', () => {
wrapper.vm.state.pipelines = pipelines.pipelines;
wrapper.vm.hasMadeRequest = true;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -626,7 +612,7 @@ describe('Pipelines', () => {
wrapper.vm.hasError = true;
wrapper.vm.hasMadeRequest = true;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -636,7 +622,7 @@ describe('Pipelines', () => {
wrapper.vm.state.count.all = 10;
wrapper.vm.hasMadeRequest = true;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -644,7 +630,7 @@ describe('Pipelines', () => {
it('returns false when has not made first request', () => {
wrapper.vm.hasMadeRequest = false;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationTabs().exists()).toBe(false);
});
});
@@ -655,7 +641,7 @@ describe('Pipelines', () => {
wrapper.vm.isLoading = false;
wrapper.vm.hasMadeRequest = true;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationTabs().exists()).toBe(false);
});
});
@@ -665,7 +651,7 @@ describe('Pipelines', () => {
it('returns true when it has paths & has made the first request', () => {
wrapper.vm.hasMadeRequest = true;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationControls().exists()).toBe(true);
});
});
@@ -673,7 +659,7 @@ describe('Pipelines', () => {
it('returns false when it has not made the first request', () => {
wrapper.vm.hasMadeRequest = false;
- return wrapper.vm.$nextTick().then(() => {
+ return nextTick().then(() => {
expect(findNavigationControls().exists()).toBe(false);
});
});
@@ -692,7 +678,7 @@ describe('Pipelines', () => {
return waitForPromises();
});
- it('updates request data and query params on filter submit', () => {
+ it('updates request data and query params on filter submit', async () => {
const expectedQueryParams = {
page: '1',
scope: 'all',
@@ -702,15 +688,17 @@ describe('Pipelines', () => {
};
findFilteredSearch().vm.$emit('submit', mockSearch);
+ await nextTick();
expect(wrapper.vm.requestData).toEqual(expectedQueryParams);
expect(updateContentMock).toHaveBeenCalledWith(expectedQueryParams);
});
- it('does not add query params if raw text search is used', () => {
+ it('does not add query params if raw text search is used', async () => {
const expectedQueryParams = { page: '1', scope: 'all' };
findFilteredSearch().vm.$emit('submit', ['rawText']);
+ await nextTick();
expect(wrapper.vm.requestData).toEqual(expectedQueryParams);
expect(updateContentMock).toHaveBeenCalledWith(expectedQueryParams);
diff --git a/spec/frontend/pipelines/pipelines_store_spec.js b/spec/frontend/pipelines/pipelines_store_spec.js
index ce21f788ed5..f374ecd0c0a 100644
--- a/spec/frontend/pipelines/pipelines_store_spec.js
+++ b/spec/frontend/pipelines/pipelines_store_spec.js
@@ -21,7 +21,10 @@ describe('Pipelines Store', () => {
});
it('should store the provided array', () => {
- const array = [{ id: 1, status: 'running' }, { id: 2, status: 'success' }];
+ const array = [
+ { id: 1, status: 'running' },
+ { id: 2, status: 'success' },
+ ];
store.storePipelines(array);
expect(store.state.pipelines).toEqual(array);
diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
index 32d53c0f1f8..9cdd24b2ab5 100644
--- a/spec/frontend/pipelines/pipelines_table_row_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_row_spec.js
@@ -5,7 +5,7 @@ import eventHub from '~/pipelines/event_hub';
describe('Pipelines Table Row', () => {
const jsonFixtureName = 'pipelines/pipelines.json';
- const createWrapper = pipeline =>
+ const createWrapper = (pipeline) =>
mount(PipelinesTableRowComponent, {
propsData: {
pipeline,
@@ -24,9 +24,9 @@ describe('Pipelines Table Row', () => {
beforeEach(() => {
const { pipelines } = getJSONFixture(jsonFixtureName);
- pipeline = pipelines.find(p => p.user !== null && p.commit !== null);
- pipelineWithoutAuthor = pipelines.find(p => p.user === null && p.commit !== null);
- pipelineWithoutCommit = pipelines.find(p => p.user === null && p.commit === null);
+ pipeline = pipelines.find((p) => p.user !== null && p.commit !== null);
+ pipelineWithoutAuthor = pipelines.find((p) => p.user === null && p.commit !== null);
+ pipelineWithoutCommit = pipelines.find((p) => p.user === null && p.commit === null);
});
afterEach(() => {
@@ -82,10 +82,7 @@ describe('Pipelines Table Row', () => {
).toEqual(pipeline.user.path);
expect(
- wrapper
- .find('.table-section:nth-child(3) .js-user-avatar-image-toolip')
- .text()
- .trim(),
+ wrapper.find('.table-section:nth-child(3) .js-user-avatar-image-tooltip').text().trim(),
).toEqual(pipeline.user.name);
});
});
@@ -112,7 +109,7 @@ describe('Pipelines Table Row', () => {
const commitAuthorLink = commitAuthorElement.attributes('href');
const commitAuthorName = commitAuthorElement
- .find('.js-user-avatar-image-toolip')
+ .find('.js-user-avatar-image-tooltip')
.text()
.trim();
@@ -190,7 +187,7 @@ describe('Pipelines Table Row', () => {
});
it('emits `retryPipeline` event when retry button is clicked and toggles loading', () => {
- eventHub.$on('retryPipeline', endpoint => {
+ eventHub.$on('retryPipeline', (endpoint) => {
expect(endpoint).toBe('/retry');
});
@@ -199,7 +196,7 @@ describe('Pipelines Table Row', () => {
});
it('emits `openConfirmationModal` event when cancel button is clicked and toggles loading', () => {
- eventHub.$once('openConfirmationModal', data => {
+ eventHub.$once('openConfirmationModal', (data) => {
const { id, ref, commit } = pipeline;
expect(data.endpoint).toBe('/cancel');
@@ -215,7 +212,7 @@ describe('Pipelines Table Row', () => {
wrapper.find('.js-pipelines-cancel-button').trigger('click');
});
- it('renders a loading icon when `cancelingPipeline` matches pipeline id', done => {
+ it('renders a loading icon when `cancelingPipeline` matches pipeline id', (done) => {
wrapper.setProps({ cancelingPipeline: pipeline.id });
wrapper.vm
.$nextTick()
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index c7d104bbde8..fd73d507919 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -24,7 +24,7 @@ describe('Pipelines Table', () => {
beforeEach(() => {
const { pipelines } = getJSONFixture(jsonFixtureName);
- pipeline = pipelines.find(p => p.user !== null && p.commit !== null);
+ pipeline = pipelines.find((p) => p.user !== null && p.commit !== null);
createComponent();
});
diff --git a/spec/frontend/pipelines/shared/links_layer_spec.js b/spec/frontend/pipelines/shared/links_layer_spec.js
new file mode 100644
index 00000000000..9ef5233dbce
--- /dev/null
+++ b/spec/frontend/pipelines/shared/links_layer_spec.js
@@ -0,0 +1,99 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import { GlAlert, GlButton } from '@gitlab/ui';
+import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
+import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
+import { generateResponse, mockPipelineResponse } from '../graph/mock_data';
+
+describe('links layer component', () => {
+ let wrapper;
+
+ const findAlert = () => wrapper.find(GlAlert);
+ const findShowAnyways = () => findAlert().find(GlButton);
+ const findLinksInner = () => wrapper.find(LinksInner);
+
+ const pipeline = generateResponse(mockPipelineResponse, 'root/fungi-xoxo');
+ const containerId = `pipeline-links-container-${pipeline.id}`;
+ const slotContent = "<div>Ceci n'est pas un graphique</div>";
+
+ const tooManyStages = Array(101)
+ .fill(0)
+ .flatMap(() => pipeline.stages);
+
+ const defaultProps = {
+ containerId,
+ containerMeasurements: { width: 400, height: 400 },
+ pipelineId: pipeline.id,
+ pipelineData: pipeline.stages,
+ };
+
+ const createComponent = ({ mountFn = shallowMount, props = {} } = {}) => {
+ wrapper = mountFn(LinksLayer, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ slots: {
+ default: slotContent,
+ },
+ stubs: {
+ 'links-inner': true,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('with data under max stages', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the default slot', () => {
+ expect(wrapper.html()).toContain(slotContent);
+ });
+
+ it('renders the inner links component', () => {
+ expect(findLinksInner().exists()).toBe(true);
+ });
+ });
+
+ describe('with more than the max number of stages', () => {
+ describe('rendering', () => {
+ beforeEach(() => {
+ createComponent({ props: { pipelineData: tooManyStages } });
+ });
+
+ it('renders the default slot', () => {
+ expect(wrapper.html()).toContain(slotContent);
+ });
+
+ it('renders the alert component', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('does not render the inner links component', () => {
+ expect(findLinksInner().exists()).toBe(false);
+ });
+ });
+
+ describe('interactions', () => {
+ beforeEach(() => {
+ createComponent({ mountFn: mount, props: { pipelineData: tooManyStages } });
+ });
+
+ it('renders the disable button', () => {
+ expect(findShowAnyways().exists()).toBe(true);
+ expect(findShowAnyways().text()).toBe(wrapper.vm.$options.i18n.showLinksAnyways);
+ });
+
+ it('shows links when override is clicked', async () => {
+ expect(findLinksInner().exists()).toBe(false);
+ await findShowAnyways().trigger('click');
+ expect(findLinksInner().exists()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/stage_spec.js b/spec/frontend/pipelines/stage_spec.js
index e134b81856b..e4782a1dab1 100644
--- a/spec/frontend/pipelines/stage_spec.js
+++ b/spec/frontend/pipelines/stage_spec.js
@@ -1,6 +1,6 @@
+import 'bootstrap/js/dist/dropdown';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import StageComponent from '~/pipelines/components/pipelines_list/stage.vue';
import eventHub from '~/pipelines/event_hub';
@@ -22,8 +22,11 @@ describe('Pipelines stage component', () => {
updateDropdown: false,
};
+ const isDropdownOpen = () => wrapper.classes('show');
+
const createComponent = (props = {}) => {
wrapper = mount(StageComponent, {
+ attachTo: document.body,
propsData: {
...defaultProps,
...props,
@@ -60,38 +63,29 @@ describe('Pipelines stage component', () => {
createComponent();
});
- it('should render the received data and emit `clickedDropdown` event', () => {
+ it('should render the received data and emit `clickedDropdown` event', async () => {
jest.spyOn(eventHub, '$emit');
wrapper.find('button').trigger('click');
- return waitForPromises().then(() => {
- expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
- stageReply.latest_statuses[0].name,
- );
+ await axios.waitForAll();
+ expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
+ stageReply.latest_statuses[0].name,
+ );
- expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
- });
+ expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
});
});
- describe('when request fails', () => {
- beforeEach(() => {
- mock.onGet('path.json').reply(500);
- createComponent();
- });
+ it('when request fails should close the dropdown', async () => {
+ mock.onGet('path.json').reply(500);
+ createComponent();
+ wrapper.find({ ref: 'dropdown' }).trigger('click');
+ expect(isDropdownOpen()).toBe(true);
- it('should close the dropdown', () => {
- wrapper.setMethods({
- closeDropdown: jest.fn(),
- isDropdownOpen: jest.fn().mockReturnValue(false),
- });
+ wrapper.find('button').trigger('click');
+ await axios.waitForAll();
- wrapper.find('button').trigger('click');
-
- return waitForPromises().then(() => {
- expect(wrapper.vm.closeDropdown).toHaveBeenCalled();
- });
- });
+ expect(isDropdownOpen()).toBe(false);
});
describe('update endpoint correctly', () => {
@@ -109,47 +103,38 @@ describe('Pipelines stage component', () => {
dropdown_path: 'bar.json',
},
});
+ return axios.waitForAll();
});
- it('should update the stage to request the new endpoint provided', () => {
- return wrapper.vm
- .$nextTick()
- .then(() => {
- wrapper.find('button').trigger('click');
- return waitForPromises();
- })
- .then(() => {
- expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
- 'this is the updated content',
- );
- });
+ it('should update the stage to request the new endpoint provided', async () => {
+ wrapper.find('button').trigger('click');
+ await axios.waitForAll();
+
+ expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
+ 'this is the updated content',
+ );
});
});
describe('pipelineActionRequestComplete', () => {
beforeEach(() => {
mock.onGet('path.json').reply(200, stageReply);
-
mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(200);
createComponent({ type: 'PIPELINES_TABLE' });
});
describe('within pipeline table', () => {
- it('emits `refreshPipelinesTable` event when `pipelineActionRequestComplete` is triggered', () => {
+ it('emits `refreshPipelinesTable` event when `pipelineActionRequestComplete` is triggered', async () => {
jest.spyOn(eventHub, '$emit');
wrapper.find('button').trigger('click');
+ await axios.waitForAll();
- return waitForPromises()
- .then(() => {
- wrapper.find('.js-ci-action').trigger('click');
+ wrapper.find('.js-ci-action').trigger('click');
+ await axios.waitForAll();
- return waitForPromises();
- })
- .then(() => {
- expect(eventHub.$emit).toHaveBeenCalledWith('refreshPipelinesTable');
- });
+ expect(eventHub.$emit).toHaveBeenCalledWith('refreshPipelinesTable');
});
});
});
diff --git a/spec/frontend/pipelines/stores/pipeline_store_spec.js b/spec/frontend/pipelines/stores/pipeline_store_spec.js
index 68d438109b3..2daf7e4b324 100644
--- a/spec/frontend/pipelines/stores/pipeline_store_spec.js
+++ b/spec/frontend/pipelines/stores/pipeline_store_spec.js
@@ -32,14 +32,14 @@ describe('EE Pipeline store', () => {
describe('triggered', () => {
it('adds isExpanding & isLoading keys set to false for each triggered pipeline', () => {
- store.state.pipeline.triggered.forEach(pipeline => {
+ store.state.pipeline.triggered.forEach((pipeline) => {
expect(pipeline.isExpanded).toEqual(false);
expect(pipeline.isLoading).toEqual(false);
});
});
it('parses nested triggered pipelines', () => {
- store.state.pipeline.triggered[1].triggered.forEach(pipeline => {
+ store.state.pipeline.triggered[1].triggered.forEach((pipeline) => {
expect(pipeline.isExpanded).toEqual(false);
expect(pipeline.isLoading).toEqual(false);
});
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
index 1809f15a6e6..f7ff36c0a46 100644
--- a/spec/frontend/pipelines/test_reports/stores/actions_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
@@ -1,10 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
import { getJSONFixture } from 'helpers/fixtures';
+import { TEST_HOST } from 'helpers/test_constants';
+import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import * as actions from '~/pipelines/stores/test_reports/actions';
import * as types from '~/pipelines/stores/test_reports/mutation_types';
-import { TEST_HOST } from '../../../helpers/test_constants';
-import testAction from '../../../helpers/vuex_action_helper';
import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash.js');
@@ -39,7 +39,7 @@ describe('Actions TestReports Store', () => {
mock.onGet(summaryEndpoint).replyOnce(200, summary, {});
});
- it('sets testReports and shows tests', done => {
+ it('sets testReports and shows tests', (done) => {
testAction(
actions.fetchSummary,
null,
@@ -50,7 +50,7 @@ describe('Actions TestReports Store', () => {
);
});
- it('should create flash on API error', done => {
+ it('should create flash on API error', (done) => {
testAction(
actions.fetchSummary,
null,
@@ -75,7 +75,7 @@ describe('Actions TestReports Store', () => {
.replyOnce(200, testReports.test_suites[0], {});
});
- it('sets test suite and shows tests', done => {
+ it('sets test suite and shows tests', (done) => {
const suite = testReports.test_suites[0];
const index = 0;
@@ -89,7 +89,7 @@ describe('Actions TestReports Store', () => {
);
});
- it('should create flash on API error', done => {
+ it('should create flash on API error', (done) => {
const index = 0;
testAction(
@@ -106,7 +106,7 @@ describe('Actions TestReports Store', () => {
});
describe('when we already have the suite data', () => {
- it('should not fetch suite', done => {
+ it('should not fetch suite', (done) => {
const index = 0;
testReports.test_suites[0].hasFullSuite = true;
@@ -116,7 +116,7 @@ describe('Actions TestReports Store', () => {
});
describe('set selected suite index', () => {
- it('sets selectedSuiteIndex', done => {
+ it('sets selectedSuiteIndex', (done) => {
const selectedSuiteIndex = 0;
testAction(
@@ -131,7 +131,7 @@ describe('Actions TestReports Store', () => {
});
describe('remove selected suite index', () => {
- it('sets selectedSuiteIndex to null', done => {
+ it('sets selectedSuiteIndex to null', (done) => {
testAction(
actions.removeSelectedSuiteIndex,
{},
@@ -144,11 +144,11 @@ describe('Actions TestReports Store', () => {
});
describe('toggles loading', () => {
- it('sets isLoading to true', done => {
+ it('sets isLoading to true', (done) => {
testAction(actions.toggleLoading, {}, state, [{ type: types.TOGGLE_LOADING }], [], done);
});
- it('toggles isLoading to false', done => {
+ it('toggles isLoading to false', (done) => {
testAction(
actions.toggleLoading,
{},
diff --git a/spec/frontend/pipelines/test_reports/stores/getters_spec.js b/spec/frontend/pipelines/test_reports/stores/getters_spec.js
index 8cef499fdb9..7382a6beefa 100644
--- a/spec/frontend/pipelines/test_reports/stores/getters_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/getters_spec.js
@@ -40,7 +40,7 @@ describe('Getters TestReports Store', () => {
setupState();
const suites = getters.getTestSuites(state);
- const expected = testReports.test_suites.map(x => ({
+ const expected = testReports.test_suites.map((x) => ({
...x,
formattedTime: formattedTime(x.total_time),
}));
@@ -72,7 +72,7 @@ describe('Getters TestReports Store', () => {
const cases = getters.getSuiteTests(state);
const expected = testReports.test_suites[0].test_cases
- .map(x => ({
+ .map((x) => ({
...x,
formattedTime: formattedTime(x.execution_time),
icon: iconForTestStatus(x.status),
diff --git a/spec/frontend/pipelines/test_reports/test_case_details_spec.js b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
index 9e66012818e..bfb8b43778d 100644
--- a/spec/frontend/pipelines/test_reports/test_case_details_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
@@ -44,7 +44,7 @@ describe('Test case details', () => {
});
it('renders the test case classname as modal title', () => {
- expect(findModal().attributes('title')).toBe(defaultTestCase.classname);
+ expect(findModal().props('title')).toBe(defaultTestCase.classname);
});
it('renders the test case name', () => {
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index 0e00ca670a7..b8fd056610b 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -23,7 +23,7 @@ describe('Test reports suite table', () => {
const noCasesMessage = () => wrapper.find('.js-no-test-cases');
const allCaseRows = () => wrapper.findAll('.js-case-row');
- const findCaseRowAtIndex = index => wrapper.findAll('.js-case-row').at(index);
+ const findCaseRowAtIndex = (index) => wrapper.findAll('.js-case-row').at(index);
const findIconForRow = (row, status) => row.find(`.ci-status-icon-${status}`);
const createComponent = (suite = testSuite, perPage = 20) => {
@@ -73,8 +73,8 @@ describe('Test reports suite table', () => {
TestStatus.SKIPPED,
TestStatus.SUCCESS,
'unknown',
- ])('renders the correct icon for test case with %s status', status => {
- const test = testCases.findIndex(x => x.status === status);
+ ])('renders the correct icon for test case with %s status', (status) => {
+ const test = testCases.findIndex((x) => x.status === status);
const row = findCaseRowAtIndex(test);
expect(findIconForRow(row, status).exists()).toBe(true);
diff --git a/spec/frontend/pipelines/test_reports/test_summary_spec.js b/spec/frontend/pipelines/test_reports/test_summary_spec.js
index dc5af7b160c..df404d87c99 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_summary_spec.js
@@ -22,7 +22,7 @@ describe('Test reports summary', () => {
showBack: false,
};
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = mount(Summary, {
propsData: {
...defaultProps,
diff --git a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
index 375325c0c6a..371ba5a4f9b 100644
--- a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
@@ -26,7 +26,7 @@ describe('Pipeline Trigger Author Token', () => {
},
};
- const createComponent = data => {
+ const createComponent = (data) => {
wrapper = shallowMount(PipelineTriggerAuthorToken, {
propsData: {
...defaultProps,
diff --git a/spec/frontend/pipelines/unwrapping_utils_spec.js b/spec/frontend/pipelines/unwrapping_utils_spec.js
index 3533599611f..cd16ed7262e 100644
--- a/spec/frontend/pipelines/unwrapping_utils_spec.js
+++ b/spec/frontend/pipelines/unwrapping_utils_spec.js
@@ -1,5 +1,4 @@
import {
- unwrapArrayOfJobs,
unwrapGroups,
unwrapNodesWithName,
unwrapStagesWithNeeds,
@@ -89,35 +88,12 @@ const completeMock = [
{
...basicStageInfo,
groups: {
- nodes: groupsArray.map(group => ({ ...group, jobs: { nodes: jobArrayWithNeeds } })),
+ nodes: groupsArray.map((group) => ({ ...group, jobs: { nodes: jobArrayWithNeeds } })),
},
},
];
describe('Shared pipeline unwrapping utils', () => {
- describe('unwrapArrayOfJobs', () => {
- it('returns an empty array if the input is an empty undefined', () => {
- expect(unwrapArrayOfJobs(undefined)).toEqual([]);
- });
-
- it('returns an empty array if the input is an empty array', () => {
- expect(unwrapArrayOfJobs([])).toEqual([]);
- });
-
- it('returns a flatten array of each job with their data and stage name', () => {
- expect(
- unwrapArrayOfJobs([
- { name: 'build', groups: [{ name: 'job_a_1' }, { name: 'job_a_2' }] },
- { name: 'test', groups: [{ name: 'job_b' }] },
- ]),
- ).toMatchObject([
- { category: 'build', name: 'job_a_1' },
- { category: 'build', name: 'job_a_2' },
- { category: 'test', name: 'job_b' },
- ]);
- });
- });
-
describe('unwrapGroups', () => {
it('takes stages without nodes and returns the unwrapped groups', () => {
expect(unwrapGroups(stagesAndGroups)[0].groups).toEqual(groupsArray);
diff --git a/spec/frontend/profile/account/components/delete_account_modal_spec.js b/spec/frontend/profile/account/components/delete_account_modal_spec.js
index 7834456f7c4..63e27473979 100644
--- a/spec/frontend/profile/account/components/delete_account_modal_spec.js
+++ b/spec/frontend/profile/account/components/delete_account_modal_spec.js
@@ -56,7 +56,7 @@ describe('DeleteAccountModal component', () => {
const findModal = () => wrapper.find(GlModalStub);
describe('with password confirmation', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createWrapper({
propsData: {
confirmWithPassword: true,
@@ -65,12 +65,10 @@ describe('DeleteAccountModal component', () => {
vm.isOpen = true;
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ Vue.nextTick().then(done).catch(done.fail);
});
- it('does not accept empty password', done => {
+ it('does not accept empty password', (done) => {
const { form, input } = findElements();
jest.spyOn(form, 'submit').mockImplementation(() => {});
input.value = '';
@@ -88,7 +86,7 @@ describe('DeleteAccountModal component', () => {
.catch(done.fail);
});
- it('submits form with password', done => {
+ it('submits form with password', (done) => {
const { form, input } = findElements();
jest.spyOn(form, 'submit').mockImplementation(() => {});
input.value = 'anything';
@@ -108,7 +106,7 @@ describe('DeleteAccountModal component', () => {
});
describe('with username confirmation', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createWrapper({
propsData: {
confirmWithPassword: false,
@@ -117,12 +115,10 @@ describe('DeleteAccountModal component', () => {
vm.isOpen = true;
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ Vue.nextTick().then(done).catch(done.fail);
});
- it('does not accept wrong username', done => {
+ it('does not accept wrong username', (done) => {
const { form, input } = findElements();
jest.spyOn(form, 'submit').mockImplementation(() => {});
input.value = 'this is wrong';
@@ -140,7 +136,7 @@ describe('DeleteAccountModal component', () => {
.catch(done.fail);
});
- it('submits form with correct username', done => {
+ it('submits form with correct username', (done) => {
const { form, input } = findElements();
jest.spyOn(form, 'submit').mockImplementation(() => {});
input.value = username;
diff --git a/spec/frontend/profile/account/components/update_username_spec.js b/spec/frontend/profile/account/components/update_username_spec.js
index 45e5e0f885f..91c3c81ab30 100644
--- a/spec/frontend/profile/account/components/update_username_spec.js
+++ b/spec/frontend/profile/account/components/update_username_spec.js
@@ -84,7 +84,7 @@ describe('UpdateUsername component', () => {
it('confirmation modal contains proper header and body', async () => {
const { modal } = findElements();
- expect(modal.attributes('title')).toBe('Change username?');
+ expect(modal.props('title')).toBe('Change username?');
expect(modal.text()).toContain(
`You are going to change the username ${defaultProps.initialUsername} to ${newUsername}`,
);
diff --git a/spec/frontend/project_find_file_spec.js b/spec/frontend/project_find_file_spec.js
index 6a50f68a4e9..1af97dbca0a 100644
--- a/spec/frontend/project_find_file_spec.js
+++ b/spec/frontend/project_find_file_spec.js
@@ -7,7 +7,7 @@ import axios from '~/lib/utils/axios_utils';
jest.mock('~/lib/dompurify', () => ({
addHook: jest.fn(),
- sanitize: jest.fn(val => val),
+ sanitize: jest.fn((val) => val),
}));
const BLOB_URL_TEMPLATE = `${TEST_HOST}/namespace/project/blob/master`;
@@ -39,7 +39,7 @@ describe('ProjectFindFile', () => {
element
.find('.tree-table tr')
.toArray()
- .map(el => ({
+ .map((el) => ({
text: el.textContent,
href: el.querySelector('a').href,
}));
@@ -53,12 +53,15 @@ describe('ProjectFindFile', () => {
{ path: 'folde?rC/fil#F.txt', escaped: 'folde%3FrC/fil%23F.txt' },
];
- beforeEach(done => {
+ beforeEach((done) => {
// Create a mock adapter for stubbing axios API requests
mock = new MockAdapter(axios);
element = $(TEMPLATE);
- mock.onGet(FILE_FIND_URL).replyOnce(200, files.map(x => x.path));
+ mock.onGet(FILE_FIND_URL).replyOnce(
+ 200,
+ files.map((x) => x.path),
+ );
getProjectFindFileInstance(); // This triggers a load / axios call + subsequent render in the constructor
setImmediate(done);
diff --git a/spec/frontend/projects/commit/components/branches_dropdown_spec.js b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
new file mode 100644
index 00000000000..9fa7d658405
--- /dev/null
+++ b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
@@ -0,0 +1,166 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
+import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
+
+Vue.use(Vuex);
+
+describe('BranchesDropdown', () => {
+ let wrapper;
+ let store;
+ const spyFetchBranches = jest.fn();
+
+ const createComponent = (term, state = { isFetching: false }) => {
+ store = new Vuex.Store({
+ getters: {
+ joinedBranches: () => ['_master_', '_branch_1_', '_branch_2_'],
+ },
+ actions: {
+ fetchBranches: spyFetchBranches,
+ },
+ state,
+ });
+
+ wrapper = extendedWrapper(
+ shallowMount(BranchesDropdown, {
+ store,
+ propsData: {
+ value: term,
+ },
+ }),
+ );
+ };
+
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
+ const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
+ const findNoResults = () => wrapper.findByTestId('empty-result-message');
+ const findLoading = () => wrapper.findByTestId('dropdown-text-loading-icon');
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ spyFetchBranches.mockReset();
+ });
+
+ describe('On mount', () => {
+ beforeEach(() => {
+ createComponent('');
+ });
+
+ it('invokes fetchBranches', () => {
+ expect(spyFetchBranches).toHaveBeenCalled();
+ });
+ });
+
+ describe('Loading states', () => {
+ it('shows loading icon while fetching', () => {
+ createComponent('', { isFetching: true });
+
+ expect(findLoading().isVisible()).toBe(true);
+ });
+
+ it('does not show loading icon', () => {
+ createComponent('');
+
+ expect(findLoading().isVisible()).toBe(false);
+ });
+ });
+
+ describe('No branches found', () => {
+ beforeEach(() => {
+ createComponent('_non_existent_branch_');
+ });
+
+ it('renders empty results message', () => {
+ expect(findNoResults().text()).toBe('No matching results');
+ });
+
+ it('shows GlSearchBoxByType with default attributes', () => {
+ expect(findSearchBoxByType().exists()).toBe(true);
+ expect(findSearchBoxByType().vm.$attrs).toMatchObject({
+ placeholder: 'Search branches',
+ debounce: 250,
+ });
+ });
+ });
+
+ describe('Search term is empty', () => {
+ beforeEach(() => {
+ createComponent('');
+ });
+
+ it('renders all branches when search term is empty', () => {
+ expect(findAllDropdownItems()).toHaveLength(3);
+ expect(findDropdownItemByIndex(0).text()).toBe('_master_');
+ expect(findDropdownItemByIndex(1).text()).toBe('_branch_1_');
+ expect(findDropdownItemByIndex(2).text()).toBe('_branch_2_');
+ });
+
+ it('should not be selected on the inactive branch', () => {
+ expect(wrapper.vm.isSelected('_master_')).toBe(false);
+ });
+ });
+
+ describe('When searching', () => {
+ beforeEach(() => {
+ createComponent('');
+ });
+
+ it('invokes fetchBranches', async () => {
+ const spy = jest.spyOn(wrapper.vm, 'fetchBranches');
+
+ findSearchBoxByType().vm.$emit('input', '_anything_');
+
+ await wrapper.vm.$nextTick();
+
+ expect(spy).toHaveBeenCalledWith('_anything_');
+ expect(wrapper.vm.searchTerm).toBe('_anything_');
+ });
+ });
+
+ describe('Branches found', () => {
+ beforeEach(() => {
+ createComponent('_branch_1_', { branch: '_branch_1_' });
+ });
+
+ it('renders only the branch searched for', () => {
+ expect(findAllDropdownItems()).toHaveLength(1);
+ expect(findDropdownItemByIndex(0).text()).toBe('_branch_1_');
+ });
+
+ it('should not display empty results message', () => {
+ expect(findNoResults().exists()).toBe(false);
+ });
+
+ it('should signify this branch is selected', () => {
+ expect(wrapper.vm.isSelected('_branch_1_')).toBe(true);
+ });
+
+ it('should signify the branch is not selected', () => {
+ expect(wrapper.vm.isSelected('_not_selected_branch_')).toBe(false);
+ });
+
+ describe('Custom events', () => {
+ it('should emit selectBranch if an branch is clicked', () => {
+ findDropdownItemByIndex(0).vm.$emit('click');
+
+ expect(wrapper.emitted('selectBranch')).toEqual([['_branch_1_']]);
+ expect(wrapper.vm.searchTerm).toBe('_branch_1_');
+ });
+ });
+ });
+
+ describe('Case insensitive for search term', () => {
+ beforeEach(() => {
+ createComponent('_BrAnCh_1_');
+ });
+
+ it('renders only the branch searched for', () => {
+ expect(findAllDropdownItems()).toHaveLength(1);
+ expect(findDropdownItemByIndex(0).text()).toBe('_branch_1_');
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/components/form_modal_spec.js b/spec/frontend/projects/commit/components/form_modal_spec.js
new file mode 100644
index 00000000000..1c37b82fed3
--- /dev/null
+++ b/spec/frontend/projects/commit/components/form_modal_spec.js
@@ -0,0 +1,155 @@
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount, mount, createWrapper } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { GlModal, GlForm, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
+import { within } from '@testing-library/dom';
+import axios from '~/lib/utils/axios_utils';
+import eventHub from '~/projects/commit/event_hub';
+import CommitFormModal from '~/projects/commit/components/form_modal.vue';
+import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
+import createStore from '~/projects/commit/store';
+import mockData from '../mock_data';
+
+describe('CommitFormModal', () => {
+ let wrapper;
+ let store;
+ let axiosMock;
+
+ const createComponent = (method, state = {}, provide = {}) => {
+ store = createStore({ ...mockData.mockModal, ...state });
+ wrapper = extendedWrapper(
+ method(CommitFormModal, {
+ provide,
+ propsData: { ...mockData.modalPropsData },
+ store,
+ attrs: {
+ static: true,
+ visible: true,
+ },
+ }),
+ );
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findStartBranch = () => wrapper.find('#start_branch');
+ const findDropdown = () => wrapper.findComponent(BranchesDropdown);
+ const findForm = () => findModal().findComponent(GlForm);
+ const findCheckBox = () => findForm().findComponent(GlFormCheckbox);
+ const findPrependedText = () => wrapper.findByTestId('prepended-text');
+ const findAppendedText = () => wrapper.findByTestId('appended-text');
+ const getByText = (text, options) =>
+ createWrapper(within(findModal().element).getByText(text, options));
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ axiosMock.restore();
+ });
+
+ describe('Basic interactions', () => {
+ it('Listens for opening of modal on mount', () => {
+ jest.spyOn(eventHub, '$on');
+
+ createComponent(shallowMount);
+
+ expect(eventHub.$on).toHaveBeenCalledWith(mockData.modalPropsData.openModal, wrapper.vm.show);
+ });
+
+ it('Shows modal', () => {
+ createComponent(shallowMount);
+ const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+
+ wrapper.vm.show();
+
+ expect(rootEmit).toHaveBeenCalledWith('bv::show::modal', mockData.modalPropsData.modalId);
+ });
+
+ it('Clears the modal state once modal is hidden', () => {
+ createComponent(shallowMount);
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ wrapper.vm.checked = false;
+
+ findModal().vm.$emit('hidden');
+
+ expect(store.dispatch).toHaveBeenCalledWith('clearModal');
+ expect(store.dispatch).toHaveBeenCalledWith('setSelectedBranch', '');
+ expect(wrapper.vm.checked).toBe(true);
+ });
+
+ it('Shows the checkbox for new merge request', () => {
+ createComponent(shallowMount);
+
+ expect(findCheckBox().exists()).toBe(true);
+ });
+
+ it('Shows the prepended text', () => {
+ createComponent(shallowMount, {}, { prependedText: '_prepended_text_' });
+
+ expect(findPrependedText().exists()).toBe(true);
+ expect(findPrependedText().find(GlSprintf).attributes('message')).toBe('_prepended_text_');
+ });
+
+ it('Does not show prepended text', () => {
+ createComponent(shallowMount);
+
+ expect(findPrependedText().exists()).toBe(false);
+ });
+
+ it('Does not show extra message text', () => {
+ createComponent(shallowMount);
+
+ expect(findModal().find('[data-testid="appended-text"]').exists()).toBe(false);
+ });
+
+ it('Does not show the checkbox for new merge request', () => {
+ createComponent(shallowMount, { pushCode: false });
+
+ expect(findCheckBox().exists()).toBe(false);
+ });
+
+ it('Shows the branch in fork message', () => {
+ createComponent(shallowMount, { pushCode: false });
+
+ expect(findAppendedText().exists()).toBe(true);
+ expect(findAppendedText().find(GlSprintf).attributes('message')).toContain(
+ mockData.modalPropsData.i18n.branchInFork,
+ );
+ });
+
+ it('Shows the branch collaboration message', () => {
+ createComponent(shallowMount, { pushCode: false, branchCollaboration: true });
+
+ expect(findAppendedText().exists()).toBe(true);
+ expect(findAppendedText().find(GlSprintf).attributes('message')).toContain(
+ mockData.modalPropsData.i18n.existingBranch,
+ );
+ });
+ });
+
+ describe('Taking action on the form', () => {
+ beforeEach(() => {
+ createComponent(mount);
+ });
+
+ it('Action primary button dispatches submit action', () => {
+ const submitSpy = jest.spyOn(findForm().element, 'submit');
+
+ getByText(mockData.modalPropsData.i18n.actionPrimaryText).trigger('click');
+
+ expect(submitSpy).toHaveBeenCalled();
+
+ submitSpy.mockRestore();
+ });
+
+ it('Changes the start_branch input value', async () => {
+ findDropdown().vm.$emit('selectBranch', '_changed_branch_value_');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findStartBranch().attributes('value')).toBe('_changed_branch_value_');
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/components/form_trigger_spec.js b/spec/frontend/projects/commit/components/form_trigger_spec.js
new file mode 100644
index 00000000000..ca51419d6a5
--- /dev/null
+++ b/spec/frontend/projects/commit/components/form_trigger_spec.js
@@ -0,0 +1,44 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import FormTrigger from '~/projects/commit/components/form_trigger.vue';
+import eventHub from '~/projects/commit/event_hub';
+
+const displayText = '_display_text_';
+
+const createComponent = () => {
+ return shallowMount(FormTrigger, {
+ provide: { displayText },
+ propsData: { openModal: '_open_modal_' },
+ });
+};
+
+describe('FormTrigger', () => {
+ let wrapper;
+ let spy;
+
+ beforeEach(() => {
+ spy = jest.spyOn(eventHub, '$emit');
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findLink = () => wrapper.find(GlLink);
+
+ describe('displayText', () => {
+ it('includes the correct displayText for the link', () => {
+ expect(findLink().text()).toBe(displayText);
+ });
+ });
+
+ describe('clicking the link', () => {
+ it('emits openModal', () => {
+ findLink().vm.$emit('click');
+
+ expect(spy).toHaveBeenCalledWith('_open_modal_');
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/mock_data.js b/spec/frontend/projects/commit/mock_data.js
new file mode 100644
index 00000000000..2b3b5a14c98
--- /dev/null
+++ b/spec/frontend/projects/commit/mock_data.js
@@ -0,0 +1,27 @@
+import { I18N_MODAL } from '~/projects/commit/constants';
+
+export default {
+ mockModal: {
+ modalTitle: '_modal_title_',
+ endpoint: '_endpoint_',
+ branch: '_branch_',
+ pushCode: true,
+ defaultBranch: '_branch_',
+ existingBranch: '_existing_branch',
+ branchesEndpoint: '_branches_endpoint_',
+ },
+ modalPropsData: {
+ i18n: {
+ branchLabel: '_branch_label_',
+ actionPrimaryText: '_action_primary_text_',
+ startMergeRequest: '_start_merge_request_',
+ existingBranch: I18N_MODAL.existingBranch,
+ branchInFork: '_new_branch_in_fork_message_',
+ newMergeRequest: '_new merge request_',
+ actionCancelText: '_action_cancel_text_',
+ },
+ modalId: '_modal_id_',
+ openModal: '_open_modal_',
+ },
+ mockBranches: ['_branch_1', '_abc_', '_master_'],
+};
diff --git a/spec/frontend/projects/commit/store/actions_spec.js b/spec/frontend/projects/commit/store/actions_spec.js
new file mode 100644
index 00000000000..ec528d4ee88
--- /dev/null
+++ b/spec/frontend/projects/commit/store/actions_spec.js
@@ -0,0 +1,111 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+import createFlash from '~/flash';
+import getInitialState from '~/projects/commit/store/state';
+import * as actions from '~/projects/commit/store/actions';
+import * as types from '~/projects/commit/store/mutation_types';
+import mockData from '../mock_data';
+import { PROJECT_BRANCHES_ERROR } from '~/projects/commit/constants';
+
+jest.mock('~/flash.js');
+
+describe('Commit form modal store actions', () => {
+ let axiosMock;
+ let state;
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ state = getInitialState();
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ describe('clearModal', () => {
+ it('commits CLEAR_MODAL mutation', () => {
+ testAction(actions.clearModal, {}, {}, [
+ {
+ type: types.CLEAR_MODAL,
+ },
+ ]);
+ });
+ });
+
+ describe('requestBranches', () => {
+ it('commits REQUEST_BRANCHES mutation', () => {
+ testAction(actions.requestBranches, {}, {}, [
+ {
+ type: types.REQUEST_BRANCHES,
+ },
+ ]);
+ });
+ });
+
+ describe('fetchBranches', () => {
+ it('dispatch correct actions on fetchBranches', (done) => {
+ jest
+ .spyOn(axios, 'get')
+ .mockImplementation(() => Promise.resolve({ data: mockData.mockBranches }));
+
+ testAction(
+ actions.fetchBranches,
+ {},
+ state,
+ [
+ {
+ type: types.RECEIVE_BRANCHES_SUCCESS,
+ payload: mockData.mockBranches,
+ },
+ ],
+ [{ type: 'requestBranches' }],
+ () => {
+ done();
+ },
+ );
+ });
+
+ it('should show flash error and set error in state on fetchBranches failure', (done) => {
+ jest.spyOn(axios, 'get').mockRejectedValue();
+
+ testAction(actions.fetchBranches, {}, state, [], [{ type: 'requestBranches' }], () => {
+ expect(createFlash).toHaveBeenCalledWith({ message: PROJECT_BRANCHES_ERROR });
+ done();
+ });
+ });
+ });
+
+ describe('setBranch', () => {
+ it('commits SET_BRANCH mutation', () => {
+ testAction(
+ actions.setBranch,
+ {},
+ {},
+ [
+ {
+ type: types.SET_BRANCH,
+ payload: {},
+ },
+ ],
+ [
+ {
+ type: 'setSelectedBranch',
+ payload: {},
+ },
+ ],
+ );
+ });
+ });
+
+ describe('setSelectedBranch', () => {
+ it('commits SET_SELECTED_BRANCH mutation', () => {
+ testAction(actions.setSelectedBranch, {}, {}, [
+ {
+ type: types.SET_SELECTED_BRANCH,
+ payload: {},
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/store/getters_spec.js b/spec/frontend/projects/commit/store/getters_spec.js
new file mode 100644
index 00000000000..bd0cb356854
--- /dev/null
+++ b/spec/frontend/projects/commit/store/getters_spec.js
@@ -0,0 +1,21 @@
+import * as getters from '~/projects/commit/store/getters';
+import mockData from '../mock_data';
+
+describe('Commit form modal getters', () => {
+ describe('joinedBranches', () => {
+ it('should join fetched branches with variable branches', () => {
+ const state = {
+ branches: mockData.mockBranches,
+ };
+
+ expect(getters.joinedBranches(state)).toEqual(mockData.mockBranches.sort());
+ });
+
+ it('should provide a uniq list of branches', () => {
+ const branches = ['_branch_', '_branch_', '_different_branch'];
+ const state = { branches };
+
+ expect(getters.joinedBranches(state)).toEqual(branches.slice(1));
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/store/mutations_spec.js b/spec/frontend/projects/commit/store/mutations_spec.js
new file mode 100644
index 00000000000..59ab3d9a74a
--- /dev/null
+++ b/spec/frontend/projects/commit/store/mutations_spec.js
@@ -0,0 +1,57 @@
+import mutations from '~/projects/commit/store/mutations';
+import * as types from '~/projects/commit/store/mutation_types';
+
+describe('Commit form modal mutations', () => {
+ let stateCopy;
+
+ describe('REQUEST_BRANCHES', () => {
+ it('should set isFetching to true', () => {
+ stateCopy = { isFetching: false };
+
+ mutations[types.REQUEST_BRANCHES](stateCopy);
+
+ expect(stateCopy.isFetching).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_BRANCHES_SUCCESS', () => {
+ it('should set branches', () => {
+ stateCopy = { branch: '_existing_branch_', isFetching: true };
+
+ mutations[types.RECEIVE_BRANCHES_SUCCESS](stateCopy, ['_branch_1_', '_branch_2_']);
+
+ expect(stateCopy.branches).toEqual(['_existing_branch_', '_branch_1_', '_branch_2_']);
+ expect(stateCopy.isFetching).toEqual(false);
+ });
+ });
+
+ describe('CLEAR_MODAL', () => {
+ it('should clear modal state ', () => {
+ stateCopy = { branch: '_master_', defaultBranch: '_default_branch_' };
+
+ mutations[types.CLEAR_MODAL](stateCopy);
+
+ expect(stateCopy.branch).toEqual('_default_branch_');
+ });
+ });
+
+ describe('SET_BRANCH', () => {
+ it('should set branch', () => {
+ stateCopy = { branch: '_master_' };
+
+ mutations[types.SET_BRANCH](stateCopy, '_changed_branch_');
+
+ expect(stateCopy.branch).toBe('_changed_branch_');
+ });
+ });
+
+ describe('SET_SELECTED_BRANCH', () => {
+ it('should set selectedBranch', () => {
+ stateCopy = { selectedBranch: '_master_' };
+
+ mutations[types.SET_SELECTED_BRANCH](stateCopy, '_changed_branch_');
+
+ expect(stateCopy.selectedBranch).toBe('_changed_branch_');
+ });
+ });
+});
diff --git a/spec/frontend/projects/commits/components/author_select_spec.js b/spec/frontend/projects/commits/components/author_select_spec.js
index 68c285a4097..63920ddfd72 100644
--- a/spec/frontend/projects/commits/components/author_select_spec.js
+++ b/spec/frontend/projects/commits/components/author_select_spec.js
@@ -139,11 +139,7 @@ describe('Author Select', () => {
});
it('has a "Any Author" as the first list item', () => {
- expect(
- findDropdownItems()
- .at(0)
- .text(),
- ).toBe('Any Author');
+ expect(findDropdownItems().at(0).text()).toBe('Any Author');
});
it('displays the project authors', () => {
@@ -163,21 +159,13 @@ describe('Author Select', () => {
wrapper.setData({ currentAuthor });
return wrapper.vm.$nextTick().then(() => {
- expect(
- findDropdownItems()
- .at(1)
- .props(),
- ).toEqual(expect.objectContaining(result));
+ expect(findDropdownItems().at(1).props()).toEqual(expect.objectContaining(result));
});
});
it("display the author's name", () => {
return wrapper.vm.$nextTick().then(() => {
- expect(
- findDropdownItems()
- .at(1)
- .text(),
- ).toBe(currentAuthor);
+ expect(findDropdownItems().at(1).text()).toBe(currentAuthor);
});
});
@@ -186,9 +174,7 @@ describe('Author Select', () => {
const spy = jest.spyOn(urlUtility, 'redirectTo');
spy.mockImplementation(() => 'mock');
- findDropdownItems()
- .at(1)
- .vm.$emit('click');
+ findDropdownItems().at(1).vm.$emit('click');
expect(spy).toHaveBeenCalledWith(redirectToUrl);
});
@@ -198,9 +184,7 @@ describe('Author Select', () => {
const spy = jest.spyOn(urlUtility, 'redirectTo');
spy.mockImplementation();
- findDropdownItems()
- .at(0)
- .vm.$emit('click');
+ findDropdownItems().at(0).vm.$emit('click');
expect(spy).toHaveBeenCalledWith(redirectToUrl);
});
});
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 4eb5060cb0a..0b9f095a700 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -31,6 +31,7 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
<gl-modal-stub
actioncancel="[object Object]"
actionprimary="[object Object]"
+ dismisslabel="Close"
footer-class="gl-bg-gray-10 gl-p-5"
modalclass=""
modalid="fakeUniqueId"
diff --git a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
index 4630415f61c..dd54db7dc0a 100644
--- a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
@@ -29,24 +29,12 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
Delete project
</gl-button-stub>
- <b-modal-stub
- canceltitle="Cancel"
- cancelvariant="secondary"
- footerclass="gl-bg-gray-10 gl-p-5"
- headerclosecontent="&times;"
- headercloselabel="Close"
- id="delete-project-modal-2"
- ignoreenforcefocusselector=""
- lazy="true"
- modalclass="gl-modal,"
- oktitle="OK"
- okvariant="danger"
- size="sm"
- title=""
- titleclass="gl-text-red-500"
- titletag="h4"
+ <div
+ footer-class="gl-bg-gray-10 gl-p-5"
+ ok-variant="danger"
+ title-class="gl-text-red-500"
>
-
+ Delete project. Are you ABSOLUTELY SURE?
<div>
<p
@@ -70,49 +58,6 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
/>
</div>
-
- <template />
-
- <template>
- Delete project. Are you ABSOLUTELY SURE?
- </template>
-
- <template />
-
- <template />
-
- <template />
-
- <template>
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-modal-action-cancel"
- icon=""
- size="medium"
- variant="default"
- >
-
- Cancel, keep project
-
- </gl-button-stub>
-
- <!---->
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-modal-action-primary"
- disabled="true"
- icon=""
- size="medium"
- variant="danger"
- >
-
- Yes, delete project
-
- </gl-button-stub>
- </template>
- </b-modal-stub>
+ </div>
</form>
`;
diff --git a/spec/frontend/projects/components/shared/delete_button_spec.js b/spec/frontend/projects/components/shared/delete_button_spec.js
index a6394a50011..cf7e41a2df2 100644
--- a/spec/frontend/projects/components/shared/delete_button_spec.js
+++ b/spec/frontend/projects/components/shared/delete_button_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlModal } from '@gitlab/ui';
+import { stubComponent } from 'helpers/stub_component';
import SharedDeleteButton from '~/projects/components/shared/delete_button.vue';
jest.mock('~/lib/utils/csrf', () => ({ token: 'test-csrf-token' }));
@@ -17,12 +18,19 @@ describe('Project remove modal', () => {
formPath: 'some/path',
};
- const createComponent = (data = {}) => {
+ const createComponent = (data = {}, stubs = {}) => {
wrapper = shallowMount(SharedDeleteButton, {
propsData: defaultProps,
data: () => data,
stubs: {
- GlModal,
+ GlModal: stubComponent(GlModal, {
+ template: `
+ <div>
+ <slot name="modal-title"></slot>
+ <slot></slot>
+ </div>`,
+ }),
+ ...stubs,
},
});
};
@@ -52,7 +60,7 @@ describe('Project remove modal', () => {
describe('when the user input does not match the confirmPhrase', () => {
beforeEach(() => {
- createComponent({ userInput: 'bar' });
+ createComponent({ userInput: 'bar' }, { GlModal });
});
it('the confirm button is disabled', () => {
@@ -62,7 +70,7 @@ describe('Project remove modal', () => {
describe('when the user input matches the confirmPhrase', () => {
beforeEach(() => {
- createComponent({ userInput: defaultProps.confirmPhrase });
+ createComponent({ userInput: defaultProps.confirmPhrase }, { GlModal });
});
it('the confirm button is not disabled', () => {
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
index a1e1e4554e2..9a5f200f5a9 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
@@ -7,7 +7,7 @@ import LegacyContainer from '~/projects/experiment_new_project_creation/componen
describe('Experimental new project creation app', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(App, { propsData });
};
@@ -53,6 +53,28 @@ describe('Experimental new project creation app', () => {
});
});
+ describe('display custom new project guideline text', () => {
+ beforeEach(() => {
+ window.location.hash = '#blank_project';
+ });
+
+ it('does not render new project guideline if undefined', () => {
+ createComponent();
+ expect(wrapper.find('div#new-project-guideline').exists()).toBe(false);
+ });
+
+ it('render new project guideline if defined', () => {
+ const guidelineSelector = 'div#new-project-guideline';
+
+ createComponent({
+ newProjectGuidelines: '<h4>Internal Guidelines</h4><p>lorem ipsum</p>',
+ });
+ expect(wrapper.find(guidelineSelector).exists()).toBe(true);
+ expect(wrapper.find(guidelineSelector).html()).toContain('<h4>Internal Guidelines</h4>');
+ expect(wrapper.find(guidelineSelector).html()).toContain('<p>lorem ipsum</p>');
+ });
+ });
+
it('renders relevant container when hash changes', () => {
createComponent();
expect(wrapper.find(WelcomePage).exists()).toBe(true);
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
index 42a7aa6bc88..6fc36d6362c 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
@@ -6,7 +6,7 @@ describe('Legacy container component', () => {
let wrapper;
let dummy;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(LegacyContainer, { propsData });
};
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
index cf23ba281f9..d6764f75262 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
@@ -6,7 +6,7 @@ describe('Welcome page', () => {
let wrapper;
let trackingSpy;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(WelcomePage, { propsData });
};
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
index d68e009f46e..fc51825f15b 100644
--- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
@@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`PipelinesAreaChart matches the snapshot 1`] = `
+exports[`CiCdAnalyticsAreaChart matches the snapshot 1`] = `
<div
class="gl-mt-3"
>
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
index c7e760486c0..be3716c24e6 100644
--- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
@@ -38,14 +38,5 @@ exports[`StatisticsList displays the counts data with labels 1`] = `
50.00%
</strong>
</li>
- <li>
- <span>
- Total duration:
- </span>
-
- <strong>
- 00:01:56
- </strong>
- </li>
</ul>
`;
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
index f8737dda5f6..44329944097 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -1,10 +1,10 @@
+import { merge } from 'lodash';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
-import { GlColumnChart } from '@gitlab/ui/dist/charts';
+import { GlTabs, GlTab } from '@gitlab/ui';
+import createMockApollo from 'helpers/mock_apollo_helper';
import Component from '~/projects/pipelines/charts/components/app.vue';
-import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
-import PipelinesAreaChart from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
+import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_charts.vue';
import getPipelineCountByStatus from '~/projects/pipelines/charts/graphql/queries/get_pipeline_count_by_status.query.graphql';
import getProjectPipelineStatistics from '~/projects/pipelines/charts/graphql/queries/get_project_pipeline_statistics.query.graphql';
import { mockPipelineCount, mockPipelineStatistics } from '../mock_data';
@@ -13,6 +13,8 @@ const projectPath = 'gitlab-org/gitlab';
const localVue = createLocalVue();
localVue.use(VueApollo);
+const DeploymentFrequencyChartsStub = { name: 'DeploymentFrequencyCharts', render: () => {} };
+
describe('ProjectsPipelinesChartsApp', () => {
let wrapper;
@@ -25,21 +27,29 @@ describe('ProjectsPipelinesChartsApp', () => {
return createMockApollo(requestHandlers);
}
- function createComponent(options = {}) {
- const { fakeApollo } = options;
-
- return shallowMount(Component, {
- provide: {
- projectPath,
- },
- localVue,
- apolloProvider: fakeApollo,
- });
+ function createComponent(mountOptions = {}) {
+ wrapper = shallowMount(
+ Component,
+ merge(
+ {},
+ {
+ provide: {
+ projectPath,
+ shouldRenderDeploymentFrequencyCharts: false,
+ },
+ localVue,
+ apolloProvider: createMockApolloProvider(),
+ stubs: {
+ DeploymentFrequencyCharts: DeploymentFrequencyChartsStub,
+ },
+ },
+ mountOptions,
+ ),
+ );
}
beforeEach(() => {
- const fakeApollo = createMockApolloProvider();
- wrapper = createComponent({ fakeApollo });
+ createComponent();
});
afterEach(() => {
@@ -47,50 +57,74 @@ describe('ProjectsPipelinesChartsApp', () => {
wrapper = null;
});
- describe('overall statistics', () => {
- it('displays the statistics list', () => {
- const list = wrapper.find(StatisticsList);
-
- expect(list.exists()).toBe(true);
- expect(list.props('counts')).toMatchObject({
- failed: 1,
- success: 23,
- total: 34,
- successRatio: 95.83333333333334,
- totalDuration: 2471,
- });
- });
+ describe('pipelines charts', () => {
+ it('displays the pipeline charts', () => {
+ const chart = wrapper.find(PipelineCharts);
+ const analytics = mockPipelineStatistics.data.project.pipelineAnalytics;
- it('displays the commit duration chart', () => {
- const chart = wrapper.find(GlColumnChart);
+ const {
+ totalPipelines: total,
+ successfulPipelines: success,
+ failedPipelines: failed,
+ } = mockPipelineCount.data.project;
expect(chart.exists()).toBe(true);
- expect(chart.props('yAxisTitle')).toBe('Minutes');
- expect(chart.props('xAxisTitle')).toBe('Commit');
- expect(chart.props('bars')).toBe(wrapper.vm.timesChartTransformedData);
- expect(chart.props('option')).toBe(wrapper.vm.$options.timesChartOptions);
+ expect(chart.props()).toMatchObject({
+ counts: {
+ failed: failed.count,
+ success: success.count,
+ total: total.count,
+ successRatio: (success.count / (success.count + failed.count)) * 100,
+ },
+ lastWeek: {
+ labels: analytics.weekPipelinesLabels,
+ totals: analytics.weekPipelinesTotals,
+ success: analytics.weekPipelinesSuccessful,
+ },
+ lastMonth: {
+ labels: analytics.monthPipelinesLabels,
+ totals: analytics.monthPipelinesTotals,
+ success: analytics.monthPipelinesSuccessful,
+ },
+ lastYear: {
+ labels: analytics.yearPipelinesLabels,
+ totals: analytics.yearPipelinesTotals,
+ success: analytics.yearPipelinesSuccessful,
+ },
+ timesChart: {
+ labels: analytics.pipelineTimesLabels,
+ values: analytics.pipelineTimesValues,
+ },
+ });
});
});
- describe('pipelines charts', () => {
- it('displays 3 area charts', () => {
- expect(wrapper.findAll(PipelinesAreaChart)).toHaveLength(3);
+ const findDeploymentFrequencyCharts = () => wrapper.find(DeploymentFrequencyChartsStub);
+ const findGlTabs = () => wrapper.find(GlTabs);
+ const findAllGlTab = () => wrapper.findAll(GlTab);
+ const findGlTabAt = (i) => findAllGlTab().at(i);
+
+ describe('when shouldRenderDeploymentFrequencyCharts is true', () => {
+ beforeEach(() => {
+ createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: true } });
});
- describe('displays individual correctly', () => {
- it('renders with the correct data', () => {
- const charts = wrapper.findAll(PipelinesAreaChart);
+ it('renders the deployment frequency charts in a tab', () => {
+ expect(findGlTabs().exists()).toBe(true);
+ expect(findGlTabAt(0).attributes('title')).toBe('Pipelines');
+ expect(findGlTabAt(1).attributes('title')).toBe('Deployments');
+ expect(findDeploymentFrequencyCharts().exists()).toBe(true);
+ });
+ });
- for (let i = 0; i < charts.length; i += 1) {
- const chart = charts.at(i);
+ describe('when shouldRenderDeploymentFrequencyCharts is false', () => {
+ beforeEach(() => {
+ createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: false } });
+ });
- expect(chart.exists()).toBe(true);
- // TODO: Refactor this to use the mocked data instead of the vm data
- // https://gitlab.com/gitlab-org/gitlab/-/issues/292085
- expect(chart.props('chartData')).toBe(wrapper.vm.areaCharts[i].data);
- expect(chart.text()).toBe(wrapper.vm.areaCharts[i].title);
- }
- });
+ it('does not render the deployment frequency charts in a tab', () => {
+ expect(findGlTabs().exists()).toBe(false);
+ expect(findDeploymentFrequencyCharts().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js
index aea25903023..64f80300237 100644
--- a/spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js
@@ -1,14 +1,23 @@
import { mount } from '@vue/test-utils';
-import Component from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
+import CiCdAnalyticsAreaChart from '~/projects/pipelines/charts/components/ci_cd_analytics_area_chart.vue';
import { transformedAreaChartData } from '../mock_data';
-describe('PipelinesAreaChart', () => {
+describe('CiCdAnalyticsAreaChart', () => {
let wrapper;
beforeEach(() => {
- wrapper = mount(Component, {
+ wrapper = mount(CiCdAnalyticsAreaChart, {
propsData: {
chartData: transformedAreaChartData,
+ areaChartOptions: {
+ xAxis: {
+ name: 'X axis title',
+ type: 'category',
+ },
+ yAxis: {
+ name: 'Y axis title',
+ },
+ },
},
slots: {
default: 'Some title',
diff --git a/spec/frontend/projects/pipelines/charts/components/app_legacy_spec.js b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
index c03b571eb26..598055d5828 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_legacy_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
@@ -1,27 +1,34 @@
import { shallowMount } from '@vue/test-utils';
import { GlColumnChart } from '@gitlab/ui/dist/charts';
-import Component from '~/projects/pipelines/charts/components/app_legacy.vue';
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
-import PipelinesAreaChart from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
+import CiCdAnalyticsAreaChart from '~/projects/pipelines/charts/components/ci_cd_analytics_area_chart.vue';
+import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_charts.vue';
import {
counts,
- timesChartData,
- areaChartData as lastWeekChartData,
- areaChartData as lastMonthChartData,
- lastYearChartData,
+ timesChartData as timesChart,
+ areaChartData as lastWeek,
+ areaChartData as lastMonth,
+ lastYearChartData as lastYear,
} from '../mock_data';
describe('ProjectsPipelinesChartsApp', () => {
let wrapper;
beforeEach(() => {
- wrapper = shallowMount(Component, {
+ wrapper = shallowMount(PipelineCharts, {
propsData: {
counts,
- timesChartData,
- lastWeekChartData,
- lastMonthChartData,
- lastYearChartData,
+ timesChart,
+ lastWeek,
+ lastMonth,
+ lastYear,
+ },
+ provide: {
+ projectPath: 'test/project',
+ shouldRenderDeploymentFrequencyCharts: true,
+ },
+ stubs: {
+ DeploymentFrequencyCharts: true,
},
});
});
@@ -35,7 +42,7 @@ describe('ProjectsPipelinesChartsApp', () => {
it('displays the statistics list', () => {
const list = wrapper.find(StatisticsList);
- expect(list.exists()).toBeTruthy();
+ expect(list.exists()).toBe(true);
expect(list.props('counts')).toBe(counts);
});
@@ -52,13 +59,12 @@ describe('ProjectsPipelinesChartsApp', () => {
describe('pipelines charts', () => {
it('displays 3 area charts', () => {
- expect(wrapper.findAll(PipelinesAreaChart).length).toBe(3);
+ expect(wrapper.findAll(CiCdAnalyticsAreaChart)).toHaveLength(3);
});
describe('displays individual correctly', () => {
it('renders with the correct data', () => {
- const charts = wrapper.findAll(PipelinesAreaChart);
-
+ const charts = wrapper.findAll(CiCdAnalyticsAreaChart);
for (let i = 0; i < charts.length; i += 1) {
const chart = charts.at(i);
diff --git a/spec/frontend/projects/pipelines/charts/mock_data.js b/spec/frontend/projects/pipelines/charts/mock_data.js
index da055536fcc..3bc09f0b0a0 100644
--- a/spec/frontend/projects/pipelines/charts/mock_data.js
+++ b/spec/frontend/projects/pipelines/charts/mock_data.js
@@ -25,11 +25,23 @@ export const lastYearChartData = {
export const transformedAreaChartData = [
{
name: 'all',
- data: [['01 Jan', 4], ['02 Jan', 6], ['03 Jan', 3], ['04 Jan', 6], ['05 Jan', 7]],
+ data: [
+ ['01 Jan', 4],
+ ['02 Jan', 6],
+ ['03 Jan', 3],
+ ['04 Jan', 6],
+ ['05 Jan', 7],
+ ],
},
{
name: 'success',
- data: [['01 Jan', 3], ['02 Jan', 3], ['03 Jan', 3], ['04 Jan', 3], ['05 Jan', 5]],
+ data: [
+ ['01 Jan', 3],
+ ['02 Jan', 3],
+ ['03 Jan', 3],
+ ['04 Jan', 3],
+ ['05 Jan', 5],
+ ],
},
];
diff --git a/spec/frontend/projects/project_import_gitlab_project_spec.js b/spec/frontend/projects/project_import_gitlab_project_spec.js
index 3c94934699d..aaf8a81f626 100644
--- a/spec/frontend/projects/project_import_gitlab_project_spec.js
+++ b/spec/frontend/projects/project_import_gitlab_project_spec.js
@@ -4,7 +4,7 @@ describe('Import Gitlab project', () => {
const pathName = 'my-project';
const projectName = 'My Project';
- const setTestFixtures = url => {
+ const setTestFixtures = (url) => {
window.history.pushState({}, null, url);
setFixtures(`
diff --git a/spec/frontend/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index c32979dcd74..d2936cb9efe 100644
--- a/spec/frontend/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import projectNew from '~/projects/project_new';
describe('New Project', () => {
@@ -38,10 +38,7 @@ describe('New Project', () => {
beforeEach(() => {
projectNew.bindEvents();
- $projectPath
- .val('')
- .keyup()
- .val(dummyImportUrl);
+ $projectPath.val('').keyup().val(dummyImportUrl);
});
it('does not change project path for disabled $projectImportUrl', () => {
diff --git a/spec/frontend/projects/projects_filterable_list_spec.js b/spec/frontend/projects/projects_filterable_list_spec.js
index e756fb3ab56..377d347623a 100644
--- a/spec/frontend/projects/projects_filterable_list_spec.js
+++ b/spec/frontend/projects/projects_filterable_list_spec.js
@@ -1,5 +1,5 @@
+import { getJSONFixture, setHTMLFixture } from 'helpers/fixtures';
import ProjectsFilterableList from '~/projects/projects_filterable_list';
-import { getJSONFixture, setHTMLFixture } from '../helpers/fixtures';
describe('ProjectsFilterableList', () => {
let List;
diff --git a/spec/frontend/projects/settings/access_dropdown_spec.js b/spec/frontend/projects/settings/access_dropdown_spec.js
index 41b9c0c3763..8a57930ac83 100644
--- a/spec/frontend/projects/settings/access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/access_dropdown_spec.js
@@ -72,7 +72,7 @@ describe('AccessDropdown', () => {
describe('with only role', () => {
beforeEach(() => {
- dropdown.setSelectedItems(dummyItems.filter(item => item.type === LEVEL_TYPES.ROLE));
+ dropdown.setSelectedItems(dummyItems.filter((item) => item.type === LEVEL_TYPES.ROLE));
$dropdownToggleText.addClass('is-default');
});
@@ -86,7 +86,7 @@ describe('AccessDropdown', () => {
describe('with only users', () => {
beforeEach(() => {
- dropdown.setSelectedItems(dummyItems.filter(item => item.type === LEVEL_TYPES.USER));
+ dropdown.setSelectedItems(dummyItems.filter((item) => item.type === LEVEL_TYPES.USER));
$dropdownToggleText.addClass('is-default');
});
@@ -100,7 +100,7 @@ describe('AccessDropdown', () => {
describe('with only groups', () => {
beforeEach(() => {
- dropdown.setSelectedItems(dummyItems.filter(item => item.type === LEVEL_TYPES.GROUP));
+ dropdown.setSelectedItems(dummyItems.filter((item) => item.type === LEVEL_TYPES.GROUP));
$dropdownToggleText.addClass('is-default');
});
@@ -115,7 +115,7 @@ describe('AccessDropdown', () => {
describe('with users and groups', () => {
beforeEach(() => {
const selectedTypes = [LEVEL_TYPES.GROUP, LEVEL_TYPES.USER];
- dropdown.setSelectedItems(dummyItems.filter(item => selectedTypes.includes(item.type)));
+ dropdown.setSelectedItems(dummyItems.filter((item) => selectedTypes.includes(item.type)));
$dropdownToggleText.addClass('is-default');
});
@@ -130,7 +130,7 @@ describe('AccessDropdown', () => {
describe('with users and deploy keys', () => {
beforeEach(() => {
const selectedTypes = [LEVEL_TYPES.DEPLOY_KEY, LEVEL_TYPES.USER];
- dropdown.setSelectedItems(dummyItems.filter(item => selectedTypes.includes(item.type)));
+ dropdown.setSelectedItems(dummyItems.filter((item) => selectedTypes.includes(item.type)));
$dropdownToggleText.addClass('is-default');
});
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index 7e74a5deee1..c83b1852147 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -137,7 +137,7 @@ describe('ServiceDeskRoot', () => {
.$nextTick()
.then(waitForPromises)
.then(() => {
- expect(wrapper.html()).toContain('Changes were successfully made.');
+ expect(wrapper.html()).toContain('Changes saved.');
});
});
@@ -160,7 +160,7 @@ describe('ServiceDeskRoot', () => {
.$nextTick()
.then(waitForPromises)
.then(() => {
- expect(wrapper.html()).toContain('An error occured while making the changes:');
+ expect(wrapper.html()).toContain('An error occured while saving changes:');
});
});
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
index 173a7fc4e11..ddd9a7b2fad 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
@@ -185,7 +185,9 @@ describe('ServiceDeskSetting', () => {
const expectedTemplates = [''].concat(templates);
const dropdown = findTemplateDropdown();
- const dropdownList = Array.from(dropdown.element.children).map(option => option.innerText);
+ const dropdownList = Array.from(dropdown.element.children).map(
+ (option) => option.innerText,
+ );
expect(dropdown.element.children).toHaveLength(expectedTemplates.length);
expect(dropdownList.includes('Bug')).toEqual(true);
diff --git a/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js b/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js
index 3b960a95db4..d5340df03fe 100644
--- a/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js
+++ b/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js
@@ -23,7 +23,7 @@ describe('ServiceDeskService', () => {
it('makes a request to set service desk', () => {
axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK, dummyResponse);
- return service.toggleServiceDesk(true).then(response => {
+ return service.toggleServiceDesk(true).then((response) => {
expect(response.data).toEqual(dummyResponse);
});
});
@@ -31,7 +31,7 @@ describe('ServiceDeskService', () => {
it('fails on error response', () => {
axiosMock.onPut(endpoint).networkError();
- return service.toggleServiceDesk(true).catch(error => {
+ return service.toggleServiceDesk(true).catch((error) => {
expect(error.message).toBe(errorMessage);
});
});
@@ -63,7 +63,7 @@ describe('ServiceDeskService', () => {
},
true,
)
- .then(response => {
+ .then((response) => {
expect(response.data).toEqual(dummyResponse);
});
});
@@ -79,7 +79,7 @@ describe('ServiceDeskService', () => {
},
true,
)
- .catch(error => {
+ .catch((error) => {
expect(error.message).toBe(errorMessage);
});
});
diff --git a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
index 2460851a6a4..f6b2780e167 100644
--- a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
@@ -91,12 +91,7 @@ describe('PrometheusMetrics', () => {
);
expect($metricsListLi.length).toEqual(metrics.length);
- expect(
- $metricsListLi
- .first()
- .find('.badge')
- .text(),
- ).toEqual(`${metrics[0].active_metrics}`);
+ expect($metricsListLi.first().find('.badge').text()).toEqual(`${metrics[0].active_metrics}`);
});
it('should show missing environment variables list', () => {
@@ -138,7 +133,7 @@ describe('PrometheusMetrics', () => {
mock.restore();
});
- it('should show loader animation while response is being loaded and hide it when request is complete', done => {
+ it('should show loader animation while response is being loaded and hide it when request is complete', (done) => {
mockSuccess();
prometheusMetrics.loadActiveMetrics();
@@ -152,7 +147,7 @@ describe('PrometheusMetrics', () => {
});
});
- it('should show empty state if response failed to load', done => {
+ it('should show empty state if response failed to load', (done) => {
mockError();
prometheusMetrics.loadActiveMetrics();
@@ -164,7 +159,7 @@ describe('PrometheusMetrics', () => {
});
});
- it('should populate metrics list once response is loaded', done => {
+ it('should populate metrics list once response is loaded', (done) => {
jest.spyOn(prometheusMetrics, 'populateActiveMetrics').mockImplementation();
mockSuccess();
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 7f0a4c7d3f4..7efb6e9ba4a 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -37,7 +37,7 @@ describe('Ref selector component', () => {
attrs,
listeners: {
// simulate a parent component v-model binding
- input: selectedRef => {
+ input: (selectedRef) => {
wrapper.setProps({ value: selectedRef });
},
},
@@ -61,13 +61,13 @@ describe('Ref selector component', () => {
mock
.onGet(`/api/v4/projects/${projectId}/repository/branches`)
- .reply(config => branchesApiCallSpy(config));
+ .reply((config) => branchesApiCallSpy(config));
mock
.onGet(`/api/v4/projects/${projectId}/repository/tags`)
- .reply(config => tagsApiCallSpy(config));
+ .reply((config) => tagsApiCallSpy(config));
mock
.onGet(new RegExp(`/api/v4/projects/${projectId}/repository/commits/.*`))
- .reply(config => commitApiCallSpy(config));
+ .reply((config) => commitApiCallSpy(config));
});
afterEach(() => {
@@ -122,7 +122,7 @@ describe('Ref selector component', () => {
//
// Convenience methods
//
- const updateQuery = newQuery => {
+ const updateQuery = (newQuery) => {
findSearchBox().vm.$emit('input', newQuery);
};
@@ -313,9 +313,7 @@ describe('Ref selector component', () => {
it('renders the "Branches" heading with a total number indicator', () => {
expect(
- findBranchesSection()
- .find('[data-testid="section-header"]')
- .text(),
+ findBranchesSection().find('[data-testid="section-header"]').text(),
).toMatchInterpolatedText('Branches 123');
});
@@ -336,7 +334,7 @@ describe('Ref selector component', () => {
it('renders the default branch as a selectable item with a "default" badge', () => {
const dropdownItems = findBranchDropdownItems();
- const defaultBranch = fixtures.branches.find(b => b.default);
+ const defaultBranch = fixtures.branches.find((b) => b.default);
const defaultBranchIndex = fixtures.branches.indexOf(defaultBranch);
expect(trimText(dropdownItems.at(defaultBranchIndex).text())).toBe(
@@ -392,9 +390,7 @@ describe('Ref selector component', () => {
it('renders the "Tags" heading with a total number indicator', () => {
expect(
- findTagsSection()
- .find('[data-testid="section-header"]')
- .text(),
+ findTagsSection().find('[data-testid="section-header"]').text(),
).toMatchInterpolatedText('Tags 456');
});
@@ -460,9 +456,7 @@ describe('Ref selector component', () => {
it('renders the "Commits" heading with a total number indicator', () => {
expect(
- findCommitsSection()
- .find('[data-testid="section-header"]')
- .text(),
+ findCommitsSection().find('[data-testid="section-header"]').text(),
).toMatchInterpolatedText('Commits 1');
});
diff --git a/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap b/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap
index feae2f629b7..4be4fce1abf 100644
--- a/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap
+++ b/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap
@@ -1,28 +1,71 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Registry Breadcrumb when is rootRoute renders 1`] = `
-<ul>
- <li
- class="foo bar"
+exports[`Registry Breadcrumb when is not rootRoute renders 1`] = `
+<div
+ class="gl-breadcrumbs"
+>
+ <ol
+ class="breadcrumb gl-breadcrumb-list"
>
- baz
- </li>
- <li
- class="foo bar"
+
+ <li
+ class="breadcrumb-item gl-breadcrumb-item"
+ >
+ <a
+ class=""
+ href="/"
+ target="_self"
+ />
+ </li>
+
+ <span
+ class="gl-breadcrumb-separator"
+ data-testid="separator"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-icon s8"
+ data-testid="angle-right-icon"
+ >
+ <use
+ href="#angle-right"
+ />
+ </svg>
+ </span>
+ <li
+ class="breadcrumb-item gl-breadcrumb-item"
+ >
+ <a
+ class=""
+ href="#"
+ target="_self"
+ />
+ </li>
+
+ <!---->
+ </ol>
+</div>
+`;
+
+exports[`Registry Breadcrumb when is rootRoute renders 1`] = `
+<div
+ class="gl-breadcrumbs"
+>
+ <ol
+ class="breadcrumb gl-breadcrumb-list"
>
- foo
- </li>
-
- <!---->
-
- <li>
- <a
- class="foo"
+
+ <li
+ class="breadcrumb-item gl-breadcrumb-item"
>
- <a>
-
- </a>
- </a>
- </li>
-</ul>
+ <a
+ class=""
+ href="/"
+ target="_self"
+ />
+ </li>
+
+ <!---->
+ </ol>
+</div>
`;
diff --git a/spec/frontend/registry/explorer/components/delete_button_spec.js b/spec/frontend/registry/explorer/components/delete_button_spec.js
index a79ca77a464..cd43e97009b 100644
--- a/spec/frontend/registry/explorer/components/delete_button_spec.js
+++ b/spec/frontend/registry/explorer/components/delete_button_spec.js
@@ -13,7 +13,7 @@ describe('delete_button', () => {
const findButton = () => wrapper.find(GlButton);
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(component, {
propsData: {
...defaultProps,
diff --git a/spec/frontend/registry/explorer/components/details_page/delete_alert_spec.js b/spec/frontend/registry/explorer/components/details_page/delete_alert_spec.js
index 5d54986978b..6a7fbbe367a 100644
--- a/spec/frontend/registry/explorer/components/details_page/delete_alert_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/delete_alert_spec.js
@@ -15,7 +15,7 @@ describe('Delete alert', () => {
const findAlert = () => wrapper.find(GlAlert);
const findLink = () => wrapper.find(GlLink);
- const mountComponent = propsData => {
+ const mountComponent = (propsData) => {
wrapper = shallowMount(component, { stubs: { GlSprintf }, propsData });
};
diff --git a/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js b/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js
index c77f7a54d34..636e0a285a6 100644
--- a/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/delete_modal_spec.js
@@ -13,7 +13,7 @@ describe('Delete Modal', () => {
const findModal = () => wrapper.find(GlModal);
const findDescription = () => wrapper.find('[data-testid="description"]');
- const mountComponent = propsData => {
+ const mountComponent = (propsData) => {
wrapper = shallowMount(component, {
propsData,
stubs: {
diff --git a/spec/frontend/registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
index f642c66832b..337235e3de5 100644
--- a/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
@@ -3,7 +3,18 @@ import { GlSprintf } from '@gitlab/ui';
import { useFakeDate } from 'helpers/fake_date';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import component from '~/registry/explorer/components/details_page/details_header.vue';
-import { DETAILS_PAGE_TITLE } from '~/registry/explorer/constants';
+import {
+ DETAILS_PAGE_TITLE,
+ UNSCHEDULED_STATUS,
+ SCHEDULED_STATUS,
+ ONGOING_STATUS,
+ UNFINISHED_STATUS,
+ CLEANUP_DISABLED_TEXT,
+ CLEANUP_DISABLED_TOOLTIP,
+ CLEANUP_SCHEDULED_TOOLTIP,
+ CLEANUP_ONGOING_TOOLTIP,
+ CLEANUP_UNFINISHED_TOOLTIP,
+} from '~/registry/explorer/constants';
describe('Details Header', () => {
let wrapper;
@@ -11,15 +22,22 @@ describe('Details Header', () => {
const defaultImage = {
name: 'foo',
updatedAt: '2020-11-03T13:29:21Z',
+ tagsCount: 10,
project: {
visibility: 'public',
+ containerExpirationPolicy: {
+ enabled: false,
+ },
},
};
// set the date to Dec 4, 2020
useFakeDate(2020, 11, 4);
+ const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`);
- const findLastUpdatedAndVisibility = () => wrapper.find('[data-testid="updated-and-visibility"]');
+ const findLastUpdatedAndVisibility = () => findByTestId('updated-and-visibility');
+ const findTagsCount = () => findByTestId('tags-count');
+ const findCleanup = () => findByTestId('cleanup');
const waitForMetadataItems = async () => {
// Metadata items are printed by a loop in the title-area and it takes two ticks for them to be available
@@ -54,25 +72,96 @@ describe('Details Header', () => {
expect(wrapper.text()).toContain('foo');
});
- it('has a metadata item with last updated text', async () => {
- mountComponent();
- await waitForMetadataItems();
+ describe('metadata items', () => {
+ describe('tags count', () => {
+ it('when there is more than one tag has the correct text', async () => {
+ mountComponent();
+ await waitForMetadataItems();
- expect(findLastUpdatedAndVisibility().props('text')).toBe('Last updated 1 month ago');
- });
+ expect(findTagsCount().props('text')).toBe('10 tags');
+ });
+
+ it('when there is one tag has the correct text', async () => {
+ mountComponent({ ...defaultImage, tagsCount: 1 });
+ await waitForMetadataItems();
+
+ expect(findTagsCount().props('text')).toBe('1 tag');
+ });
+
+ it('has the correct icon', async () => {
+ mountComponent();
+ await waitForMetadataItems();
+
+ expect(findTagsCount().props('icon')).toBe('tag');
+ });
+ });
- describe('visibility icon', () => {
- it('shows an eye when the project is public', async () => {
- mountComponent();
- await waitForMetadataItems();
+ describe('cleanup metadata item', () => {
+ it('has the correct icon', async () => {
+ mountComponent();
+ await waitForMetadataItems();
- expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye');
+ expect(findCleanup().props('icon')).toBe('expire');
+ });
+
+ it('when the expiration policy is disabled', async () => {
+ mountComponent();
+ await waitForMetadataItems();
+
+ expect(findCleanup().props()).toMatchObject({
+ text: CLEANUP_DISABLED_TEXT,
+ textTooltip: CLEANUP_DISABLED_TOOLTIP,
+ });
+ });
+
+ it.each`
+ status | text | tooltip
+ ${UNSCHEDULED_STATUS} | ${'Cleanup will run in 1 month'} | ${''}
+ ${SCHEDULED_STATUS} | ${'Cleanup pending'} | ${CLEANUP_SCHEDULED_TOOLTIP}
+ ${ONGOING_STATUS} | ${'Cleanup in progress'} | ${CLEANUP_ONGOING_TOOLTIP}
+ ${UNFINISHED_STATUS} | ${'Cleanup incomplete'} | ${CLEANUP_UNFINISHED_TOOLTIP}
+ `(
+ 'when the status is $status the text is $text and the tooltip is $tooltip',
+ async ({ status, text, tooltip }) => {
+ mountComponent({
+ ...defaultImage,
+ expirationPolicyCleanupStatus: status,
+ project: {
+ containerExpirationPolicy: { enabled: true, nextRunAt: '2021-01-03T14:29:21Z' },
+ },
+ });
+ await waitForMetadataItems();
+
+ expect(findCleanup().props()).toMatchObject({
+ text,
+ textTooltip: tooltip,
+ });
+ },
+ );
});
- it('shows an eye slashed when the project is not public', async () => {
- mountComponent({ ...defaultImage, project: { visibility: 'private' } });
- await waitForMetadataItems();
- expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye-slash');
+ describe('visibility and updated at ', () => {
+ it('has last updated text', async () => {
+ mountComponent();
+ await waitForMetadataItems();
+
+ expect(findLastUpdatedAndVisibility().props('text')).toBe('Last updated 1 month ago');
+ });
+
+ describe('visibility icon', () => {
+ it('shows an eye when the project is public', async () => {
+ mountComponent();
+ await waitForMetadataItems();
+
+ expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye');
+ });
+ it('shows an eye slashed when the project is not public', async () => {
+ mountComponent({ ...defaultImage, project: { visibility: 'private' } });
+ await waitForMetadataItems();
+
+ expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye-slash');
+ });
+ });
});
});
});
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
index 94944643e8b..c2efc71c159 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
@@ -312,11 +312,7 @@ describe('tags list row', () => {
});
it(`is ${clipboard} that clipboard button exist`, () => {
- expect(
- finderFunction()
- .find(ClipboardButton)
- .exists(),
- ).toBe(clipboard);
+ expect(finderFunction().find(ClipboardButton).exists()).toBe(clipboard);
});
});
});
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js
index 035b59731c9..413795a7a57 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js
@@ -8,7 +8,7 @@ import { tagsMock } from '../../mock_data';
describe('Tags List', () => {
let wrapper;
const tags = [...tagsMock];
- const readOnlyTags = tags.map(t => ({ ...t, canDelete: false }));
+ const readOnlyTags = tags.map((t) => ({ ...t, canDelete: false }));
const findTagsListRow = () => wrapper.findAll(TagsListRow);
const findDeleteButton = () => wrapper.find(GlButton);
@@ -78,18 +78,14 @@ describe('Tags List', () => {
it('is enabled when at least one item is selected', async () => {
mountComponent();
- findTagsListRow()
- .at(0)
- .vm.$emit('select');
+ findTagsListRow().at(0).vm.$emit('select');
await wrapper.vm.$nextTick();
expect(findDeleteButton().attributes('disabled')).toBe(undefined);
});
it('click event emits a deleted event with selected items', () => {
mountComponent();
- findTagsListRow()
- .at(0)
- .vm.$emit('select');
+ findTagsListRow().at(0).vm.$emit('select');
findDeleteButton().vm.$emit('click');
expect(wrapper.emitted('delete')).toEqual([[{ 'beta-24753': true }]]);
@@ -116,22 +112,14 @@ describe('Tags List', () => {
describe('events', () => {
it('select event update the selected items', async () => {
mountComponent();
- findTagsListRow()
- .at(0)
- .vm.$emit('select');
+ findTagsListRow().at(0).vm.$emit('select');
await wrapper.vm.$nextTick();
- expect(
- findTagsListRow()
- .at(0)
- .attributes('selected'),
- ).toBe('true');
+ expect(findTagsListRow().at(0).attributes('selected')).toBe('true');
});
it('delete event emit a delete event', () => {
mountComponent();
- findTagsListRow()
- .at(0)
- .vm.$emit('delete');
+ findTagsListRow().at(0).vm.$emit('delete');
expect(wrapper.emitted('delete')).toEqual([[{ 'beta-24753': true }]]);
});
});
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_loader_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_loader_spec.js
index b27d3e2c042..40d84d9d4a5 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_loader_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_loader_spec.js
@@ -32,11 +32,7 @@ describe('TagsLoader component', () => {
it('has the correct props', () => {
mountComponent();
- expect(
- findGlSkeletonLoaders()
- .at(0)
- .props(),
- ).toMatchObject({
+ expect(findGlSkeletonLoaders().at(0).props()).toMatchObject({
width: component.loader.width,
height: component.loader.height,
});
diff --git a/spec/frontend/registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap
index bab6b25cc15..46b07b4c2d6 100644
--- a/spec/frontend/registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap
+++ b/spec/frontend/registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap
@@ -56,7 +56,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
</p>
<gl-form-input-group-stub
- class="gl-mb-4 "
+ class="gl-mb-4"
predefinedoptions="[object Object]"
value=""
>
diff --git a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
index b9839d92f1d..a06c4795b2e 100644
--- a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlIcon, GlSprintf } from '@gitlab/ui';
+import { GlIcon, GlSprintf, GlSkeletonLoader } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
@@ -23,12 +23,13 @@ describe('Image List Row', () => {
const [item] = imagesListResponse;
const findDetailsLink = () => wrapper.find('[data-testid="details-link"]');
- const findTagsCount = () => wrapper.find('[data-testid="tagsCount"]');
+ const findTagsCount = () => wrapper.find('[data-testid="tags-count"]');
const findDeleteBtn = () => wrapper.find(DeleteButton);
const findClipboardButton = () => wrapper.find(ClipboardButton);
const findWarningIcon = () => wrapper.find('[data-testid="warning-icon"]');
+ const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(Component, {
stubs: {
RouterLink,
@@ -164,6 +165,20 @@ describe('Image List Row', () => {
expect(icon.props('name')).toBe('tag');
});
+ describe('loading state', () => {
+ it('shows a loader when metadataLoading is true', () => {
+ mountComponent({ metadataLoading: true });
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('hides the tags count while loading', () => {
+ mountComponent({ metadataLoading: true });
+
+ expect(findTagsCount().exists()).toBe(false);
+ });
+ });
+
describe('tags count text', () => {
it('with one tag in the image', () => {
mountComponent({ item: { ...item, tagsCount: 1 } });
diff --git a/spec/frontend/registry/explorer/components/list_page/image_list_spec.js b/spec/frontend/registry/explorer/components/list_page/image_list_spec.js
index 54befc9973a..61c362f4d78 100644
--- a/spec/frontend/registry/explorer/components/list_page/image_list_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/image_list_spec.js
@@ -11,11 +11,12 @@ describe('Image List', () => {
const findRow = () => wrapper.findAll(ImageListRow);
const findPagination = () => wrapper.find(GlKeysetPagination);
- const mountComponent = (pageInfo = defaultPageInfo) => {
+ const mountComponent = (props) => {
wrapper = shallowMount(Component, {
propsData: {
images: imagesListResponse,
- pageInfo,
+ pageInfo: defaultPageInfo,
+ ...props,
},
});
};
@@ -35,11 +36,14 @@ describe('Image List', () => {
it('when delete event is emitted on the row it emits up a delete event', () => {
mountComponent();
- findRow()
- .at(0)
- .vm.$emit('delete', 'foo');
+ findRow().at(0).vm.$emit('delete', 'foo');
expect(wrapper.emitted('delete')).toEqual([['foo']]);
});
+
+ it('passes down the metadataLoading prop', () => {
+ mountComponent({ metadataLoading: true });
+ expect(findRow().at(0).props('metadataLoading')).toBe(true);
+ });
});
describe('pagination', () => {
@@ -57,7 +61,7 @@ describe('Image List', () => {
`(
'when hasNextPage is $hasNextPage and hasPreviousPage is $hasPreviousPage: is $isVisible that the component is visible',
({ hasNextPage, hasPreviousPage, isVisible }) => {
- mountComponent({ hasNextPage, hasPreviousPage });
+ mountComponent({ pageInfo: { ...defaultPageInfo, hasNextPage, hasPreviousPage } });
expect(findPagination().exists()).toBe(isVisible);
expect(findPagination().props('hasPreviousPage')).toBe(hasPreviousPage);
@@ -66,7 +70,7 @@ describe('Image List', () => {
);
it('emits "prev-page" when the user clicks the back page button', () => {
- mountComponent({ hasPreviousPage: true });
+ mountComponent();
findPagination().vm.$emit('prev');
@@ -74,7 +78,7 @@ describe('Image List', () => {
});
it('emits "next-page" when the user clicks the forward page button', () => {
- mountComponent({ hasNextPage: true });
+ mountComponent();
findPagination().vm.$emit('next');
diff --git a/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js b/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
index 3c997093d46..58439c185e3 100644
--- a/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
@@ -41,9 +41,12 @@ describe('registry_header', () => {
describe('header', () => {
it('has a title', () => {
- mountComponent();
+ mountComponent({ metadataLoading: true });
- expect(findTitleArea().props('title')).toBe(CONTAINER_REGISTRY_TITLE);
+ expect(findTitleArea().props()).toMatchObject({
+ title: CONTAINER_REGISTRY_TITLE,
+ metadataLoading: true,
+ });
});
it('has a commands slot', () => {
diff --git a/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js b/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js
index fb0b98ba004..487f33594c1 100644
--- a/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js
+++ b/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import component from '~/registry/explorer/components/registry_breadcrumb.vue';
@@ -6,45 +6,13 @@ describe('Registry Breadcrumb', () => {
let wrapper;
const nameGenerator = jest.fn();
- const crumb = {
- className: 'foo bar',
- tagName: 'div',
- innerHTML: 'baz',
- querySelector: jest.fn(),
- children: [
- {
- tagName: 'a',
- className: 'foo',
- },
- ],
- };
-
- const querySelectorReturnValue = {
- classList: ['js-divider'],
- tagName: 'svg',
- innerHTML: 'foo',
- };
-
- const crumbs = [crumb, { ...crumb, innerHTML: 'foo' }, { ...crumb, className: 'baz' }];
-
const routes = [
- { name: 'foo', meta: { nameGenerator, root: true } },
- { name: 'baz', meta: { nameGenerator } },
+ { name: 'list', path: '/', meta: { nameGenerator, root: true } },
+ { name: 'details', path: '/:id', meta: { nameGenerator } },
];
- const findDivider = () => wrapper.find('.js-divider');
- const findRootRoute = () => wrapper.find({ ref: 'rootRouteLink' });
- const findChildRoute = () => wrapper.find({ ref: 'childRouteLink' });
- const findLastCrumb = () => wrapper.find({ ref: 'lastCrumb' });
-
- const mountComponent = $route => {
- wrapper = shallowMount(component, {
- propsData: {
- crumbs,
- },
- stubs: {
- 'router-link': { name: 'router-link', template: '<a><slot></slot></a>', props: ['to'] },
- },
+ const mountComponent = ($route) => {
+ wrapper = mount(component, {
mocks: {
$route,
$router: {
@@ -58,7 +26,6 @@ describe('Registry Breadcrumb', () => {
beforeEach(() => {
nameGenerator.mockClear();
- crumb.querySelector = jest.fn();
});
afterEach(() => {
@@ -75,8 +42,11 @@ describe('Registry Breadcrumb', () => {
expect(wrapper.element).toMatchSnapshot();
});
- it('contains a router-link for the child route', () => {
- expect(findChildRoute().exists()).toBe(true);
+ it('contains only a single router-link to list', () => {
+ const links = wrapper.findAll('a');
+
+ expect(links).toHaveLength(1);
+ expect(links.at(0).attributes('href')).toBe('/');
});
it('the link text is calculated by nameGenerator', () => {
@@ -86,52 +56,23 @@ describe('Registry Breadcrumb', () => {
describe('when is not rootRoute', () => {
beforeEach(() => {
- crumb.querySelector.mockReturnValue(querySelectorReturnValue);
mountComponent(routes[1]);
});
- it('renders a divider', () => {
- expect(findDivider().exists()).toBe(true);
+ it('renders', () => {
+ expect(wrapper.element).toMatchSnapshot();
});
- it('contains a router-link for the root route', () => {
- expect(findRootRoute().exists()).toBe(true);
- });
+ it('contains two router-links to list and details', () => {
+ const links = wrapper.findAll('a');
- it('contains a router-link for the child route', () => {
- expect(findChildRoute().exists()).toBe(true);
+ expect(links).toHaveLength(2);
+ expect(links.at(0).attributes('href')).toBe('/');
+ expect(links.at(1).attributes('href')).toBe('#');
});
it('the link text is calculated by nameGenerator', () => {
expect(nameGenerator).toHaveBeenCalledTimes(2);
});
});
-
- describe('last crumb', () => {
- const lastChildren = crumb.children[0];
- beforeEach(() => {
- nameGenerator.mockReturnValue('foo');
- mountComponent(routes[0]);
- });
-
- it('has the same tag as the last children of the crumbs', () => {
- expect(findLastCrumb().element.tagName).toBe(lastChildren.tagName.toUpperCase());
- });
-
- it('has the same classes as the last children of the crumbs', () => {
- expect(
- findLastCrumb()
- .classes()
- .join(' '),
- ).toEqual(lastChildren.className);
- });
-
- it('has a link to the current route', () => {
- expect(findChildRoute().props('to')).toEqual({ to: routes[0].name });
- });
-
- it('the link has the correct text', () => {
- expect(findChildRoute().text()).toEqual('foo');
- });
- });
});
diff --git a/spec/frontend/registry/explorer/mock_data.js b/spec/frontend/registry/explorer/mock_data.js
index 72a9bff8a47..b0fc009872c 100644
--- a/spec/frontend/registry/explorer/mock_data.js
+++ b/spec/frontend/registry/explorer/mock_data.js
@@ -8,7 +8,6 @@ export const imagesListResponse = [
location: '0.0.0.0:5000/gitlab-org/gitlab-test/rails-12009',
canDelete: true,
createdAt: '2020-11-03T13:29:21Z',
- tagsCount: 18,
expirationPolicyStartedAt: null,
},
{
@@ -20,7 +19,6 @@ export const imagesListResponse = [
location: '0.0.0.0:5000/gitlab-org/gitlab-test/rails-20572',
canDelete: true,
createdAt: '2020-09-21T06:57:43Z',
- tagsCount: 1,
expirationPolicyStartedAt: null,
},
];
@@ -117,8 +115,13 @@ export const containerRepositoryMock = {
updatedAt: '2020-11-03T13:29:21Z',
tagsCount: 13,
expirationPolicyStartedAt: null,
+ expirationPolicyCleanupStatus: 'UNSCHEDULED',
project: {
visibility: 'public',
+ containerExpirationPolicy: {
+ enabled: false,
+ nextRunAt: '2020-11-27T08:59:27Z',
+ },
__typename: 'Project',
},
};
@@ -158,7 +161,7 @@ export const tagsMock = [
},
];
-export const graphQLImageDetailsMock = override => ({
+export const graphQLImageDetailsMock = (override) => ({
data: {
containerRepository: {
...containerRepositoryMock,
@@ -209,3 +212,26 @@ export const dockerCommands = {
dockerPushCommand: 'barbar',
dockerLoginCommand: 'bazbaz',
};
+
+export const graphQLProjectImageRepositoriesDetailsMock = {
+ data: {
+ project: {
+ containerRepositories: {
+ nodes: [
+ {
+ id: 'gid://gitlab/ContainerRepository/26',
+ tagsCount: 4,
+ __typename: 'ContainerRepository',
+ },
+ {
+ id: 'gid://gitlab/ContainerRepository/11',
+ tagsCount: 1,
+ __typename: 'ContainerRepository',
+ },
+ ],
+ __typename: 'ContainerRepositoryConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index d307dfe590c..1746a6a63b6 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -1,7 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlKeysetPagination } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/details.vue';
@@ -15,6 +15,8 @@ import EmptyTagsState from '~/registry/explorer/components/details_page/empty_ta
import getContainerRepositoryDetailsQuery from '~/registry/explorer/graphql/queries/get_container_repository_details.query.graphql';
import deleteContainerRepositoryTagsMutation from '~/registry/explorer/graphql/mutations/delete_container_repository_tags.mutation.graphql';
+import { UNFINISHED_STATUS } from '~/registry/explorer/constants/index';
+
import {
graphQLImageDetailsMock,
graphQLImageDetailsEmptyTagsMock,
@@ -46,7 +48,7 @@ describe('Details Page', () => {
updateName: jest.fn(),
};
- const cleanTags = tagsMock.map(t => {
+ const cleanTags = tagsMock.map((t) => {
const result = { ...t };
// eslint-disable-next-line no-underscore-dangle
delete result.__typename;
@@ -58,7 +60,7 @@ describe('Details Page', () => {
await wrapper.vm.$nextTick();
};
- const tagsArrayToSelectedTags = tags =>
+ const tagsArrayToSelectedTags = (tags) =>
tags.reduce((acc, c) => {
acc[c.name] = true;
return acc;
@@ -334,7 +336,7 @@ describe('Details Page', () => {
findDeleteModal().vm.$emit('confirmDelete');
expect(mutationResolver).toHaveBeenCalledWith(
- expect.objectContaining({ tagNames: tagsMock.map(t => t.name) }),
+ expect.objectContaining({ tagNames: tagsMock.map((t) => t.name) }),
);
});
});
@@ -353,10 +355,13 @@ describe('Details Page', () => {
mountComponent();
await waitForApolloRequestRender();
- expect(findDetailsHeader().props('image')).toMatchObject({
- name: containerRepositoryMock.name,
- project: {
- visibility: containerRepositoryMock.project.visibility,
+ expect(findDetailsHeader().props()).toMatchObject({
+ metadataLoading: false,
+ image: {
+ name: containerRepositoryMock.name,
+ project: {
+ visibility: containerRepositoryMock.project.visibility,
+ },
},
});
});
@@ -398,13 +403,13 @@ describe('Details Page', () => {
cleanupPoliciesHelpPagePath: 'bar',
};
- describe('when expiration_policy_started is not null', () => {
+ describe(`when expirationPolicyCleanupStatus is ${UNFINISHED_STATUS}`, () => {
let resolver;
beforeEach(() => {
resolver = jest.fn().mockResolvedValue(
graphQLImageDetailsMock({
- expirationPolicyStartedAt: Date.now().toString(),
+ expirationPolicyCleanupStatus: UNFINISHED_STATUS,
}),
);
});
@@ -439,7 +444,7 @@ describe('Details Page', () => {
});
});
- describe('when expiration_policy_started is null', () => {
+ describe(`when expirationPolicyCleanupStatus is not ${UNFINISHED_STATUS}`, () => {
it('the component is hidden', async () => {
mountComponent();
await waitForApolloRequestRender();
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index 7d32a667011..c4556934934 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -1,8 +1,9 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { GlSkeletonLoader, GlSprintf, GlAlert, GlSearchBoxByClick } from '@gitlab/ui';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import getContainerRepositoriesQuery from 'shared_queries/container_registry/get_container_repositories.query.graphql';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/list.vue';
import CliCommands from '~/registry/explorer/components/list_page/cli_commands.vue';
@@ -19,8 +20,7 @@ import {
SEARCH_PLACEHOLDER_TEXT,
} from '~/registry/explorer/constants';
-import getProjectContainerRepositoriesQuery from '~/registry/explorer/graphql/queries/get_project_container_repositories.query.graphql';
-import getGroupContainerRepositoriesQuery from '~/registry/explorer/graphql/queries/get_group_container_repositories.query.graphql';
+import getContainerRepositoriesDetails from '~/registry/explorer/graphql/queries/get_container_repositories_details.query.graphql';
import deleteContainerRepositoryMutation from '~/registry/explorer/graphql/mutations/delete_container_repository.mutation.graphql';
import {
@@ -31,6 +31,8 @@ import {
graphQLEmptyImageListMock,
graphQLEmptyGroupImageListMock,
pageInfo,
+ graphQLProjectImageRepositoriesDetailsMock,
+ dockerCommands,
} from '../mock_data';
import { GlModal, GlEmptyState } from '../stubs';
import { $toast } from '../../shared/mocks';
@@ -58,6 +60,7 @@ describe('List Page', () => {
const findEmptySearchMessage = () => wrapper.find('[data-testid="emptySearch"]');
const waitForApolloRequestRender = async () => {
+ jest.runOnlyPendingTimers();
await waitForPromises();
await wrapper.vm.$nextTick();
};
@@ -65,15 +68,15 @@ describe('List Page', () => {
const mountComponent = ({
mocks,
resolver = jest.fn().mockResolvedValue(graphQLImageListMock),
- groupResolver = jest.fn().mockResolvedValue(graphQLImageListMock),
+ detailsResolver = jest.fn().mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock),
mutationResolver = jest.fn().mockResolvedValue(graphQLImageDeleteMock),
- config = {},
+ config = { isGroupPage: false },
} = {}) => {
localVue.use(VueApollo);
const requestHandlers = [
- [getProjectContainerRepositoriesQuery, resolver],
- [getGroupContainerRepositoriesQuery, groupResolver],
+ [getContainerRepositoriesQuery, resolver],
+ [getContainerRepositoriesDetails, detailsResolver],
[deleteContainerRepositoryMutation, mutationResolver],
];
@@ -99,6 +102,7 @@ describe('List Page', () => {
provide() {
return {
config,
+ ...dockerCommands,
};
},
});
@@ -116,6 +120,7 @@ describe('List Page', () => {
expect(findRegistryHeader().exists()).toBe(true);
expect(findRegistryHeader().props()).toMatchObject({
imagesCount: 2,
+ metadataLoading: false,
});
});
@@ -124,6 +129,7 @@ describe('List Page', () => {
characterError: true,
containersErrorImage: 'foo',
helpPagePath: 'bar',
+ isGroupPage: false,
};
it('should show an empty state', () => {
@@ -170,6 +176,12 @@ describe('List Page', () => {
expect(findCliCommands().exists()).toBe(false);
});
+
+ it('title has the metadataLoading props set to true', () => {
+ mountComponent();
+
+ expect(findRegistryHeader().props('metadataLoading')).toBe(true);
+ });
});
describe('list is empty', () => {
@@ -192,15 +204,16 @@ describe('List Page', () => {
expect(findProjectEmptyState().exists()).toBe(true);
});
});
+
describe('group page', () => {
- const groupResolver = jest.fn().mockResolvedValue(graphQLEmptyGroupImageListMock);
+ const resolver = jest.fn().mockResolvedValue(graphQLEmptyGroupImageListMock);
const config = {
isGroupPage: true,
};
it('group empty state is visible', async () => {
- mountComponent({ groupResolver, config });
+ mountComponent({ resolver, config });
await waitForApolloRequestRender();
@@ -208,7 +221,7 @@ describe('List Page', () => {
});
it('cli commands is not visible', async () => {
- mountComponent({ groupResolver, config });
+ mountComponent({ resolver, config });
await waitForApolloRequestRender();
@@ -216,7 +229,7 @@ describe('List Page', () => {
});
it('list header is not visible', async () => {
- mountComponent({ groupResolver, config });
+ mountComponent({ resolver, config });
await waitForApolloRequestRender();
@@ -253,6 +266,39 @@ describe('List Page', () => {
expect(header.text()).toBe(IMAGE_REPOSITORY_LIST_LABEL);
});
+ describe('additional metadata', () => {
+ it('is called on component load', async () => {
+ const detailsResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
+ mountComponent({ detailsResolver });
+
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+
+ expect(detailsResolver).toHaveBeenCalled();
+ });
+
+ it('does not block the list ui to show', async () => {
+ const detailsResolver = jest.fn().mockRejectedValue();
+ mountComponent({ detailsResolver });
+
+ await waitForApolloRequestRender();
+
+ expect(findImageList().exists()).toBe(true);
+ });
+
+ it('loading state is passed to list component', async () => {
+ // this is a promise that never resolves, to trick apollo to think that this request is still loading
+ const detailsResolver = jest.fn().mockImplementation(() => new Promise(() => {}));
+
+ mountComponent({ detailsResolver });
+ await waitForApolloRequestRender();
+
+ expect(findImageList().props('metadataLoading')).toBe(true);
+ });
+ });
+
describe('delete image', () => {
const deleteImage = async () => {
await wrapper.vm.$nextTick();
@@ -274,7 +320,7 @@ describe('List Page', () => {
const updatedImage = findImageList()
.props('images')
- .find(i => i.id === deletedContainerRepository.id);
+ .find((i) => i.id === deletedContainerRepository.id);
expect(updatedImage.status).toBe(deletedContainerRepository.status);
});
@@ -350,9 +396,15 @@ describe('List Page', () => {
it('when search result is empty displays an empty search message', async () => {
const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
- mountComponent({ resolver });
+ const detailsResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
+ mountComponent({ resolver, detailsResolver });
+
+ await waitForApolloRequestRender();
resolver.mockResolvedValue(graphQLEmptyImageListMock);
+ detailsResolver.mockResolvedValue(graphQLEmptyImageListMock);
await doSearch();
@@ -363,28 +415,42 @@ describe('List Page', () => {
describe('pagination', () => {
it('prev-page event triggers a fetchMore request', async () => {
const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
- mountComponent({ resolver });
+ const detailsResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
+ mountComponent({ resolver, detailsResolver });
await waitForApolloRequestRender();
findImageList().vm.$emit('prev-page');
+ await wrapper.vm.$nextTick();
expect(resolver).toHaveBeenCalledWith(
- expect.objectContaining({ first: null, before: pageInfo.startCursor }),
+ expect.objectContaining({ before: pageInfo.startCursor }),
+ );
+ expect(detailsResolver).toHaveBeenCalledWith(
+ expect.objectContaining({ before: pageInfo.startCursor }),
);
});
it('next-page event triggers a fetchMore request', async () => {
const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
- mountComponent({ resolver });
+ const detailsResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
+ mountComponent({ resolver, detailsResolver });
await waitForApolloRequestRender();
findImageList().vm.$emit('next-page');
+ await wrapper.vm.$nextTick();
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ after: pageInfo.endCursor }),
);
+ expect(detailsResolver).toHaveBeenCalledWith(
+ expect.objectContaining({ after: pageInfo.endCursor }),
+ );
});
});
});
@@ -411,7 +477,7 @@ describe('List Page', () => {
mountComponent();
});
- const testTrackingCall = action => {
+ const testTrackingCall = (action) => {
expect(Tracking.event).toHaveBeenCalledWith(undefined, action, {
label: 'registry_repository_delete',
});
diff --git a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
index d7f89ce070e..7a52b4a5d0f 100644
--- a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
+++ b/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
@@ -32,7 +32,7 @@ exports[`Settings Form Keep N matches snapshot 1`] = `
exports[`Settings Form Keep Regex matches snapshot 1`] = `
<expiration-input-stub
data-testid="keep-regex-input"
- description="Tags with names that match this regex pattern are kept. %{linkStart}More information%{linkEnd}"
+ description="Tags with names that match this regex pattern are kept. %{linkStart}View regex examples.%{linkEnd}"
error=""
label="Keep tags matching:"
name="keep-regex"
@@ -54,7 +54,7 @@ exports[`Settings Form OlderThan matches snapshot 1`] = `
exports[`Settings Form Remove regex matches snapshot 1`] = `
<expiration-input-stub
data-testid="remove-regex-input"
- description="Tags with names that match this regex pattern are removed. %{linkStart}More information%{linkEnd}"
+ description="Tags with names that match this regex pattern are removed. %{linkStart}View regex examples.%{linkEnd}"
error=""
label="Remove tags matching:"
name="remove-regex"
diff --git a/spec/frontend/registry/settings/components/expiration_dropdown_spec.js b/spec/frontend/registry/settings/components/expiration_dropdown_spec.js
index e0cac317ad6..f777f7ec9de 100644
--- a/spec/frontend/registry/settings/components/expiration_dropdown_spec.js
+++ b/spec/frontend/registry/settings/components/expiration_dropdown_spec.js
@@ -8,14 +8,17 @@ describe('ExpirationDropdown', () => {
const defaultProps = {
name: 'foo',
label: 'label-bar',
- formOptions: [{ key: 'foo', label: 'bar' }, { key: 'baz', label: 'zab' }],
+ formOptions: [
+ { key: 'foo', label: 'bar' },
+ { key: 'baz', label: 'zab' },
+ ],
};
const findFormSelect = () => wrapper.find(GlFormSelect);
const findFormGroup = () => wrapper.find(GlFormGroup);
const findOptions = () => wrapper.findAll('[data-testid="option"]');
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(component, {
stubs: {
GlFormGroup,
diff --git a/spec/frontend/registry/settings/components/expiration_input_spec.js b/spec/frontend/registry/settings/components/expiration_input_spec.js
index 849f85aa265..383158067dc 100644
--- a/spec/frontend/registry/settings/components/expiration_input_spec.js
+++ b/spec/frontend/registry/settings/components/expiration_input_spec.js
@@ -22,7 +22,7 @@ describe('ExpirationInput', () => {
const findDescription = () => wrapper.find('[data-testid="description"]');
const findDescriptionLink = () => wrapper.find(GlLink);
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(component, {
stubs: {
GlSprintf,
diff --git a/spec/frontend/registry/settings/components/expiration_run_text_spec.js b/spec/frontend/registry/settings/components/expiration_run_text_spec.js
index c594b1f449d..d74ee2d1c18 100644
--- a/spec/frontend/registry/settings/components/expiration_run_text_spec.js
+++ b/spec/frontend/registry/settings/components/expiration_run_text_spec.js
@@ -11,7 +11,7 @@ describe('ExpirationToggle', () => {
const findInput = () => wrapper.find(GlFormInput);
const findFormGroup = () => wrapper.find(GlFormGroup);
- const mountComponent = propsData => {
+ const mountComponent = (propsData) => {
wrapper = shallowMount(component, {
stubs: {
GlFormGroup,
diff --git a/spec/frontend/registry/settings/components/expiration_toggle_spec.js b/spec/frontend/registry/settings/components/expiration_toggle_spec.js
index 99ff7a7f77a..ce016e852ee 100644
--- a/spec/frontend/registry/settings/components/expiration_toggle_spec.js
+++ b/spec/frontend/registry/settings/components/expiration_toggle_spec.js
@@ -13,7 +13,7 @@ describe('ExpirationToggle', () => {
const findToggle = () => wrapper.find(GlToggle);
const findDescription = () => wrapper.find('[data-testid="description"]');
- const mountComponent = propsData => {
+ const mountComponent = (propsData) => {
wrapper = shallowMount(component, {
stubs: {
GlFormGroup,
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
index c31c7bdf99b..32d4f16221d 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
@@ -1,7 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import component from '~/registry/settings/components/registry_settings_app.vue';
import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.query.graphql';
import SettingsForm from '~/registry/settings/components/settings_form.vue';
@@ -59,7 +59,7 @@ describe('Registry Settings App', () => {
apolloProvider: fakeApollo,
});
- return requestHandlers.map(request => request[1]);
+ return requestHandlers.map((request) => request[1]);
};
afterEach(() => {
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
index b89269c0ae4..626f4fcc9f5 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -1,6 +1,6 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Tracking from '~/tracking';
import component from '~/registry/settings/components/settings_form.vue';
@@ -103,7 +103,7 @@ describe('Settings Form', () => {
},
});
- return requestHandlers.map(resolvers => resolvers[1]);
+ return requestHandlers.map((resolvers) => resolvers[1]);
};
beforeEach(() => {
diff --git a/spec/frontend/registry/settings/mock_data.js b/spec/frontend/registry/settings/mock_data.js
index 7cc645fcf55..9778f409010 100644
--- a/spec/frontend/registry/settings/mock_data.js
+++ b/spec/frontend/registry/settings/mock_data.js
@@ -8,7 +8,7 @@ export const containerExpirationPolicyData = () => ({
nextRunAt: '2020-11-19T07:37:03.941Z',
});
-export const expirationPolicyPayload = override => ({
+export const expirationPolicyPayload = (override) => ({
data: {
project: {
containerExpirationPolicy: {
diff --git a/spec/frontend/registry/settings/utils_spec.js b/spec/frontend/registry/settings/utils_spec.js
index f92d51db307..7bc627908af 100644
--- a/spec/frontend/registry/settings/utils_spec.js
+++ b/spec/frontend/registry/settings/utils_spec.js
@@ -11,7 +11,10 @@ describe('Utils', () => {
[{ variable: 1 }, { variable: 2 }],
olderThanTranslationGenerator,
);
- expect(result).toEqual([{ variable: 1, label: '1 day' }, { variable: 2, label: '2 days' }]);
+ expect(result).toEqual([
+ { variable: 1, label: '1 day' },
+ { variable: 2, label: '2 days' },
+ ]);
});
});
diff --git a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
index db33a9cdce1..c084410c65b 100644
--- a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
@@ -14,7 +14,7 @@ describe('RelatedMergeRequests', () => {
let mock;
let mockData;
- beforeEach(done => {
+ beforeEach((done) => {
loadFixtures(FIXTURE_PATH);
mockData = getJSONFixture(FIXTURE_PATH);
@@ -73,10 +73,7 @@ describe('RelatedMergeRequests', () => {
expect(wrapper.find('.js-items-count').text()).toEqual('2');
expect(wrapper.findAll(RelatedIssuableItem).length).toEqual(2);
- const props = wrapper
- .findAll(RelatedIssuableItem)
- .at(1)
- .props();
+ const props = wrapper.findAll(RelatedIssuableItem).at(1).props();
const data = mockData[1];
expect(props.idKey).toEqual(data.id);
diff --git a/spec/frontend/related_merge_requests/store/actions_spec.js b/spec/frontend/related_merge_requests/store/actions_spec.js
index fa031a91c83..a4257d67176 100644
--- a/spec/frontend/related_merge_requests/store/actions_spec.js
+++ b/spec/frontend/related_merge_requests/store/actions_spec.js
@@ -23,7 +23,7 @@ describe('RelatedMergeRequest store actions', () => {
});
describe('setInitialState', () => {
- it('commits types.SET_INITIAL_STATE with given props', done => {
+ it('commits types.SET_INITIAL_STATE with given props', (done) => {
const props = { a: 1, b: 2 };
testAction(
@@ -38,13 +38,13 @@ describe('RelatedMergeRequest store actions', () => {
});
describe('requestData', () => {
- it('commits types.REQUEST_DATA', done => {
+ it('commits types.REQUEST_DATA', (done) => {
testAction(actions.requestData, null, {}, [{ type: types.REQUEST_DATA }], [], done);
});
});
describe('receiveDataSuccess', () => {
- it('commits types.RECEIVE_DATA_SUCCESS with data', done => {
+ it('commits types.RECEIVE_DATA_SUCCESS with data', (done) => {
const data = { a: 1, b: 2 };
testAction(
@@ -59,7 +59,7 @@ describe('RelatedMergeRequest store actions', () => {
});
describe('receiveDataError', () => {
- it('commits types.RECEIVE_DATA_ERROR', done => {
+ it('commits types.RECEIVE_DATA_ERROR', (done) => {
testAction(
actions.receiveDataError,
null,
@@ -73,7 +73,7 @@ describe('RelatedMergeRequest store actions', () => {
describe('fetchMergeRequests', () => {
describe('for a successful request', () => {
- it('should dispatch success action', done => {
+ it('should dispatch success action', (done) => {
const data = { a: 1 };
mock.onGet(`${state.apiEndpoint}?per_page=100`).replyOnce(200, data, { 'x-total': 2 });
@@ -89,7 +89,7 @@ describe('RelatedMergeRequest store actions', () => {
});
describe('for a failing request', () => {
- it('should dispatch error action', done => {
+ it('should dispatch error action', (done) => {
mock.onGet(`${state.apiEndpoint}?per_page=100`).replyOnce(400);
testAction(
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index f49d3d7b716..c9f84be97c4 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -89,18 +89,6 @@ Object {
],
"milestones": Array [
Object {
- "description": "The 12.4 milestone",
- "id": "gid://gitlab/Milestone/124",
- "issueStats": Object {
- "closed": 1,
- "total": 4,
- },
- "stats": undefined,
- "title": "12.4",
- "webPath": undefined,
- "webUrl": "/releases-namespace/releases-project/-/milestones/2",
- },
- Object {
"description": "The 12.3 milestone",
"id": "gid://gitlab/Milestone/123",
"issueStats": Object {
@@ -112,6 +100,18 @@ Object {
"webPath": undefined,
"webUrl": "/releases-namespace/releases-project/-/milestones/1",
},
+ Object {
+ "description": "The 12.4 milestone",
+ "id": "gid://gitlab/Milestone/124",
+ "issueStats": Object {
+ "closed": 1,
+ "total": 4,
+ },
+ "stats": undefined,
+ "title": "12.4",
+ "webPath": undefined,
+ "webUrl": "/releases-namespace/releases-project/-/milestones/2",
+ },
],
"name": "The first release",
"releasedAt": "2018-12-10T00:00:00Z",
@@ -217,18 +217,6 @@ Object {
],
"milestones": Array [
Object {
- "description": "The 12.4 milestone",
- "id": "gid://gitlab/Milestone/124",
- "issueStats": Object {
- "closed": 1,
- "total": 4,
- },
- "stats": undefined,
- "title": "12.4",
- "webPath": undefined,
- "webUrl": "/releases-namespace/releases-project/-/milestones/2",
- },
- Object {
"description": "The 12.3 milestone",
"id": "gid://gitlab/Milestone/123",
"issueStats": Object {
@@ -240,6 +228,18 @@ Object {
"webPath": undefined,
"webUrl": "/releases-namespace/releases-project/-/milestones/1",
},
+ Object {
+ "description": "The 12.4 milestone",
+ "id": "gid://gitlab/Milestone/124",
+ "issueStats": Object {
+ "closed": 1,
+ "total": 4,
+ },
+ "stats": undefined,
+ "title": "12.4",
+ "webPath": undefined,
+ "webUrl": "/releases-namespace/releases-project/-/milestones/2",
+ },
],
"name": "The first release",
"releasedAt": "2018-12-10T00:00:00Z",
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 1d409b5b590..828d1b46a80 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -71,7 +71,7 @@ describe('Release edit/new component', () => {
await wrapper.vm.$nextTick();
- wrapper.element.querySelectorAll('input').forEach(input => jest.spyOn(input, 'focus'));
+ wrapper.element.querySelectorAll('input').forEach((input) => jest.spyOn(input, 'focus'));
};
beforeEach(() => {
@@ -104,7 +104,7 @@ describe('Release edit/new component', () => {
const firstEnabledInput = wrapper.element.querySelector('input:enabled');
const allInputs = wrapper.element.querySelectorAll('input');
- allInputs.forEach(input => {
+ allInputs.forEach((input) => {
const expectedFocusCalls = input === firstEnabledInput ? 1 : 0;
expect(input.focus).toHaveBeenCalledTimes(expectedFocusCalls);
});
@@ -152,7 +152,7 @@ describe('Release edit/new component', () => {
beforeEach(async () => {
commonUtils.getParameterByName = jest
.fn()
- .mockImplementation(paramToGet => ({ [BACK_URL_PARAM]: backUrl }[paramToGet]));
+ .mockImplementation((paramToGet) => ({ [BACK_URL_PARAM]: backUrl }[paramToGet]));
await factory();
});
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 9f1577c2f1e..1481dd30fd4 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -13,7 +13,7 @@ import ReleasesPagination from '~/releases/components/releases_pagination.vue';
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
- getParameterByName: jest.fn().mockImplementation(paramName => {
+ getParameterByName: jest.fn().mockImplementation((paramName) => {
return `${paramName}_param_value`;
}),
}));
@@ -28,7 +28,7 @@ describe('Releases App ', () => {
let wrapper;
let fetchReleaseSpy;
- const paginatedReleases = rge(21).map(index => ({
+ const paginatedReleases = rge(21).map((index) => ({
...convertObjectPropsToCamelCase(release, { deep: true }),
tagName: `${index}.00`,
}));
@@ -98,10 +98,10 @@ describe('Releases App ', () => {
});
it('renders loading icon', () => {
- expect(wrapper.contains('.js-loading')).toBe(true);
- expect(wrapper.contains('.js-empty-state')).toBe(false);
- expect(wrapper.contains('.js-success-state')).toBe(false);
- expect(wrapper.contains(ReleasesPagination)).toBe(false);
+ expect(wrapper.find('.js-loading').exists()).toBe(true);
+ expect(wrapper.find('.js-empty-state').exists()).toBe(false);
+ expect(wrapper.find('.js-success-state').exists()).toBe(false);
+ expect(wrapper.find(ReleasesPagination).exists()).toBe(false);
});
});
@@ -115,10 +115,10 @@ describe('Releases App ', () => {
});
it('renders success state', () => {
- expect(wrapper.contains('.js-loading')).toBe(false);
- expect(wrapper.contains('.js-empty-state')).toBe(false);
- expect(wrapper.contains('.js-success-state')).toBe(true);
- expect(wrapper.contains(ReleasesPagination)).toBe(true);
+ expect(wrapper.find('.js-loading').exists()).toBe(false);
+ expect(wrapper.find('.js-empty-state').exists()).toBe(false);
+ expect(wrapper.find('.js-success-state').exists()).toBe(true);
+ expect(wrapper.find(ReleasesPagination).exists()).toBe(true);
});
});
@@ -132,10 +132,10 @@ describe('Releases App ', () => {
});
it('renders success state', () => {
- expect(wrapper.contains('.js-loading')).toBe(false);
- expect(wrapper.contains('.js-empty-state')).toBe(false);
- expect(wrapper.contains('.js-success-state')).toBe(true);
- expect(wrapper.contains(ReleasesPagination)).toBe(true);
+ expect(wrapper.find('.js-loading').exists()).toBe(false);
+ expect(wrapper.find('.js-empty-state').exists()).toBe(false);
+ expect(wrapper.find('.js-success-state').exists()).toBe(true);
+ expect(wrapper.find(ReleasesPagination).exists()).toBe(true);
});
});
@@ -147,9 +147,9 @@ describe('Releases App ', () => {
});
it('renders empty state', () => {
- expect(wrapper.contains('.js-loading')).toBe(false);
- expect(wrapper.contains('.js-empty-state')).toBe(true);
- expect(wrapper.contains('.js-success-state')).toBe(false);
+ expect(wrapper.find('.js-loading').exists()).toBe(false);
+ expect(wrapper.find('.js-empty-state').exists()).toBe(true);
+ expect(wrapper.find('.js-success-state').exists()).toBe(false);
});
});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 181fa0150f1..b1f94ca9859 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -17,7 +17,7 @@ describe('Release show component', () => {
release = convertObjectPropsToCamelCase(originalRelease);
});
- const factory = state => {
+ const factory = (state) => {
actions = {
fetchRelease: jest.fn(),
};
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index 6794a56debc..2b2ad0150ab 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -30,7 +30,7 @@ describe('Release edit component', () => {
updateAssetLinkName: jest.fn(),
updateAssetLinkType: jest.fn(),
removeAssetLink: jest.fn().mockImplementation((_context, linkId) => {
- state.release.assets.links = state.release.assets.links.filter(l => l.id !== linkId);
+ state.release.assets.links = state.release.assets.links.filter((l) => l.id !== linkId);
}),
};
@@ -104,7 +104,7 @@ describe('Release edit component', () => {
expect(actions.updateAssetLinkUrl).not.toHaveBeenCalled();
};
- const dispatchKeydowEvent = eventParams => {
+ const dispatchKeydowEvent = (eventParams) => {
const event = new KeyboardEvent('keydown', eventParams);
input.dispatchEvent(event);
@@ -162,7 +162,7 @@ describe('Release edit component', () => {
expect(actions.updateAssetLinkUrl).not.toHaveBeenCalled();
};
- const dispatchKeydowEvent = eventParams => {
+ const dispatchKeydowEvent = (eventParams) => {
const event = new KeyboardEvent('keydown', eventParams);
input.dispatchEvent(event);
diff --git a/spec/frontend/releases/components/issuable_stats_spec.js b/spec/frontend/releases/components/issuable_stats_spec.js
index d8211ec2adc..8fc0779da14 100644
--- a/spec/frontend/releases/components/issuable_stats_spec.js
+++ b/spec/frontend/releases/components/issuable_stats_spec.js
@@ -7,7 +7,7 @@ describe('~/releases/components/issuable_stats.vue', () => {
let wrapper;
let defaultProps;
- const createComponent = propUpdates => {
+ const createComponent = (propUpdates) => {
wrapper = mount(IssuableStats, {
propsData: {
...defaultProps,
diff --git a/spec/frontend/releases/components/release_block_assets_spec.js b/spec/frontend/releases/components/release_block_assets_spec.js
index 126ca27e8a6..77bd35f94aa 100644
--- a/spec/frontend/releases/components/release_block_assets_spec.js
+++ b/spec/frontend/releases/components/release_block_assets_spec.js
@@ -26,8 +26,8 @@ describe('Release block assets', () => {
});
};
- const findSectionHeading = type =>
- wrapper.findAll('h5').filter(h5 => h5.text() === sections[type]);
+ const findSectionHeading = (type) =>
+ wrapper.findAll('h5').filter((h5) => h5.text() === sections[type]);
beforeEach(() => {
defaultProps = { assets: convertObjectPropsToCamelCase(assets, { deep: true }) };
@@ -53,7 +53,7 @@ describe('Release block assets', () => {
});
it('renders sources with the expected text and URL', () => {
- defaultProps.assets.sources.forEach(s => {
+ defaultProps.assets.sources.forEach((s) => {
const sourceLink = wrapper.find(`li>a[href="${s.url}"]`);
expect(sourceLink.exists()).toBe(true);
@@ -62,7 +62,7 @@ describe('Release block assets', () => {
});
it('renders a heading for each assets type (except sources)', () => {
- Object.keys(sections).forEach(type => {
+ Object.keys(sections).forEach((type) => {
const sectionHeadings = findSectionHeading(type);
expect(sectionHeadings).toHaveLength(1);
@@ -70,7 +70,7 @@ describe('Release block assets', () => {
});
it('renders asset links with the expected text and URL', () => {
- defaultProps.assets.links.forEach(l => {
+ defaultProps.assets.links.forEach((l) => {
const sourceLink = wrapper.find(`li>a[href="${l.directAssetUrl}"]`);
expect(sourceLink.exists()).toBe(true);
@@ -84,7 +84,7 @@ describe('Release block assets', () => {
beforeEach(() => {
defaultProps.assets.links = defaultProps.assets.links.filter(
- l => l.linkType !== typeToExclude,
+ (l) => l.linkType !== typeToExclude,
);
createComponent(defaultProps);
});
@@ -98,7 +98,7 @@ describe('Release block assets', () => {
describe('sources', () => {
const testSources = ({ shouldSourcesBeRendered }) => {
- assets.sources.forEach(s => {
+ assets.sources.forEach((s) => {
expect(wrapper.find(`a[href="${s.url}"]`).exists()).toBe(shouldSourcesBeRendered);
});
};
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index bb34693c757..4eb15e9da12 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -12,7 +12,7 @@ describe('Release block milestone info', () => {
let wrapper;
let milestones;
- const factory = props => {
+ const factory = (props) => {
wrapper = mount(ReleaseBlockMilestoneInfo, {
propsData: props,
});
@@ -54,22 +54,10 @@ describe('Release block milestone info', () => {
});
it('renders a list of links to all associated milestones', () => {
- // The API currently returns the milestones in a non-deterministic order,
- // which causes the frontend fixture used by this test to return the
- // milestones in one order locally and a different order in the CI pipeline.
- // This is a bug and is tracked here: https://gitlab.com/gitlab-org/gitlab/-/issues/259012
- // When this bug is fixed this expectation should be updated to
- // assert the expected order.
- const containerText = trimText(milestoneListContainer().text());
- expect(
- containerText.includes('Milestones 12.4 • 12.3') ||
- containerText.includes('Milestones 12.3 • 12.4'),
- ).toBe(true);
+ expect(milestoneListContainer().text()).toMatchInterpolatedText('Milestones 12.3 • 12.4');
milestones.forEach((m, i) => {
- const milestoneLink = milestoneListContainer()
- .findAll(GlLink)
- .at(i);
+ const milestoneLink = milestoneListContainer().findAll(GlLink).at(i);
expect(milestoneLink.text()).toBe(m.title);
expect(milestoneLink.attributes('href')).toBe(m.webUrl);
@@ -108,19 +96,17 @@ describe('Release block milestone info', () => {
});
}
- fullListString = lotsOfMilestones.map(m => m.title).join(' • ');
+ fullListString = lotsOfMilestones.map((m) => m.title).join(' • ');
abbreviatedListString = lotsOfMilestones
.slice(0, MAX_MILESTONES_TO_DISPLAY)
- .map(m => m.title)
+ .map((m) => m.title)
.join(' • ');
return factory({ milestones: lotsOfMilestones });
});
const clickShowMoreFewerButton = () => {
- milestoneListContainer()
- .find(GlButton)
- .trigger('click');
+ milestoneListContainer().find(GlButton).trigger('click');
return wrapper.vm.$nextTick();
};
@@ -161,7 +147,7 @@ describe('Release block milestone info', () => {
/** Ensures we don't have any issues with dividing by zero when computing percentages */
describe('when all issue counts are zero', () => {
beforeEach(() => {
- milestones = milestones.map(m => ({
+ milestones = milestones.map((m) => ({
...m,
issueStats: {
...m.issueStats,
@@ -178,7 +164,7 @@ describe('Release block milestone info', () => {
describe('if the API response is missing the "issue_stats" property', () => {
beforeEach(() => {
- milestones = milestones.map(m => ({
+ milestones = milestones.map((m) => ({
...m,
issueStats: undefined,
}));
@@ -199,7 +185,7 @@ describe('Release block milestone info', () => {
describe('if the API response includes the "mr_stats" property', () => {
beforeEach(() => {
- milestones = milestones.map(m => ({
+ milestones = milestones.map((m) => ({
...m,
mrStats: {
total: 15,
diff --git a/spec/frontend/releases/components/releases_pagination_graphql_spec.js b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
index bba5e532e5e..cee5e72e1c0 100644
--- a/spec/frontend/releases/components/releases_pagination_graphql_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
@@ -24,7 +24,7 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
const projectPath = 'my/project';
- const createComponent = pageInfo => {
+ const createComponent = (pageInfo) => {
listModule = createListModule({ projectPath });
listModule.state.graphQlPageInfo = pageInfo;
@@ -72,7 +72,7 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
});
it('does not render anything', () => {
- expect(wrapper.isEmpty()).toBe(true);
+ expect(wrapper.html()).toBe('');
});
});
diff --git a/spec/frontend/releases/components/releases_pagination_rest_spec.js b/spec/frontend/releases/components/releases_pagination_rest_spec.js
index 59c0c31413a..b015792c96b 100644
--- a/spec/frontend/releases/components/releases_pagination_rest_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_rest_spec.js
@@ -17,7 +17,7 @@ describe('~/releases/components/releases_pagination_rest.vue', () => {
const projectId = 19;
- const createComponent = pageInfo => {
+ const createComponent = (pageInfo) => {
listModule = createListModule({ projectId });
listModule.state.restPageInfo = pageInfo;
diff --git a/spec/frontend/releases/components/releases_pagination_spec.js b/spec/frontend/releases/components/releases_pagination_spec.js
index 2466fb53a68..1d47da31f38 100644
--- a/spec/frontend/releases/components/releases_pagination_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_spec.js
@@ -10,7 +10,7 @@ localVue.use(Vuex);
describe('~/releases/components/releases_pagination.vue', () => {
let wrapper;
- const createComponent = useGraphQLEndpoint => {
+ const createComponent = (useGraphQLEndpoint) => {
const store = new Vuex.Store({
getters: {
useGraphQLEndpoint: () => useGraphQLEndpoint,
diff --git a/spec/frontend/releases/components/tag_field_new_spec.js b/spec/frontend/releases/components/tag_field_new_spec.js
index b6ebc496f33..eba0e286b27 100644
--- a/spec/frontend/releases/components/tag_field_new_spec.js
+++ b/spec/frontend/releases/components/tag_field_new_spec.js
@@ -83,7 +83,7 @@ describe('releases/components/tag_field_new', () => {
* @param {'shown' | 'hidden'} state The expected state of the validation message.
* Should be passed either 'shown' or 'hidden'
*/
- const expectValidationMessageToBe = state => {
+ const expectValidationMessageToBe = (state) => {
return wrapper.vm.$nextTick().then(() => {
expect(findTagNameFormGroup().element).toHaveClass(
state === 'shown' ? 'is-invalid' : 'is-valid',
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index abd0db6a589..396e7bd8745 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -435,7 +435,7 @@ describe('Release detail actions', () => {
expect(api.deleteReleaseLink).toHaveBeenCalledTimes(
getters.releaseLinksToDelete.length,
);
- getters.releaseLinksToDelete.forEach(link => {
+ getters.releaseLinksToDelete.forEach((link) => {
expect(api.deleteReleaseLink).toHaveBeenCalledWith(
state.projectId,
state.tagName,
@@ -446,7 +446,7 @@ describe('Release detail actions', () => {
expect(api.createReleaseLink).toHaveBeenCalledTimes(
getters.releaseLinksToCreate.length,
);
- getters.releaseLinksToCreate.forEach(link => {
+ getters.releaseLinksToCreate.forEach((link) => {
expect(api.createReleaseLink).toHaveBeenCalledWith(
state.projectId,
state.tagName,
diff --git a/spec/frontend/releases/stores/modules/list/helpers.js b/spec/frontend/releases/stores/modules/list/helpers.js
index 3ca255eaf8c..3913eba31b8 100644
--- a/spec/frontend/releases/stores/modules/list/helpers.js
+++ b/spec/frontend/releases/stores/modules/list/helpers.js
@@ -1,5 +1,5 @@
import state from '~/releases/stores/modules/list/state';
-export const resetStore = store => {
+export const resetStore = (store) => {
store.replaceState(state());
};
diff --git a/spec/frontend/reports/accessibility_report/store/actions_spec.js b/spec/frontend/reports/accessibility_report/store/actions_spec.js
index 9f210659cfd..4e607fa5a76 100644
--- a/spec/frontend/reports/accessibility_report/store/actions_spec.js
+++ b/spec/frontend/reports/accessibility_report/store/actions_spec.js
@@ -17,7 +17,7 @@ describe('Accessibility Reports actions', () => {
});
describe('setEndpoints', () => {
- it('should commit SET_ENDPOINTS mutation', done => {
+ it('should commit SET_ENDPOINTS mutation', (done) => {
const endpoint = 'endpoint.json';
testAction(
@@ -46,7 +46,7 @@ describe('Accessibility Reports actions', () => {
});
describe('success', () => {
- it('should commit REQUEST_REPORT mutation and dispatch receiveReportSuccess', done => {
+ it('should commit REQUEST_REPORT mutation and dispatch receiveReportSuccess', (done) => {
const data = { report: { summary: {} } };
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(200, data);
@@ -67,7 +67,7 @@ describe('Accessibility Reports actions', () => {
});
describe('error', () => {
- it('should commit REQUEST_REPORT and RECEIVE_REPORT_ERROR mutations', done => {
+ it('should commit REQUEST_REPORT and RECEIVE_REPORT_ERROR mutations', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
testAction(
@@ -83,7 +83,7 @@ describe('Accessibility Reports actions', () => {
});
describe('receiveReportSuccess', () => {
- it('should commit RECEIVE_REPORT_SUCCESS mutation with 200', done => {
+ it('should commit RECEIVE_REPORT_SUCCESS mutation with 200', (done) => {
testAction(
actions.receiveReportSuccess,
{ status: 200, data: mockReport },
@@ -94,7 +94,7 @@ describe('Accessibility Reports actions', () => {
);
});
- it('should not commit RECEIVE_REPORTS_SUCCESS mutation with 204', done => {
+ it('should not commit RECEIVE_REPORTS_SUCCESS mutation with 204', (done) => {
testAction(
actions.receiveReportSuccess,
{ status: 204, data: mockReport },
@@ -107,7 +107,7 @@ describe('Accessibility Reports actions', () => {
});
describe('receiveReportError', () => {
- it('should commit RECEIVE_REPORT_ERROR mutation', done => {
+ it('should commit RECEIVE_REPORT_ERROR mutation', (done) => {
testAction(
actions.receiveReportError,
null,
diff --git a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
index 77d7c6f8678..ecb657af6f1 100644
--- a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
+++ b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
@@ -139,11 +139,7 @@ describe('Grouped code quality reports app', () => {
});
it('renders a help icon with more information', () => {
- expect(
- findWidget()
- .find('[data-testid="question-icon"]')
- .exists(),
- ).toBe(true);
+ expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(true);
});
});
@@ -157,11 +153,7 @@ describe('Grouped code quality reports app', () => {
});
it('does not render a help icon', () => {
- expect(
- findWidget()
- .find('[data-testid="question-icon"]')
- .exists(),
- ).toBe(false);
+ expect(findWidget().find('[data-testid="question-icon"]').exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/reports/codequality_report/store/actions_spec.js b/spec/frontend/reports/codequality_report/store/actions_spec.js
index 7d9e4bbbe9f..321785cb85a 100644
--- a/spec/frontend/reports/codequality_report/store/actions_spec.js
+++ b/spec/frontend/reports/codequality_report/store/actions_spec.js
@@ -33,7 +33,7 @@ describe('Codequality Reports actions', () => {
});
describe('setPaths', () => {
- it('should commit SET_PATHS mutation', done => {
+ it('should commit SET_PATHS mutation', (done) => {
const paths = {
basePath: 'basePath',
headPath: 'headPath',
@@ -67,7 +67,7 @@ describe('Codequality Reports actions', () => {
});
describe('on success', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', done => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues);
mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues);
@@ -91,7 +91,7 @@ describe('Codequality Reports actions', () => {
});
describe('on error', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', done => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
mock.onGet(`${TEST_HOST}/head.json`).reply(500);
testAction(
@@ -106,7 +106,7 @@ describe('Codequality Reports actions', () => {
});
describe('with no base path', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', done => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
localState.basePath = null;
testAction(
@@ -122,7 +122,7 @@ describe('Codequality Reports actions', () => {
});
describe('receiveReportsSuccess', () => {
- it('commits RECEIVE_REPORTS_SUCCESS', done => {
+ it('commits RECEIVE_REPORTS_SUCCESS', (done) => {
const data = { issues: [] };
testAction(
@@ -137,7 +137,7 @@ describe('Codequality Reports actions', () => {
});
describe('receiveReportsError', () => {
- it('commits RECEIVE_REPORTS_ERROR', done => {
+ it('commits RECEIVE_REPORTS_ERROR', (done) => {
testAction(
actions.receiveReportsError,
null,
diff --git a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
index 5dd69d3c4d4..085d697672d 100644
--- a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
+++ b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
@@ -11,17 +11,17 @@ jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker',
addEventListener: (_, callback) => {
mockPostMessageCallback = callback;
},
- postMessage: data => {
+ postMessage: (data) => {
if (!data.headIssues) return mockPostMessageCallback({ data: {} });
if (!data.baseIssues) throw new Error();
const key = 'fingerprint';
return mockPostMessageCallback({
data: {
newIssues: data.headIssues.filter(
- item => !data.baseIssues.find(el => el[key] === item[key]),
+ (item) => !data.baseIssues.find((el) => el[key] === item[key]),
),
resolvedIssues: data.baseIssues.filter(
- item => !data.headIssues.find(el => el[key] === item[key]),
+ (item) => !data.headIssues.find((el) => el[key] === item[key]),
),
},
});
diff --git a/spec/frontend/reports/components/grouped_issues_list_spec.js b/spec/frontend/reports/components/grouped_issues_list_spec.js
index 1172e514707..c6eebf05dd7 100644
--- a/spec/frontend/reports/components/grouped_issues_list_spec.js
+++ b/spec/frontend/reports/components/grouped_issues_list_spec.js
@@ -13,7 +13,7 @@ describe('Grouped Issues List', () => {
});
};
- const findHeading = groupName => wrapper.find(`[data-testid="${groupName}Heading"`);
+ const findHeading = (groupName) => wrapper.find(`[data-testid="${groupName}Heading"`);
afterEach(() => {
wrapper.destroy();
@@ -37,7 +37,7 @@ describe('Grouped Issues List', () => {
describe('without data', () => {
beforeEach(createComponent);
- it.each(['unresolved', 'resolved'])('does not a render a header for %s issues', issueName => {
+ it.each(['unresolved', 'resolved'])('does not a render a header for %s issues', (issueName) => {
expect(findHeading(issueName).exists()).toBe(false);
});
@@ -59,7 +59,7 @@ describe('Grouped Issues List', () => {
expect(findHeading(groupName).text()).toBe(givenHeading);
});
- it.each(['resolved', 'unresolved'])('renders all %s issues', issueName => {
+ it.each(['resolved', 'unresolved'])('renders all %s issues', (issueName) => {
const issues = [{ name: 'foo' }, { name: 'bar' }];
createComponent({
diff --git a/spec/frontend/reports/components/grouped_test_reports_app_spec.js b/spec/frontend/reports/components/grouped_test_reports_app_spec.js
index 66d429017b2..492192988fb 100644
--- a/spec/frontend/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/components/grouped_test_reports_app_spec.js
@@ -22,7 +22,7 @@ describe('Grouped test reports app', () => {
let wrapper;
let mockStore;
- const mountComponent = ({ props = { pipelinePath }, testFailureHistory = false } = {}) => {
+ const mountComponent = ({ props = { pipelinePath } } = {}) => {
wrapper = mount(Component, {
store: mockStore,
localVue,
@@ -31,15 +31,10 @@ describe('Grouped test reports app', () => {
pipelinePath,
...props,
},
- provide: {
- glFeatures: {
- testFailureHistory,
- },
- },
});
};
- const setReports = reports => {
+ const setReports = (reports) => {
mockStore.state.status = reports.status;
mockStore.state.summary = reports.summary;
mockStore.state.reports = reports.suites;
@@ -232,11 +227,9 @@ describe('Grouped test reports app', () => {
});
it('renders resolved errors', () => {
- expect(
- findAllIssueDescriptions()
- .at(2)
- .text(),
- ).toContain(resolvedFailures.suites[0].resolved_errors[0].name);
+ expect(findAllIssueDescriptions().at(2).text()).toContain(
+ resolvedFailures.suites[0].resolved_errors[0].name,
+ );
});
});
@@ -244,50 +237,25 @@ describe('Grouped test reports app', () => {
describe('with recent failures counts', () => {
beforeEach(() => {
setReports(recentFailuresTestReports);
+ mountComponent();
});
- describe('with feature flag enabled', () => {
- beforeEach(() => {
- mountComponent({ testFailureHistory: true });
- });
-
- it('renders the recently failed tests summary', () => {
- expect(findHeader().text()).toContain(
- '2 out of 3 failed tests have failed more than once in the last 14 days',
- );
- });
-
- it('renders the recently failed count on the test suite', () => {
- expect(findSummaryDescription().text()).toContain(
- '1 out of 2 failed tests has failed more than once in the last 14 days',
- );
- });
-
- it('renders the recent failures count on the test case', () => {
- expect(findIssueDescription().text()).toContain(
- 'Failed 8 times in master in the last 14 days',
- );
- });
+ it('renders the recently failed tests summary', () => {
+ expect(findHeader().text()).toContain(
+ '2 out of 3 failed tests have failed more than once in the last 14 days',
+ );
});
- describe('with feature flag disabled', () => {
- beforeEach(() => {
- mountComponent({ testFailureHistory: false });
- });
-
- it('does not render the recently failed tests summary', () => {
- expect(findHeader().text()).not.toContain('failed more than once in the last 14 days');
- });
-
- it('does not render the recently failed count on the test suite', () => {
- expect(findSummaryDescription().text()).not.toContain(
- 'failed more than once in the last 14 days',
- );
- });
+ it('renders the recently failed count on the test suite', () => {
+ expect(findSummaryDescription().text()).toContain(
+ '1 out of 2 failed tests has failed more than once in the last 14 days',
+ );
+ });
- it('renders the recent failures count on the test case', () => {
- expect(findIssueDescription().text()).not.toContain('in the last 14 days');
- });
+ it('renders the recent failures count on the test case', () => {
+ expect(findIssueDescription().text()).toContain(
+ 'Failed 8 times in master in the last 14 days',
+ );
});
});
diff --git a/spec/frontend/reports/components/issue_status_icon_spec.js b/spec/frontend/reports/components/issue_status_icon_spec.js
index 3a55ff0a9e3..8706f2f8d83 100644
--- a/spec/frontend/reports/components/issue_status_icon_spec.js
+++ b/spec/frontend/reports/components/issue_status_icon_spec.js
@@ -20,7 +20,7 @@ describe('IssueStatusIcon', () => {
it.each([STATUS_SUCCESS, STATUS_NEUTRAL, STATUS_FAILED])(
'renders "%s" state correctly',
- status => {
+ (status) => {
createComponent({ status });
expect(wrapper.element).toMatchSnapshot();
diff --git a/spec/frontend/reports/components/modal_open_name_spec.js b/spec/frontend/reports/components/modal_open_name_spec.js
deleted file mode 100644
index d59f3571c4b..00000000000
--- a/spec/frontend/reports/components/modal_open_name_spec.js
+++ /dev/null
@@ -1,47 +0,0 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
-import component from '~/reports/components/modal_open_name.vue';
-
-Vue.use(Vuex);
-
-describe('Modal open name', () => {
- const Component = Vue.extend(component);
- let vm;
-
- const store = new Vuex.Store({
- actions: {
- openModal: () => {},
- },
- state: {},
- mutations: {},
- });
-
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- store,
- props: {
- issue: {
- title: 'Issue',
- },
- status: 'failed',
- },
- });
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders the issue name', () => {
- expect(vm.$el.textContent.trim()).toEqual('Issue');
- });
-
- it('calls openModal actions when button is clicked', () => {
- jest.spyOn(vm, 'openModal').mockImplementation(() => {});
-
- vm.$el.click();
-
- expect(vm.openModal).toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/reports/components/modal_spec.js b/spec/frontend/reports/components/modal_spec.js
index ff046e64b6e..39b84d1ee05 100644
--- a/spec/frontend/reports/components/modal_spec.js
+++ b/spec/frontend/reports/components/modal_spec.js
@@ -1,54 +1,63 @@
-import Vue from 'vue';
-import component from '~/reports/components/modal.vue';
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+
+import CodeBlock from '~/vue_shared/components/code_block.vue';
+import ReportsModal from '~/reports/components/modal.vue';
import state from '~/reports/store/state';
-import mountComponent from '../../helpers/vue_mount_component_helper';
-import { trimText } from '../../helpers/text_helper';
+
+const StubbedGlModal = { template: '<div><slot></slot></div>', name: 'GlModal', props: ['title'] };
describe('Grouped Test Reports Modal', () => {
- const Component = Vue.extend(component);
const modalDataStructure = state().modal.data;
+ const title = 'Test#sum when a is 1 and b is 2 returns summary';
// populate data
modalDataStructure.execution_time.value = 0.009411;
modalDataStructure.system_output.value = 'Failure/Error: is_expected.to eq(3)\n\n';
modalDataStructure.class.value = 'link';
- let vm;
+ let wrapper;
beforeEach(() => {
- vm = mountComponent(Component, {
- title: 'Test#sum when a is 1 and b is 2 returns summary',
- modalData: modalDataStructure,
- });
+ wrapper = extendedWrapper(
+ shallowMount(ReportsModal, {
+ propsData: {
+ title,
+ modalData: modalDataStructure,
+ visible: true,
+ },
+ stubs: { GlModal: StubbedGlModal, GlSprintf },
+ }),
+ );
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders code block', () => {
- expect(vm.$el.querySelector('code').textContent).toEqual(
- modalDataStructure.system_output.value,
- );
+ expect(wrapper.find(CodeBlock).props().code).toEqual(modalDataStructure.system_output.value);
});
it('renders link', () => {
- expect(vm.$el.querySelector('.js-modal-link').getAttribute('href')).toEqual(
- modalDataStructure.class.value,
- );
+ const link = wrapper.findComponent(GlLink);
- expect(trimText(vm.$el.querySelector('.js-modal-link').textContent)).toEqual(
- modalDataStructure.class.value,
- );
+ expect(link.attributes().href).toEqual(modalDataStructure.class.value);
+
+ expect(link.text()).toEqual(modalDataStructure.class.value);
});
it('renders seconds', () => {
- expect(vm.$el.textContent).toContain(`${modalDataStructure.execution_time.value} s`);
+ expect(wrapper.text()).toContain(`${modalDataStructure.execution_time.value} s`);
});
it('render title', () => {
- expect(trimText(vm.$el.querySelector('.modal-title').textContent)).toEqual(
- 'Test#sum when a is 1 and b is 2 returns summary',
- );
+ expect(wrapper.findComponent(StubbedGlModal).props().title).toEqual(title);
+ });
+
+ it('re-emits hide event', () => {
+ wrapper.findComponent(StubbedGlModal).vm.$emit('hide');
+ expect(wrapper.emitted().hide).toEqual([[]]);
});
});
diff --git a/spec/frontend/reports/components/report_link_spec.js b/spec/frontend/reports/components/report_link_spec.js
index f879899e9c5..fc21515ded6 100644
--- a/spec/frontend/reports/components/report_link_spec.js
+++ b/spec/frontend/reports/components/report_link_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/reports/components/report_link.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
describe('report link', () => {
let vm;
diff --git a/spec/frontend/reports/components/report_section_spec.js b/spec/frontend/reports/components/report_section_spec.js
index 2d228313a9b..c3219b34057 100644
--- a/spec/frontend/reports/components/report_section_spec.js
+++ b/spec/frontend/reports/components/report_section_spec.js
@@ -29,7 +29,7 @@ describe('Report section', () => {
alwaysOpen: false,
};
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(reportSection, {
propsData: {
...defaultProps,
@@ -67,7 +67,7 @@ describe('Report section', () => {
const issues = hasIssues ? 'has issues' : 'has no issues';
const open = alwaysOpen ? 'is always open' : 'is not always open';
- it(`is ${isCollapsible}, if the report ${issues} and ${open}`, done => {
+ it(`is ${isCollapsible}, if the report ${issues} and ${open}`, (done) => {
vm.hasIssues = hasIssues;
vm.alwaysOpen = alwaysOpen;
@@ -93,7 +93,7 @@ describe('Report section', () => {
const issues = isCollapsed ? 'is collapsed' : 'is not collapsed';
const open = alwaysOpen ? 'is always open' : 'is not always open';
- it(`is ${isExpanded}, if the report ${issues} and ${open}`, done => {
+ it(`is ${isExpanded}, if the report ${issues} and ${open}`, (done) => {
vm.isCollapsed = isCollapsed;
vm.alwaysOpen = alwaysOpen;
@@ -144,7 +144,7 @@ describe('Report section', () => {
describe('toggleCollapsed', () => {
const hiddenCss = { display: 'none' };
- it('toggles issues', done => {
+ it('toggles issues', (done) => {
vm.$el.querySelector('button').click();
Vue.nextTick()
@@ -163,7 +163,7 @@ describe('Report section', () => {
.catch(done.fail);
});
- it('is always expanded, if always-open is set to true', done => {
+ it('is always expanded, if always-open is set to true', (done) => {
vm.alwaysOpen = true;
Vue.nextTick()
.then(() => {
@@ -177,7 +177,7 @@ describe('Report section', () => {
});
describe('snowplow events', () => {
- it('does emit an event on issue toggle if the shouldEmitToggleEvent prop does exist', done => {
+ it('does emit an event on issue toggle if the shouldEmitToggleEvent prop does exist', (done) => {
createComponent({ hasIssues: true, shouldEmitToggleEvent: true });
expect(wrapper.emitted().toggleEvent).toBeUndefined();
@@ -192,7 +192,7 @@ describe('Report section', () => {
.catch(done.fail);
});
- it('does not emit an event on issue toggle if the shouldEmitToggleEvent prop does not exist', done => {
+ it('does not emit an event on issue toggle if the shouldEmitToggleEvent prop does not exist', (done) => {
createComponent({ hasIssues: true });
expect(wrapper.emitted().toggleEvent).toBeUndefined();
@@ -207,7 +207,7 @@ describe('Report section', () => {
.catch(done.fail);
});
- it('does not emit an event if always-open is set to true', done => {
+ it('does not emit an event if always-open is set to true', (done) => {
createComponent({ alwaysOpen: true, hasIssues: true, shouldEmitToggleEvent: true });
wrapper.vm
@@ -259,7 +259,7 @@ describe('Report section', () => {
});
describe('Success and Error slots', () => {
- const createComponentWithSlots = status => {
+ const createComponentWithSlots = (status) => {
vm = mountComponentWithSlots(ReportSection, {
props: {
status,
diff --git a/spec/frontend/reports/components/test_issue_body_spec.js b/spec/frontend/reports/components/test_issue_body_spec.js
index ff81020a4eb..c13a3599fef 100644
--- a/spec/frontend/reports/components/test_issue_body_spec.js
+++ b/spec/frontend/reports/components/test_issue_body_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
+import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { trimText } from 'helpers/text_helper';
import component from '~/reports/components/test_issue_body.vue';
import createStore from '~/reports/store';
-import { mountComponentWithStore } from '../../helpers/vue_mount_component_helper';
-import { trimText } from '../../helpers/text_helper';
import { issue } from '../mock_data/mock_data';
describe('Test Issue body', () => {
diff --git a/spec/frontend/reports/store/actions_spec.js b/spec/frontend/reports/store/actions_spec.js
index 3f189736922..b7c4a31b1c8 100644
--- a/spec/frontend/reports/store/actions_spec.js
+++ b/spec/frontend/reports/store/actions_spec.js
@@ -11,7 +11,7 @@ import {
receiveReportsSuccess,
receiveReportsError,
openModal,
- setModalData,
+ closeModal,
} from '~/reports/store/actions';
import state from '~/reports/store/state';
import * as types from '~/reports/store/mutation_types';
@@ -24,7 +24,7 @@ describe('Reports Store Actions', () => {
});
describe('setEndpoint', () => {
- it('should commit SET_ENDPOINT mutation', done => {
+ it('should commit SET_ENDPOINT mutation', (done) => {
testAction(
setEndpoint,
'endpoint.json',
@@ -37,7 +37,7 @@ describe('Reports Store Actions', () => {
});
describe('requestReports', () => {
- it('should commit REQUEST_REPORTS mutation', done => {
+ it('should commit REQUEST_REPORTS mutation', (done) => {
testAction(requestReports, null, mockedState, [{ type: types.REQUEST_REPORTS }], [], done);
});
});
@@ -57,7 +57,7 @@ describe('Reports Store Actions', () => {
});
describe('success', () => {
- it('dispatches requestReports and receiveReportsSuccess ', done => {
+ it('dispatches requestReports and receiveReportsSuccess ', (done) => {
mock
.onGet(`${TEST_HOST}/endpoint.json`)
.replyOnce(200, { summary: {}, suites: [{ name: 'rspec' }] });
@@ -86,7 +86,7 @@ describe('Reports Store Actions', () => {
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
});
- it('dispatches requestReports and receiveReportsError ', done => {
+ it('dispatches requestReports and receiveReportsError ', (done) => {
testAction(
fetchReports,
null,
@@ -107,7 +107,7 @@ describe('Reports Store Actions', () => {
});
describe('receiveReportsSuccess', () => {
- it('should commit RECEIVE_REPORTS_SUCCESS mutation with 200', done => {
+ it('should commit RECEIVE_REPORTS_SUCCESS mutation with 200', (done) => {
testAction(
receiveReportsSuccess,
{ data: { summary: {} }, status: 200 },
@@ -118,7 +118,7 @@ describe('Reports Store Actions', () => {
);
});
- it('should not commit RECEIVE_REPORTS_SUCCESS mutation with 204', done => {
+ it('should not commit RECEIVE_REPORTS_SUCCESS mutation with 204', (done) => {
testAction(
receiveReportsSuccess,
{ data: { summary: {} }, status: 204 },
@@ -131,7 +131,7 @@ describe('Reports Store Actions', () => {
});
describe('receiveReportsError', () => {
- it('should commit RECEIVE_REPORTS_ERROR mutation', done => {
+ it('should commit RECEIVE_REPORTS_ERROR mutation', (done) => {
testAction(
receiveReportsError,
null,
@@ -144,25 +144,25 @@ describe('Reports Store Actions', () => {
});
describe('openModal', () => {
- it('should dispatch setModalData', done => {
+ it('should commit SET_ISSUE_MODAL_DATA', (done) => {
testAction(
openModal,
{ name: 'foo' },
mockedState,
+ [{ type: types.SET_ISSUE_MODAL_DATA, payload: { name: 'foo' } }],
[],
- [{ type: 'setModalData', payload: { name: 'foo' } }],
done,
);
});
});
- describe('setModalData', () => {
- it('should commit SET_ISSUE_MODAL_DATA', done => {
+ describe('closeModal', () => {
+ it('should commit RESET_ISSUE_MODAL_DATA', (done) => {
testAction(
- setModalData,
- { name: 'foo' },
+ closeModal,
+ {},
mockedState,
- [{ type: types.SET_ISSUE_MODAL_DATA, payload: { name: 'foo' } }],
+ [{ type: types.RESET_ISSUE_MODAL_DATA, payload: {} }],
[],
done,
);
diff --git a/spec/frontend/reports/store/mutations_spec.js b/spec/frontend/reports/store/mutations_spec.js
index c1c5862a37c..59065d1151f 100644
--- a/spec/frontend/reports/store/mutations_spec.js
+++ b/spec/frontend/reports/store/mutations_spec.js
@@ -127,5 +127,32 @@ describe('Reports Store Mutations', () => {
expect(stateCopy.modal.data.execution_time.value).toEqual(issue.execution_time);
expect(stateCopy.modal.data.system_output.value).toEqual(issue.system_output);
});
+
+ it('should open modal', () => {
+ expect(stateCopy.modal.open).toEqual(true);
+ });
+ });
+
+ describe('RESET_ISSUE_MODAL_DATA', () => {
+ beforeEach(() => {
+ mutations[types.SET_ISSUE_MODAL_DATA](stateCopy, {
+ issue,
+ });
+
+ mutations[types.RESET_ISSUE_MODAL_DATA](stateCopy);
+ });
+
+ it('should reset modal title', () => {
+ expect(stateCopy.modal.title).toEqual(null);
+ });
+
+ it('should reset modal data', () => {
+ expect(stateCopy.modal.data.execution_time.value).toEqual(null);
+ expect(stateCopy.modal.data.system_output.value).toEqual(null);
+ });
+
+ it('should close modal', () => {
+ expect(stateCopy.modal.open).toEqual(false);
+ });
});
});
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 38e5c9aaca5..a0dc608ddc9 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -36,23 +36,15 @@ describe('Repository breadcrumbs component', () => {
it('escapes hash in directory path', () => {
factory('app/assets/javascripts#');
- expect(
- vm
- .findAll(RouterLinkStub)
- .at(3)
- .props('to'),
- ).toEqual('/-/tree/app/assets/javascripts%23');
+ expect(vm.findAll(RouterLinkStub).at(3).props('to')).toEqual(
+ '/-/tree/app/assets/javascripts%23',
+ );
});
it('renders last link as active', () => {
factory('app/assets');
- expect(
- vm
- .findAll(RouterLinkStub)
- .at(2)
- .attributes('aria-current'),
- ).toEqual('page');
+ expect(vm.findAll(RouterLinkStub).at(2).attributes('aria-current')).toEqual('page');
});
it('does not render add to tree dropdown when permissions are false', () => {
diff --git a/spec/frontend/repository/components/directory_download_links_spec.js b/spec/frontend/repository/components/directory_download_links_spec.js
index 4d70b44de08..72c4165c2e9 100644
--- a/spec/frontend/repository/components/directory_download_links_spec.js
+++ b/spec/frontend/repository/components/directory_download_links_spec.js
@@ -7,7 +7,10 @@ function factory(currentPath) {
vm = shallowMount(DirectoryDownloadLinks, {
propsData: {
currentPath,
- links: [{ text: 'zip', path: 'http://test.com/' }, { text: 'tar', path: 'http://test.com/' }],
+ links: [
+ { text: 'zip', path: 'http://test.com/' },
+ { text: 'tar', path: 'http://test.com/' },
+ ],
},
});
}
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index ccba0982c26..fe77057c3d4 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -131,6 +131,16 @@ describe('Repository last commit component', () => {
});
});
+ it('strips the first newline of the description', async () => {
+ factory(createCommitData({ descriptionHtml: '&#x000A;Update ADOPTERS.md' }));
+
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.commit-row-description').html()).toBe(
+ '<pre class="commit-row-description gl-mb-3">Update ADOPTERS.md</pre>',
+ );
+ });
+
it('renders the signature HTML as returned by the backend', () => {
factory(createCommitData({ signatureHtml: '<button>Verified</button>' }));
diff --git a/spec/frontend/repository/utils/dom_spec.js b/spec/frontend/repository/utils/dom_spec.js
index 26ed57f0392..f9e619a82d1 100644
--- a/spec/frontend/repository/utils/dom_spec.js
+++ b/spec/frontend/repository/utils/dom_spec.js
@@ -1,5 +1,5 @@
import { TEST_HOST } from 'helpers/test_constants';
-import { setHTMLFixture } from '../../helpers/fixtures';
+import { setHTMLFixture } from 'helpers/fixtures';
import { updateElementsVisibility, updateFormAction } from '~/repository/utils/dom';
describe('updateElementsVisibility', () => {
diff --git a/spec/frontend/right_sidebar_spec.js b/spec/frontend/right_sidebar_spec.js
index 3490a99afb4..1b31030cb92 100644
--- a/spec/frontend/right_sidebar_spec.js
+++ b/spec/frontend/right_sidebar_spec.js
@@ -12,7 +12,7 @@ let $collapseIcon = null;
let $page = null;
let $labelsIcon = null;
-const assertSidebarState = state => {
+const assertSidebarState = (state) => {
const shouldBeExpanded = state === 'expanded';
const shouldBeCollapsed = state === 'collapsed';
expect($aside.hasClass('right-sidebar-expanded')).toBe(shouldBeExpanded);
@@ -67,7 +67,7 @@ describe('RightSidebar', () => {
assertSidebarState('collapsed');
});
- it('should broadcast todo:toggle event when add todo clicked', done => {
+ it('should broadcast todo:toggle event when add todo clicked', (done) => {
const todos = getJSONFixture('todos/todos.json');
mock.onPost(/(.*)\/todos$/).reply(200, todos);
@@ -84,7 +84,7 @@ describe('RightSidebar', () => {
});
it('should not hide collapsed icons', () => {
- [].forEach.call(document.querySelectorAll('.sidebar-collapsed-icon'), el => {
+ [].forEach.call(document.querySelectorAll('.sidebar-collapsed-icon'), (el) => {
expect(el.querySelector('.fa, svg').classList.contains('hidden')).toBeFalsy();
});
});
diff --git a/spec/frontend/search/index_spec.js b/spec/frontend/search/index_spec.js
index 31b5aa3686b..023cd341345 100644
--- a/spec/frontend/search/index_spec.js
+++ b/spec/frontend/search/index_spec.js
@@ -8,7 +8,7 @@ jest.mock('~/search/sidebar');
describe('initSearchApp', () => {
let defaultLocation;
- const setUrl = query => {
+ const setUrl = (query) => {
window.location.href = `https://localhost:3000/search${query}`;
window.location.search = query;
};
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index d2c0081080c..94a39b90d02 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -17,7 +17,7 @@ describe('GlobalSearchSidebar', () => {
resetQuery: jest.fn(),
};
- const createComponent = initialState => {
+ const createComponent = (initialState) => {
const store = new Vuex.Store({
state: {
query: MOCK_QUERY,
diff --git a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
index 68d20b2480e..42fcc859308 100644
--- a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
@@ -15,7 +15,7 @@ describe('ConfidentialityFilter', () => {
resetQuery: jest.fn(),
};
- const createComponent = initialState => {
+ const createComponent = (initialState) => {
const store = new Vuex.Store({
state: {
query: MOCK_QUERY,
diff --git a/spec/frontend/search/sidebar/components/radio_filter_spec.js b/spec/frontend/search/sidebar/components/radio_filter_spec.js
index 31a4a8859ee..9918af54cfe 100644
--- a/spec/frontend/search/sidebar/components/radio_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/radio_filter_spec.js
@@ -46,7 +46,7 @@ describe('RadioFilter', () => {
const findGlRadioButtonGroup = () => wrapper.find(GlFormRadioGroup);
const findGlRadioButtons = () => findGlRadioButtonGroup().findAll(GlFormRadio);
- const findGlRadioButtonsText = () => findGlRadioButtons().wrappers.map(w => w.text());
+ const findGlRadioButtonsText = () => findGlRadioButtons().wrappers.map((w) => w.text());
describe('template', () => {
beforeEach(() => {
@@ -61,7 +61,7 @@ describe('RadioFilter', () => {
describe('Status Filter', () => {
it('renders a radio button for each filterOption', () => {
expect(findGlRadioButtonsText()).toStrictEqual(
- stateFilterData.filterByScope[stateFilterData.scopes.ISSUES].map(f => {
+ stateFilterData.filterByScope[stateFilterData.scopes.ISSUES].map((f) => {
return f.value === stateFilterData.filters.ANY.value
? `Any ${stateFilterData.header.toLowerCase()}`
: f.label;
@@ -87,7 +87,7 @@ describe('RadioFilter', () => {
it('renders a radio button for each filterOption', () => {
expect(findGlRadioButtonsText()).toStrictEqual(
- confidentialFilterData.filterByScope[confidentialFilterData.scopes.ISSUES].map(f => {
+ confidentialFilterData.filterByScope[confidentialFilterData.scopes.ISSUES].map((f) => {
return f.value === confidentialFilterData.filters.ANY.value
? `Any ${confidentialFilterData.header.toLowerCase()}`
: f.label;
diff --git a/spec/frontend/search/sidebar/components/status_filter_spec.js b/spec/frontend/search/sidebar/components/status_filter_spec.js
index 188d47b38cd..21fc663397e 100644
--- a/spec/frontend/search/sidebar/components/status_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/status_filter_spec.js
@@ -15,7 +15,7 @@ describe('StatusFilter', () => {
resetQuery: jest.fn(),
};
- const createComponent = initialState => {
+ const createComponent = (initialState) => {
const store = new Vuex.Store({
state: {
query: MOCK_QUERY,
diff --git a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
index c4ebaabbf96..86e29571d0f 100644
--- a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
+++ b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
@@ -47,7 +47,7 @@ describe('Global Search Searchable Dropdown', () => {
const findGlDropdownSearch = () => findGlDropdown().find(GlSearchBoxByType);
const findDropdownText = () => findGlDropdown().find('.dropdown-toggle-text');
const findDropdownItems = () => findGlDropdown().findAll(GlDropdownItem);
- const findDropdownItemsText = () => findDropdownItems().wrappers.map(w => w.text());
+ const findDropdownItemsText = () => findDropdownItems().wrappers.map((w) => w.text());
const findAnyDropdownItem = () => findDropdownItems().at(0);
const findFirstGroupDropdownItem = () => findDropdownItems().at(1);
const findLoader = () => wrapper.find(GlSkeletonLoader);
@@ -94,7 +94,7 @@ describe('Global Search Searchable Dropdown', () => {
});
it('renders an instance for each namespace', () => {
- const resultsIncludeAny = ['Any'].concat(MOCK_GROUPS.map(n => n.full_name));
+ const resultsIncludeAny = ['Any'].concat(MOCK_GROUPS.map((n) => n.full_name));
expect(findDropdownItemsText()).toStrictEqual(resultsIncludeAny);
});
});
diff --git a/spec/frontend/search_autocomplete_spec.js b/spec/frontend/search_autocomplete_spec.js
index 3240664f5aa..e844bc96e05 100644
--- a/spec/frontend/search_autocomplete_spec.js
+++ b/spec/frontend/search_autocomplete_spec.js
@@ -32,7 +32,7 @@ describe('Search autocomplete dropdown', () => {
// Add required attributes to body before starting the test.
// section would be dashboard|group|project
- const addBodyAttributes = section => {
+ const addBodyAttributes = (section) => {
if (section == null) {
section = 'dashboard';
}
@@ -213,10 +213,10 @@ describe('Search autocomplete dropdown', () => {
});
function triggerAutocomplete() {
- return new Promise(resolve => {
+ return new Promise((resolve) => {
const dropdown = widget.searchInput.data('deprecatedJQueryDropdown');
const filterCallback = dropdown.filter.options.callback;
- dropdown.filter.options.callback = jest.fn(data => {
+ dropdown.filter.options.callback = jest.fn((data) => {
filterCallback(data);
resolve();
@@ -227,7 +227,7 @@ describe('Search autocomplete dropdown', () => {
});
}
- it('suggest Projects', done => {
+ it('suggest Projects', (done) => {
// eslint-disable-next-line promise/catch-or-return
triggerAutocomplete().finally(() => {
const list = widget.wrap.find('.dropdown-menu').find('ul');
@@ -242,7 +242,7 @@ describe('Search autocomplete dropdown', () => {
jest.runOnlyPendingTimers();
});
- it('suggest Groups', done => {
+ it('suggest Groups', (done) => {
// eslint-disable-next-line promise/catch-or-return
triggerAutocomplete().finally(() => {
const list = widget.wrap.find('.dropdown-menu').find('ul');
diff --git a/spec/frontend/search_settings/components/search_settings_spec.js b/spec/frontend/search_settings/components/search_settings_spec.js
new file mode 100644
index 00000000000..b80f9b15abf
--- /dev/null
+++ b/spec/frontend/search_settings/components/search_settings_spec.js
@@ -0,0 +1,106 @@
+import { GlSearchBoxByType } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import SearchSettings from '~/search_settings/components/search_settings.vue';
+import { HIGHLIGHT_CLASS, HIDE_CLASS } from '~/search_settings/constants';
+
+describe('search_settings/components/search_settings.vue', () => {
+ const ROOT_ID = 'content-body';
+ const SECTION_SELECTOR = 'section.settings';
+ const SEARCH_TERM = 'Delete project';
+ const GENERAL_SETTINGS_ID = 'js-general-settings';
+ const ADVANCED_SETTINGS_ID = 'js-advanced-settings';
+ let wrapper;
+
+ const buildWrapper = () => {
+ wrapper = shallowMount(SearchSettings, {
+ propsData: {
+ searchRoot: document.querySelector(`#${ROOT_ID}`),
+ sectionSelector: SECTION_SELECTOR,
+ },
+ });
+ };
+
+ const sections = () => Array.from(document.querySelectorAll(SECTION_SELECTOR));
+ const sectionsCount = () => sections().length;
+ const visibleSectionsCount = () =>
+ document.querySelectorAll(`${SECTION_SELECTOR}:not(.${HIDE_CLASS})`).length;
+ const highlightedElementsCount = () => document.querySelectorAll(`.${HIGHLIGHT_CLASS}`).length;
+ const findSearchBox = () => wrapper.find(GlSearchBoxByType);
+ const search = (term) => {
+ findSearchBox().vm.$emit('input', term);
+ };
+ const clearSearch = () => search('');
+
+ beforeEach(() => {
+ setFixtures(`
+ <div>
+ <div class="js-search-app"></div>
+ <div id="${ROOT_ID}">
+ <section id="${GENERAL_SETTINGS_ID}" class="settings">
+ <span>General</span>
+ </section>
+ <section id="${ADVANCED_SETTINGS_ID}" class="settings">
+ <span>${SEARCH_TERM}</span>
+ </section>
+ </div>
+ </div>
+ `);
+ buildWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('expands first section and collapses the rest', () => {
+ clearSearch();
+
+ const [firstSection, ...otherSections] = sections();
+
+ expect(wrapper.emitted()).toEqual({
+ expand: [[firstSection]],
+ collapse: otherSections.map((x) => [x]),
+ });
+ });
+
+ it('hides sections that do not match the search term', () => {
+ const hiddenSection = document.querySelector(`#${GENERAL_SETTINGS_ID}`);
+ search(SEARCH_TERM);
+
+ expect(visibleSectionsCount()).toBe(1);
+ expect(hiddenSection.classList).toContain(HIDE_CLASS);
+ });
+
+ it('expands section that matches the search term', () => {
+ const section = document.querySelector(`#${ADVANCED_SETTINGS_ID}`);
+
+ search(SEARCH_TERM);
+
+ // Last called because expand is always called once to reset the page state
+ expect(wrapper.emitted().expand[1][0]).toBe(section);
+ });
+
+ it('highlight elements that match the search term', () => {
+ search(SEARCH_TERM);
+
+ expect(highlightedElementsCount()).toBe(1);
+ });
+
+ describe('when search term is cleared', () => {
+ beforeEach(() => {
+ search(SEARCH_TERM);
+ });
+
+ it('displays all sections', () => {
+ expect(visibleSectionsCount()).toBe(1);
+ clearSearch();
+ expect(visibleSectionsCount()).toBe(sectionsCount());
+ });
+
+ it('removes the highlight from all elements', () => {
+ expect(highlightedElementsCount()).toBe(1);
+ clearSearch();
+ expect(highlightedElementsCount()).toBe(0);
+ });
+ });
+});
diff --git a/spec/frontend/search_settings/index_spec.js b/spec/frontend/search_settings/index_spec.js
new file mode 100644
index 00000000000..122ee1251bb
--- /dev/null
+++ b/spec/frontend/search_settings/index_spec.js
@@ -0,0 +1,36 @@
+import $ from 'jquery';
+import { setHTMLFixture } from 'helpers/fixtures';
+import initSearch from '~/search_settings';
+import { expandSection, closeSection } from '~/settings_panels';
+
+jest.mock('~/settings_panels');
+
+describe('search_settings/index', () => {
+ let app;
+
+ beforeEach(() => {
+ const el = document.createElement('div');
+
+ setHTMLFixture('<div id="content-body"></div>');
+
+ app = initSearch({ el });
+ });
+
+ afterEach(() => {
+ app.$destroy();
+ });
+
+ it('calls settings_panel.onExpand when expand event is emitted', () => {
+ const section = { name: 'section' };
+ app.$refs.searchSettings.$emit('expand', section);
+
+ expect(expandSection).toHaveBeenCalledWith($(section));
+ });
+
+ it('calls settings_panel.closeSection when collapse event is emitted', () => {
+ const section = { name: 'section' };
+ app.$refs.searchSettings.$emit('collapse', section);
+
+ expect(closeSection).toHaveBeenCalledWith($(section));
+ });
+});
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
index 02d5ca6bdb3..bd05eb69080 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
@@ -60,6 +60,7 @@ exports[`self monitor component When the self monitor project has not been creat
<gl-modal-stub
cancel-title="Cancel"
category="primary"
+ dismisslabel="Close"
modalclass=""
modalid="delete-self-monitor-modal"
ok-title="Delete project"
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index 618cc16cdf4..dfa961c5115 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -79,10 +79,7 @@ describe('self monitor component', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(
- wrapper
- .find({ ref: 'selfMonitoringFormText' })
- .find('a')
- .attributes('href'),
+ wrapper.find({ ref: 'selfMonitoringFormText' }).find('a').attributes('href'),
).toEqual(`${TEST_HOST}/instance-administrators-random/gitlab-self-monitoring`);
});
});
diff --git a/spec/frontend/self_monitor/store/actions_spec.js b/spec/frontend/self_monitor/store/actions_spec.js
index 0326ca6f415..29181e15680 100644
--- a/spec/frontend/self_monitor/store/actions_spec.js
+++ b/spec/frontend/self_monitor/store/actions_spec.js
@@ -16,7 +16,7 @@ describe('self monitor actions', () => {
});
describe('setSelfMonitor', () => {
- it('commits the SET_ENABLED mutation', done => {
+ it('commits the SET_ENABLED mutation', (done) => {
testAction(
actions.setSelfMonitor,
null,
@@ -29,7 +29,7 @@ describe('self monitor actions', () => {
});
describe('resetAlert', () => {
- it('commits the SET_ENABLED mutation', done => {
+ it('commits the SET_ENABLED mutation', (done) => {
testAction(
actions.resetAlert,
null,
@@ -54,7 +54,7 @@ describe('self monitor actions', () => {
});
});
- it('dispatches status request with job data', done => {
+ it('dispatches status request with job data', (done) => {
testAction(
actions.requestCreateProject,
null,
@@ -75,7 +75,7 @@ describe('self monitor actions', () => {
);
});
- it('dispatches success with project path', done => {
+ it('dispatches success with project path', (done) => {
testAction(
actions.requestCreateProjectStatus,
null,
@@ -98,7 +98,7 @@ describe('self monitor actions', () => {
mock.onPost(state.createProjectEndpoint).reply(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
testAction(
actions.requestCreateProject,
null,
@@ -121,7 +121,7 @@ describe('self monitor actions', () => {
});
describe('requestCreateProjectSuccess', () => {
- it('should commit the received data', done => {
+ it('should commit the received data', (done) => {
testAction(
actions.requestCreateProjectSuccess,
{ project_full_path: '/self-monitor-url' },
@@ -165,7 +165,7 @@ describe('self monitor actions', () => {
});
});
- it('dispatches status request with job data', done => {
+ it('dispatches status request with job data', (done) => {
testAction(
actions.requestDeleteProject,
null,
@@ -186,7 +186,7 @@ describe('self monitor actions', () => {
);
});
- it('dispatches success with status', done => {
+ it('dispatches success with status', (done) => {
testAction(
actions.requestDeleteProjectStatus,
null,
@@ -209,7 +209,7 @@ describe('self monitor actions', () => {
mock.onDelete(state.deleteProjectEndpoint).reply(500);
});
- it('dispatches error', done => {
+ it('dispatches error', (done) => {
testAction(
actions.requestDeleteProject,
null,
@@ -232,7 +232,7 @@ describe('self monitor actions', () => {
});
describe('requestDeleteProjectSuccess', () => {
- it('should commit mutations to remove previously set data', done => {
+ it('should commit mutations to remove previously set data', (done) => {
testAction(
actions.requestDeleteProjectSuccess,
null,
diff --git a/spec/frontend/sentry/sentry_config_spec.js b/spec/frontend/sentry/sentry_config_spec.js
index ed30e4774d9..5ee261f480a 100644
--- a/spec/frontend/sentry/sentry_config_spec.js
+++ b/spec/frontend/sentry/sentry_config_spec.js
@@ -4,7 +4,7 @@ import SentryConfig from '~/sentry/sentry_config';
describe('SentryConfig', () => {
describe('IGNORE_ERRORS', () => {
it('should be an array of strings', () => {
- const areStrings = SentryConfig.IGNORE_ERRORS.every(error => typeof error === 'string');
+ const areStrings = SentryConfig.IGNORE_ERRORS.every((error) => typeof error === 'string');
expect(areStrings).toBe(true);
});
@@ -12,7 +12,7 @@ describe('SentryConfig', () => {
describe('BLACKLIST_URLS', () => {
it('should be an array of regexps', () => {
- const areRegExps = SentryConfig.BLACKLIST_URLS.every(url => url instanceof RegExp);
+ const areRegExps = SentryConfig.BLACKLIST_URLS.every((url) => url instanceof RegExp);
expect(areRegExps).toBe(true);
});
diff --git a/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js b/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js
index e5f83b6fa49..135a3844e78 100644
--- a/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js
+++ b/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js
@@ -78,10 +78,5 @@ describe('Sentry Error Stack Trace', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.find(Stacktrace).exists()).toBe(true);
});
-
- it('should not show stacktrace if it does not exist', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(Stacktrace).exists()).toBe(false);
- });
});
});
diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
index 6b3d65ff037..4b827301943 100644
--- a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
+++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
@@ -3,7 +3,7 @@
exports[`EmptyStateComponent should render content 1`] = `
"<section class=\\"row empty-state text-center\\">
<div class=\\"col-12\\">
- <div class=\\"svg-250 svg-content\\"><img src=\\"/image.svg\\" alt=\\"Getting started with serverless\\" class=\\"gl-max-w-full\\"></div>
+ <div class=\\"svg-250 svg-content\\"><img src=\\"/image.svg\\" alt=\\"\\" class=\\"gl-max-w-full\\"></div>
</div>
<div class=\\"col-12\\">
<div class=\\"text-content gl-mx-auto gl-my-0 gl-p-5\\">
diff --git a/spec/frontend/serverless/components/area_spec.js b/spec/frontend/serverless/components/area_spec.js
index 8b6f664ae25..05c9ee44307 100644
--- a/spec/frontend/serverless/components/area_spec.js
+++ b/spec/frontend/serverless/components/area_spec.js
@@ -34,7 +34,7 @@ describe('Area component', () => {
describe('methods', () => {
describe('formatTooltipText', () => {
const mockDate = mockNormalizedMetrics.queries[0].result[0].values[0].time;
- const generateSeriesData = type => ({
+ const generateSeriesData = (type) => ({
seriesData: [
{
componentSubType: type,
@@ -91,7 +91,7 @@ describe('Area component', () => {
expect(
data.filter(
- datum => new Date(datum.time).getTime() > 0 && typeof datum.value === 'number',
+ (datum) => new Date(datum.time).getTime() > 0 && typeof datum.value === 'number',
).length,
).toBe(data.length);
});
diff --git a/spec/frontend/serverless/components/function_row_spec.js b/spec/frontend/serverless/components/function_row_spec.js
index 76a9e149302..081edd33b3b 100644
--- a/spec/frontend/serverless/components/function_row_spec.js
+++ b/spec/frontend/serverless/components/function_row_spec.js
@@ -7,7 +7,7 @@ import { mockServerlessFunction } from '../mock_data';
describe('functionRowComponent', () => {
let wrapper;
- const createComponent = func => {
+ const createComponent = (func) => {
wrapper = shallowMount(functionRowComponent, {
propsData: { func },
});
diff --git a/spec/frontend/serverless/components/missing_prometheus_spec.js b/spec/frontend/serverless/components/missing_prometheus_spec.js
index 0bd2e96a068..ffdb1f13111 100644
--- a/spec/frontend/serverless/components/missing_prometheus_spec.js
+++ b/spec/frontend/serverless/components/missing_prometheus_spec.js
@@ -6,7 +6,7 @@ import missingPrometheusComponent from '~/serverless/components/missing_promethe
describe('missingPrometheusComponent', () => {
let wrapper;
- const createComponent = missingData => {
+ const createComponent = (missingData) => {
const store = createStore({ clustersPath: '/clusters', helpPath: '/help' });
wrapper = shallowMount(missingPrometheusComponent, { store, propsData: { missingData } });
diff --git a/spec/frontend/serverless/components/pod_box_spec.js b/spec/frontend/serverless/components/pod_box_spec.js
index 495d11bd9ec..cf0c14a2cac 100644
--- a/spec/frontend/serverless/components/pod_box_spec.js
+++ b/spec/frontend/serverless/components/pod_box_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import podBoxComponent from '~/serverless/components/pod_box.vue';
-const createComponent = count =>
+const createComponent = (count) =>
shallowMount(podBoxComponent, {
propsData: {
count,
diff --git a/spec/frontend/serverless/components/url_spec.js b/spec/frontend/serverless/components/url_spec.js
index 92e4938c2cd..c43933e5b94 100644
--- a/spec/frontend/serverless/components/url_spec.js
+++ b/spec/frontend/serverless/components/url_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import urlComponent from '~/serverless/components/url.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
-const createComponent = uri =>
+const createComponent = (uri) =>
shallowMount(Vue.extend(urlComponent), {
propsData: {
uri,
diff --git a/spec/frontend/serverless/mock_data.js b/spec/frontend/serverless/mock_data.js
index ef616ceb37f..1816ad62a04 100644
--- a/spec/frontend/serverless/mock_data.js
+++ b/spec/frontend/serverless/mock_data.js
@@ -102,7 +102,10 @@ export const mockMetrics = {
result: [
{
metric: {},
- values: [[1551352298.756, '0'], [1551352358.756, '0']],
+ values: [
+ [1551352298.756, '0'],
+ [1551352358.756, '0'],
+ ],
},
],
},
diff --git a/spec/frontend/serverless/store/actions_spec.js b/spec/frontend/serverless/store/actions_spec.js
index aac57c75a4f..32e30a57d4b 100644
--- a/spec/frontend/serverless/store/actions_spec.js
+++ b/spec/frontend/serverless/store/actions_spec.js
@@ -1,14 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import statusCodes from '~/lib/utils/http_status';
import { fetchFunctions, fetchMetrics } from '~/serverless/store/actions';
import { mockServerlessFunctions, mockMetrics } from '../mock_data';
import axios from '~/lib/utils/axios_utils';
-import testAction from '../../helpers/vuex_action_helper';
import { adjustMetricQuery } from '../utils';
describe('ServerlessActions', () => {
describe('fetchFunctions', () => {
- it('should successfully fetch functions', done => {
+ it('should successfully fetch functions', (done) => {
const endpoint = '/functions';
const mock = new MockAdapter(axios);
mock.onGet(endpoint).reply(statusCodes.OK, JSON.stringify(mockServerlessFunctions));
@@ -29,12 +29,12 @@ describe('ServerlessActions', () => {
);
});
- it('should successfully retry', done => {
+ it('should successfully retry', (done) => {
const endpoint = '/functions';
const mock = new MockAdapter(axios);
mock
.onGet(endpoint)
- .reply(() => new Promise(resolve => setTimeout(() => resolve(200), Infinity)));
+ .reply(() => new Promise((resolve) => setTimeout(() => resolve(200), Infinity)));
testAction(
fetchFunctions,
@@ -51,7 +51,7 @@ describe('ServerlessActions', () => {
});
describe('fetchMetrics', () => {
- it('should return no prometheus', done => {
+ it('should return no prometheus', (done) => {
const endpoint = '/metrics';
const mock = new MockAdapter(axios);
mock.onGet(endpoint).reply(statusCodes.NO_CONTENT);
@@ -69,7 +69,7 @@ describe('ServerlessActions', () => {
);
});
- it('should successfully fetch metrics', done => {
+ it('should successfully fetch metrics', (done) => {
const endpoint = '/metrics';
const mock = new MockAdapter(axios);
mock.onGet(endpoint).reply(statusCodes.OK, JSON.stringify(mockMetrics));
diff --git a/spec/frontend/serverless/utils.js b/spec/frontend/serverless/utils.js
index 4af3eda1ffb..7caf7da231e 100644
--- a/spec/frontend/serverless/utils.js
+++ b/spec/frontend/serverless/utils.js
@@ -1,9 +1,9 @@
-export const adjustMetricQuery = data => {
+export const adjustMetricQuery = (data) => {
const updatedMetric = data.metrics;
- const queries = data.metrics.queries.map(query => ({
+ const queries = data.metrics.queries.map((query) => ({
...query,
- result: query.result.map(result => ({
+ result: query.result.map((result) => ({
...result,
values: result.values.map(([timestamp, value]) => ({
time: new Date(timestamp * 1000).toISOString(),
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index fad23aa05a4..f3085fb7ffb 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -1,13 +1,12 @@
import { shallowMount } from '@vue/test-utils';
import { GlModal, GlFormCheckbox } from '@gitlab/ui';
import { initEmojiMock } from 'helpers/emoji';
-import Api from '~/api';
+import * as UserApi from '~/api/user_api';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import SetStatusModalWrapper, {
AVAILABILITY_STATUS,
} from '~/set_status_modal/set_status_modal_wrapper.vue';
-jest.mock('~/api');
jest.mock('~/flash');
describe('SetStatusModalWrapper', () => {
@@ -40,7 +39,7 @@ describe('SetStatusModalWrapper', () => {
};
const findModal = () => wrapper.find(GlModal);
- const findFormField = field => wrapper.find(`[name="user[status][${field}]"]`);
+ const findFormField = (field) => wrapper.find(`[name="user[status][${field}]"]`);
const findClearStatusButton = () => wrapper.find('.js-clear-user-status-button');
const findNoEmojiPlaceholder = () => wrapper.find('.js-no-emoji-placeholder');
const findToggleEmojiButton = () => wrapper.find('.js-toggle-emoji-menu');
@@ -150,7 +149,7 @@ describe('SetStatusModalWrapper', () => {
describe('update status', () => {
describe('succeeds', () => {
beforeEach(() => {
- jest.spyOn(Api, 'postUserStatus').mockResolvedValue();
+ jest.spyOn(UserApi, 'updateUserStatus').mockResolvedValue();
});
it('clicking "removeStatus" clears the emoji and message fields', async () => {
@@ -173,12 +172,12 @@ describe('SetStatusModalWrapper', () => {
const commonParams = { emoji: defaultEmoji, message: defaultMessage };
- expect(Api.postUserStatus).toHaveBeenCalledTimes(2);
- expect(Api.postUserStatus).toHaveBeenNthCalledWith(1, {
+ expect(UserApi.updateUserStatus).toHaveBeenCalledTimes(2);
+ expect(UserApi.updateUserStatus).toHaveBeenNthCalledWith(1, {
availability: AVAILABILITY_STATUS.NOT_SET,
...commonParams,
});
- expect(Api.postUserStatus).toHaveBeenNthCalledWith(2, {
+ expect(UserApi.updateUserStatus).toHaveBeenNthCalledWith(2, {
availability: AVAILABILITY_STATUS.BUSY,
...commonParams,
});
@@ -196,7 +195,7 @@ describe('SetStatusModalWrapper', () => {
beforeEach(async () => {
mockEmoji = await initEmojiMock();
wrapper = createComponent({ currentEmoji: '', currentMessage: '' });
- jest.spyOn(Api, 'postUserStatus').mockResolvedValue();
+ jest.spyOn(UserApi, 'updateUserStatus').mockResolvedValue();
return initModal({ mockOnUpdateSuccess: false });
});
@@ -204,16 +203,13 @@ describe('SetStatusModalWrapper', () => {
findModal().vm.$emit('ok');
await wrapper.vm.$nextTick();
- expect($toast.show).toHaveBeenCalledWith('Status updated', {
- position: 'top-center',
- type: 'success',
- });
+ expect($toast.show).toHaveBeenCalledWith('Status updated');
});
});
describe('with errors', () => {
beforeEach(() => {
- jest.spyOn(Api, 'postUserStatus').mockRejectedValue();
+ jest.spyOn(UserApi, 'updateUserStatus').mockRejectedValue();
});
it('calls the "onUpdateFail" handler', async () => {
@@ -228,7 +224,7 @@ describe('SetStatusModalWrapper', () => {
beforeEach(async () => {
mockEmoji = await initEmojiMock();
wrapper = createComponent({ currentEmoji: '', currentMessage: '' });
- jest.spyOn(Api, 'postUserStatus').mockRejectedValue();
+ jest.spyOn(UserApi, 'updateUserStatus').mockRejectedValue();
return initModal({ mockOnUpdateFailure: false });
});
diff --git a/spec/frontend/shared/popover_spec.js b/spec/frontend/shared/popover_spec.js
index bbde936185e..59b0b3b006c 100644
--- a/spec/frontend/shared/popover_spec.js
+++ b/spec/frontend/shared/popover_spec.js
@@ -22,14 +22,14 @@ describe('popover', () => {
expect(togglePopover.call(context, true)).toEqual(false);
});
- it('shows popover', done => {
+ it('shows popover', (done) => {
const context = {
hasClass: () => false,
popover: () => {},
toggleClass: () => {},
};
- jest.spyOn(context, 'popover').mockImplementation(method => {
+ jest.spyOn(context, 'popover').mockImplementation((method) => {
expect(method).toEqual('show');
done();
});
@@ -37,7 +37,7 @@ describe('popover', () => {
togglePopover.call(context, true);
});
- it('adds disable-animation and js-popover-show class', done => {
+ it('adds disable-animation and js-popover-show class', (done) => {
const context = {
hasClass: () => false,
popover: () => {},
@@ -73,14 +73,14 @@ describe('popover', () => {
expect(togglePopover.call(context, false)).toEqual(false);
});
- it('hides popover', done => {
+ it('hides popover', (done) => {
const context = {
hasClass: () => true,
popover: () => {},
toggleClass: () => {},
};
- jest.spyOn(context, 'popover').mockImplementation(method => {
+ jest.spyOn(context, 'popover').mockImplementation((method) => {
expect(method).toEqual('hide');
done();
});
@@ -88,7 +88,7 @@ describe('popover', () => {
togglePopover.call(context, false);
});
- it('removes disable-animation and js-popover-show class', done => {
+ it('removes disable-animation and js-popover-show class', (done) => {
const context = {
hasClass: () => true,
popover: () => {},
@@ -114,7 +114,7 @@ describe('popover', () => {
jest
.spyOn($.fn, 'init')
- .mockImplementation(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
+ .mockImplementation((selector) => (selector === '.popover:hover' ? fakeJquery : $.fn));
jest.spyOn(togglePopover, 'call').mockImplementation(() => {});
mouseleave();
@@ -128,7 +128,7 @@ describe('popover', () => {
jest
.spyOn($.fn, 'init')
- .mockImplementation(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
+ .mockImplementation((selector) => (selector === '.popover:hover' ? fakeJquery : $.fn));
jest.spyOn(togglePopover, 'call').mockImplementation(() => {});
mouseleave();
@@ -146,9 +146,9 @@ describe('popover', () => {
expect(togglePopover.call).toHaveBeenCalledWith(expect.any(Object), true);
});
- it('registers mouseleave event if popover is showed', done => {
+ it('registers mouseleave event if popover is showed', (done) => {
jest.spyOn(togglePopover, 'call').mockReturnValue(true);
- jest.spyOn($.fn, 'on').mockImplementation(eventName => {
+ jest.spyOn($.fn, 'on').mockImplementation((eventName) => {
expect(eventName).toEqual('mouseleave');
done();
});
diff --git a/spec/frontend/shortcuts_spec.js b/spec/frontend/shortcuts_spec.js
index 538b3afa50f..1650dd2c1ca 100644
--- a/spec/frontend/shortcuts_spec.js
+++ b/spec/frontend/shortcuts_spec.js
@@ -85,13 +85,13 @@ describe('Shortcuts', () => {
});
it('attaches a Mousetrap handler for every markdown shortcut specified with md-shortcuts', () => {
- const expectedCalls = shortcuts.map(s => [s, expect.any(Function)]);
+ const expectedCalls = shortcuts.map((s) => [s, expect.any(Function)]);
expect(mockMousetrap.bind.mock.calls).toEqual(expectedCalls);
});
it('attaches a stopCallback that allows each markdown shortcut specified with md-shortcuts', () => {
- flatten(shortcuts).forEach(s => {
+ flatten(shortcuts).forEach((s) => {
expect(mockMousetrap.stopCallback(null, null, s)).toBe(false);
});
});
@@ -108,7 +108,7 @@ describe('Shortcuts', () => {
Shortcuts.initMarkdownEditorShortcuts($('.edit-note textarea'));
Shortcuts.removeMarkdownEditorShortcuts($('.edit-note textarea'));
- const expectedCalls = shortcuts.map(s => [s]);
+ const expectedCalls = shortcuts.map((s) => [s]);
expect(mockMousetrap.unbind.mock.calls).toEqual(expectedCalls);
});
diff --git a/spec/frontend/sidebar/assignee_title_spec.js b/spec/frontend/sidebar/assignee_title_spec.js
index b5d1e5216f8..9f5d51c7795 100644
--- a/spec/frontend/sidebar/assignee_title_spec.js
+++ b/spec/frontend/sidebar/assignee_title_spec.js
@@ -6,7 +6,7 @@ import Component from '~/sidebar/components/assignees/assignee_title.vue';
describe('AssigneeTitle component', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
return shallowMount(Component, {
propsData: {
numberOfAssignees: 0,
diff --git a/spec/frontend/sidebar/assignees_spec.js b/spec/frontend/sidebar/assignees_spec.js
index d1810ada97a..23e82171fe9 100644
--- a/spec/frontend/sidebar/assignees_spec.js
+++ b/spec/frontend/sidebar/assignees_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import { GlIcon } from '@gitlab/ui';
+import UsersMockHelper from 'helpers/user_mock_data_helper';
import Assignee from '~/sidebar/components/assignees/assignees.vue';
import UsersMock from './mock_data';
-import UsersMockHelper from '../helpers/user_mock_data_helper';
describe('Assignee component', () => {
const getDefaultProps = () => ({
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
index 907d6144415..8e158c99971 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
@@ -101,7 +101,7 @@ describe('CollapsedAssigneeList component', () => {
beforeEach(() => {
users = UsersMockHelper.createNumberRandomUsers(3);
- userNames = users.map(x => x.name).join(', ');
+ userNames = users.map((x) => x.name).join(', ');
});
describe('default', () => {
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
index 49a6d9e8ae6..ee1f8ed8d2b 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
@@ -28,12 +28,7 @@ describe('CollapsedAssignee assignee component', () => {
it('has author name', () => {
createComponent();
- expect(
- wrapper
- .find('.author')
- .text()
- .trim(),
- ).toEqual(TEST_USER.name);
+ expect(wrapper.find('.author').text().trim()).toEqual(TEST_USER.name);
});
it('has assignee avatar', () => {
diff --git a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
index 1cf0af48bef..da69f56d442 100644
--- a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
+import UsersMockHelper from 'helpers/user_mock_data_helper';
import UncollapsedAssigneeList from '~/sidebar/components/assignees/uncollapsed_assignee_list.vue';
import AssigneeAvatarLink from '~/sidebar/components/assignees/assignee_avatar_link.vue';
import userDataMock from '../../user_data_mock';
-import UsersMockHelper from '../../../helpers/user_mock_data_helper';
const DEFAULT_RENDER_COUNT = 5;
diff --git a/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js b/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
new file mode 100644
index 00000000000..b9132fa4450
--- /dev/null
+++ b/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
@@ -0,0 +1,22 @@
+import { mount } from '@vue/test-utils';
+import { getByText } from '@testing-library/dom';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import CopyEmailToClipboard from '~/sidebar/components/copy_email_to_clipboard.vue';
+
+describe('CopyEmailToClipboard component', () => {
+ const sampleEmail = 'sample+email@test.com';
+
+ const wrapper = mount(CopyEmailToClipboard, {
+ propsData: {
+ copyText: sampleEmail,
+ },
+ });
+
+ it('renders the Issue email text with the forwardable email', () => {
+ expect(getByText(wrapper.element, `Issue email: ${sampleEmail}`)).not.toBeNull();
+ });
+
+ it('finds ClipboardButton with the correct props', () => {
+ expect(wrapper.find(ClipboardButton).props('text')).toBe(sampleEmail);
+ });
+});
diff --git a/spec/frontend/sidebar/components/severity/severity_spec.js b/spec/frontend/sidebar/components/severity/severity_spec.js
index b6690f11d6b..ba7cea0919c 100644
--- a/spec/frontend/sidebar/components/severity/severity_spec.js
+++ b/spec/frontend/sidebar/components/severity/severity_spec.js
@@ -24,7 +24,7 @@ describe('SeverityToken', () => {
const findIcon = () => wrapper.find(GlIcon);
it('renders severity token for each severity type', () => {
- Object.values(INCIDENT_SEVERITY).forEach(severity => {
+ Object.values(INCIDENT_SEVERITY).forEach((severity) => {
createComponent({ severity });
expect(findIcon().classes()).toContain(`icon-${severity.icon}`);
expect(findIcon().attributes('name')).toBe(`severity-${severity.icon}`);
diff --git a/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js b/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
index 638d3706d12..fa40d75d4e9 100644
--- a/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
+++ b/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
@@ -94,7 +94,7 @@ describe('SidebarSeverity', () => {
let resolvePromise;
wrapper.vm.$apollo.mutate = jest.fn(
() =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolvePromise = resolve;
}),
);
@@ -119,21 +119,11 @@ describe('SidebarSeverity', () => {
});
it('should display only icon with a tooltip', () => {
- expect(
- findSeverityToken()
- .at(0)
- .attributes('icononly'),
- ).toBe('true');
- expect(
- findSeverityToken()
- .at(0)
- .attributes('iconsize'),
- ).toBe('14');
- expect(
- findTooltip()
- .text()
- .replace(/\s+/g, ' '),
- ).toContain(`Severity: ${INCIDENT_SEVERITY[severity].label}`);
+ expect(findSeverityToken().at(0).attributes('icononly')).toBe('true');
+ expect(findSeverityToken().at(0).attributes('iconsize')).toBe('14');
+ expect(findTooltip().text().replace(/\s+/g, ' ')).toContain(
+ `Severity: ${INCIDENT_SEVERITY[severity].label}`,
+ );
});
it('should expand the dropdown on collapsed icon click', async () => {
diff --git a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
index bcd2c14f2fa..0b6a2e6ceb9 100644
--- a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
@@ -1,11 +1,12 @@
import { createMockDirective } from 'helpers/vue_mock_directive';
import { mount } from '@vue/test-utils';
+import { stubTransition } from 'helpers/stub_transition';
import TimeTracker from '~/sidebar/components/time_tracking/time_tracker.vue';
describe('Issuable Time Tracker', () => {
let wrapper;
- const findByTestId = testId => wrapper.find(`[data-testid=${testId}]`);
+ const findByTestId = (testId) => wrapper.find(`[data-testid=${testId}]`);
const findComparisonMeter = () => findByTestId('compareMeter').attributes('title');
const findCollapsedState = () => findByTestId('collapsedState');
const findTimeRemainingProgress = () => findByTestId('timeRemainingProgress');
@@ -22,6 +23,9 @@ describe('Issuable Time Tracker', () => {
mount(TimeTracker, {
propsData: { ...defaultProps, ...props },
directives: { GlTooltip: createMockDirective() },
+ stubs: {
+ transition: stubTransition(),
+ },
});
afterEach(() => {
@@ -213,14 +217,12 @@ describe('Issuable Time Tracker', () => {
findHelpButton().trigger('click');
await wrapper.vm.$nextTick();
- expect(findByTestId('helpPane').classes('help-state-toggle-enter')).toBe(true);
- expect(findByTestId('helpPane').classes('help-state-toggle-leave')).toBe(false);
+ expect(findByTestId('helpPane').exists()).toBe(true);
findCloseHelpButton().trigger('click');
await wrapper.vm.$nextTick();
- expect(findByTestId('helpPane').classes('help-state-toggle-leave')).toBe(true);
- expect(findByTestId('helpPane').classes('help-state-toggle-enter')).toBe(false);
+ expect(findByTestId('helpPane').exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/sidebar/confidential/edit_form_spec.js b/spec/frontend/sidebar/confidential/edit_form_spec.js
index 56f163eecd1..6b571df10ae 100644
--- a/spec/frontend/sidebar/confidential/edit_form_spec.js
+++ b/spec/frontend/sidebar/confidential/edit_form_spec.js
@@ -6,7 +6,7 @@ describe('Edit Form Dropdown', () => {
const toggleForm = () => {};
const updateConfidentialAttribute = () => {};
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(EditForm, {
propsData: {
...props,
diff --git a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
index 913646c8f8d..a14ca711204 100644
--- a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
@@ -15,7 +15,7 @@ describe('EditFormButtons', () => {
let issuableType;
let issuableDisplayName;
- const setIssuableType = pageType => {
+ const setIssuableType = (pageType) => {
issuableType = pageType;
issuableDisplayName = issuableType.replace(/_/g, ' ');
};
diff --git a/spec/frontend/sidebar/lock/edit_form_spec.js b/spec/frontend/sidebar/lock/edit_form_spec.js
index b1c3bfe3ef5..4ae9025ee39 100644
--- a/spec/frontend/sidebar/lock/edit_form_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_spec.js
@@ -7,7 +7,7 @@ describe('Edit Form Dropdown', () => {
let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR
let issuableDisplayName;
- const setIssuableType = pageType => {
+ const setIssuableType = (pageType) => {
issuableType = pageType;
issuableDisplayName = issuableType.replace(/_/g, ' ');
};
diff --git a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
index e8091dcb51d..92cdba4f1f2 100644
--- a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
+++ b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
@@ -12,7 +12,7 @@ describe('IssuableLockForm', () => {
let store;
let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR
- const setIssuableType = pageType => {
+ const setIssuableType = (pageType) => {
issuableType = pageType;
};
@@ -23,7 +23,7 @@ describe('IssuableLockForm', () => {
const findSidebarLockStatusTooltip = () =>
getBinding(findSidebarCollapseIcon().element, 'gl-tooltip');
- const initStore = isLocked => {
+ const initStore = (isLocked) => {
if (issuableType === ISSUABLE_TYPE_ISSUE) {
store = createStore();
store.getters.getNoteableData.targetType = 'issue';
diff --git a/spec/frontend/sidebar/participants_spec.js b/spec/frontend/sidebar/participants_spec.js
index 93c9b3b84c3..1210f7c9531 100644
--- a/spec/frontend/sidebar/participants_spec.js
+++ b/spec/frontend/sidebar/participants_spec.js
@@ -21,7 +21,7 @@ describe('Participants', () => {
const getCollapsedParticipantsCount = () => wrapper.find('[data-testid="collapsed-count"]');
- const mountComponent = propsData =>
+ const mountComponent = (propsData) =>
shallowMount(Participants, {
propsData,
});
diff --git a/spec/frontend/sidebar/reviewer_title_spec.js b/spec/frontend/sidebar/reviewer_title_spec.js
index eae266688d5..cbd36040579 100644
--- a/spec/frontend/sidebar/reviewer_title_spec.js
+++ b/spec/frontend/sidebar/reviewer_title_spec.js
@@ -6,7 +6,7 @@ import Component from '~/sidebar/components/reviewers/reviewer_title.vue';
describe('ReviewerTitle component', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
return shallowMount(Component, {
propsData: {
numberOfReviewers: 0,
@@ -41,27 +41,6 @@ describe('ReviewerTitle component', () => {
});
});
- describe('gutter toggle', () => {
- it('does not show toggle by default', () => {
- wrapper = createComponent({
- numberOfReviewers: 2,
- editable: false,
- });
-
- expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toBeNull();
- });
-
- it('shows toggle when showToggle is true', () => {
- wrapper = createComponent({
- numberOfReviewers: 2,
- editable: false,
- showToggle: true,
- });
-
- expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toEqual(expect.any(Object));
- });
- });
-
it('does not render spinner by default', () => {
wrapper = createComponent({
numberOfReviewers: 0,
diff --git a/spec/frontend/sidebar/reviewers_spec.js b/spec/frontend/sidebar/reviewers_spec.js
index effcac266f0..91f28e85f3b 100644
--- a/spec/frontend/sidebar/reviewers_spec.js
+++ b/spec/frontend/sidebar/reviewers_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import { GlIcon } from '@gitlab/ui';
+import UsersMockHelper from 'helpers/user_mock_data_helper';
import Reviewer from '~/sidebar/components/reviewers/reviewers.vue';
import UsersMock from './mock_data';
-import UsersMockHelper from '../helpers/user_mock_data_helper';
describe('Reviewer component', () => {
const getDefaultProps = () => ({
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
index dc4560d2ae8..f1c13a5f818 100644
--- a/spec/frontend/sidebar/sidebar_assignees_spec.js
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -29,7 +29,7 @@ describe('sidebar assignees', () => {
},
},
// Attaching to document is required because this component emits something from the parent element :/
- attachToDocument: true,
+ attachTo: document.body,
});
};
diff --git a/spec/frontend/sidebar/sidebar_mediator_spec.js b/spec/frontend/sidebar/sidebar_mediator_spec.js
index 0892d452966..019ded87093 100644
--- a/spec/frontend/sidebar/sidebar_mediator_spec.js
+++ b/spec/frontend/sidebar/sidebar_mediator_spec.js
@@ -33,7 +33,7 @@ describe('Sidebar mediator', () => {
it('saves assignees', () => {
mock.onPut(mediatorMockData.endpoint).reply(200, {});
- return mediator.saveAssignees('issue[assignee_ids]').then(resp => {
+ return mediator.saveAssignees('issue[assignee_ids]').then((resp) => {
expect(resp.status).toEqual(200);
});
});
diff --git a/spec/frontend/sidebar/sidebar_move_issue_spec.js b/spec/frontend/sidebar/sidebar_move_issue_spec.js
index ad919f69546..24bb5a8e916 100644
--- a/spec/frontend/sidebar/sidebar_move_issue_spec.js
+++ b/spec/frontend/sidebar/sidebar_move_issue_spec.js
@@ -74,7 +74,7 @@ describe('SidebarMoveIssue', () => {
expect(test.sidebarMoveIssue.$dropdownToggle.data('deprecatedJQueryDropdown')).toBeTruthy();
});
- it('escapes html from project name', done => {
+ it('escapes html from project name', (done) => {
test.$toggleButton.dropdown('toggle');
setImmediate(() => {
@@ -98,7 +98,7 @@ describe('SidebarMoveIssue', () => {
expect(test.$confirmButton.hasClass('is-loading')).toBe(true);
});
- it('should remove loading state from confirm button on failure', done => {
+ it('should remove loading state from confirm button on failure', (done) => {
jest.spyOn(window, 'Flash').mockImplementation(() => {});
jest.spyOn(test.mediator, 'moveIssue').mockReturnValue(Promise.reject());
test.mediator.setMoveToProjectId(7);
@@ -125,7 +125,7 @@ describe('SidebarMoveIssue', () => {
});
});
- it('should set moveToProjectId on dropdown item "No project" click', done => {
+ it('should set moveToProjectId on dropdown item "No project" click', (done) => {
jest.spyOn(test.mediator, 'setMoveToProjectId').mockImplementation(() => {});
// Open the dropdown
@@ -133,10 +133,7 @@ describe('SidebarMoveIssue', () => {
// Wait for the autocomplete request to finish
setImmediate(() => {
- test.$content
- .find('.js-move-issue-dropdown-item')
- .eq(0)
- .trigger('click');
+ test.$content.find('.js-move-issue-dropdown-item').eq(0).trigger('click');
expect(test.mediator.setMoveToProjectId).toHaveBeenCalledWith(0);
expect(test.$confirmButton.prop('disabled')).toBeTruthy();
@@ -144,7 +141,7 @@ describe('SidebarMoveIssue', () => {
});
});
- it('should set moveToProjectId on dropdown item click', done => {
+ it('should set moveToProjectId on dropdown item click', (done) => {
jest.spyOn(test.mediator, 'setMoveToProjectId').mockImplementation(() => {});
// Open the dropdown
@@ -152,10 +149,7 @@ describe('SidebarMoveIssue', () => {
// Wait for the autocomplete request to finish
setImmediate(() => {
- test.$content
- .find('.js-move-issue-dropdown-item')
- .eq(1)
- .trigger('click');
+ test.$content.find('.js-move-issue-dropdown-item').eq(1).trigger('click');
expect(test.mediator.setMoveToProjectId).toHaveBeenCalledWith(20);
expect(test.$confirmButton.attr('disabled')).toBe(undefined);
diff --git a/spec/frontend/sidebar/sidebar_store_spec.js b/spec/frontend/sidebar/sidebar_store_spec.js
index 7c18222f300..7b73dc868b7 100644
--- a/spec/frontend/sidebar/sidebar_store_spec.js
+++ b/spec/frontend/sidebar/sidebar_store_spec.js
@@ -1,6 +1,6 @@
+import UsersMockHelper from 'helpers/user_mock_data_helper';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import Mock from './mock_data';
-import UsersMockHelper from '../helpers/user_mock_data_helper';
const ASSIGNEE = {
id: 2,
diff --git a/spec/frontend/sidebar/subscriptions_spec.js b/spec/frontend/sidebar/subscriptions_spec.js
index 428441656b3..043ffd972da 100644
--- a/spec/frontend/sidebar/subscriptions_spec.js
+++ b/spec/frontend/sidebar/subscriptions_spec.js
@@ -8,7 +8,7 @@ describe('Subscriptions', () => {
const findToggleButton = () => wrapper.find(ToggleButton);
- const mountComponent = propsData =>
+ const mountComponent = (propsData) =>
shallowMount(Subscriptions, {
propsData,
});
diff --git a/spec/frontend/sidebar/todo_spec.js b/spec/frontend/sidebar/todo_spec.js
index b0e94f16dd7..4adfaf7ad7b 100644
--- a/spec/frontend/sidebar/todo_spec.js
+++ b/spec/frontend/sidebar/todo_spec.js
@@ -42,12 +42,7 @@ describe('SidebarTodo', () => {
({ isTodo, iconClass, label, icon }) => {
createComponent({ isTodo });
- expect(
- wrapper
- .find(GlIcon)
- .classes()
- .join(' '),
- ).toStrictEqual(iconClass);
+ expect(wrapper.find(GlIcon).classes().join(' ')).toStrictEqual(iconClass);
expect(wrapper.find(GlIcon).props('name')).toStrictEqual(icon);
expect(wrapper.find('button').text()).toBe(label);
},
diff --git a/spec/frontend/snippet/collapsible_input_spec.js b/spec/frontend/snippet/collapsible_input_spec.js
index aa017964437..3f14a9cd1a1 100644
--- a/spec/frontend/snippet/collapsible_input_spec.js
+++ b/spec/frontend/snippet/collapsible_input_spec.js
@@ -38,12 +38,12 @@ describe('~/snippet/collapsible_input', () => {
setupCollapsibleInputs();
});
- const findInput = el => el.querySelector('textarea,input');
- const findCollapsed = el => el.querySelector('.js-collapsed');
- const findExpanded = el => el.querySelector('.js-expanded');
- const findCollapsedInput = el => findInput(findCollapsed(el));
- const findExpandedInput = el => findInput(findExpanded(el));
- const focusIn = target => target.dispatchEvent(new Event('focusin', { bubbles: true }));
+ const findInput = (el) => el.querySelector('textarea,input');
+ const findCollapsed = (el) => el.querySelector('.js-collapsed');
+ const findExpanded = (el) => el.querySelector('.js-expanded');
+ const findCollapsedInput = (el) => findInput(findCollapsed(el));
+ const findExpandedInput = (el) => findInput(findExpanded(el));
+ const focusIn = (target) => target.dispatchEvent(new Event('focusin', { bubbles: true }));
const expectIsCollapsed = (el, isCollapsed) => {
expect(findCollapsed(el).classList.contains('d-none')).toEqual(!isCollapsed);
expect(findExpanded(el).classList.contains('d-none')).toEqual(isCollapsed);
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index 3521733ee5e..b818f98efb1 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -2,7 +2,7 @@ import VueApollo, { ApolloMutation } from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import GetSnippetQuery from 'shared_queries/snippet/snippet.query.graphql';
import { deprecatedCreateFlash as Flash } from '~/flash';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -149,9 +149,11 @@ describe('Snippet Edit app', () => {
const hasDisabledSubmit = () => Boolean(findSubmitButton().attributes('disabled'));
const clickSubmitBtn = () => wrapper.find('[data-testid="snippet-edit-form"]').trigger('submit');
- const triggerBlobActions = actions => findBlobActions().vm.$emit('actions', actions);
- const setUploadFilesHtml = paths => {
- wrapper.vm.$el.innerHTML = paths.map(path => `<input name="files[]" value="${path}">`).join('');
+ const triggerBlobActions = (actions) => findBlobActions().vm.$emit('actions', actions);
+ const setUploadFilesHtml = (paths) => {
+ wrapper.vm.$el.innerHTML = paths
+ .map((path) => `<input name="files[]" value="${path}">`)
+ .join('');
};
const getApiData = ({
id,
@@ -189,7 +191,7 @@ describe('Snippet Edit app', () => {
it.each([[{}], [{ snippetGid: '' }]])(
'should render all required components with %s',
- props => {
+ (props) => {
createComponent(props);
expect(wrapper.find(TitleField).exists()).toBe(true);
@@ -257,7 +259,7 @@ describe('Snippet Edit app', () => {
describe('default visibility', () => {
it.each([SNIPPET_VISIBILITY_PRIVATE, SNIPPET_VISIBILITY_INTERNAL, SNIPPET_VISIBILITY_PUBLIC])(
'marks %s visibility by default',
- async visibility => {
+ async (visibility) => {
createComponent({
props: { snippetGid: '' },
selectedLevel: visibility,
diff --git a/spec/frontend/snippets/components/embed_dropdown_spec.js b/spec/frontend/snippets/components/embed_dropdown_spec.js
index 8eb44965692..f1eb7d43409 100644
--- a/spec/frontend/snippets/components/embed_dropdown_spec.js
+++ b/spec/frontend/snippets/components/embed_dropdown_spec.js
@@ -26,7 +26,7 @@ describe('snippets/components/embed_dropdown', () => {
const sections = [];
let current = {};
- wrapper.findAll('[data-testid="header"],[data-testid="input"]').wrappers.forEach(x => {
+ wrapper.findAll('[data-testid="header"],[data-testid="input"]').wrappers.forEach((x) => {
const type = x.attributes('data-testid');
if (type === 'header') {
diff --git a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
index 055168a1711..08056e788de 100644
--- a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
@@ -14,7 +14,7 @@ const TEST_BLOBS = [
createBlobFromTestEntry(testEntries.deleted),
];
-const TEST_BLOBS_UNLOADED = TEST_BLOBS.map(blob => ({ ...blob, content: '', isLoaded: false }));
+const TEST_BLOBS_UNLOADED = TEST_BLOBS.map((blob) => ({ ...blob, content: '', isLoaded: false }));
describe('snippets/components/snippet_blob_actions_edit', () => {
let wrapper;
@@ -31,7 +31,7 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
const findLabel = () => wrapper.find('label');
const findBlobEdits = () => wrapper.findAll(SnippetBlobEdit);
const findBlobsData = () =>
- findBlobEdits().wrappers.map(x => ({
+ findBlobEdits().wrappers.map((x) => ({
blob: x.props('blob'),
classes: x.classes(),
}));
@@ -42,7 +42,7 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
return events[events.length - 1]?.[0];
};
- const buildBlobsDataExpectation = blobs =>
+ const buildBlobsDataExpectation = (blobs) =>
blobs.map((blob, index) => ({
blob: {
...blob,
@@ -50,14 +50,8 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
},
classes: index > 0 ? ['gl-mt-3'] : [],
}));
- const triggerBlobDelete = idx =>
- findBlobEdits()
- .at(idx)
- .vm.$emit('delete');
- const triggerBlobUpdate = (idx, props) =>
- findBlobEdits()
- .at(idx)
- .vm.$emit('blob-updated', props);
+ const triggerBlobDelete = (idx) => findBlobEdits().at(idx).vm.$emit('delete');
+ const triggerBlobUpdate = (idx, props) => findBlobEdits().at(idx).vm.$emit('blob-updated', props);
afterEach(() => {
wrapper.destroy();
@@ -213,11 +207,7 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
it('disables delete button', () => {
expect(findBlobEdits()).toHaveLength(1);
- expect(
- findBlobEdits()
- .at(0)
- .props(),
- ).toMatchObject({
+ expect(findBlobEdits().at(0).props()).toMatchObject({
showDelete: true,
canDelete: false,
});
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 5836de1fdbe..93a66db32c6 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -17,6 +17,8 @@ describe('Snippet header component', () => {
let err;
const originalRelativeUrlRoot = gon.relative_url_root;
+ const GlEmoji = { template: '<img/>' };
+
function createComponent({
loading = false,
permissions = {},
@@ -47,10 +49,15 @@ describe('Snippet header component', () => {
},
stubs: {
ApolloMutation,
+ GlEmoji,
},
});
}
+ const findAuthorEmoji = () => wrapper.find(GlEmoji);
+ const findAuthoredMessage = () => wrapper.find('[data-testid="authored-message"]').text();
+ const buttonCount = () => wrapper.findAll(GlButton).length;
+
beforeEach(() => {
gon.relative_url_root = '/foo/';
snippet = {
@@ -66,6 +73,7 @@ describe('Snippet header component', () => {
project: null,
author: {
name: 'Thor Odinson',
+ status: null,
},
blobs: [Blob],
createdAt: new Date(differenceInMilliseconds(32 * 24 * 3600 * 1000)).toISOString(),
@@ -100,17 +108,36 @@ describe('Snippet header component', () => {
it('renders a message showing snippet creation date and author', () => {
createComponent();
- const text = wrapper.find('[data-testid="authored-message"]').text();
+ const text = findAuthoredMessage();
expect(text).toContain('Authored 1 month ago by');
expect(text).toContain('Thor Odinson');
});
+ describe('author status', () => {
+ it('is rendered when it is set', () => {
+ snippet.author.status = {
+ message: 'At work',
+ emoji: 'hammer',
+ };
+ createComponent();
+
+ expect(findAuthorEmoji().attributes('title')).toBe(snippet.author.status.message);
+ expect(findAuthorEmoji().attributes('data-name')).toBe(snippet.author.status.emoji);
+ });
+
+ it('is not rendered when the user has no status', () => {
+ createComponent();
+
+ expect(findAuthorEmoji().exists()).toBe(false);
+ });
+ });
+
it('renders a message showing only snippet creation date if author is null', () => {
snippet.author = null;
createComponent();
- const text = wrapper.find('[data-testid="authored-message"]').text();
+ const text = findAuthoredMessage();
expect(text).toBe('Authored 1 month ago');
});
@@ -121,7 +148,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlButton).length).toEqual(0);
+ expect(buttonCount()).toEqual(0);
createComponent({
permissions: {
@@ -129,7 +156,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlButton).length).toEqual(1);
+ expect(buttonCount()).toEqual(1);
createComponent({
permissions: {
@@ -137,7 +164,7 @@ describe('Snippet header component', () => {
updateSnippet: true,
},
});
- expect(wrapper.findAll(GlButton).length).toEqual(2);
+ expect(buttonCount()).toEqual(2);
createComponent({
permissions: {
@@ -149,7 +176,7 @@ describe('Snippet header component', () => {
canCreateSnippet: true,
});
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.findAll(GlButton).length).toEqual(3);
+ expect(buttonCount()).toEqual(3);
});
});
diff --git a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
index 3151090f388..62d1ac9b476 100644
--- a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
@@ -40,7 +40,7 @@ describe('Snippet Visibility Edit component', () => {
const findLink = () => wrapper.find('label').find(GlLink);
const findRadios = () => wrapper.find(GlFormRadioGroup).findAll(GlFormRadio);
const findRadiosData = () =>
- findRadios().wrappers.map(x => {
+ findRadios().wrappers.map((x) => {
return {
value: x.find('input').attributes('value'),
icon: x.find(GlIcon).props('name'),
diff --git a/spec/frontend/snippets/utils/blob_spec.js b/spec/frontend/snippets/utils/blob_spec.js
index c20cf2e6102..4c6cee32614 100644
--- a/spec/frontend/snippets/utils/blob_spec.js
+++ b/spec/frontend/snippets/utils/blob_spec.js
@@ -2,7 +2,7 @@ import { cloneDeep } from 'lodash';
import { decorateBlob, createBlob, diffAll } from '~/snippets/utils/blob';
import { testEntries, createBlobsFromTestEntries } from '../test_utils';
-jest.mock('lodash/uniqueId', () => arg => `${arg}fakeUniqueId`);
+jest.mock('lodash/uniqueId', () => (arg) => `${arg}fakeUniqueId`);
const TEST_RAW_BLOB = {
rawPath: '/test/blob/7/raw',
diff --git a/spec/frontend/static_site_editor/components/edit_area_spec.js b/spec/frontend/static_site_editor/components/edit_area_spec.js
index ed33f93ec51..07097d00cc5 100644
--- a/spec/frontend/static_site_editor/components/edit_area_spec.js
+++ b/spec/frontend/static_site_editor/components/edit_area_spec.js
@@ -1,4 +1,6 @@
+import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
import RichContentEditor from '~/vue_shared/components/rich_content_editor/rich_content_editor.vue';
import { EDITOR_TYPES } from '~/vue_shared/components/rich_content_editor/constants';
@@ -22,7 +24,7 @@ import {
imageRoot,
} from '../mock_data';
-jest.mock('~/static_site_editor/services/formatter', () => jest.fn(str => `${str} format-pass`));
+jest.mock('~/static_site_editor/services/formatter', () => jest.fn((str) => `${str} format-pass`));
describe('~/static_site_editor/components/edit_area.vue', () => {
let wrapper;
@@ -30,6 +32,12 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
const savingChanges = true;
const newBody = `new ${body}`;
+ const RichContentEditorStub = stubComponent(RichContentEditor, {
+ methods: {
+ resetInitialValue: jest.fn(),
+ },
+ });
+
const buildWrapper = (propsData = {}) => {
wrapper = shallowMount(EditArea, {
propsData: {
@@ -44,6 +52,7 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
savingChanges,
...propsData,
},
+ stubs: { RichContentEditor: RichContentEditorStub },
});
};
@@ -94,7 +103,7 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
beforeEach(() => {
findRichContentEditor().vm.$emit('input', newBody);
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('updates parsedSource with new content', () => {
@@ -114,30 +123,21 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
expect(findUnsavedChangesConfirmDialog().props('modified')).toBe(true);
});
- it('sets publish toolbar as not saveable when content changes are rollback', () => {
+ it('sets publish toolbar as not saveable when content changes are rollback', async () => {
findRichContentEditor().vm.$emit('input', formattedBody);
- return wrapper.vm.$nextTick().then(() => {
- expect(findPublishToolbar().props('saveable')).toBe(false);
- });
+ await nextTick();
+ expect(findPublishToolbar().props('saveable')).toBe(false);
});
});
describe('when the mode changes', () => {
- let resetInitialValue;
-
- const setInitialMode = mode => {
+ const setInitialMode = (mode) => {
wrapper.setData({ editorMode: mode });
};
- const buildResetInitialValue = () => {
- resetInitialValue = jest.fn();
- findRichContentEditor().setMethods({ resetInitialValue });
- };
-
afterEach(() => {
setInitialMode(EDITOR_TYPES.wysiwyg);
- resetInitialValue = null;
});
it.each`
@@ -148,21 +148,20 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
'sets editorMode from $initialMode to $targetMode',
({ initialMode, targetMode, resetValue }) => {
setInitialMode(initialMode);
- buildResetInitialValue();
findRichContentEditor().vm.$emit('modeChange', targetMode);
- expect(resetInitialValue).toHaveBeenCalledWith(resetValue);
+ expect(RichContentEditorStub.methods.resetInitialValue).toHaveBeenCalledWith(resetValue);
expect(wrapper.vm.editorMode).toBe(targetMode);
},
);
it('should format the content', () => {
- buildResetInitialValue();
-
findRichContentEditor().vm.$emit('modeChange', EDITOR_TYPES.markdown);
- expect(resetInitialValue).toHaveBeenCalledWith(`${content} format-pass format-pass`);
+ expect(RichContentEditorStub.methods.resetInitialValue).toHaveBeenCalledWith(
+ `${content} format-pass format-pass`,
+ );
});
});
@@ -172,30 +171,27 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
expect(findEditDrawer().props('isOpen')).toBe(false);
});
- it('opens the edit drawer', () => {
+ it('opens the edit drawer', async () => {
findPublishToolbar().vm.$emit('editSettings');
- return wrapper.vm.$nextTick().then(() => {
- expect(findEditDrawer().props('isOpen')).toBe(true);
- });
+ await nextTick();
+ expect(findEditDrawer().props('isOpen')).toBe(true);
});
- it('closes the edit drawer', () => {
+ it('closes the edit drawer', async () => {
findEditDrawer().vm.$emit('close');
- return wrapper.vm.$nextTick().then(() => {
- expect(findEditDrawer().props('isOpen')).toBe(false);
- });
+ await nextTick();
+ expect(findEditDrawer().props('isOpen')).toBe(false);
});
- it('forwards the matter settings when the drawer is open', () => {
+ it('forwards the matter settings when the drawer is open', async () => {
findPublishToolbar().vm.$emit('editSettings');
jest.spyOn(wrapper.vm.parsedSource, 'matter').mockReturnValueOnce(headerSettings);
- return wrapper.vm.$nextTick().then(() => {
- expect(findEditDrawer().props('settings')).toEqual(headerSettings);
- });
+ await nextTick();
+ expect(findEditDrawer().props('settings')).toEqual(headerSettings);
});
it('enables toolbar submit button', () => {
@@ -211,16 +207,15 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
expect(spySyncParsedSource).toHaveBeenCalledWith(newSettings);
});
- it('syncs matter changes to content in markdown mode', () => {
+ it('syncs matter changes to content in markdown mode', async () => {
wrapper.setData({ editorMode: EDITOR_TYPES.markdown });
const newSettings = { title: 'test' };
findEditDrawer().vm.$emit('updateSettings', newSettings);
- return wrapper.vm.$nextTick().then(() => {
- expect(findRichContentEditor().props('content')).toContain('title: test');
- });
+ await nextTick();
+ expect(findRichContentEditor().props('content')).toContain('title: test');
});
});
diff --git a/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js b/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
index b887570e947..cf82efc3d0b 100644
--- a/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
+++ b/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
@@ -35,7 +35,7 @@ describe('~/static_site_editor/components/edit_meta_controls.vue', () => {
const findGlFormInputTitle = () => wrapper.find(GlFormInput);
const findGlDropdownDescriptionTemplate = () => wrapper.find(GlDropdown);
const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
- const findDropdownItemByIndex = index => findAllDropdownItems().at(index);
+ const findDropdownItemByIndex = (index) => findAllDropdownItems().at(index);
const findGlFormTextAreaDescription = () => wrapper.find(GlFormTextarea);
diff --git a/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js b/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js
index 24651543650..83ad23f7dcf 100644
--- a/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js
+++ b/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js
@@ -14,7 +14,7 @@ describe('static_site_editor/graphql/resolvers/file', () => {
it('returns file content and title when fetching file successfully', () => {
loadSourceContent.mockResolvedValueOnce({ title, content });
- return fileResolver({ fullPath: projectId }, { path: sourcePath }).then(file => {
+ return fileResolver({ fullPath: projectId }, { path: sourcePath }).then((file) => {
expect(file).toEqual({
__typename: 'File',
title,
diff --git a/spec/frontend/static_site_editor/services/load_source_content_spec.js b/spec/frontend/static_site_editor/services/load_source_content_spec.js
index 54061b7a503..98d437698c4 100644
--- a/spec/frontend/static_site_editor/services/load_source_content_spec.js
+++ b/spec/frontend/static_site_editor/services/load_source_content_spec.js
@@ -16,7 +16,7 @@ describe('loadSourceContent', () => {
beforeEach(() => {
jest.spyOn(Api, 'getRawFile').mockResolvedValue({ data: sourceContent });
- return loadSourceContent({ projectId, sourcePath }).then(_result => {
+ return loadSourceContent({ projectId, sourcePath }).then((_result) => {
result = _result;
});
});
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index 6c2bff6740a..d4cbc5d235e 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -188,7 +188,7 @@ describe('submitContentChanges', () => {
let result;
beforeEach(() => {
- return submitContentChanges(buildPayload()).then(_result => {
+ return submitContentChanges(buildPayload()).then((_result) => {
result = _result;
});
});
diff --git a/spec/frontend/syntax_highlight_spec.js b/spec/frontend/syntax_highlight_spec.js
index d2fb5983f7b..418679e7d18 100644
--- a/spec/frontend/syntax_highlight_spec.js
+++ b/spec/frontend/syntax_highlight_spec.js
@@ -4,7 +4,7 @@ import $ from 'jquery';
import syntaxHighlight from '~/syntax_highlight';
describe('Syntax Highlighter', () => {
- const stubUserColorScheme = value => {
+ const stubUserColorScheme = (value) => {
if (window.gon == null) {
window.gon = {};
}
diff --git a/spec/frontend/task_list_spec.js b/spec/frontend/task_list_spec.js
index 1261833e3ec..a8bdc506102 100644
--- a/spec/frontend/task_list_spec.js
+++ b/spec/frontend/task_list_spec.js
@@ -93,7 +93,7 @@ describe('TaskList', () => {
});
describe('update', () => {
- it('should disable task list items and make a patch request then enable them again', done => {
+ it('should disable task list items and make a patch request then enable them again', (done) => {
const response = { data: { lock_version: 3 } };
jest.spyOn(taskList, 'enableTaskListItems').mockImplementation(() => {});
jest.spyOn(taskList, 'disableTaskListItems').mockImplementation(() => {});
@@ -137,7 +137,7 @@ describe('TaskList', () => {
});
});
- it('should handle request error and enable task list items', done => {
+ it('should handle request error and enable task list items', (done) => {
const response = { data: { error: 1 } };
jest.spyOn(taskList, 'enableTaskListItems').mockImplementation(() => {});
jest.spyOn(taskList, 'onError').mockImplementation(() => {});
diff --git a/spec/frontend/terraform/components/states_table_actions_spec.js b/spec/frontend/terraform/components/states_table_actions_spec.js
index 264f4b7939a..3f5df8a96f8 100644
--- a/spec/frontend/terraform/components/states_table_actions_spec.js
+++ b/spec/frontend/terraform/components/states_table_actions_spec.js
@@ -1,6 +1,6 @@
import { GlDropdown, GlModal, GlSprintf } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import VueApollo from 'vue-apollo';
import StateActions from '~/terraform/components/states_table_actions.vue';
import lockStateMutation from '~/terraform/graphql/mutations/lock_state.mutation.graphql';
diff --git a/spec/frontend/terraform/components/terraform_list_spec.js b/spec/frontend/terraform/components/terraform_list_spec.js
index b31afecc816..fb56a7135a3 100644
--- a/spec/frontend/terraform/components/terraform_list_spec.js
+++ b/spec/frontend/terraform/components/terraform_list_spec.js
@@ -1,6 +1,6 @@
import { GlAlert, GlBadge, GlKeysetPagination, GlLoadingIcon, GlTab } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import VueApollo from 'vue-apollo';
import EmptyState from '~/terraform/components/empty_state.vue';
import StatesTable from '~/terraform/components/states_table.vue';
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 2a3eddf7b4e..a122b06fdda 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -4,15 +4,18 @@ import 'jquery';
import * as jqueryMatchers from 'custom-jquery-matchers';
import { config as testUtilsConfig } from '@vue/test-utils';
import Translate from '~/vue_shared/translate';
-import { initializeTestTimeout } from './helpers/timeout';
-import { getJSONFixture, loadHTMLFixture, setHTMLFixture } from './helpers/fixtures';
+import { initializeTestTimeout } from './__helpers__/timeout';
+import { getJSONFixture, loadHTMLFixture, setHTMLFixture } from './__helpers__/fixtures';
import { setupManualMocks } from './mocks/mocks_helper';
import customMatchers from './matchers';
-import './helpers/dom_shims';
-import './helpers/jquery';
+import './__helpers__/dom_shims';
+import './__helpers__/jquery';
import '~/commons/bootstrap';
+// This module has some fairly decent visual test coverage in it's own repository.
+jest.mock('@gitlab/favicon-overlay');
+
process.on('unhandledRejection', global.promiseRejectionHandler);
setupManualMocks();
@@ -25,7 +28,7 @@ afterEach(() =>
}),
);
-initializeTestTimeout(process.env.CI ? 6000 : 5000);
+initializeTestTimeout(process.env.CI ? 6000 : 500);
Vue.config.devtools = false;
Vue.config.productionTip = false;
@@ -58,8 +61,18 @@ Object.entries(jqueryMatchers).forEach(([matcherName, matcherFactory]) => {
expect.extend(customMatchers);
-// Tech debt issue TBD
-testUtilsConfig.logModifiedComponents = false;
+testUtilsConfig.deprecationWarningHandler = (method, message) => {
+ const ALLOWED_DEPRECATED_METHODS = [
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/295679
+ 'finding components with `find` or `get`',
+
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/295680
+ 'finding components with `findAll`',
+ ];
+ if (!ALLOWED_DEPRECATED_METHODS.includes(method)) {
+ global.console.error(message);
+ }
+};
Object.assign(global, {
requestIdleCallback(cb) {
diff --git a/spec/frontend/toggle_buttons_spec.js b/spec/frontend/toggle_buttons_spec.js
index 09a4bd53c09..435fd35744f 100644
--- a/spec/frontend/toggle_buttons_spec.js
+++ b/spec/frontend/toggle_buttons_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
+import waitForPromises from 'helpers/wait_for_promises';
import setupToggleButtons from '~/toggle_buttons';
-import waitForPromises from './helpers/wait_for_promises';
function generateMarkup(isChecked = true) {
return `
diff --git a/spec/frontend/tooltips/components/tooltips_spec.js b/spec/frontend/tooltips/components/tooltips_spec.js
index 50848ca2978..3a894427643 100644
--- a/spec/frontend/tooltips/components/tooltips_spec.js
+++ b/spec/frontend/tooltips/components/tooltips_spec.js
@@ -18,7 +18,7 @@ describe('tooltips/components/tooltips.vue', () => {
...attributes,
};
- Object.keys(defaults).forEach(name => {
+ Object.keys(defaults).forEach((name) => {
target.setAttribute(name, defaults[name]);
});
@@ -51,6 +51,16 @@ describe('tooltips/components/tooltips.vue', () => {
expect(wrapper.find(GlTooltip).props('target')).toBe(target);
});
+ it('does not attach a tooltip to a target with empty title', async () => {
+ target.setAttribute('title', '');
+
+ wrapper.vm.addTooltips([target]);
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlTooltip).exists()).toBe(false);
+ });
+
it('does not attach a tooltip twice to the same element', async () => {
wrapper.vm.addTooltips([target]);
wrapper.vm.addTooltips([target]);
diff --git a/spec/frontend/tooltips/index_spec.js b/spec/frontend/tooltips/index_spec.js
index 511003fdb8f..bff9ee0c7f2 100644
--- a/spec/frontend/tooltips/index_spec.js
+++ b/spec/frontend/tooltips/index_spec.js
@@ -20,7 +20,7 @@ describe('tooltips/index.js', () => {
title: 'default title',
};
- Object.keys(attributes).forEach(name => {
+ Object.keys(attributes).forEach((name) => {
target.setAttribute(name, attributes[name]);
});
@@ -42,11 +42,11 @@ describe('tooltips/index.js', () => {
};
beforeEach(() => {
- window.gon.glTooltipsEnabled = true;
+ window.gon.features = { glTooltips: true };
});
afterEach(() => {
- document.body.childNodes.forEach(node => node.remove());
+ document.body.childNodes.forEach((node) => node.remove());
destroy();
});
@@ -149,7 +149,7 @@ describe('tooltips/index.js', () => {
describe('when glTooltipsEnabled feature flag is disabled', () => {
beforeEach(() => {
- window.gon.glTooltipsEnabled = false;
+ window.gon.features.glTooltips = false;
});
it.each`
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index d4b97532cdd..a516a4a8269 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -1,4 +1,4 @@
-import { setHTMLFixture } from './helpers/fixtures';
+import { setHTMLFixture } from 'helpers/fixtures';
import Tracking, { initUserTracking, initDefaultTrackers } from '~/tracking';
describe('Tracking', () => {
diff --git a/spec/frontend/transfer_edit_spec.js b/spec/frontend/transfer_edit_spec.js
index dc9455a569d..ad8c9c68f37 100644
--- a/spec/frontend/transfer_edit_spec.js
+++ b/spec/frontend/transfer_edit_spec.js
@@ -13,41 +13,19 @@ describe('setupTransferEdit', () => {
});
it('disables submit button on load', () => {
- expect(
- $(formSelector)
- .find(':submit')
- .prop('disabled'),
- ).toBe(true);
+ expect($(formSelector).find(':submit').prop('disabled')).toBe(true);
});
it('enables submit button when selection changes to non-empty value', () => {
- const nonEmptyValue = $(formSelector)
- .find(targetSelector)
- .find('option')
- .not(':empty')
- .val();
- $(formSelector)
- .find(targetSelector)
- .val(nonEmptyValue)
- .trigger('change');
-
- expect(
- $(formSelector)
- .find(':submit')
- .prop('disabled'),
- ).toBeFalsy();
+ const nonEmptyValue = $(formSelector).find(targetSelector).find('option').not(':empty').val();
+ $(formSelector).find(targetSelector).val(nonEmptyValue).trigger('change');
+
+ expect($(formSelector).find(':submit').prop('disabled')).toBeFalsy();
});
it('disables submit button when selection changes to empty value', () => {
- $(formSelector)
- .find(targetSelector)
- .val('')
- .trigger('change');
-
- expect(
- $(formSelector)
- .find(':submit')
- .prop('disabled'),
- ).toBe(true);
+ $(formSelector).find(targetSelector).val('').trigger('change');
+
+ expect($(formSelector).find(':submit').prop('disabled')).toBe(true);
});
});
diff --git a/spec/frontend/user_lists/components/add_user_modal_spec.js b/spec/frontend/user_lists/components/add_user_modal_spec.js
index 82ce195d7cd..c9ad40ed228 100644
--- a/spec/frontend/user_lists/components/add_user_modal_spec.js
+++ b/spec/frontend/user_lists/components/add_user_modal_spec.js
@@ -4,7 +4,7 @@ import AddUserModal from '~/user_lists/components/add_user_modal.vue';
describe('Add User Modal', () => {
let wrapper;
- const click = testId => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
+ const click = (testId) => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
beforeEach(() => {
wrapper = mount(AddUserModal, {
diff --git a/spec/frontend/user_lists/components/edit_user_list_spec.js b/spec/frontend/user_lists/components/edit_user_list_spec.js
index 51a38e12916..958e86ac050 100644
--- a/spec/frontend/user_lists/components/edit_user_list_spec.js
+++ b/spec/frontend/user_lists/components/edit_user_list_spec.js
@@ -19,9 +19,9 @@ localVue.use(Vuex);
describe('user_lists/components/edit_user_list', () => {
let wrapper;
- const setInputValue = value => wrapper.find('[data-testid="user-list-name"]').setValue(value);
+ const setInputValue = (value) => wrapper.find('[data-testid="user-list-name"]').setValue(value);
- const click = button => wrapper.find(`[data-testid="${button}"]`).trigger('click');
+ const click = (button) => wrapper.find(`[data-testid="${button}"]`).trigger('click');
const clickSave = () => click('save-user-list');
const destroy = () => wrapper?.destroy();
diff --git a/spec/frontend/user_lists/components/new_user_list_spec.js b/spec/frontend/user_lists/components/new_user_list_spec.js
index 62fb0ca0859..421b49f40e9 100644
--- a/spec/frontend/user_lists/components/new_user_list_spec.js
+++ b/spec/frontend/user_lists/components/new_user_list_spec.js
@@ -18,9 +18,9 @@ localVue.use(Vuex);
describe('user_lists/components/new_user_list', () => {
let wrapper;
- const setInputValue = value => wrapper.find('[data-testid="user-list-name"]').setValue(value);
+ const setInputValue = (value) => wrapper.find('[data-testid="user-list-name"]').setValue(value);
- const click = button => wrapper.find(`[data-testid="${button}"]`).trigger('click');
+ const click = (button) => wrapper.find(`[data-testid="${button}"]`).trigger('click');
beforeEach(() => {
wrapper = mount(NewUserList, {
diff --git a/spec/frontend/user_lists/components/user_list_spec.js b/spec/frontend/user_lists/components/user_list_spec.js
index 5f9b7967846..cd00c421154 100644
--- a/spec/frontend/user_lists/components/user_list_spec.js
+++ b/spec/frontend/user_lists/components/user_list_spec.js
@@ -16,7 +16,7 @@ Vue.use(Vuex);
describe('User List', () => {
let wrapper;
- const click = testId => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
+ const click = (testId) => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
const findUserIds = () => wrapper.findAll('[data-testid="user-id"]');
@@ -38,7 +38,7 @@ describe('User List', () => {
beforeEach(() => {
Api.fetchFeatureFlagUserList.mockReturnValue(
- new Promise(resolve => {
+ new Promise((resolve) => {
resolveFn = resolve;
}),
);
@@ -110,15 +110,15 @@ describe('User List', () => {
});
it('should add user IDs to the user list', () => {
- newIds.forEach(id => expect(receivedUserIds).toContain(id));
+ newIds.forEach((id) => expect(receivedUserIds).toContain(id));
});
it('should not remove existing user ids', () => {
- userIds.forEach(id => expect(receivedUserIds).toContain(id));
+ userIds.forEach((id) => expect(receivedUserIds).toContain(id));
});
it('should not submit empty IDs', () => {
- parsedReceivedUserIds.forEach(id => expect(id).not.toBe(''));
+ parsedReceivedUserIds.forEach((id) => expect(id).not.toBe(''));
});
it('should not create duplicate entries', () => {
@@ -127,8 +127,8 @@ describe('User List', () => {
it('should display the new IDs', () => {
const userIdWrappers = findUserIds();
- newIds.forEach(id => {
- const userIdWrapper = userIdWrappers.wrappers.find(w => w.text() === id);
+ newIds.forEach((id) => {
+ const userIdWrapper = userIdWrappers.wrappers.find((w) => w.text() === id);
expect(userIdWrapper.exists()).toBe(true);
});
});
@@ -150,7 +150,7 @@ describe('User List', () => {
it('should not display the deleted user', () => {
const userIdWrappers = findUserIds();
- const userIdWrapper = userIdWrappers.wrappers.find(w => w.text() === userIds[0]);
+ const userIdWrapper = userIdWrappers.wrappers.find((w) => w.text() === userIds[0]);
expect(userIdWrapper).toBeUndefined();
});
});
diff --git a/spec/frontend/user_lists/store/show/mutations_spec.js b/spec/frontend/user_lists/store/show/mutations_spec.js
index 364cc6a0225..cd379641ee1 100644
--- a/spec/frontend/user_lists/store/show/mutations_spec.js
+++ b/spec/frontend/user_lists/store/show/mutations_spec.js
@@ -57,7 +57,7 @@ describe('User Lists Show Mutations', () => {
});
it('adds the new IDs to the state unless empty', () => {
- newIds.filter(id => id).forEach(id => expect(mockState.userIds).toContain(id));
+ newIds.filter((id) => id).forEach((id) => expect(mockState.userIds).toContain(id));
});
it('does not add duplicate IDs to the state', () => {
@@ -80,7 +80,9 @@ describe('User Lists Show Mutations', () => {
});
it('should leave the rest of the IDs alone', () => {
- userIds.filter(id => id !== removedId).forEach(id => expect(mockState.userIds).toContain(id));
+ userIds
+ .filter((id) => id !== removedId)
+ .forEach((id) => expect(mockState.userIds).toContain(id));
});
});
});
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index 0367b9cc924..1e0c4dd29ee 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -26,12 +26,12 @@ describe('User Popovers', () => {
loadFixtures(fixtureTemplate);
const usersCacheSpy = () => Promise.resolve(dummyUser);
- jest.spyOn(UsersCache, 'retrieveById').mockImplementation(userId => usersCacheSpy(userId));
+ jest.spyOn(UsersCache, 'retrieveById').mockImplementation((userId) => usersCacheSpy(userId));
const userStatusCacheSpy = () => Promise.resolve(dummyUserStatus);
jest
.spyOn(UsersCache, 'retrieveStatusById')
- .mockImplementation(userId => userStatusCacheSpy(userId));
+ .mockImplementation((userId) => userStatusCacheSpy(userId));
popovers = initUserPopovers(document.querySelectorAll(selector));
});
diff --git a/spec/frontend/version_check_image_spec.js b/spec/frontend/version_check_image_spec.js
index 2ab157105a1..13bd104a91c 100644
--- a/spec/frontend/version_check_image_spec.js
+++ b/spec/frontend/version_check_image_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
+import ClassSpecHelper from 'helpers/class_spec_helper';
import VersionCheckImage from '~/version_check_image';
-import ClassSpecHelper from './helpers/class_spec_helper';
describe('VersionCheckImage', () => {
let testContext;
@@ -19,7 +19,7 @@ describe('VersionCheckImage', () => {
it('registers an error event', () => {
jest.spyOn($.prototype, 'on').mockImplementation(() => {});
// eslint-disable-next-line func-names
- jest.spyOn($.prototype, 'off').mockImplementation(function() {
+ jest.spyOn($.prototype, 'off').mockImplementation(function () {
return this;
});
diff --git a/spec/frontend/vue_alerts_spec.js b/spec/frontend/vue_alerts_spec.js
index b52737e6106..16eb2d44e4d 100644
--- a/spec/frontend/vue_alerts_spec.js
+++ b/spec/frontend/vue_alerts_spec.js
@@ -27,7 +27,7 @@ describe('VueAlerts', () => {
setHTMLFixture(
alerts
.map(
- x => `
+ (x) => `
<div class="js-vue-alert"
data-dismissible="${x.dismissible}"
data-title="${x.title}"
@@ -42,15 +42,15 @@ describe('VueAlerts', () => {
const findJsHooks = () => document.querySelectorAll('.js-vue-alert');
const findAlerts = () => document.querySelectorAll('.gl-alert');
- const findAlertDismiss = alert => alert.querySelector('.gl-alert-dismiss');
+ const findAlertDismiss = (alert) => alert.querySelector('.gl-alert-dismiss');
- const serializeAlert = alert => ({
+ const serializeAlert = (alert) => ({
title: alert.querySelector('.gl-alert-title').textContent.trim(),
html: alert.querySelector('.gl-alert-body div').innerHTML,
dismissible: Boolean(alert.querySelector('.gl-alert-dismiss')),
primaryButtonText: alert.querySelector('.gl-alert-action').textContent.trim(),
primaryButtonLink: alert.querySelector('.gl-alert-action').href,
- variant: [...alert.classList].find(x => x.match('gl-alert-')).replace('gl-alert-', ''),
+ variant: [...alert.classList].find((x) => x.match('gl-alert-')).replace('gl-alert-', ''),
});
it('starts with only JsHooks', () => {
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
index 65ca3639dcc..fd8b0dddc61 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
@@ -15,10 +15,10 @@ import eventHub from '~/vue_merge_request_widget/event_hub';
jest.mock('~/flash');
const TEST_HELP_PATH = 'help/path';
-const testApprovedBy = () => [1, 7, 10].map(id => ({ id }));
+const testApprovedBy = () => [1, 7, 10].map((id) => ({ id }));
const testApprovals = () => ({
approved: false,
- approved_by: testApprovedBy().map(user => ({ user })),
+ approved_by: testApprovedBy().map((user) => ({ user })),
approval_rules_left: [],
approvals_left: 4,
suggested_approvers: [],
@@ -31,10 +31,7 @@ const testApprovalRulesResponse = () => ({ rules: [{ id: 2 }] });
// For some reason, the `Promise.resolve()` needs to be deferred
// or the timing doesn't work.
const tick = () => Promise.resolve();
-const waitForTick = done =>
- tick()
- .then(done)
- .catch(done.fail);
+const waitForTick = (done) => tick().then(done).catch(done.fail);
describe('MRWidget approvals', () => {
let wrapper;
@@ -117,7 +114,7 @@ describe('MRWidget approvals', () => {
});
describe('when fetch approvals error', () => {
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(service, 'fetchApprovals').mockReturnValue(Promise.reject());
createComponent();
waitForTick(done);
@@ -134,7 +131,7 @@ describe('MRWidget approvals', () => {
describe('action button', () => {
describe('when mr is closed', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.isOpen = false;
mr.approvals.user_has_approved = false;
mr.approvals.user_can_approve = true;
@@ -149,7 +146,7 @@ describe('MRWidget approvals', () => {
});
describe('when user cannot approve', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.approvals.user_has_approved = false;
mr.approvals.user_can_approve = false;
@@ -169,7 +166,7 @@ describe('MRWidget approvals', () => {
});
describe('and MR is unapproved', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent();
waitForTick(done);
});
@@ -189,7 +186,7 @@ describe('MRWidget approvals', () => {
});
describe('with no approvers', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.approvals.approved_by = [];
createComponent();
waitForTick(done);
@@ -205,7 +202,7 @@ describe('MRWidget approvals', () => {
});
describe('with approvers', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.approvals.approved_by = [{ user: { id: 7 } }];
createComponent();
waitForTick(done);
@@ -222,7 +219,7 @@ describe('MRWidget approvals', () => {
});
describe('when approve action is clicked', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent();
waitForTick(done);
});
@@ -241,7 +238,7 @@ describe('MRWidget approvals', () => {
});
describe('and after loading', () => {
- beforeEach(done => {
+ beforeEach((done) => {
findAction().vm.$emit('click');
waitForTick(done);
});
@@ -260,7 +257,7 @@ describe('MRWidget approvals', () => {
});
describe('and error', () => {
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(service, 'approveMergeRequest').mockReturnValue(Promise.reject());
findAction().vm.$emit('click');
waitForTick(done);
@@ -274,7 +271,7 @@ describe('MRWidget approvals', () => {
});
describe('when user has approved', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.approvals.user_has_approved = true;
mr.approvals.user_can_approve = false;
@@ -292,7 +289,7 @@ describe('MRWidget approvals', () => {
describe('when revoke action is clicked', () => {
describe('and successful', () => {
- beforeEach(done => {
+ beforeEach((done) => {
findAction().vm.$emit('click');
waitForTick(done);
});
@@ -311,7 +308,7 @@ describe('MRWidget approvals', () => {
});
describe('and error', () => {
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(service, 'unapproveMergeRequest').mockReturnValue(Promise.reject());
findAction().vm.$emit('click');
waitForTick(done);
@@ -334,7 +331,7 @@ describe('MRWidget approvals', () => {
});
describe('and can approve', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.approvals.user_can_approve = true;
createComponent();
@@ -351,7 +348,7 @@ describe('MRWidget approvals', () => {
});
describe('and cannot approve', () => {
- beforeEach(done => {
+ beforeEach((done) => {
mr.approvals.user_can_approve = false;
createComponent();
@@ -370,7 +367,7 @@ describe('MRWidget approvals', () => {
});
describe('approvals summary', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent();
waitForTick(done);
});
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
index 822d075f28f..b8ba619fbb1 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
@@ -4,7 +4,7 @@ import ApprovalsSummary from '~/vue_merge_request_widget/components/approvals/ap
import { toNounSeriesText } from '~/lib/utils/grammar';
import UserAvatarList from '~/vue_shared/components/user_avatar/user_avatar_list.vue';
-const testApprovers = () => Array.from({ length: 5 }, (_, i) => i).map(id => ({ id }));
+const testApprovers = () => Array.from({ length: 5 }, (_, i) => i).map((id) => ({ id }));
const testRulesLeft = () => ['Lorem', 'Ipsum', 'dolar & sit'];
const TEST_APPROVALS_LEFT = 3;
diff --git a/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
index 1401308f7f0..2e1e21299b3 100644
--- a/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
+++ b/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
@@ -1,19 +1,20 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import Vuex from 'vuex';
+import Vue, { nextTick } from 'vue';
import MockAdapter from 'axios-mock-adapter';
import { GlLoadingIcon } from '@gitlab/ui';
-import { TEST_HOST } from 'helpers/test_constants';
+import { TEST_HOST as FAKE_ENDPOINT } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import ArtifactsListApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
-import createStore from '~/vue_merge_request_widget/stores/artifacts_list';
-import { artifactsList } from './mock_data';
+import { getStoreConfig } from '~/vue_merge_request_widget/stores/artifacts_list';
+import { artifacts } from '../mock_data';
+
+Vue.use(Vuex);
describe('Merge Requests Artifacts list app', () => {
let wrapper;
+ let store;
let mock;
- const store = createStore();
- const localVue = createLocalVue();
- localVue.use(Vuex);
const actionSpies = {
fetchArtifacts: jest.fn(),
@@ -29,15 +30,20 @@ describe('Merge Requests Artifacts list app', () => {
});
const createComponent = () => {
+ const storeConfig = getStoreConfig();
+ store = new Vuex.Store({
+ ...storeConfig,
+ actions: {
+ ...storeConfig.actions,
+ ...actionSpies,
+ },
+ });
+
wrapper = mount(ArtifactsListApp, {
propsData: {
- endpoint: TEST_HOST,
+ endpoint: FAKE_ENDPOINT,
},
store,
- methods: {
- ...actionSpies,
- },
- localVue,
});
};
@@ -50,7 +56,7 @@ describe('Merge Requests Artifacts list app', () => {
beforeEach(() => {
createComponent();
store.dispatch('requestArtifacts');
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('renders a loading icon', () => {
@@ -72,12 +78,12 @@ describe('Merge Requests Artifacts list app', () => {
describe('with results', () => {
beforeEach(() => {
createComponent();
- mock.onGet(wrapper.vm.$store.state.endpoint).reply(200, artifactsList, {});
+ mock.onGet(FAKE_ENDPOINT).reply(200, artifacts, {});
store.dispatch('receiveArtifactsSuccess', {
- data: artifactsList,
+ data: artifacts,
status: 200,
});
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('renders a title with the number of artifacts', () => {
@@ -91,11 +97,11 @@ describe('Merge Requests Artifacts list app', () => {
});
describe('on click', () => {
- it('renders the list of artifacts', () => {
+ it('renders the list of artifacts', async () => {
findTitle().trigger('click');
- wrapper.vm.$nextTick(() => {
- expect(findTableRows().length).toEqual(2);
- });
+ await nextTick();
+
+ expect(findTableRows().length).toEqual(2);
});
});
});
@@ -103,9 +109,9 @@ describe('Merge Requests Artifacts list app', () => {
describe('with error', () => {
beforeEach(() => {
createComponent();
- mock.onGet(wrapper.vm.$store.state.endpoint).reply(500, {}, {});
+ mock.onGet(FAKE_ENDPOINT).reply(500, {}, {});
store.dispatch('receiveArtifactsError');
- return wrapper.vm.$nextTick();
+ return nextTick();
});
it('renders the error state', () => {
diff --git a/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js b/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
index 1b1624e3e8f..fd432381512 100644
--- a/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
+++ b/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
@@ -1,16 +1,16 @@
import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import ArtifactsList from '~/vue_merge_request_widget/components/artifacts_list.vue';
-import { artifactsList } from './mock_data';
+import { artifacts } from '../mock_data';
describe('Artifacts List', () => {
let wrapper;
const data = {
- artifacts: artifactsList,
+ artifacts,
};
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(ArtifactsList, {
propsData: {
...props,
@@ -39,20 +39,10 @@ describe('Artifacts List', () => {
});
it('renders job url', () => {
- expect(
- wrapper
- .findAll(GlLink)
- .at(1)
- .attributes('href'),
- ).toEqual(data.artifacts[0].job_path);
+ expect(wrapper.findAll(GlLink).at(1).attributes('href')).toEqual(data.artifacts[0].job_path);
});
it('renders job name', () => {
- expect(
- wrapper
- .findAll(GlLink)
- .at(1)
- .text(),
- ).toEqual(data.artifacts[0].job_name);
+ expect(wrapper.findAll(GlLink).at(1).text()).toEqual(data.artifacts[0].job_name);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mock_data.js b/spec/frontend/vue_mr_widget/components/mock_data.js
deleted file mode 100644
index 73e254f2b1a..00000000000
--- a/spec/frontend/vue_mr_widget/components/mock_data.js
+++ /dev/null
@@ -1,14 +0,0 @@
-export const artifactsList = [
- {
- text: 'result.txt',
- url: 'bar',
- job_name: 'generate-artifact',
- job_path: 'bar',
- },
- {
- text: 'foo.txt',
- url: 'foo',
- job_name: 'foo-artifact',
- job_path: 'foo',
- },
-];
diff --git a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js b/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
index 8a604355625..ba2a8ee0a41 100644
--- a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
@@ -8,7 +8,7 @@ describe('Merge Request Collapsible Extension', () => {
title: 'View artifacts',
};
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = mount(MrCollapsibleSection, {
propsData: {
...props,
@@ -65,18 +65,8 @@ describe('Merge Request Collapsible Extension', () => {
});
it('renders the buttons disabled', () => {
- expect(
- wrapper
- .findAll('button')
- .at(0)
- .attributes('disabled'),
- ).toEqual('disabled');
- expect(
- wrapper
- .findAll('button')
- .at(1)
- .attributes('disabled'),
- ).toEqual('disabled');
+ expect(wrapper.findAll('button').at(0).attributes('disabled')).toEqual('disabled');
+ expect(wrapper.findAll('button').at(1).attributes('disabled')).toEqual('disabled');
});
it('renders loading spinner', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js
index f78fcfb52b4..720ce613b85 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js
@@ -19,7 +19,7 @@ describe('MrWidgetAlertMessage', () => {
});
describe('when type is not provided', () => {
- it('should render a red message', done => {
+ it('should render a red message', (done) => {
wrapper.vm.$nextTick(() => {
expect(wrapper.classes()).toContain('danger_message');
expect(wrapper.classes()).not.toContain('warning_message');
@@ -29,7 +29,7 @@ describe('MrWidgetAlertMessage', () => {
});
describe('when type === "danger"', () => {
- it('should render a red message', done => {
+ it('should render a red message', (done) => {
wrapper.setProps({ type: 'danger' });
wrapper.vm.$nextTick(() => {
expect(wrapper.classes()).toContain('danger_message');
@@ -40,7 +40,7 @@ describe('MrWidgetAlertMessage', () => {
});
describe('when type === "warning"', () => {
- it('should render a red message', done => {
+ it('should render a red message', (done) => {
wrapper.setProps({ type: 'warning' });
wrapper.vm.$nextTick(() => {
expect(wrapper.classes()).toContain('warning_message');
@@ -51,7 +51,7 @@ describe('MrWidgetAlertMessage', () => {
});
describe('when helpPath is not provided', () => {
- it('should not render a help icon/link', done => {
+ it('should not render a help icon/link', (done) => {
wrapper.vm.$nextTick(() => {
const link = wrapper.find(GlLink);
@@ -62,7 +62,7 @@ describe('MrWidgetAlertMessage', () => {
});
describe('when helpPath is provided', () => {
- it('should render a help icon/link', done => {
+ it('should render a help icon/link', (done) => {
wrapper.setProps({ helpPath: '/path/to/help/docs' });
wrapper.vm.$nextTick(() => {
const link = wrapper.find(GlLink);
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index f9b6ac721d2..db884dfe015 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -187,7 +187,7 @@ describe('MRWidgetHeader', () => {
expect(link.getAttribute('href')).toBeNull();
});
- it('renders web ide button with blank query string if target & source project branch', done => {
+ it('renders web ide button with blank query string if target & source project branch', (done) => {
vm.mr.targetProjectFullPath = 'root/gitlab-ce';
vm.$nextTick(() => {
@@ -202,7 +202,7 @@ describe('MRWidgetHeader', () => {
});
});
- it('renders web ide button with relative URL', done => {
+ it('renders web ide button with relative URL', (done) => {
gon.relative_url_root = '/gitlab';
vm.mr.iid = 2;
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
index ea8b33495ab..4bcae904ddc 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
@@ -20,7 +20,7 @@ describe('MrWidgetIcon', () => {
});
it('renders icon and container', () => {
- expect(wrapper.is('.circle-icon-container')).toBe(true);
+ expect(wrapper.element.className).toContain('circle-icon-container');
expect(wrapper.find(GlIcon).props('name')).toEqual(TEST_ICON);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js
index 7a932feb3a7..79a0dd1e760 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js
@@ -153,13 +153,13 @@ describe('MemoryUsage', () => {
describe('loadMetrics', () => {
const returnServicePromise = () =>
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
data: metricsMockData,
});
});
- it('should load metrics data using MRWidgetService', done => {
+ it('should load metrics data using MRWidgetService', (done) => {
jest.spyOn(MRWidgetService, 'fetchMetrics').mockReturnValue(returnServicePromise(true));
jest.spyOn(vm, 'computeGraphData').mockImplementation(() => {});
@@ -179,7 +179,7 @@ describe('MemoryUsage', () => {
expect(el.querySelector('.js-usage-info')).toBeDefined();
});
- it('should show loading metrics message while metrics are being loaded', done => {
+ it('should show loading metrics message while metrics are being loaded', (done) => {
vm.loadingMetrics = true;
vm.hasMetrics = false;
vm.loadFailed = false;
@@ -194,7 +194,7 @@ describe('MemoryUsage', () => {
});
});
- it('should show deployment memory usage when metrics are loaded', done => {
+ it('should show deployment memory usage when metrics are loaded', (done) => {
// ignore BoostrapVue warnings
jest.spyOn(console, 'warn').mockImplementation();
@@ -210,7 +210,7 @@ describe('MemoryUsage', () => {
});
});
- it('should show failure message when metrics loading failed', done => {
+ it('should show failure message when metrics loading failed', (done) => {
vm.loadingMetrics = false;
vm.hasMetrics = false;
vm.loadFailed = true;
@@ -223,7 +223,7 @@ describe('MemoryUsage', () => {
});
});
- it('should show metrics unavailable message when metrics loading failed', done => {
+ it('should show metrics unavailable message when metrics loading failed', (done) => {
vm.loadingMetrics = false;
vm.hasMetrics = false;
vm.loadFailed = false;
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
index d67f1adadf2..85468c5b0db 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
@@ -46,7 +46,7 @@ describe('MrWidgetPipelineContainer', () => {
});
it('renders deployments', () => {
- const expectedProps = mockStore.deployments.map(dep =>
+ const expectedProps = mockStore.deployments.map((dep) =>
expect.objectContaining({
deployment: dep,
showMetrics: false,
@@ -55,7 +55,7 @@ describe('MrWidgetPipelineContainer', () => {
const deployments = wrapper.findAll('.mr-widget-extension .js-pre-deployment');
- expect(deployments.wrappers.map(x => x.props())).toEqual(expectedProps);
+ expect(deployments.wrappers.map((x) => x.props())).toEqual(expectedProps);
});
});
@@ -79,7 +79,7 @@ describe('MrWidgetPipelineContainer', () => {
});
it('renders deployments', () => {
- const expectedProps = mockStore.postMergeDeployments.map(dep =>
+ const expectedProps = mockStore.postMergeDeployments.map((dep) =>
expect.objectContaining({
deployment: dep,
showMetrics: true,
@@ -88,12 +88,14 @@ describe('MrWidgetPipelineContainer', () => {
const deployments = wrapper.findAll('.mr-widget-extension .js-post-deployment');
- expect(deployments.wrappers.map(x => x.props())).toEqual(expectedProps);
+ expect(deployments.wrappers.map((x) => x.props())).toEqual(expectedProps);
});
});
describe('with artifacts path', () => {
it('renders the artifacts app', () => {
+ factory();
+
expect(wrapper.find(ArtifactsApp).isVisible()).toBe(true);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
index 7ecd8629607..3e5ab5cd32d 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -72,21 +72,13 @@ describe('MRWidgetPipeline', () => {
});
it('should render pipeline ID', () => {
- expect(
- findPipelineID()
- .text()
- .trim(),
- ).toBe(`#${mockData.pipeline.id}`);
+ expect(findPipelineID().text().trim()).toBe(`#${mockData.pipeline.id}`);
});
it('should render pipeline status and commit id', () => {
expect(findPipelineInfoContainer().text()).toMatch(mockData.pipeline.details.status.label);
- expect(
- findCommitLink()
- .text()
- .trim(),
- ).toBe(mockData.pipeline.commit.short_id);
+ expect(findCommitLink().text().trim()).toBe(mockData.pipeline.commit.short_id);
expect(findCommitLink().attributes('href')).toBe(mockData.pipeline.commit.commit_path);
});
@@ -130,7 +122,7 @@ describe('MRWidgetPipeline', () => {
it.each(mockData.buildsWithCoverage)(
'should have name and coverage for build %s listed in tooltip',
- build => {
+ (build) => {
const tooltipText = findPipelineCoverageTooltipText();
expect(tooltipText).toContain(`${build.name} (${build.coverage}%)`);
@@ -148,11 +140,7 @@ describe('MRWidgetPipeline', () => {
});
it('should render pipeline ID', () => {
- expect(
- findPipelineID()
- .text()
- .trim(),
- ).toBe(`#${mockData.pipeline.id}`);
+ expect(findPipelineID().text().trim()).toBe(`#${mockData.pipeline.id}`);
});
it('should render pipeline status', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
index 9923434a7dd..bdd038edd71 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
@@ -1,135 +1,181 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
import eventHub from '~/vue_merge_request_widget/event_hub';
-import component from '~/vue_merge_request_widget/components/states/mr_widget_rebase.vue';
+import WidgetRebase from '~/vue_merge_request_widget/components/states/mr_widget_rebase.vue';
+
+let wrapper;
+
+function factory(propsData, mergeRequestWidgetGraphql) {
+ wrapper = shallowMount(WidgetRebase, {
+ propsData,
+ data() {
+ return {
+ state: {
+ rebaseInProgress: propsData.mr.rebaseInProgress,
+ targetBranch: propsData.mr.targetBranch,
+ userPermissions: {
+ pushToSourceBranch: propsData.mr.canPushToSourceBranch,
+ },
+ },
+ };
+ },
+ provide: { glFeatures: { mergeRequestWidgetGraphql } },
+ mocks: {
+ $apollo: {
+ queries: {
+ state: { loading: false },
+ },
+ },
+ },
+ });
+}
describe('Merge request widget rebase component', () => {
- let Component;
- let vm;
-
- const findRebaseMessageEl = () => vm.$el.querySelector('[data-testid="rebase-message"]');
- const findRebaseMessageElText = () => findRebaseMessageEl().textContent.trim();
-
- beforeEach(() => {
- Component = Vue.extend(component);
- });
+ const findRebaseMessageEl = () => wrapper.find('[data-testid="rebase-message"]');
+ const findRebaseMessageElText = () => findRebaseMessageEl().text();
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- describe('While rebasing', () => {
- it('should show progress message', () => {
- vm = mountComponent(Component, {
- mr: { rebaseInProgress: true },
- service: {},
+ [true, false].forEach((mergeRequestWidgetGraphql) => {
+ describe(`widget graphql is ${mergeRequestWidgetGraphql ? 'enabled' : 'dislabed'}`, () => {
+ describe('While rebasing', () => {
+ it('should show progress message', () => {
+ factory(
+ {
+ mr: { rebaseInProgress: true },
+ service: {},
+ },
+ mergeRequestWidgetGraphql,
+ );
+
+ expect(findRebaseMessageElText()).toContain('Rebase in progress');
+ });
});
- expect(findRebaseMessageElText()).toContain('Rebase in progress');
- });
- });
-
- describe('With permissions', () => {
- beforeEach(() => {
- vm = mountComponent(Component, {
- mr: {
- rebaseInProgress: false,
- canPushToSourceBranch: true,
- },
- service: {},
- });
- });
-
- it('it should render rebase button and warning message', () => {
- const text = findRebaseMessageElText();
-
- expect(text).toContain('Fast-forward merge is not possible.');
- expect(text.replace(/\s\s+/g, ' ')).toContain(
- 'Rebase the source branch onto the target branch.',
- );
- });
+ describe('With permissions', () => {
+ it('it should render rebase button and warning message', () => {
+ factory(
+ {
+ mr: {
+ rebaseInProgress: false,
+ canPushToSourceBranch: true,
+ },
+ service: {},
+ },
+ mergeRequestWidgetGraphql,
+ );
+
+ const text = findRebaseMessageElText();
+
+ expect(text).toContain('Fast-forward merge is not possible.');
+ expect(text.replace(/\s\s+/g, ' ')).toContain(
+ 'Rebase the source branch onto the target branch.',
+ );
+ });
+
+ it('it should render error message when it fails', async () => {
+ factory(
+ {
+ mr: {
+ rebaseInProgress: false,
+ canPushToSourceBranch: true,
+ },
+ service: {},
+ },
+ mergeRequestWidgetGraphql,
+ );
- it('it should render error message when it fails', done => {
- vm.rebasingError = 'Something went wrong!';
+ wrapper.setData({ rebasingError: 'Something went wrong!' });
- Vue.nextTick(() => {
- expect(findRebaseMessageElText()).toContain('Something went wrong!');
- done();
+ await nextTick();
+ expect(findRebaseMessageElText()).toContain('Something went wrong!');
+ });
});
- });
- });
- describe('Without permissions', () => {
- it('should render a message explaining user does not have permissions', () => {
- vm = mountComponent(Component, {
- mr: {
- rebaseInProgress: false,
- canPushToSourceBranch: false,
- targetBranch: 'foo',
- },
- service: {},
- });
-
- const text = findRebaseMessageElText();
+ describe('Without permissions', () => {
+ it('should render a message explaining user does not have permissions', () => {
+ factory(
+ {
+ mr: {
+ rebaseInProgress: false,
+ canPushToSourceBranch: false,
+ targetBranch: 'foo',
+ },
+ service: {},
+ },
+ mergeRequestWidgetGraphql,
+ );
+
+ const text = findRebaseMessageElText();
+
+ expect(text).toContain('Fast-forward merge is not possible.');
+ expect(text).toContain('Rebase the source branch onto');
+ expect(text).toContain('foo');
+ expect(text.replace(/\s\s+/g, ' ')).toContain(
+ 'to allow this merge request to be merged.',
+ );
+ });
+
+ it('should render the correct target branch name', () => {
+ const targetBranch = 'fake-branch-to-test-with';
+ factory(
+ {
+ mr: {
+ rebaseInProgress: false,
+ canPushToSourceBranch: false,
+ targetBranch,
+ },
+ service: {},
+ },
+ mergeRequestWidgetGraphql,
+ );
- expect(text).toContain('Fast-forward merge is not possible.');
- expect(text).toContain('Rebase the source branch onto');
- expect(text).toContain('foo');
- expect(text.replace(/\s\s+/g, ' ')).toContain('to allow this merge request to be merged.');
- });
+ const elem = findRebaseMessageEl();
- it('should render the correct target branch name', () => {
- const targetBranch = 'fake-branch-to-test-with';
- vm = mountComponent(Component, {
- mr: {
- rebaseInProgress: false,
- canPushToSourceBranch: false,
- targetBranch,
- },
- service: {},
+ expect(elem.text()).toContain(
+ `Fast-forward merge is not possible. Rebase the source branch onto ${targetBranch} to allow this merge request to be merged.`,
+ );
+ });
});
- const elem = findRebaseMessageEl();
-
- expect(elem.innerHTML).toContain(
- `Fast-forward merge is not possible. Rebase the source branch onto <span class="label-branch">${targetBranch}</span> to allow this merge request to be merged.`,
- );
- });
- });
-
- describe('methods', () => {
- it('checkRebaseStatus', done => {
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- vm = mountComponent(Component, {
- mr: {},
- service: {
- rebase() {
- return Promise.resolve();
- },
- poll() {
- return Promise.resolve({
- data: {
- rebase_in_progress: false,
- merge_error: null,
+ describe('methods', () => {
+ it('checkRebaseStatus', async () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ factory(
+ {
+ mr: {},
+ service: {
+ rebase() {
+ return Promise.resolve();
+ },
+ poll() {
+ return Promise.resolve({
+ data: {
+ rebase_in_progress: false,
+ merge_error: null,
+ },
+ });
+ },
},
- });
- },
- },
- });
+ },
+ mergeRequestWidgetGraphql,
+ );
- vm.rebase();
+ wrapper.vm.rebase();
+
+ // Wait for the rebase request
+ await nextTick();
+ // Wait for the polling request
+ await nextTick();
+ // Wait for the eventHub to be called
+ await nextTick();
- // Wait for the rebase request
- vm.$nextTick()
- // Wait for the polling request
- .then(vm.$nextTick())
- // Wait for the eventHub to be called
- .then(vm.$nextTick())
- .then(() => {
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetRebaseSuccess');
- })
- .then(done)
- .catch(done.fail);
+ });
+ });
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
index 0c4ec7ed99b..a33401c5ba9 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
@@ -5,7 +5,7 @@ import relatedLinksComponent from '~/vue_merge_request_widget/components/mr_widg
describe('MRWidgetRelatedLinks', () => {
let vm;
- const createComponent = data => {
+ const createComponent = (data) => {
const Component = Vue.extend(relatedLinksComponent);
return mountComponent(Component, data);
diff --git a/spec/frontend/vue_mr_widget/components/review_app_link_spec.js b/spec/frontend/vue_mr_widget/components/review_app_link_spec.js
index 7d47621c64a..81a52890db7 100644
--- a/spec/frontend/vue_mr_widget/components/review_app_link_spec.js
+++ b/spec/frontend/vue_mr_widget/components/review_app_link_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import mountComponent from 'helpers/vue_mount_component_helper';
import component from '~/vue_merge_request_widget/components/review_app_link.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
describe('review app link', () => {
const Component = Vue.extend(component);
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
new file mode 100644
index 00000000000..c425a3a86a9
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
@@ -0,0 +1,183 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have correct elements 1`] = `
+<div
+ class="mr-widget-body media"
+>
+ <status-icon-stub
+ status="success"
+ />
+
+ <div
+ class="media-body"
+ >
+ <h4
+ class="gl-display-flex"
+ >
+ <span
+ class="gl-mr-3"
+ >
+ <span
+ class="js-status-text-before-author"
+ data-testid="beforeStatusText"
+ >
+ Set by
+ </span>
+
+ <mr-widget-author-stub
+ author="[object Object]"
+ showauthorname="true"
+ />
+
+ <span
+ class="js-status-text-after-author"
+ data-testid="afterStatusText"
+ >
+ to be merged automatically when the pipeline succeeds
+ </span>
+ </span>
+
+ <a
+ class="btn btn-sm btn-default js-cancel-auto-merge"
+ data-testid="cancelAutomaticMergeButton"
+ href="#"
+ role="button"
+ >
+ <!---->
+
+ Cancel automatic merge
+
+ </a>
+ </h4>
+
+ <section
+ class="mr-info-list"
+ >
+ <p>
+
+ The changes will be merged into
+
+ <a
+ class="label-branch"
+ href="/foo/bar"
+ >
+ foo
+ </a>
+ </p>
+
+ <p
+ class="gl-display-flex"
+ >
+ <span
+ class="gl-mr-3"
+ >
+ The source branch will not be deleted
+ </span>
+
+ <a
+ class="btn btn-sm btn-default js-remove-source-branch"
+ data-testid="removeSourceBranchButton"
+ href="#"
+ role="button"
+ >
+ <!---->
+
+ Delete source branch
+
+ </a>
+ </p>
+ </section>
+ </div>
+</div>
+`;
+
+exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have correct elements 1`] = `
+<div
+ class="mr-widget-body media"
+>
+ <status-icon-stub
+ status="success"
+ />
+
+ <div
+ class="media-body"
+ >
+ <h4
+ class="gl-display-flex"
+ >
+ <span
+ class="gl-mr-3"
+ >
+ <span
+ class="js-status-text-before-author"
+ data-testid="beforeStatusText"
+ >
+ Set by
+ </span>
+
+ <mr-widget-author-stub
+ author="[object Object]"
+ showauthorname="true"
+ />
+
+ <span
+ class="js-status-text-after-author"
+ data-testid="afterStatusText"
+ >
+ to be merged automatically when the pipeline succeeds
+ </span>
+ </span>
+
+ <a
+ class="btn btn-sm btn-default js-cancel-auto-merge"
+ data-testid="cancelAutomaticMergeButton"
+ href="#"
+ role="button"
+ >
+ <!---->
+
+ Cancel automatic merge
+
+ </a>
+ </h4>
+
+ <section
+ class="mr-info-list"
+ >
+ <p>
+
+ The changes will be merged into
+
+ <a
+ class="label-branch"
+ href="/foo/bar"
+ >
+ foo
+ </a>
+ </p>
+
+ <p
+ class="gl-display-flex"
+ >
+ <span
+ class="gl-mr-3"
+ >
+ The source branch will not be deleted
+ </span>
+
+ <a
+ class="btn btn-sm btn-default js-remove-source-branch"
+ data-testid="removeSourceBranchButton"
+ href="#"
+ role="button"
+ >
+ <!---->
+
+ Delete source branch
+
+ </a>
+ </p>
+ </section>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index ae0f605c419..850bbd93df5 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -1,20 +1,81 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import autoMergeEnabledComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue';
import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
import eventHub from '~/vue_merge_request_widget/event_hub';
import { MWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants';
+let wrapper;
+let mergeRequestWidgetGraphqlEnabled = false;
+
+function convertPropsToGraphqlState(props) {
+ return {
+ autoMergeStrategy: props.autoMergeStrategy,
+ cancelAutoMergePath: 'http://text.com',
+ mergeUser: {
+ id: props.mergeUserId,
+ ...props.setToAutoMergeBy,
+ },
+ targetBranch: props.targetBranch,
+ targetBranchCommitsPath: props.targetBranchPath,
+ shouldRemoveSourceBranch: props.shouldRemoveSourceBranch,
+ forceRemoveSourceBranch: props.shouldRemoveSourceBranch,
+ userPermissions: {
+ removeSourceBranch: props.canRemoveSourceBranch,
+ },
+ };
+}
+
+function factory(propsData) {
+ let state = {};
+
+ if (mergeRequestWidgetGraphqlEnabled) {
+ state = convertPropsToGraphqlState(propsData);
+ }
+
+ wrapper = extendedWrapper(
+ shallowMount(autoMergeEnabledComponent, {
+ propsData: {
+ mr: propsData,
+ service: new MRWidgetService({}),
+ },
+ data() {
+ return { state };
+ },
+ provide: { glFeatures: { mergeRequestWidgetGraphql: mergeRequestWidgetGraphqlEnabled } },
+ mocks: {
+ $apollo: {
+ queries: {
+ state: { loading: false },
+ },
+ },
+ },
+ }),
+ );
+}
+
+const targetBranchPath = '/foo/bar';
+const targetBranch = 'foo';
+const sha = '1EA2EZ34';
+const defaultMrProps = () => ({
+ shouldRemoveSourceBranch: false,
+ canRemoveSourceBranch: true,
+ canCancelAutomaticMerge: true,
+ mergeUserId: 1,
+ currentUserId: 1,
+ setToAutoMergeBy: {},
+ sha,
+ targetBranchPath,
+ targetBranch,
+ autoMergeStrategy: MWPS_MERGE_STRATEGY,
+});
+
describe('MRWidgetAutoMergeEnabled', () => {
- let vm;
let oldWindowGl;
- const targetBranchPath = '/foo/bar';
- const targetBranch = 'foo';
- const sha = '1EA2EZ34';
beforeEach(() => {
- const Component = Vue.extend(autoMergeEnabledComponent);
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
oldWindowGl = window.gl;
@@ -23,216 +84,234 @@ describe('MRWidgetAutoMergeEnabled', () => {
defaultAvatarUrl: 'no_avatar.png',
},
};
-
- vm = mountComponent(Component, {
- mr: {
- shouldRemoveSourceBranch: false,
- canRemoveSourceBranch: true,
- canCancelAutomaticMerge: true,
- mergeUserId: 1,
- currentUserId: 1,
- setToAutoMergeBy: {},
- sha,
- targetBranchPath,
- targetBranch,
- autoMergeStrategy: MWPS_MERGE_STRATEGY,
- },
- service: new MRWidgetService({}),
- });
});
afterEach(() => {
- vm.$destroy();
window.gl = oldWindowGl;
+ wrapper.destroy();
+ wrapper = null;
});
- describe('computed', () => {
- describe('canRemoveSourceBranch', () => {
- it('should return true when user is able to remove source branch', () => {
- expect(vm.canRemoveSourceBranch).toBeTruthy();
+ [true, false].forEach((mergeRequestWidgetGraphql) => {
+ describe(`when graphql is ${mergeRequestWidgetGraphql ? 'enabled' : 'disabled'}`, () => {
+ beforeEach(() => {
+ mergeRequestWidgetGraphqlEnabled = mergeRequestWidgetGraphql;
});
- it('should return false when user id is not the same with who set the MWPS', () => {
- vm.mr.mergeUserId = 2;
-
- expect(vm.canRemoveSourceBranch).toBeFalsy();
-
- vm.mr.currentUserId = 2;
-
- expect(vm.canRemoveSourceBranch).toBeTruthy();
+ describe('computed', () => {
+ describe('canRemoveSourceBranch', () => {
+ it('should return true when user is able to remove source branch', () => {
+ factory({
+ ...defaultMrProps(),
+ });
- vm.mr.currentUserId = 3;
+ expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(true);
+ });
- expect(vm.canRemoveSourceBranch).toBeFalsy();
- });
+ it.each`
+ mergeUserId | currentUserId
+ ${2} | ${1}
+ ${1} | ${2}
+ `(
+ 'should return false when user id is not the same with who set the MWPS',
+ ({ mergeUserId, currentUserId }) => {
+ factory({
+ ...defaultMrProps(),
+ mergeUserId,
+ currentUserId,
+ });
+
+ expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
+ },
+ );
- it('should return false when shouldRemoveSourceBranch set to false', () => {
- vm.mr.shouldRemoveSourceBranch = true;
+ it('should return false when shouldRemoveSourceBranch set to false', () => {
+ factory({
+ ...defaultMrProps(),
+ shouldRemoveSourceBranch: true,
+ });
- expect(vm.canRemoveSourceBranch).toBeFalsy();
- });
+ expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
+ });
- it('should return false if user is not able to remove the source branch', () => {
- vm.mr.canRemoveSourceBranch = false;
+ it('should return false if user is not able to remove the source branch', () => {
+ factory({
+ ...defaultMrProps(),
+ canRemoveSourceBranch: false,
+ });
- expect(vm.canRemoveSourceBranch).toBeFalsy();
- });
- });
+ expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
+ });
+ });
- describe('statusTextBeforeAuthor', () => {
- it('should return "Set by" if the MWPS is selected', () => {
- Vue.set(vm.mr, 'autoMergeStrategy', MWPS_MERGE_STRATEGY);
+ describe('statusTextBeforeAuthor', () => {
+ it('should return "Set by" if the MWPS is selected', () => {
+ factory({
+ ...defaultMrProps(),
+ autoMergeStrategy: MWPS_MERGE_STRATEGY,
+ });
- expect(vm.statusTextBeforeAuthor).toBe('Set by');
- });
- });
+ expect(wrapper.findByTestId('beforeStatusText').text()).toBe('Set by');
+ });
+ });
- describe('statusTextAfterAuthor', () => {
- it('should return "to be merged automatically..." if MWPS is selected', () => {
- Vue.set(vm.mr, 'autoMergeStrategy', MWPS_MERGE_STRATEGY);
+ describe('statusTextAfterAuthor', () => {
+ it('should return "to be merged automatically..." if MWPS is selected', () => {
+ factory({
+ ...defaultMrProps(),
+ autoMergeStrategy: MWPS_MERGE_STRATEGY,
+ });
- expect(vm.statusTextAfterAuthor).toBe(
- 'to be merged automatically when the pipeline succeeds',
- );
- });
- });
+ expect(wrapper.findByTestId('afterStatusText').text()).toBe(
+ 'to be merged automatically when the pipeline succeeds',
+ );
+ });
+ });
- describe('cancelButtonText', () => {
- it('should return "Cancel automatic merge" if MWPS is selected', () => {
- Vue.set(vm.mr, 'autoMergeStrategy', MWPS_MERGE_STRATEGY);
+ describe('cancelButtonText', () => {
+ it('should return "Cancel automatic merge" if MWPS is selected', () => {
+ factory({
+ ...defaultMrProps(),
+ autoMergeStrategy: MWPS_MERGE_STRATEGY,
+ });
- expect(vm.cancelButtonText).toBe('Cancel automatic merge');
+ expect(wrapper.findByTestId('cancelAutomaticMergeButton').text()).toBe(
+ 'Cancel automatic merge',
+ );
+ });
+ });
});
- });
- });
- describe('methods', () => {
- describe('cancelAutomaticMerge', () => {
- it('should set flag and call service then tell main component to update the widget with data', done => {
- const mrObj = {
- is_new_mr_data: true,
- };
- jest.spyOn(vm.service, 'cancelAutomaticMerge').mockReturnValue(
- new Promise(resolve => {
- resolve({
- data: mrObj,
+ describe('methods', () => {
+ describe('cancelAutomaticMerge', () => {
+ it('should set flag and call service then tell main component to update the widget with data', (done) => {
+ factory({
+ ...defaultMrProps(),
});
- }),
- );
-
- vm.cancelAutomaticMerge();
- setImmediate(() => {
- expect(vm.isCancellingAutoMerge).toBeTruthy();
- expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
- done();
+ const mrObj = {
+ is_new_mr_data: true,
+ };
+ jest.spyOn(wrapper.vm.service, 'cancelAutomaticMerge').mockReturnValue(
+ new Promise((resolve) => {
+ resolve({
+ data: mrObj,
+ });
+ }),
+ );
+
+ wrapper.vm.cancelAutomaticMerge();
+ setImmediate(() => {
+ expect(wrapper.vm.isCancellingAutoMerge).toBeTruthy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
+ done();
+ });
+ });
});
- });
- });
- describe('removeSourceBranch', () => {
- it('should set flag and call service then request main component to update the widget', done => {
- jest.spyOn(vm.service, 'merge').mockReturnValue(
- Promise.resolve({
- data: {
- status: MWPS_MERGE_STRATEGY,
- },
- }),
- );
-
- vm.removeSourceBranch();
- setImmediate(() => {
- expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
- expect(vm.service.merge).toHaveBeenCalledWith({
- sha,
- auto_merge_strategy: MWPS_MERGE_STRATEGY,
- should_remove_source_branch: true,
- });
- done();
+ describe('removeSourceBranch', () => {
+ it('should set flag and call service then request main component to update the widget', (done) => {
+ factory({
+ ...defaultMrProps(),
+ });
+ jest.spyOn(wrapper.vm.service, 'merge').mockReturnValue(
+ Promise.resolve({
+ data: {
+ status: MWPS_MERGE_STRATEGY,
+ },
+ }),
+ );
+
+ wrapper.vm.removeSourceBranch();
+ setImmediate(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+ expect(wrapper.vm.service.merge).toHaveBeenCalledWith({
+ sha,
+ auto_merge_strategy: MWPS_MERGE_STRATEGY,
+ should_remove_source_branch: true,
+ });
+ done();
+ });
+ });
});
});
- });
- });
- describe('template', () => {
- it('should have correct elements', () => {
- expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(vm.$el.innerText).toContain('to be merged automatically when the pipeline succeeds');
+ describe('template', () => {
+ it('should have correct elements', () => {
+ factory({
+ ...defaultMrProps(),
+ });
- expect(vm.$el.innerText).toContain('The changes will be merged into');
- expect(vm.$el.innerText).toContain(targetBranch);
- expect(vm.$el.innerText).toContain('The source branch will not be deleted');
- expect(vm.$el.querySelector('.js-cancel-auto-merge').innerText).toContain(
- 'Cancel automatic merge',
- );
+ expect(wrapper.element).toMatchSnapshot();
+ });
- expect(vm.$el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeFalsy();
- expect(vm.$el.querySelector('.js-remove-source-branch').innerText).toContain(
- 'Delete source branch',
- );
+ it('should disable cancel auto merge button when the action is in progress', async () => {
+ factory({
+ ...defaultMrProps(),
+ });
+ wrapper.setData({
+ isCancellingAutoMerge: true,
+ });
- expect(vm.$el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeFalsy();
- });
+ await nextTick();
- it('should disable cancel auto merge button when the action is in progress', done => {
- vm.isCancellingAutoMerge = true;
+ expect(wrapper.find('.js-cancel-auto-merge').attributes('disabled')).toBe('disabled');
+ });
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeTruthy();
- done();
- });
- });
+ it('should show source branch will be deleted text when it source branch set to remove', () => {
+ factory({
+ ...defaultMrProps(),
+ shouldRemoveSourceBranch: true,
+ });
- it('should show source branch will be deleted text when it source branch set to remove', done => {
- vm.mr.shouldRemoveSourceBranch = true;
+ const normalizedText = wrapper.text().replace(/\s+/g, ' ');
- Vue.nextTick(() => {
- const normalizedText = vm.$el.innerText.replace(/\s+/g, ' ');
+ expect(normalizedText).toContain('The source branch will be deleted');
+ expect(normalizedText).not.toContain('The source branch will not be deleted');
+ });
- expect(normalizedText).toContain('The source branch will be deleted');
- expect(normalizedText).not.toContain('The source branch will not be deleted');
- done();
- });
- });
+ it('should not show delete source branch button when user not able to delete source branch', () => {
+ factory({
+ ...defaultMrProps(),
+ currentUserId: 4,
+ });
- it('should not show delete source branch button when user not able to delete source branch', done => {
- vm.mr.currentUserId = 4;
+ expect(wrapper.find('.js-remove-source-branch').exists()).toBe(false);
+ });
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-remove-source-branch')).toEqual(null);
- done();
- });
- });
+ it('should disable delete source branch button when the action is in progress', async () => {
+ factory({
+ ...defaultMrProps(),
+ });
+ wrapper.setData({
+ isRemovingSourceBranch: true,
+ });
- it('should disable delete source branch button when the action is in progress', done => {
- vm.isRemovingSourceBranch = true;
+ await nextTick();
- Vue.nextTick(() => {
- expect(
- vm.$el.querySelector('.js-remove-source-branch').getAttribute('disabled'),
- ).toBeTruthy();
- done();
- });
- });
+ expect(wrapper.find('.js-remove-source-branch').attributes('disabled')).toBe('disabled');
+ });
- it('should render the status text as "...to merged automatically" if MWPS is selected', done => {
- Vue.set(vm.mr, 'autoMergeStrategy', MWPS_MERGE_STRATEGY);
+ it('should render the status text as "...to merged automatically" if MWPS is selected', () => {
+ factory({
+ ...defaultMrProps(),
+ autoMergeStrategy: MWPS_MERGE_STRATEGY,
+ });
- Vue.nextTick(() => {
- const statusText = trimText(vm.$el.querySelector('.js-status-text-after-author').innerText);
+ const statusText = trimText(wrapper.find('.js-status-text-after-author').text());
- expect(statusText).toBe('to be merged automatically when the pipeline succeeds');
- done();
- });
- });
+ expect(statusText).toBe('to be merged automatically when the pipeline succeeds');
+ });
- it('should render the cancel button as "Cancel automatic merge" if MWPS is selected', done => {
- Vue.set(vm.mr, 'autoMergeStrategy', MWPS_MERGE_STRATEGY);
+ it('should render the cancel button as "Cancel automatic merge" if MWPS is selected', () => {
+ factory({
+ ...defaultMrProps(),
+ autoMergeStrategy: MWPS_MERGE_STRATEGY,
+ });
- Vue.nextTick(() => {
- const cancelButtonText = trimText(vm.$el.querySelector('.js-cancel-auto-merge').innerText);
+ const cancelButtonText = trimText(wrapper.find('.js-cancel-auto-merge').text());
- expect(cancelButtonText).toBe('Cancel automatic merge');
- done();
+ expect(cancelButtonText).toBe('Cancel automatic merge');
+ });
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
index aae9b8660e2..dca3798f7ea 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlButton } from '@gitlab/ui';
import AutoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed.vue';
@@ -8,43 +9,60 @@ describe('MRWidgetAutoMergeFailed', () => {
const mergeError = 'This is the merge error';
const findButton = () => wrapper.find(GlButton);
- const createComponent = (props = {}) => {
+ const createComponent = (props = {}, mergeRequestWidgetGraphql = false) => {
wrapper = shallowMount(AutoMergeFailedComponent, {
propsData: { ...props },
- });
- };
+ data() {
+ if (mergeRequestWidgetGraphql) {
+ return { mergeError: props.mr?.mergeError };
+ }
- beforeEach(() => {
- createComponent({
- mr: { mergeError },
+ return {};
+ },
+ provide: {
+ glFeatures: { mergeRequestWidgetGraphql },
+ },
});
- });
+ };
afterEach(() => {
wrapper.destroy();
});
- it('renders failed message', () => {
- expect(wrapper.text()).toContain('This merge request failed to be merged automatically');
- });
+ [true, false].forEach((mergeRequestWidgetGraphql) => {
+ describe(`when graphql is ${mergeRequestWidgetGraphql ? 'enabled' : 'dislabed'}`, () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ mr: { mergeError },
+ },
+ mergeRequestWidgetGraphql,
+ );
+ });
- it('renders merge error provided', () => {
- expect(wrapper.text()).toContain(mergeError);
- });
+ it('renders failed message', () => {
+ expect(wrapper.text()).toContain('This merge request failed to be merged automatically');
+ });
- it('render refresh button', () => {
- expect(findButton().text()).toEqual('Refresh');
- });
+ it('renders merge error provided', () => {
+ expect(wrapper.text()).toContain(mergeError);
+ });
+
+ it('render refresh button', () => {
+ expect(findButton().text()).toBe('Refresh');
+ });
+
+ it('emits event and shows loading icon when button is clicked', async () => {
+ jest.spyOn(eventHub, '$emit');
+ findButton().vm.$emit('click');
- it('emits event and shows loading icon when button is clicked', () => {
- jest.spyOn(eventHub, '$emit');
- findButton().vm.$emit('click');
+ expect(eventHub.$emit.mock.calls[0][0]).toBe('MRWidgetUpdateRequested');
- expect(eventHub.$emit.mock.calls[0][0]).toBe('MRWidgetUpdateRequested');
+ await nextTick();
- return wrapper.vm.$nextTick(() => {
- expect(findButton().attributes('disabled')).toBe('true');
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(findButton().attributes('disabled')).toBe('true');
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js
index 322f440763c..55d7e2391b2 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js
@@ -39,10 +39,7 @@ describe('MRWidgetClosed', () => {
it('renders closed by information with author and time', () => {
expect(
- vm.$el
- .querySelector('.js-mr-widget-author')
- .textContent.trim()
- .replace(/\s\s+/g, ' '),
+ vm.$el.querySelector('.js-mr-widget-author').textContent.trim().replace(/\s\s+/g, ' '),
).toContain('Closed by Administrator less than a minute ago');
});
@@ -54,10 +51,7 @@ describe('MRWidgetClosed', () => {
it('renders information about the changes not being merged', () => {
expect(
- vm.$el
- .querySelector('.mr-info-list')
- .textContent.trim()
- .replace(/\s\s+/g, ' '),
+ vm.$el.querySelector('.mr-info-list').textContent.trim().replace(/\s\s+/g, ' '),
).toContain('The changes were not merged into so_long_jquery');
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
index 56832f82b05..706d60368b5 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
@@ -47,7 +47,8 @@ describe('Commits message dropdown component', () => {
});
it('should have correct message for the first dropdown list element', () => {
- expect(findFirstDropdownElement().text()).toBe('78d5b7 Commit 1');
+ expect(findFirstDropdownElement().text()).toContain('78d5b7');
+ expect(findFirstDropdownElement().text()).toContain('Commit 1');
});
it('should emit a commit title on selecting commit', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
index 62fc3330444..e4123b2ca83 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
@@ -4,7 +4,7 @@ import CommitsHeader from '~/vue_merge_request_widget/components/states/commits_
describe('Commits header component', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(CommitsHeader, {
propsData: {
isSquashEnabled: false,
@@ -98,6 +98,8 @@ describe('Commits header component', () => {
});
it('does has merge commit part of the message', () => {
+ createComponent();
+
expect(findHeaderWrapper().text()).toContain('1 merge commit');
});
});
@@ -108,21 +110,21 @@ describe('Commits header component', () => {
wrapper.setData({ expanded: true });
});
- it('toggle has aria-label equal to collapse', done => {
+ it('toggle has aria-label equal to collapse', (done) => {
wrapper.vm.$nextTick(() => {
expect(findCommitToggle().attributes('aria-label')).toBe('Collapse');
done();
});
});
- it('has a chevron-down icon', done => {
+ it('has a chevron-down icon', (done) => {
wrapper.vm.$nextTick(() => {
expect(findCommitToggle().props('icon')).toBe('chevron-down');
done();
});
});
- it('has a collapse text', done => {
+ it('has a collapse text', (done) => {
wrapper.vm.$nextTick(() => {
expect(findHeaderWrapper().text()).toBe('Collapse');
done();
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index ad21e6e6f4f..f5a059698b6 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -54,7 +54,7 @@ describe('MRWidgetConflicts', () => {
vm.destroy();
});
- [false, true].forEach(featureEnabled => {
+ [false, true].forEach((featureEnabled) => {
describe(`with GraphQL feature flag ${featureEnabled ? 'enabled' : 'disabled'}`, () => {
beforeEach(() => {
mergeRequestWidgetGraphql = featureEnabled;
@@ -167,12 +167,9 @@ describe('MRWidgetConflicts', () => {
},
});
- expect(
- vm
- .text()
- .trim()
- .replace(/\s\s+/g, ' '),
- ).toContain('ask someone with write access');
+ expect(vm.text().trim().replace(/\s\s+/g, ' ')).toContain(
+ 'ask someone with write access',
+ );
});
it('should not have action buttons', async () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
index 6778a8f4a1f..48c1a9eedf9 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
@@ -49,7 +49,7 @@ describe('MRWidgetFailedToMerge', () => {
});
describe('mergeError', () => {
- it('removes forced line breaks', done => {
+ it('removes forced line breaks', (done) => {
mr.mergeError = 'contains<br />line breaks<br />';
Vue.nextTick()
@@ -98,7 +98,7 @@ describe('MRWidgetFailedToMerge', () => {
});
describe('while it is refreshing', () => {
- it('renders Refresing now', done => {
+ it('renders Refresing now', (done) => {
vm.isRefreshing = true;
Vue.nextTick(() => {
@@ -139,7 +139,7 @@ describe('MRWidgetFailedToMerge', () => {
});
});
- it('should just generic merge failed message if merge_error is not available', done => {
+ it('should just generic merge failed message if merge_error is not available', (done) => {
vm.mr.mergeError = null;
Vue.nextTick(() => {
@@ -149,7 +149,7 @@ describe('MRWidgetFailedToMerge', () => {
});
});
- it('should show refresh label when refresh requested', done => {
+ it('should show refresh label when refresh requested', (done) => {
vm.refresh();
Vue.nextTick(() => {
expect(vm.$el.innerText).not.toContain('Merge failed. Refreshing');
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
index 9b51e8583ba..36c4174c03d 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -123,9 +123,9 @@ describe('MRWidgetMerged', () => {
describe('methods', () => {
describe('removeSourceBranch', () => {
- it('should set flag and call service then request main component to update the widget', done => {
+ it('should set flag and call service then request main component to update the widget', (done) => {
jest.spyOn(vm.service, 'removeSourceBranch').mockReturnValue(
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
data: {
message: 'Branch was deleted',
@@ -173,7 +173,7 @@ describe('MRWidgetMerged', () => {
);
});
- it('hides button to copy commit SHA if SHA does not exist', done => {
+ it('hides button to copy commit SHA if SHA does not exist', (done) => {
vm.mr.mergeCommitSha = null;
Vue.nextTick(() => {
@@ -189,7 +189,7 @@ describe('MRWidgetMerged', () => {
expect(selectors.mergeCommitShaLink.href).toBe(vm.mr.mergeCommitPath);
});
- it('should not show source branch deleted text', done => {
+ it('should not show source branch deleted text', (done) => {
vm.mr.sourceBranchRemoved = false;
Vue.nextTick(() => {
@@ -199,7 +199,7 @@ describe('MRWidgetMerged', () => {
});
});
- it('should show source branch deleting text', done => {
+ it('should show source branch deleting text', (done) => {
vm.mr.isRemovingSourceBranch = true;
vm.mr.sourceBranchRemoved = false;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
index f45368bf443..848677bf4d2 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
@@ -25,7 +25,7 @@ describe('MRWidgetMissingBranch', () => {
wrapper.destroy();
});
- [true, false].forEach(mergeRequestWidgetGraphql => {
+ [true, false].forEach((mergeRequestWidgetGraphql) => {
describe(`widget GraphQL feature flag is ${
mergeRequestWidgetGraphql ? 'enabled' : 'disabled'
}`, () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 9057ffaea45..8eddf59820c 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -20,7 +20,7 @@ jest.mock('~/commons/nav/user_merge_requests', () => ({
const commitMessage = 'This is the commit message';
const squashCommitMessage = 'This is the squash commit message';
const commitMessageWithDescription = 'This is the commit message description';
-const createTestMr = customConfig => {
+const createTestMr = (customConfig) => {
const mr = {
isPipelineActive: false,
pipeline: null,
@@ -346,8 +346,8 @@ describe('ReadyToMerge', () => {
});
describe('handleMergeButtonClick', () => {
- const returnPromise = status =>
- new Promise(resolve => {
+ const returnPromise = (status) =>
+ new Promise((resolve) => {
resolve({
data: {
status,
@@ -355,7 +355,7 @@ describe('ReadyToMerge', () => {
});
});
- it('should handle merge when pipeline succeeds', done => {
+ it('should handle merge when pipeline succeeds', (done) => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest
.spyOn(vm.service, 'merge')
@@ -381,7 +381,7 @@ describe('ReadyToMerge', () => {
});
});
- it('should handle merge failed', done => {
+ it('should handle merge failed', (done) => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest.spyOn(vm.service, 'merge').mockReturnValue(returnPromise('failed'));
vm.handleMergeButtonClick(false, true);
@@ -398,7 +398,7 @@ describe('ReadyToMerge', () => {
});
});
- it('should handle merge action accepted case', done => {
+ it('should handle merge action accepted case', (done) => {
jest.spyOn(vm.service, 'merge').mockReturnValue(returnPromise('success'));
jest.spyOn(vm, 'initiateMergePolling').mockImplementation(() => {});
vm.handleMergeButtonClick();
@@ -433,8 +433,8 @@ describe('ReadyToMerge', () => {
});
describe('handleMergePolling', () => {
- const returnPromise = state =>
- new Promise(resolve => {
+ const returnPromise = (state) =>
+ new Promise((resolve) => {
resolve({
data: {
state,
@@ -447,7 +447,7 @@ describe('ReadyToMerge', () => {
loadFixtures('merge_requests/merge_request_of_current_user.html');
});
- it('should call start and stop polling when MR merged', done => {
+ it('should call start and stop polling when MR merged', (done) => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
@@ -476,11 +476,14 @@ describe('ReadyToMerge', () => {
});
});
- it('updates status box', done => {
+ it('updates status box', (done) => {
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
- vm.handleMergePolling(() => {}, () => {});
+ vm.handleMergePolling(
+ () => {},
+ () => {},
+ );
setImmediate(() => {
const statusBox = document.querySelector('.status-box');
@@ -492,11 +495,14 @@ describe('ReadyToMerge', () => {
});
});
- it('updates merge request count badge', done => {
+ it('updates merge request count badge', (done) => {
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
- vm.handleMergePolling(() => {}, () => {});
+ vm.handleMergePolling(
+ () => {},
+ () => {},
+ );
setImmediate(() => {
expect(document.querySelector('.js-merge-counter').textContent).toBe('0');
@@ -505,7 +511,7 @@ describe('ReadyToMerge', () => {
});
});
- it('should continue polling until MR is merged', done => {
+ it('should continue polling until MR is merged', (done) => {
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('some_other_state'));
jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
@@ -541,8 +547,8 @@ describe('ReadyToMerge', () => {
});
describe('handleRemoveBranchPolling', () => {
- const returnPromise = state =>
- new Promise(resolve => {
+ const returnPromise = (state) =>
+ new Promise((resolve) => {
resolve({
data: {
source_branch_exists: state,
@@ -550,7 +556,7 @@ describe('ReadyToMerge', () => {
});
});
- it('should call start and stop polling when MR merged', done => {
+ it('should call start and stop polling when MR merged', (done) => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise(false));
@@ -583,7 +589,7 @@ describe('ReadyToMerge', () => {
});
});
- it('should continue polling until MR is merged', done => {
+ it('should continue polling until MR is merged', (done) => {
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise(true));
let cpc = false; // continuePollingCalled
@@ -657,10 +663,7 @@ describe('ReadyToMerge', () => {
const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
const findCommitEditElements = () => wrapper.findAll(CommitEdit);
const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
- const findFirstCommitEditLabel = () =>
- findCommitEditElements()
- .at(0)
- .props('label');
+ const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
describe('squash checkbox', () => {
it('should be rendered when squash before merge is enabled and there is more than 1 commit', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
index f9490ac77ff..cc160f6182d 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
@@ -8,7 +8,7 @@ const localVue = createLocalVue();
describe('Squash before merge component', () => {
let wrapper;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(localVue.extend(SquashBeforeMerge), {
localVue,
propsData: {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
index 907906ebe98..6d63d4b1be3 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
@@ -47,12 +47,12 @@ describe('Wip', () => {
};
describe('handleRemoveWIP', () => {
- it('should make a request to service and handle response', done => {
+ it('should make a request to service and handle response', (done) => {
const vm = createComponent();
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest.spyOn(vm.service, 'removeWIP').mockReturnValue(
- new Promise(resolve => {
+ new Promise((resolve) => {
resolve({
data: mrObj,
});
@@ -92,7 +92,7 @@ describe('Wip', () => {
);
});
- it('should not show removeWIP button is user cannot update MR', done => {
+ it('should not show removeWIP button is user cannot update MR', (done) => {
vm.mr.removeWIPPath = '';
Vue.nextTick(() => {
diff --git a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
index 7fe6b44ecc7..8da0d0f16d6 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
@@ -15,7 +15,7 @@ describe('MrWidgetTerraformConainer', () => {
const propsData = { endpoint: '/path/to/terraform/report.json' };
const findHeader = () => wrapper.find('[data-testid="terraform-header-text"]');
- const findPlans = () => wrapper.findAll(TerraformPlan).wrappers.map(x => x.props('plan'));
+ const findPlans = () => wrapper.findAll(TerraformPlan).wrappers.map((x) => x.props('plan'));
const mockPollingApi = (response, body, header) => {
mock.onGet(propsData.endpoint).reply(response, body, header);
diff --git a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js b/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
index cc68ba0d9df..ea4eb44ebfe 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
+++ b/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
@@ -14,7 +14,7 @@ describe('TerraformPlan', () => {
const findIcon = () => wrapper.find('[data-testid="change-type-icon"]');
const findLogButton = () => wrapper.find('[data-testid="terraform-report-link"]');
- const mountWrapper = propsData => {
+ const mountWrapper = (propsData) => {
wrapper = shallowMount(TerraformPlan, { stubs: { GlLink, GlSprintf }, propsData });
};
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
index 19a5566c3b1..87bf32351bd 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
@@ -116,7 +116,7 @@ describe('Deployment component', () => {
if (actionButtons.length > 0) {
describe('renders the expected button group', () => {
- actionButtons.forEach(button => {
+ actionButtons.forEach((button) => {
it(`renders ${button}`, () => {
expect(wrapper.find(button).exists()).toBe(true);
});
@@ -126,7 +126,7 @@ describe('Deployment component', () => {
if (actionButtons.length === 0) {
describe('does not render the button group', () => {
- defaultGroup.forEach(button => {
+ defaultGroup.forEach((button) => {
it(`does not render ${button}`, () => {
expect(wrapper.find(button).exists()).toBe(false);
});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
index a12757d4cce..a5d91468ef2 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
@@ -90,7 +90,7 @@ describe('Deployment View App button', () => {
it('renders all the links to the review apps', () => {
const allUrls = wrapper.findAll('.js-deploy-url-menu-item').wrappers;
- const expectedUrls = deploymentMockData.changes.map(change => change.external_url);
+ const expectedUrls = deploymentMockData.changes.map((change) => change.external_url);
expectedUrls.forEach((expectedUrl, idx) => {
const deployUrl = allUrls[idx];
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index 8ee920f06a1..aa2345abccf 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -1,5 +1,20 @@
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
+export const artifacts = [
+ {
+ text: 'result.txt',
+ url: 'bar',
+ job_name: 'generate-artifact',
+ job_path: 'bar',
+ },
+ {
+ text: 'foo.txt',
+ url: 'foo',
+ job_name: 'foo-artifact',
+ job_path: 'foo',
+ },
+];
+
export default {
id: 132,
iid: 22,
@@ -84,6 +99,7 @@ export default {
coverage: '92.16',
path: '/root/acets-app/pipelines/172',
details: {
+ artifacts,
status: {
icon: 'status_success',
favicon: 'favicon_status_success',
@@ -127,7 +143,6 @@ export default {
dropdown_path: '/root/acets-app/pipelines/172/stage.json?stage=review',
},
],
- artifacts: [],
manual_actions: [
{
name: 'stop_review',
@@ -194,7 +209,10 @@ export default {
updated_at: '2017-04-07T15:28:44.800Z',
},
pipelineCoverageDelta: '15.25',
- buildsWithCoverage: [{ name: 'karma', coverage: '40.2' }, { name: 'rspec', coverage: '80.4' }],
+ buildsWithCoverage: [
+ { name: 'karma', coverage: '40.2' },
+ { name: 'rspec', coverage: '80.4' },
+ ],
work_in_progress: false,
source_branch_exists: false,
mergeable_discussions_state: true,
@@ -272,6 +290,7 @@ export const mockStore = {
pipeline: {
id: 0,
details: {
+ artifacts,
status: {
details_path: '/root/review-app-tester/pipelines/66',
favicon:
@@ -291,6 +310,7 @@ export const mockStore = {
mergePipeline: {
id: 1,
details: {
+ artifacts,
status: {
details_path: '/root/review-app-tester/pipelines/66',
favicon:
diff --git a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
index aaaee3327a8..bd22183cbea 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
@@ -50,19 +50,11 @@ describe('MRWidgetHowToMerge', () => {
it('should render different instructions based on if the user can merge', () => {
mountComponent({ props: { canMerge: true } });
- expect(
- findInstructionsFields()
- .at(2)
- .text(),
- ).toContain('git push origin');
+ expect(findInstructionsFields().at(2).text()).toContain('git push origin');
});
it('should render different instructions based on if the merge is based off a fork', () => {
mountComponent({ props: { isFork: true } });
- expect(
- findInstructionsFields()
- .at(0)
- .text(),
- ).toContain('FETCH_HEAD');
+ expect(findInstructionsFields().at(0).text()).toContain('FETCH_HEAD');
});
});
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index a20cd5b4400..1ea7fe1fbfe 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -1,12 +1,13 @@
-import Vue from 'vue';
+import { mount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
import MockAdapter from 'axios-mock-adapter';
-import mountComponent from 'helpers/vue_mount_component_helper';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
-import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
+import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify';
import SmartInterval from '~/smart_interval';
+import { setFaviconOverlay } from '~/lib/utils/favicon';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import mockData from './mock_data';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
@@ -14,58 +15,56 @@ import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/consta
jest.mock('~/smart_interval');
-const returnPromise = data =>
- new Promise(resolve => {
+jest.mock('~/lib/utils/favicon');
+
+const returnPromise = (data) =>
+ new Promise((resolve) => {
resolve({
data,
});
});
-describe('mrWidgetOptions', () => {
- let vm;
+describe('MrWidgetOptions', () => {
+ let wrapper;
let mock;
- let MrWidgetOptions;
const COLLABORATION_MESSAGE = 'Allows commits from members who can merge to the target branch';
beforeEach(() => {
- // Prevent component mounting
- delete mrWidgetOptions.el;
-
gl.mrWidgetData = { ...mockData };
gon.features = { asyncMrWidget: true };
mock = new MockAdapter(axios);
mock.onGet(mockData.merge_request_widget_path).reply(() => [200, { ...mockData }]);
mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, { ...mockData }]);
-
- MrWidgetOptions = Vue.extend(mrWidgetOptions);
});
afterEach(() => {
mock.restore();
- vm.$destroy();
- vm = null;
+ wrapper.destroy();
+ wrapper = null;
gl.mrWidgetData = {};
gon.features = {};
});
const createComponent = (mrData = mockData) => {
- if (vm) {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
}
- vm = mountComponent(MrWidgetOptions, {
- mrData: { ...mrData },
+ wrapper = mount(MrWidgetOptions, {
+ propsData: {
+ mrData: { ...mrData },
+ },
});
return axios.waitForAll();
};
- const findSuggestPipeline = () => vm.$el.querySelector('[data-testid="mr-suggest-pipeline"]');
- const findSuggestPipelineButton = () => findSuggestPipeline().querySelector('button');
- const findSecurityMrWidget = () => vm.$el.querySelector('[data-testid="security-mr-widget"]');
+ const findSuggestPipeline = () => wrapper.find('[data-testid="mr-suggest-pipeline"]');
+ const findSuggestPipelineButton = () => findSuggestPipeline().find('button');
+ const findSecurityMrWidget = () => wrapper.find('[data-testid="security-mr-widget"]');
describe('default', () => {
beforeEach(() => {
@@ -74,147 +73,147 @@ describe('mrWidgetOptions', () => {
describe('data', () => {
it('should instantiate Store and Service', () => {
- expect(vm.mr).toBeDefined();
- expect(vm.service).toBeDefined();
+ expect(wrapper.vm.mr).toBeDefined();
+ expect(wrapper.vm.service).toBeDefined();
});
});
describe('computed', () => {
describe('componentName', () => {
it('should return merged component', () => {
- expect(vm.componentName).toEqual('mr-widget-merged');
+ expect(wrapper.vm.componentName).toEqual('mr-widget-merged');
});
it('should return conflicts component', () => {
- vm.mr.state = 'conflicts';
+ wrapper.vm.mr.state = 'conflicts';
- expect(vm.componentName).toEqual('mr-widget-conflicts');
+ expect(wrapper.vm.componentName).toEqual('mr-widget-conflicts');
});
});
describe('shouldRenderMergeHelp', () => {
it('should return false for the initial merged state', () => {
- expect(vm.shouldRenderMergeHelp).toBeFalsy();
+ expect(wrapper.vm.shouldRenderMergeHelp).toBeFalsy();
});
it('should return true for a state which requires help widget', () => {
- vm.mr.state = 'conflicts';
+ wrapper.vm.mr.state = 'conflicts';
- expect(vm.shouldRenderMergeHelp).toBeTruthy();
+ expect(wrapper.vm.shouldRenderMergeHelp).toBeTruthy();
});
});
describe('shouldRenderPipelines', () => {
it('should return true when hasCI is true', () => {
- vm.mr.hasCI = true;
+ wrapper.vm.mr.hasCI = true;
- expect(vm.shouldRenderPipelines).toBeTruthy();
+ expect(wrapper.vm.shouldRenderPipelines).toBeTruthy();
});
it('should return false when hasCI is false', () => {
- vm.mr.hasCI = false;
+ wrapper.vm.mr.hasCI = false;
- expect(vm.shouldRenderPipelines).toBeFalsy();
+ expect(wrapper.vm.shouldRenderPipelines).toBeFalsy();
});
});
describe('shouldRenderRelatedLinks', () => {
it('should return false for the initial data', () => {
- expect(vm.shouldRenderRelatedLinks).toBeFalsy();
+ expect(wrapper.vm.shouldRenderRelatedLinks).toBeFalsy();
});
it('should return true if there is relatedLinks in MR', () => {
- Vue.set(vm.mr, 'relatedLinks', {});
+ Vue.set(wrapper.vm.mr, 'relatedLinks', {});
- expect(vm.shouldRenderRelatedLinks).toBeTruthy();
+ expect(wrapper.vm.shouldRenderRelatedLinks).toBeTruthy();
});
});
describe('shouldRenderSourceBranchRemovalStatus', () => {
beforeEach(() => {
- vm.mr.state = 'readyToMerge';
+ wrapper.vm.mr.state = 'readyToMerge';
});
it('should return true when cannot remove source branch and branch will be removed', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
+ wrapper.vm.mr.canRemoveSourceBranch = false;
+ wrapper.vm.mr.shouldRemoveSourceBranch = true;
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(true);
+ expect(wrapper.vm.shouldRenderSourceBranchRemovalStatus).toEqual(true);
});
it('should return false when can remove source branch and branch will be removed', () => {
- vm.mr.canRemoveSourceBranch = true;
- vm.mr.shouldRemoveSourceBranch = true;
+ wrapper.vm.mr.canRemoveSourceBranch = true;
+ wrapper.vm.mr.shouldRemoveSourceBranch = true;
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ expect(wrapper.vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
});
it('should return false when cannot remove source branch and branch will not be removed', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = false;
+ wrapper.vm.mr.canRemoveSourceBranch = false;
+ wrapper.vm.mr.shouldRemoveSourceBranch = false;
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ expect(wrapper.vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
});
it('should return false when in merged state', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'merged';
+ wrapper.vm.mr.canRemoveSourceBranch = false;
+ wrapper.vm.mr.shouldRemoveSourceBranch = true;
+ wrapper.vm.mr.state = 'merged';
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ expect(wrapper.vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
});
it('should return false when in nothing to merge state', () => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'nothingToMerge';
+ wrapper.vm.mr.canRemoveSourceBranch = false;
+ wrapper.vm.mr.shouldRemoveSourceBranch = true;
+ wrapper.vm.mr.state = 'nothingToMerge';
- expect(vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
+ expect(wrapper.vm.shouldRenderSourceBranchRemovalStatus).toEqual(false);
});
});
describe('shouldRenderCollaborationStatus', () => {
describe('when collaboration is allowed', () => {
beforeEach(() => {
- vm.mr.allowCollaboration = true;
+ wrapper.vm.mr.allowCollaboration = true;
});
describe('when merge request is opened', () => {
- beforeEach(done => {
- vm.mr.isOpen = true;
- vm.$nextTick(done);
+ beforeEach((done) => {
+ wrapper.vm.mr.isOpen = true;
+ nextTick(done);
});
it('should render collaboration status', () => {
- expect(vm.$el.textContent).toContain(COLLABORATION_MESSAGE);
+ expect(wrapper.text()).toContain(COLLABORATION_MESSAGE);
});
});
describe('when merge request is not opened', () => {
- beforeEach(done => {
- vm.mr.isOpen = false;
- vm.$nextTick(done);
+ beforeEach((done) => {
+ wrapper.vm.mr.isOpen = false;
+ nextTick(done);
});
it('should not render collaboration status', () => {
- expect(vm.$el.textContent).not.toContain(COLLABORATION_MESSAGE);
+ expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
});
});
});
describe('when collaboration is not allowed', () => {
beforeEach(() => {
- vm.mr.allowCollaboration = false;
+ wrapper.vm.mr.allowCollaboration = false;
});
describe('when merge request is opened', () => {
- beforeEach(done => {
- vm.mr.isOpen = true;
- vm.$nextTick(done);
+ beforeEach((done) => {
+ wrapper.vm.mr.isOpen = true;
+ nextTick(done);
});
it('should not render collaboration status', () => {
- expect(vm.$el.textContent).not.toContain(COLLABORATION_MESSAGE);
+ expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
});
});
});
@@ -222,56 +221,56 @@ describe('mrWidgetOptions', () => {
describe('showMergePipelineForkWarning', () => {
describe('when the source project and target project are the same', () => {
- beforeEach(done => {
- Vue.set(vm.mr, 'mergePipelinesEnabled', true);
- Vue.set(vm.mr, 'sourceProjectId', 1);
- Vue.set(vm.mr, 'targetProjectId', 1);
- vm.$nextTick(done);
+ beforeEach((done) => {
+ Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', true);
+ Vue.set(wrapper.vm.mr, 'sourceProjectId', 1);
+ Vue.set(wrapper.vm.mr, 'targetProjectId', 1);
+ nextTick(done);
});
it('should be false', () => {
- expect(vm.showMergePipelineForkWarning).toEqual(false);
+ expect(wrapper.vm.showMergePipelineForkWarning).toEqual(false);
});
});
describe('when merge pipelines are not enabled', () => {
- beforeEach(done => {
- Vue.set(vm.mr, 'mergePipelinesEnabled', false);
- Vue.set(vm.mr, 'sourceProjectId', 1);
- Vue.set(vm.mr, 'targetProjectId', 2);
- vm.$nextTick(done);
+ beforeEach((done) => {
+ Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', false);
+ Vue.set(wrapper.vm.mr, 'sourceProjectId', 1);
+ Vue.set(wrapper.vm.mr, 'targetProjectId', 2);
+ nextTick(done);
});
it('should be false', () => {
- expect(vm.showMergePipelineForkWarning).toEqual(false);
+ expect(wrapper.vm.showMergePipelineForkWarning).toEqual(false);
});
});
describe('when merge pipelines are enabled _and_ the source project and target project are different', () => {
- beforeEach(done => {
- Vue.set(vm.mr, 'mergePipelinesEnabled', true);
- Vue.set(vm.mr, 'sourceProjectId', 1);
- Vue.set(vm.mr, 'targetProjectId', 2);
- vm.$nextTick(done);
+ beforeEach((done) => {
+ Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', true);
+ Vue.set(wrapper.vm.mr, 'sourceProjectId', 1);
+ Vue.set(wrapper.vm.mr, 'targetProjectId', 2);
+ nextTick(done);
});
it('should be true', () => {
- expect(vm.showMergePipelineForkWarning).toEqual(true);
+ expect(wrapper.vm.showMergePipelineForkWarning).toEqual(true);
});
});
});
describe('formattedHumanAccess', () => {
it('when user is a tool admin but not a member of project', () => {
- vm.mr.humanAccess = null;
+ wrapper.vm.mr.humanAccess = null;
- expect(vm.formattedHumanAccess).toEqual('');
+ expect(wrapper.vm.formattedHumanAccess).toEqual('');
});
it('when user a member of the project', () => {
- vm.mr.humanAccess = 'Owner';
+ wrapper.vm.mr.humanAccess = 'Owner';
- expect(vm.formattedHumanAccess).toEqual('owner');
+ expect(wrapper.vm.formattedHumanAccess).toEqual('owner');
});
});
});
@@ -282,9 +281,9 @@ describe('mrWidgetOptions', () => {
let isCbExecuted;
beforeEach(() => {
- jest.spyOn(vm.service, 'checkStatus').mockReturnValue(returnPromise(mockData));
- jest.spyOn(vm.mr, 'setData').mockImplementation(() => {});
- jest.spyOn(vm, 'handleNotification').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm.service, 'checkStatus').mockReturnValue(returnPromise(mockData));
+ jest.spyOn(wrapper.vm.mr, 'setData').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'handleNotification').mockImplementation(() => {});
isCbExecuted = false;
cb = () => {
@@ -293,12 +292,12 @@ describe('mrWidgetOptions', () => {
});
it('should tell service to check status if document is visible', () => {
- vm.checkStatus(cb);
+ wrapper.vm.checkStatus(cb);
- return vm.$nextTick().then(() => {
- expect(vm.service.checkStatus).toHaveBeenCalled();
- expect(vm.mr.setData).toHaveBeenCalled();
- expect(vm.handleNotification).toHaveBeenCalledWith(mockData);
+ return nextTick().then(() => {
+ expect(wrapper.vm.service.checkStatus).toHaveBeenCalled();
+ expect(wrapper.vm.mr.setData).toHaveBeenCalled();
+ expect(wrapper.vm.handleNotification).toHaveBeenCalledWith(mockData);
expect(isCbExecuted).toBeTruthy();
});
});
@@ -306,11 +305,11 @@ describe('mrWidgetOptions', () => {
describe('initPolling', () => {
it('should call SmartInterval', () => {
- vm.initPolling();
+ wrapper.vm.initPolling();
expect(SmartInterval).toHaveBeenCalledWith(
expect.objectContaining({
- callback: vm.checkStatus,
+ callback: wrapper.vm.checkStatus,
}),
);
});
@@ -318,11 +317,11 @@ describe('mrWidgetOptions', () => {
describe('initDeploymentsPolling', () => {
it('should call SmartInterval', () => {
- vm.initDeploymentsPolling();
+ wrapper.vm.initDeploymentsPolling();
expect(SmartInterval).toHaveBeenCalledWith(
expect.objectContaining({
- callback: vm.fetchPreMergeDeployments,
+ callback: wrapper.vm.fetchPreMergeDeployments,
}),
);
});
@@ -331,15 +330,15 @@ describe('mrWidgetOptions', () => {
describe('fetchDeployments', () => {
it('should fetch deployments', () => {
jest
- .spyOn(vm.service, 'fetchDeployments')
+ .spyOn(wrapper.vm.service, 'fetchDeployments')
.mockReturnValue(returnPromise([{ id: 1, status: SUCCESS }]));
- vm.fetchPreMergeDeployments();
+ wrapper.vm.fetchPreMergeDeployments();
- return vm.$nextTick().then(() => {
- expect(vm.service.fetchDeployments).toHaveBeenCalled();
- expect(vm.mr.deployments.length).toEqual(1);
- expect(vm.mr.deployments[0].id).toBe(1);
+ return nextTick().then(() => {
+ expect(wrapper.vm.service.fetchDeployments).toHaveBeenCalled();
+ expect(wrapper.vm.mr.deployments.length).toEqual(1);
+ expect(wrapper.vm.mr.deployments[0].id).toBe(1);
});
});
});
@@ -347,13 +346,13 @@ describe('mrWidgetOptions', () => {
describe('fetchActionsContent', () => {
it('should fetch content of Cherry Pick and Revert modals', () => {
jest
- .spyOn(vm.service, 'fetchMergeActionsContent')
+ .spyOn(wrapper.vm.service, 'fetchMergeActionsContent')
.mockReturnValue(returnPromise('hello world'));
- vm.fetchActionsContent();
+ wrapper.vm.fetchActionsContent();
- return vm.$nextTick().then(() => {
- expect(vm.service.fetchMergeActionsContent).toHaveBeenCalled();
+ return nextTick().then(() => {
+ expect(wrapper.vm.service.fetchMergeActionsContent).toHaveBeenCalled();
expect(document.body.textContent).toContain('hello world');
});
});
@@ -362,46 +361,46 @@ describe('mrWidgetOptions', () => {
describe('bindEventHubListeners', () => {
it.each`
event | method | methodArgs
- ${'MRWidgetUpdateRequested'} | ${'checkStatus'} | ${x => [x]}
- ${'MRWidgetRebaseSuccess'} | ${'checkStatus'} | ${x => [x, true]}
+ ${'MRWidgetUpdateRequested'} | ${'checkStatus'} | ${(x) => [x]}
+ ${'MRWidgetRebaseSuccess'} | ${'checkStatus'} | ${(x) => [x, true]}
${'FetchActionsContent'} | ${'fetchActionsContent'} | ${() => []}
${'EnablePolling'} | ${'resumePolling'} | ${() => []}
${'DisablePolling'} | ${'stopPolling'} | ${() => []}
`('should bind to $event', ({ event, method, methodArgs }) => {
- jest.spyOn(vm, method).mockImplementation();
+ jest.spyOn(wrapper.vm, method).mockImplementation();
const eventArg = {};
eventHub.$emit(event, eventArg);
- expect(vm[method]).toHaveBeenCalledWith(...methodArgs(eventArg));
+ expect(wrapper.vm[method]).toHaveBeenCalledWith(...methodArgs(eventArg));
});
it('should bind to SetBranchRemoveFlag', () => {
- expect(vm.mr.isRemovingSourceBranch).toBe(false);
+ expect(wrapper.vm.mr.isRemovingSourceBranch).toBe(false);
eventHub.$emit('SetBranchRemoveFlag', [true]);
- expect(vm.mr.isRemovingSourceBranch).toBe(true);
+ expect(wrapper.vm.mr.isRemovingSourceBranch).toBe(true);
});
it('should bind to FailedToMerge', () => {
- vm.mr.state = '';
- vm.mr.mergeError = '';
+ wrapper.vm.mr.state = '';
+ wrapper.vm.mr.mergeError = '';
const mergeError = 'Something bad happened!';
eventHub.$emit('FailedToMerge', mergeError);
- expect(vm.mr.state).toBe('failedToMerge');
- expect(vm.mr.mergeError).toBe(mergeError);
+ expect(wrapper.vm.mr.state).toBe('failedToMerge');
+ expect(wrapper.vm.mr.mergeError).toBe(mergeError);
});
it('should bind to UpdateWidgetData', () => {
- jest.spyOn(vm.mr, 'setData').mockImplementation();
+ jest.spyOn(wrapper.vm.mr, 'setData').mockImplementation();
const data = { ...mockData };
eventHub.$emit('UpdateWidgetData', data);
- expect(vm.mr.setData).toHaveBeenCalledWith(data);
+ expect(wrapper.vm.mr.setData).toHaveBeenCalledWith(data);
});
});
@@ -421,26 +420,18 @@ describe('mrWidgetOptions', () => {
document.body.removeChild(document.getElementById('favicon'));
});
- it('should call setFavicon method', done => {
- vm.mr.ciStatusFaviconPath = overlayDataUrl;
- vm.setFaviconHelper()
- .then(() => {
- /*
- It would be better if we'd could mock commonUtils.setFaviconURL
- with a spy and test that it was called. We are doing the following
- tests as a proxy to show that the function has been called
- */
- expect(faviconElement.getAttribute('href')).not.toEqual(null);
- expect(faviconElement.getAttribute('href')).not.toEqual(overlayDataUrl);
- expect(faviconElement.getAttribute('href')).not.toEqual(faviconDataUrl);
- })
- .then(done)
- .catch(done.fail);
+ it('should call setFavicon method', async () => {
+ wrapper.vm.mr.ciStatusFaviconPath = overlayDataUrl;
+
+ await wrapper.vm.setFaviconHelper();
+
+ expect(setFaviconOverlay).toHaveBeenCalledWith(overlayDataUrl);
});
- it('should not call setFavicon when there is no ciStatusFaviconPath', done => {
- vm.mr.ciStatusFaviconPath = null;
- vm.setFaviconHelper()
+ it('should not call setFavicon when there is no ciStatusFaviconPath', (done) => {
+ wrapper.vm.mr.ciStatusFaviconPath = null;
+ wrapper.vm
+ .setFaviconHelper()
.then(() => {
expect(faviconElement.getAttribute('href')).toEqual(null);
done();
@@ -459,12 +450,12 @@ describe('mrWidgetOptions', () => {
beforeEach(() => {
jest.spyOn(notify, 'notifyMe').mockImplementation(() => {});
- vm.mr.ciStatus = 'failed';
- vm.mr.gitlabLogo = 'logo.png';
+ wrapper.vm.mr.ciStatus = 'failed';
+ wrapper.vm.mr.gitlabLogo = 'logo.png';
});
it('should call notifyMe', () => {
- vm.handleNotification(data);
+ wrapper.vm.handleNotification(data);
expect(notify.notifyMe).toHaveBeenCalledWith(
'Pipeline running-label',
@@ -474,15 +465,15 @@ describe('mrWidgetOptions', () => {
});
it('should not call notifyMe if the status has not changed', () => {
- vm.mr.ciStatus = data.ci_status;
+ wrapper.vm.mr.ciStatus = data.ci_status;
- vm.handleNotification(data);
+ wrapper.vm.handleNotification(data);
expect(notify.notifyMe).not.toHaveBeenCalled();
});
it('should not notify if no pipeline provided', () => {
- vm.handleNotification({
+ wrapper.vm.handleNotification({
...data,
pipeline: undefined,
});
@@ -493,63 +484,65 @@ describe('mrWidgetOptions', () => {
describe('resumePolling', () => {
it('should call stopTimer on pollingInterval', () => {
- jest.spyOn(vm.pollingInterval, 'resume').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm.pollingInterval, 'resume').mockImplementation(() => {});
- vm.resumePolling();
+ wrapper.vm.resumePolling();
- expect(vm.pollingInterval.resume).toHaveBeenCalled();
+ expect(wrapper.vm.pollingInterval.resume).toHaveBeenCalled();
});
});
describe('stopPolling', () => {
it('should call stopTimer on pollingInterval', () => {
- jest.spyOn(vm.pollingInterval, 'stopTimer').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm.pollingInterval, 'stopTimer').mockImplementation(() => {});
- vm.stopPolling();
+ wrapper.vm.stopPolling();
- expect(vm.pollingInterval.stopTimer).toHaveBeenCalled();
+ expect(wrapper.vm.pollingInterval.stopTimer).toHaveBeenCalled();
});
});
});
describe('rendering relatedLinks', () => {
- beforeEach(done => {
- vm.mr.relatedLinks = {
- assignToMe: null,
- closing: `
- <a class="close-related-link" href="#">
- Close
- </a>
- `,
- mentioned: '',
- };
- Vue.nextTick(done);
+ beforeEach(() => {
+ createComponent({
+ ...mockData,
+ issues_links: {
+ closing: `
+ <a class="close-related-link" href="#">
+ Close
+ </a>
+ `,
+ },
+ });
+
+ return nextTick();
});
it('renders if there are relatedLinks', () => {
- expect(vm.$el.querySelector('.close-related-link')).toBeDefined();
+ expect(wrapper.find('.close-related-link').exists()).toBe(true);
});
- it('does not render if state is nothingToMerge', done => {
- vm.mr.state = stateKey.nothingToMerge;
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.close-related-link')).toBeNull();
+ it('does not render if state is nothingToMerge', (done) => {
+ wrapper.vm.mr.state = stateKey.nothingToMerge;
+ nextTick(() => {
+ expect(wrapper.find('.close-related-link').exists()).toBe(false);
done();
});
});
});
describe('rendering source branch removal status', () => {
- it('renders when user cannot remove branch and branch should be removed', done => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'readyToMerge';
+ it('renders when user cannot remove branch and branch should be removed', (done) => {
+ wrapper.vm.mr.canRemoveSourceBranch = false;
+ wrapper.vm.mr.shouldRemoveSourceBranch = true;
+ wrapper.vm.mr.state = 'readyToMerge';
- vm.$nextTick(() => {
- const tooltip = vm.$el.querySelector('[data-testid="question-o-icon"]');
+ nextTick(() => {
+ const tooltip = wrapper.find('[data-testid="question-o-icon"]');
- expect(vm.$el.textContent).toContain('Deletes source branch');
- expect(tooltip.getAttribute('title')).toBe(
+ expect(wrapper.text()).toContain('Deletes source branch');
+ expect(tooltip.attributes('title')).toBe(
'A user with write access to the source branch selected this option',
);
@@ -557,14 +550,14 @@ describe('mrWidgetOptions', () => {
});
});
- it('does not render in merged state', done => {
- vm.mr.canRemoveSourceBranch = false;
- vm.mr.shouldRemoveSourceBranch = true;
- vm.mr.state = 'merged';
+ it('does not render in merged state', (done) => {
+ wrapper.vm.mr.canRemoveSourceBranch = false;
+ wrapper.vm.mr.shouldRemoveSourceBranch = true;
+ wrapper.vm.mr.state = 'merged';
- vm.$nextTick(() => {
- expect(vm.$el.textContent).toContain('The source branch has been deleted');
- expect(vm.$el.textContent).not.toContain('Deletes source branch');
+ nextTick(() => {
+ expect(wrapper.text()).toContain('The source branch has been deleted');
+ expect(wrapper.text()).not.toContain('Deletes source branch');
done();
});
@@ -601,8 +594,8 @@ describe('mrWidgetOptions', () => {
status: SUCCESS,
};
- beforeEach(done => {
- vm.mr.deployments.push(
+ beforeEach((done) => {
+ wrapper.vm.mr.deployments.push(
{
...deploymentMockData,
},
@@ -612,33 +605,32 @@ describe('mrWidgetOptions', () => {
},
);
- vm.$nextTick(done);
+ nextTick(done);
});
it('renders multiple deployments', () => {
- expect(vm.$el.querySelectorAll('.deploy-heading').length).toBe(2);
+ expect(wrapper.findAll('.deploy-heading').length).toBe(2);
});
it('renders dropdpown with multiple file changes', () => {
expect(
- vm.$el
- .querySelector('.js-mr-wigdet-deployment-dropdown')
- .querySelectorAll('.js-filtered-dropdown-result').length,
+ wrapper.find('.js-mr-wigdet-deployment-dropdown').findAll('.js-filtered-dropdown-result')
+ .length,
).toEqual(changes.length);
});
});
describe('code quality widget', () => {
it('renders the component', () => {
- expect(vm.$el.querySelector('.js-codequality-widget')).toExist();
+ expect(wrapper.find('.js-codequality-widget').exists()).toBe(true);
});
});
describe('pipeline for target branch after merge', () => {
describe('with information for target branch pipeline', () => {
- beforeEach(done => {
- vm.mr.state = 'merged';
- vm.mr.mergePipeline = {
+ beforeEach((done) => {
+ wrapper.vm.mr.state = 'merged';
+ wrapper.vm.mr.mergePipeline = {
id: 127,
user: {
id: 1,
@@ -744,16 +736,16 @@ describe('mrWidgetOptions', () => {
},
cancel_path: '/root/ci-web-terminal/pipelines/127/cancel',
};
- vm.$nextTick(done);
+ nextTick(done);
});
it('renders pipeline block', () => {
- expect(vm.$el.querySelector('.js-post-merge-pipeline')).not.toBeNull();
+ expect(wrapper.find('.js-post-merge-pipeline').exists()).toBe(true);
});
describe('with post merge deployments', () => {
- beforeEach(done => {
- vm.mr.postMergeDeployments = [
+ beforeEach((done) => {
+ wrapper.vm.mr.postMergeDeployments = [
{
id: 15,
name: 'review/diplo',
@@ -785,46 +777,46 @@ describe('mrWidgetOptions', () => {
},
];
- vm.$nextTick(done);
+ nextTick(done);
});
it('renders post deployment information', () => {
- expect(vm.$el.querySelector('.js-post-deployment')).not.toBeNull();
+ expect(wrapper.find('.js-post-deployment').exists()).toBe(true);
});
});
});
describe('without information for target branch pipeline', () => {
- beforeEach(done => {
- vm.mr.state = 'merged';
+ beforeEach((done) => {
+ wrapper.vm.mr.state = 'merged';
- vm.$nextTick(done);
+ nextTick(done);
});
it('does not render pipeline block', () => {
- expect(vm.$el.querySelector('.js-post-merge-pipeline')).toBeNull();
+ expect(wrapper.find('.js-post-merge-pipeline').exists()).toBe(false);
});
});
describe('when state is not merged', () => {
- beforeEach(done => {
- vm.mr.state = 'archived';
+ beforeEach((done) => {
+ wrapper.vm.mr.state = 'archived';
- vm.$nextTick(done);
+ nextTick(done);
});
it('does not render pipeline block', () => {
- expect(vm.$el.querySelector('.js-post-merge-pipeline')).toBeNull();
+ expect(wrapper.find('.js-post-merge-pipeline').exists()).toBe(false);
});
it('does not render post deployment information', () => {
- expect(vm.$el.querySelector('.js-post-deployment')).toBeNull();
+ expect(wrapper.find('.js-post-deployment').exists()).toBe(false);
});
});
});
it('should not suggest pipelines when feature flag is not present', () => {
- expect(findSuggestPipeline()).toBeNull();
+ expect(findSuggestPipeline().exists()).toBe(false);
});
});
@@ -853,11 +845,11 @@ describe('mrWidgetOptions', () => {
if (shouldRender) {
it('renders', () => {
- expect(findSecurityMrWidget()).toEqual(expect.any(HTMLElement));
+ expect(findSecurityMrWidget().exists()).toBe(true);
});
} else {
it('does not render', () => {
- expect(findSecurityMrWidget()).toBeNull();
+ expect(findSecurityMrWidget().exists()).toBe(false);
});
}
});
@@ -866,41 +858,35 @@ describe('mrWidgetOptions', () => {
describe('suggestPipeline', () => {
beforeEach(() => {
mock.onAny().reply(200);
-
- // This is needed because some grandchildren Bootstrap components throw warnings
- // https://gitlab.com/gitlab-org/gitlab/issues/208458
- jest.spyOn(console, 'warn').mockImplementation();
});
describe('given feature flag is enabled', () => {
beforeEach(() => {
createComponent();
- vm.mr.hasCI = false;
+ wrapper.vm.mr.hasCI = false;
});
it('should suggest pipelines when none exist', () => {
- expect(findSuggestPipeline()).toEqual(expect.any(Element));
+ expect(findSuggestPipeline().exists()).toBe(true);
});
it.each([
{ isDismissedSuggestPipeline: true },
{ mergeRequestAddCiConfigPath: null },
{ hasCI: true },
- ])('with %s, should not suggest pipeline', async obj => {
- Object.assign(vm.mr, obj);
+ ])('with %s, should not suggest pipeline', async (obj) => {
+ Object.assign(wrapper.vm.mr, obj);
- await vm.$nextTick();
+ await nextTick();
- expect(findSuggestPipeline()).toBeNull();
+ expect(findSuggestPipeline().exists()).toBe(false);
});
it('should allow dismiss of the suggest pipeline message', async () => {
- findSuggestPipelineButton().click();
-
- await vm.$nextTick();
+ await findSuggestPipelineButton().trigger('click');
- expect(findSuggestPipeline()).toBeNull();
+ expect(findSuggestPipeline().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js b/spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js
index 026ea0e4d0a..1dfa7564535 100644
--- a/spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js
@@ -22,7 +22,7 @@ describe('Artifacts App Store Actions', () => {
});
describe('setEndpoint', () => {
- it('should commit SET_ENDPOINT mutation', done => {
+ it('should commit SET_ENDPOINT mutation', (done) => {
testAction(
setEndpoint,
'endpoint.json',
@@ -35,7 +35,7 @@ describe('Artifacts App Store Actions', () => {
});
describe('requestArtifacts', () => {
- it('should commit REQUEST_ARTIFACTS mutation', done => {
+ it('should commit REQUEST_ARTIFACTS mutation', (done) => {
testAction(
requestArtifacts,
null,
@@ -62,7 +62,7 @@ describe('Artifacts App Store Actions', () => {
});
describe('success', () => {
- it('dispatches requestArtifacts and receiveArtifactsSuccess ', done => {
+ it('dispatches requestArtifacts and receiveArtifactsSuccess ', (done) => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, [
{
text: 'result.txt',
@@ -106,7 +106,7 @@ describe('Artifacts App Store Actions', () => {
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
});
- it('dispatches requestArtifacts and receiveArtifactsError ', done => {
+ it('dispatches requestArtifacts and receiveArtifactsError ', (done) => {
testAction(
fetchArtifacts,
null,
@@ -127,7 +127,7 @@ describe('Artifacts App Store Actions', () => {
});
describe('receiveArtifactsSuccess', () => {
- it('should commit RECEIVE_ARTIFACTS_SUCCESS mutation with 200', done => {
+ it('should commit RECEIVE_ARTIFACTS_SUCCESS mutation with 200', (done) => {
testAction(
receiveArtifactsSuccess,
{ data: { summary: {} }, status: 200 },
@@ -138,7 +138,7 @@ describe('Artifacts App Store Actions', () => {
);
});
- it('should not commit RECEIVE_ARTIFACTS_SUCCESS mutation with 204', done => {
+ it('should not commit RECEIVE_ARTIFACTS_SUCCESS mutation with 204', (done) => {
testAction(
receiveArtifactsSuccess,
{ data: { summary: {} }, status: 204 },
@@ -151,7 +151,7 @@ describe('Artifacts App Store Actions', () => {
});
describe('receiveArtifactsError', () => {
- it('should commit RECEIVE_ARTIFACTS_ERROR mutation', done => {
+ it('should commit RECEIVE_ARTIFACTS_ERROR mutation', (done) => {
testAction(
receiveArtifactsError,
null,
diff --git a/spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js b/spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js
index 62ee6f5f189..dc90fef63c6 100644
--- a/spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js
@@ -1,6 +1,6 @@
import { title } from '~/vue_merge_request_widget/stores/artifacts_list/getters';
import state from '~/vue_merge_request_widget/stores/artifacts_list/state';
-import { artifactsList } from '../../components/mock_data';
+import { artifacts } from '../../mock_data';
describe('Artifacts Store Getters', () => {
let localState;
@@ -24,7 +24,7 @@ describe('Artifacts Store Getters', () => {
});
describe('when it has artifacts', () => {
it('returns artifacts message', () => {
- localState.artifacts = artifactsList;
+ localState.artifacts = artifacts;
expect(title(localState)).toBe('View 2 exposed artifacts');
});
});
diff --git a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
index 8b2c10ec50a..cfc846075ea 100644
--- a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -156,7 +156,7 @@ describe('MergeRequestStore', () => {
it.each(['sast_comparison_path', 'secret_scanning_comparison_path'])(
'should set %s path',
- property => {
+ (property) => {
// Ensure something is set in the mock data
expect(property in mockData).toBe(true);
const expectedValue = mockData[property];
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index 63d38e7587a..1bf757ea312 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -4,6 +4,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<gl-dropdown-stub
category="primary"
headertext=""
+ hideheaderborder="true"
right="true"
size="medium"
text="Clone"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
index dd88ba9a6fb..c4f351eb58d 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -4,6 +4,7 @@ exports[`SplitButton renders actionItems 1`] = `
<gl-dropdown-stub
category="primary"
headertext=""
+ hideheaderborder="true"
menu-class=""
size="medium"
split="true"
diff --git a/spec/frontend/vue_shared/components/actions_button_spec.js b/spec/frontend/vue_shared/components/actions_button_spec.js
index 6e7ed9d612b..2ac4bfda29a 100644
--- a/spec/frontend/vue_shared/components/actions_button_spec.js
+++ b/spec/frontend/vue_shared/components/actions_button_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdown, GlButton } from '@gitlab/ui';
+import { GlDropdown, GlDropdownDivider, GlButton } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
@@ -40,7 +40,7 @@ describe('Actions button component', () => {
wrapper.destroy();
});
- const getTooltip = child => {
+ const getTooltip = (child) => {
const directiveBinding = getBinding(child.element, 'gl-tooltip');
return directiveBinding.value;
@@ -52,8 +52,8 @@ describe('Actions button component', () => {
const parseDropdownItems = () =>
findDropdown()
.findAll('gl-dropdown-item-stub,gl-dropdown-divider-stub')
- .wrappers.map(x => {
- if (x.is('gl-dropdown-divider-stub')) {
+ .wrappers.map((x) => {
+ if (x.is(GlDropdownDivider)) {
return { type: 'divider' };
}
diff --git a/spec/frontend/vue_shared/components/alert_details_table_spec.js b/spec/frontend/vue_shared/components/alert_details_table_spec.js
index ef7815f9e9e..49b82cb4d4e 100644
--- a/spec/frontend/vue_shared/components/alert_details_table_spec.js
+++ b/spec/frontend/vue_shared/components/alert_details_table_spec.js
@@ -45,13 +45,13 @@ describe('AlertDetails', () => {
const findTableComponent = () => wrapper.find(GlTable);
const findTableKeys = () => findTableComponent().findAll('tbody td:first-child');
- const findTableFieldValueByKey = fieldKey =>
+ const findTableFieldValueByKey = (fieldKey) =>
findTableComponent()
.findAll('tbody tr')
- .filter(row => row.text().includes(fieldKey))
+ .filter((row) => row.text().includes(fieldKey))
.at(0)
.find('td:nth-child(2)');
- const findTableField = (fields, fieldName) => fields.filter(row => row.text() === fieldName);
+ const findTableField = (fields, fieldName) => fields.filter((row) => row.text() === fieldName);
describe('Alert details', () => {
describe('empty state', () => {
diff --git a/spec/frontend/vue_shared/components/awards_list_spec.js b/spec/frontend/vue_shared/components/awards_list_spec.js
index d20de81c446..550ac4a9d38 100644
--- a/spec/frontend/vue_shared/components/awards_list_spec.js
+++ b/spec/frontend/vue_shared/components/awards_list_spec.js
@@ -58,10 +58,10 @@ describe('vue_shared/components/awards_list', () => {
wrapper = mount(AwardsList, { propsData: props });
};
- const matchingEmojiTag = name => expect.stringMatching(`gl-emoji data-name="${name}"`);
+ const matchingEmojiTag = (name) => expect.stringMatching(`gl-emoji data-name="${name}"`);
const findAwardButtons = () => wrapper.findAll('[data-testid="award-button"]');
const findAwardsData = () =>
- findAwardButtons().wrappers.map(x => {
+ findAwardButtons().wrappers.map((x) => {
return {
classes: x.classes(),
title: x.attributes('title'),
@@ -135,9 +135,7 @@ describe('vue_shared/components/awards_list', () => {
it('with award clicked, it emits award', () => {
expect(wrapper.emitted().award).toBeUndefined();
- findAwardButtons()
- .at(2)
- .vm.$emit('click');
+ findAwardButtons().at(2).vm.$emit('click');
expect(wrapper.emitted().award).toEqual([[EMOJI_SMILE]]);
});
@@ -162,9 +160,7 @@ describe('vue_shared/components/awards_list', () => {
it('when clicked, it emits award as number', () => {
expect(wrapper.emitted().award).toBeUndefined();
- findAwardButtons()
- .at(0)
- .vm.$emit('click');
+ findAwardButtons().at(0).vm.$emit('click');
expect(wrapper.emitted().award).toEqual([[Number(EMOJI_100)]]);
});
@@ -209,7 +205,7 @@ describe('vue_shared/components/awards_list', () => {
const buttons = findAwardButtons();
expect(buttons.length).toBe(7);
- expect(buttons.wrappers.every(x => x.classes('disabled'))).toBe(true);
+ expect(buttons.wrappers.every((x) => x.classes('disabled'))).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index f656bb0b60d..a633ef65aa4 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -81,7 +81,7 @@ describe('CI Badge Link Component', () => {
});
it('should render each status badge', () => {
- Object.keys(statuses).map(status => {
+ Object.keys(statuses).map((status) => {
vm = mountComponent(CIBadge, { status: statuses[status] });
expect(vm.$el.getAttribute('href')).toEqual(statuses[status].details_path);
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index ac0be1537b7..0d4266ce82f 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import initCopyToClipboard from '~/behaviors/copy_to_clipboard';
describe('clipboard button', () => {
let wrapper;
@@ -87,4 +88,25 @@ describe('clipboard button', () => {
expect(onClick).toHaveBeenCalled();
});
+
+ describe('integration', () => {
+ it('actually copies to clipboard', () => {
+ initCopyToClipboard();
+
+ document.execCommand = () => {};
+ jest.spyOn(document, 'execCommand').mockImplementation(() => true);
+
+ createWrapper(
+ {
+ text: 'copy me',
+ title: 'Copy this value',
+ },
+ { attachTo: document.body },
+ );
+
+ findButton().trigger('click');
+
+ expect(document.execCommand).toHaveBeenCalledWith('copy');
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
index a50a4b742b3..c8fe6c3131c 100644
--- a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
+++ b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
@@ -130,9 +130,7 @@ describe('ColorPicker', () => {
it('has preset color selected', async () => {
createComponent();
- await presetColors()
- .at(0)
- .trigger('click');
+ await presetColors().at(0).trigger('click');
expect(wrapper.vm.$data.selectedColor).toBe(setColor);
});
diff --git a/spec/frontend/vue_shared/components/commit_spec.js b/spec/frontend/vue_shared/components/commit_spec.js
index 9b5c0941a0d..6f3c97f7194 100644
--- a/spec/frontend/vue_shared/components/commit_spec.js
+++ b/spec/frontend/vue_shared/components/commit_spec.js
@@ -7,14 +7,14 @@ describe('Commit component', () => {
let props;
let wrapper;
- const findIcon = name => {
- const icons = wrapper.findAll(GlIcon).filter(c => c.attributes('name') === name);
+ const findIcon = (name) => {
+ const icons = wrapper.findAll(GlIcon).filter((c) => c.attributes('name') === name);
return icons.length ? icons.at(0) : icons;
};
const findUserAvatar = () => wrapper.find(UserAvatarLink);
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(CommitComponent, {
propsData,
});
@@ -43,12 +43,7 @@ describe('Commit component', () => {
},
});
- expect(
- wrapper
- .find('.icon-container')
- .find(GlIcon)
- .exists(),
- ).toBe(true);
+ expect(wrapper.find('.icon-container').find(GlIcon).exists()).toBe(true);
});
describe('Given all the props', () => {
diff --git a/spec/frontend/vue_shared/components/confirm_modal_spec.js b/spec/frontend/vue_shared/components/confirm_modal_spec.js
index 96ccf56cbc6..db8d0674121 100644
--- a/spec/frontend/vue_shared/components/confirm_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_modal_spec.js
@@ -53,7 +53,7 @@ describe('vue_shared/components/confirm_modal', () => {
const findFormData = () =>
findForm()
.findAll('input')
- .wrappers.map(x => ({ name: x.attributes('name'), value: x.attributes('value') }));
+ .wrappers.map((x) => ({ name: x.attributes('name'), value: x.attributes('value') }));
describe('template', () => {
describe('when modal data is set', () => {
diff --git a/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js b/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
index c75891c9ed3..22ee6acfed8 100644
--- a/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
@@ -9,7 +9,7 @@ describe('MarkdownViewer', () => {
let wrapper;
let mock;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = mount(MarkdownViewer, {
propsData: {
...props,
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
index e46c63a1a32..10eacff630d 100644
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
@@ -71,7 +71,7 @@ describe('date time picker lib', () => {
'2019-19-23',
'2019-09-23 x',
'2019-09-29 24:24:24',
- ].forEach(input => {
+ ].forEach((input) => {
it(`throws error for invalid input like ${input}`, () => {
expect(() => inputStringToIsoDate(input)).toThrow();
});
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
index afd1f1a3123..33667a1bb71 100644
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
@@ -17,7 +17,7 @@ describe('DateTimePicker', () => {
const applyButtonElement = () => wrapper.find('button.btn-success').element;
const findQuickRangeItems = () => wrapper.findAll('.dropdown-item');
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = mount(DateTimePicker, {
propsData: {
...props,
@@ -235,7 +235,7 @@ describe('DateTimePicker', () => {
it('unchecks quick range when text is input is clicked', () => {
const findActiveItems = () =>
- findQuickRangeItems().filter(w => w.classes().includes('active'));
+ findQuickRangeItems().filter((w) => w.classes().includes('active'));
expect(findActiveItems().length).toBe(1);
diff --git a/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js b/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js
new file mode 100644
index 00000000000..b812ced72c9
--- /dev/null
+++ b/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js
@@ -0,0 +1,103 @@
+import { shallowMount } from '@vue/test-utils';
+import DeployBoardInstance from '~/vue_shared/components/deployment_instance.vue';
+import { folder } from './mock_data';
+
+describe('Deploy Board Instance', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) =>
+ shallowMount(DeployBoardInstance, {
+ propsData: {
+ status: 'succeeded',
+ ...props,
+ },
+ });
+
+ describe('as a non-canary deployment', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render a div with the correct css status and tooltip data', () => {
+ wrapper = createComponent({
+ logsPath: folder.logs_path,
+ tooltipText: 'This is a pod',
+ });
+
+ expect(wrapper.classes('deployment-instance-succeeded')).toBe(true);
+ expect(wrapper.attributes('title')).toEqual('This is a pod');
+ });
+
+ it('should render a div without tooltip data', (done) => {
+ wrapper = createComponent({
+ status: 'deploying',
+ tooltipText: '',
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.classes('deployment-instance-deploying')).toBe(true);
+ expect(wrapper.attributes('title')).toEqual('');
+ done();
+ });
+ });
+
+ it('should have a log path computed with a pod name as a parameter', () => {
+ wrapper = createComponent({
+ logsPath: folder.logs_path,
+ podName: 'tanuki-1',
+ });
+
+ expect(wrapper.vm.computedLogPath).toEqual(
+ '/root/review-app/-/logs?environment_name=foo&pod_name=tanuki-1',
+ );
+ });
+ });
+
+ describe('as a canary deployment', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render a div with canary class when stable prop is provided as false', (done) => {
+ wrapper = createComponent({
+ stable: false,
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.classes('deployment-instance-canary')).toBe(true);
+ done();
+ });
+ });
+ });
+
+ describe('as a legend item', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should not be a link without a logsPath prop', (done) => {
+ wrapper = createComponent({
+ stable: false,
+ logsPath: '',
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.computedLogPath).toBeNull();
+ expect(wrapper.vm.isLink).toBeFalsy();
+ done();
+ });
+ });
+
+ it('should render a link without href if path is not passed', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.attributes('href')).toBeUndefined();
+ });
+
+ it('should not have a tooltip', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.attributes('title')).toEqual('');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/deployment_instance/mock_data.js b/spec/frontend/vue_shared/components/deployment_instance/mock_data.js
new file mode 100644
index 00000000000..6618c57948c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/deployment_instance/mock_data.js
@@ -0,0 +1,144 @@
+export const environmentsList = [
+ {
+ name: 'DEV',
+ size: 1,
+ id: 7,
+ state: 'available',
+ external_url: null,
+ environment_type: null,
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/7',
+ stop_path: '/root/review-app/environments/7/stop',
+ created_at: '2017-01-31T10:53:46.894Z',
+ updated_at: '2017-01-31T10:53:46.894Z',
+ project_path: '/root/review-app',
+ rollout_status: {},
+ },
+ {
+ folderName: 'build',
+ size: 5,
+ id: 12,
+ name: 'build/update-README',
+ state: 'available',
+ external_url: null,
+ environment_type: 'build',
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/12',
+ stop_path: '/root/review-app/environments/12/stop',
+ created_at: '2017-02-01T19:42:18.400Z',
+ updated_at: '2017-02-01T19:42:18.400Z',
+ project_path: '/root/review-app',
+ rollout_status: {},
+ },
+];
+
+export const serverData = [
+ {
+ name: 'DEV',
+ size: 1,
+ latest: {
+ id: 7,
+ name: 'DEV',
+ state: 'available',
+ external_url: null,
+ environment_type: null,
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/7',
+ stop_path: '/root/review-app/environments/7/stop',
+ created_at: '2017-01-31T10:53:46.894Z',
+ updated_at: '2017-01-31T10:53:46.894Z',
+ rollout_status: {},
+ },
+ },
+ {
+ name: 'build',
+ size: 5,
+ latest: {
+ id: 12,
+ name: 'build/update-README',
+ state: 'available',
+ external_url: null,
+ environment_type: 'build',
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/12',
+ stop_path: '/root/review-app/environments/12/stop',
+ created_at: '2017-02-01T19:42:18.400Z',
+ updated_at: '2017-02-01T19:42:18.400Z',
+ },
+ },
+ {
+ name: 'build',
+ size: 1,
+ latest: {
+ id: 12,
+ name: 'build/update-README',
+ state: 'available',
+ external_url: null,
+ environment_type: 'build',
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/12',
+ stop_path: '/root/review-app/environments/12/stop',
+ created_at: '2017-02-01T19:42:18.400Z',
+ updated_at: '2017-02-01T19:42:18.400Z',
+ },
+ },
+];
+
+export const deployBoardMockData = {
+ instances: [
+ { status: 'finished', tooltip: 'tanuki-2334 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2335 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2336 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2337 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2338 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2339 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2340 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2334 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2335 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2336 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2337 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2338 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2339 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'finished', tooltip: 'tanuki-2340 Finished', pod_name: 'production-tanuki-1' },
+ { status: 'deploying', tooltip: 'tanuki-2341 Deploying', pod_name: 'production-tanuki-1' },
+ { status: 'deploying', tooltip: 'tanuki-2342 Deploying', pod_name: 'production-tanuki-1' },
+ { status: 'deploying', tooltip: 'tanuki-2343 Deploying', pod_name: 'production-tanuki-1' },
+ { status: 'failed', tooltip: 'tanuki-2344 Failed', pod_name: 'production-tanuki-1' },
+ { status: 'ready', tooltip: 'tanuki-2345 Ready', pod_name: 'production-tanuki-1' },
+ { status: 'ready', tooltip: 'tanuki-2346 Ready', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2348 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2349 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2350 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'preparing', tooltip: 'tanuki-2353 Preparing', pod_name: 'production-tanuki-1' },
+ { status: 'waiting', tooltip: 'tanuki-2354 Waiting', pod_name: 'production-tanuki-1' },
+ { status: 'waiting', tooltip: 'tanuki-2355 Waiting', pod_name: 'production-tanuki-1' },
+ { status: 'waiting', tooltip: 'tanuki-2356 Waiting', pod_name: 'production-tanuki-1' },
+ ],
+ abort_url: 'url',
+ rollback_url: 'url',
+ completion: 100,
+ status: 'found',
+};
+
+export const folder = {
+ folderName: 'build',
+ size: 5,
+ id: 12,
+ name: 'build/update-README',
+ state: 'available',
+ external_url: null,
+ environment_type: 'build',
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/12',
+ stop_path: '/root/review-app/environments/12/stop',
+ created_at: '2017-02-01T19:42:18.400Z',
+ updated_at: '2017-02-01T19:42:18.400Z',
+ rollout_status: {},
+ logs_path: '/root/review-app/-/logs?environment_name=foo',
+};
diff --git a/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js b/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js
deleted file mode 100644
index c37a44df6f8..00000000000
--- a/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js
+++ /dev/null
@@ -1,258 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
-
-const modalComponent = Vue.extend(DeprecatedModal2);
-
-describe('DeprecatedModal2', () => {
- let vm;
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('props', () => {
- describe('with id', () => {
- const props = {
- id: 'my-modal',
- };
-
- beforeEach(() => {
- vm = mountComponent(modalComponent, props);
- });
-
- it('assigns the id to the modal', () => {
- expect(vm.$el.id).toBe(props.id);
- });
- });
-
- describe('without id', () => {
- beforeEach(() => {
- vm = mountComponent(modalComponent, {});
- });
-
- it('does not add an id attribute to the modal', () => {
- expect(vm.$el.hasAttribute('id')).toBe(false);
- });
- });
-
- describe('with headerTitleText', () => {
- const props = {
- headerTitleText: 'my title text',
- };
-
- beforeEach(() => {
- vm = mountComponent(modalComponent, props);
- });
-
- it('sets the modal title', () => {
- const modalTitle = vm.$el.querySelector('.modal-title');
-
- expect(modalTitle.innerHTML.trim()).toBe(props.headerTitleText);
- });
- });
-
- describe('with footerPrimaryButtonVariant', () => {
- const props = {
- footerPrimaryButtonVariant: 'danger',
- };
-
- beforeEach(() => {
- vm = mountComponent(modalComponent, props);
- });
-
- it('sets the primary button class', () => {
- const primaryButton = vm.$el.querySelector('.modal-footer button:last-of-type');
-
- expect(primaryButton).toHaveClass(`btn-${props.footerPrimaryButtonVariant}`);
- });
- });
-
- describe('with footerPrimaryButtonText', () => {
- const props = {
- footerPrimaryButtonText: 'my button text',
- };
-
- beforeEach(() => {
- vm = mountComponent(modalComponent, props);
- });
-
- it('sets the primary button text', () => {
- const primaryButton = vm.$el.querySelector('.js-modal-primary-action .gl-button-text');
-
- expect(primaryButton.innerHTML.trim()).toBe(props.footerPrimaryButtonText);
- });
- });
- });
-
- it('works with data-toggle="modal"', () => {
- setFixtures(`
- <button id="modal-button" data-toggle="modal" data-target="#my-modal"></button>
- <div id="modal-container"></div>
- `);
-
- const modalContainer = document.getElementById('modal-container');
- const modalButton = document.getElementById('modal-button');
- vm = mountComponent(
- modalComponent,
- {
- id: 'my-modal',
- },
- modalContainer,
- );
- const modalElement = document.getElementById('my-modal');
-
- modalButton.click();
-
- expect(modalElement).not.toHaveClass('show');
-
- // let the modal fade in
- jest.runOnlyPendingTimers();
-
- expect(modalElement).toHaveClass('show');
- });
-
- describe('methods', () => {
- const dummyEvent = 'not really an event';
-
- beforeEach(() => {
- vm = mountComponent(modalComponent, {});
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- });
-
- describe('emitCancel', () => {
- it('emits a cancel event', () => {
- vm.emitCancel(dummyEvent);
-
- expect(vm.$emit).toHaveBeenCalledWith('cancel', dummyEvent);
- });
- });
-
- describe('emitSubmit', () => {
- it('emits a submit event', () => {
- vm.emitSubmit(dummyEvent);
-
- expect(vm.$emit).toHaveBeenCalledWith('submit', dummyEvent);
- });
- });
-
- describe('opened', () => {
- it('emits a open event', () => {
- vm.opened();
-
- expect(vm.$emit).toHaveBeenCalledWith('open');
- });
- });
-
- describe('closed', () => {
- it('emits a closed event', () => {
- vm.closed();
-
- expect(vm.$emit).toHaveBeenCalledWith('closed');
- });
- });
- });
-
- describe('slots', () => {
- const slotContent = 'this should go into the slot';
-
- const modalWithSlot = slot => {
- return Vue.extend({
- components: {
- DeprecatedModal2,
- },
- render: h =>
- h('deprecated-modal-2', [slot ? h('template', { slot }, slotContent) : slotContent]),
- });
- };
-
- describe('default slot', () => {
- beforeEach(() => {
- vm = mountComponent(modalWithSlot());
- });
-
- it('sets the modal body', () => {
- const modalBody = vm.$el.querySelector('.modal-body');
-
- expect(modalBody.innerHTML).toBe(slotContent);
- });
- });
-
- describe('header slot', () => {
- beforeEach(() => {
- vm = mountComponent(modalWithSlot('header'));
- });
-
- it('sets the modal header', () => {
- const modalHeader = vm.$el.querySelector('.modal-header');
-
- expect(modalHeader.innerHTML).toBe(slotContent);
- });
- });
-
- describe('title slot', () => {
- beforeEach(() => {
- vm = mountComponent(modalWithSlot('title'));
- });
-
- it('sets the modal title', () => {
- const modalTitle = vm.$el.querySelector('.modal-title');
-
- expect(modalTitle.innerHTML).toBe(slotContent);
- });
- });
-
- describe('footer slot', () => {
- beforeEach(() => {
- vm = mountComponent(modalWithSlot('footer'));
- });
-
- it('sets the modal footer', () => {
- const modalFooter = vm.$el.querySelector('.modal-footer');
-
- expect(modalFooter.innerHTML).toBe(slotContent);
- });
- });
- });
-
- describe('handling sizes', () => {
- it('should render modal-sm', () => {
- vm = mountComponent(modalComponent, {
- modalSize: 'sm',
- });
-
- expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-sm')).toEqual(true);
- });
-
- it('should render modal-lg', () => {
- vm = mountComponent(modalComponent, {
- modalSize: 'lg',
- });
-
- expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-lg')).toEqual(true);
- });
-
- it('should render modal-xl', () => {
- vm = mountComponent(modalComponent, {
- modalSize: 'xl',
- });
-
- expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-xl')).toEqual(true);
- });
-
- it('should not add modal size classes when md size is passed', () => {
- vm = mountComponent(modalComponent, {
- modalSize: 'md',
- });
-
- expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-md')).toEqual(false);
- });
-
- it('should not add modal size classes by default', () => {
- vm = mountComponent(modalComponent, {});
-
- expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-sm')).toEqual(false);
- expect(vm.$el.querySelector('.modal-dialog').classList.contains('modal-lg')).toEqual(false);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js b/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js
index a6e4d812c3c..68e3ee11a0d 100644
--- a/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js
@@ -26,7 +26,7 @@ describe('DiffViewer', () => {
vm.$destroy();
});
- it('renders image diff', done => {
+ it('renders image diff', (done) => {
window.gon = {
relative_url_root: '',
};
@@ -46,7 +46,7 @@ describe('DiffViewer', () => {
});
});
- it('renders fallback download diff display', done => {
+ it('renders fallback download diff display', (done) => {
createComponent({
...requiredProps,
diffViewerMode: 'added',
diff --git a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
index f364f374887..b6bffbcc6f3 100644
--- a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
@@ -51,7 +51,7 @@ describe('ImageDiffViewer', () => {
wrapper.destroy();
});
- it('renders image diff for replaced', done => {
+ it('renders image diff for replaced', (done) => {
createComponent({ ...allProps });
vm.$nextTick(() => {
@@ -78,7 +78,7 @@ describe('ImageDiffViewer', () => {
});
});
- it('renders image diff for new', done => {
+ it('renders image diff for new', (done) => {
createComponent({ ...allProps, diffMode: 'new', oldPath: '' });
setImmediate(() => {
@@ -91,7 +91,7 @@ describe('ImageDiffViewer', () => {
});
});
- it('renders image diff for deleted', done => {
+ it('renders image diff for deleted', (done) => {
createComponent({ ...allProps, diffMode: 'deleted', newPath: '' });
setImmediate(() => {
@@ -104,7 +104,7 @@ describe('ImageDiffViewer', () => {
});
});
- it('renders image diff for renamed', done => {
+ it('renders image diff for renamed', (done) => {
vm = new Vue({
components: {
imageDiffViewer,
@@ -139,7 +139,7 @@ describe('ImageDiffViewer', () => {
});
describe('swipeMode', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent({ ...requiredProps });
setImmediate(() => {
@@ -147,7 +147,7 @@ describe('ImageDiffViewer', () => {
});
});
- it('switches to Swipe Mode', done => {
+ it('switches to Swipe Mode', (done) => {
vm.$el.querySelector('.view-modes-menu li:nth-child(2)').click();
vm.$nextTick(() => {
@@ -158,7 +158,7 @@ describe('ImageDiffViewer', () => {
});
describe('onionSkin', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent({ ...requiredProps });
setImmediate(() => {
@@ -166,7 +166,7 @@ describe('ImageDiffViewer', () => {
});
});
- it('switches to Onion Skin Mode', done => {
+ it('switches to Onion Skin Mode', (done) => {
vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
vm.$nextTick(() => {
@@ -177,7 +177,7 @@ describe('ImageDiffViewer', () => {
});
});
- it('has working drag handler', done => {
+ it('has working drag handler', (done) => {
vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
vm.$nextTick(() => {
diff --git a/spec/frontend/vue_shared/components/dismissible_container_spec.js b/spec/frontend/vue_shared/components/dismissible_container_spec.js
index e49ca1e2285..2c0e363fa0e 100644
--- a/spec/frontend/vue_shared/components/dismissible_container_spec.js
+++ b/spec/frontend/vue_shared/components/dismissible_container_spec.js
@@ -43,7 +43,7 @@ describe('DismissibleContainer', () => {
default: 'default slot',
};
- it.each(Object.keys(slots))('renders the %s slot', slot => {
+ it.each(Object.keys(slots))('renders the %s slot', (slot) => {
const slotContent = slots[slot];
wrapper = shallowMount(dismissibleContainer, {
propsData,
diff --git a/spec/frontend/vue_shared/components/editor_lite_spec.js b/spec/frontend/vue_shared/components/editor_lite_spec.js
index 52502fcf64f..70fdd8e24a5 100644
--- a/spec/frontend/vue_shared/components/editor_lite_spec.js
+++ b/spec/frontend/vue_shared/components/editor_lite_spec.js
@@ -7,20 +7,22 @@ jest.mock('~/editor/editor_lite');
describe('Editor Lite component', () => {
let wrapper;
- const onDidChangeModelContent = jest.fn();
- const updateModelLanguage = jest.fn();
- const getValue = jest.fn();
- const setValue = jest.fn();
+ let mockInstance;
+
const value = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
const fileName = 'lorem.txt';
const fileGlobalId = 'snippet_777';
- const createInstanceMock = jest.fn().mockImplementation(() => ({
- onDidChangeModelContent,
- updateModelLanguage,
- getValue,
- setValue,
- dispose: jest.fn(),
- }));
+ const createInstanceMock = jest.fn().mockImplementation(() => {
+ mockInstance = {
+ onDidChangeModelContent: jest.fn(),
+ updateModelLanguage: jest.fn(),
+ getValue: jest.fn(),
+ setValue: jest.fn(),
+ dispose: jest.fn(),
+ };
+ return mockInstance;
+ });
+
Editor.mockImplementation(() => {
return {
createInstance: createInstanceMock,
@@ -45,9 +47,9 @@ describe('Editor Lite component', () => {
wrapper.destroy();
});
- const triggerChangeContent = val => {
- getValue.mockReturnValue(val);
- const [cb] = onDidChangeModelContent.mock.calls[0];
+ const triggerChangeContent = (val) => {
+ mockInstance.getValue.mockReturnValue(val);
+ const [cb] = mockInstance.onDidChangeModelContent.mock.calls[0];
cb();
@@ -92,12 +94,12 @@ describe('Editor Lite component', () => {
});
return nextTick().then(() => {
- expect(updateModelLanguage).toHaveBeenCalledWith(newFileName);
+ expect(mockInstance.updateModelLanguage).toHaveBeenCalledWith(newFileName);
});
});
it('registers callback with editor onChangeContent', () => {
- expect(onDidChangeModelContent).toHaveBeenCalledWith(expect.any(Function));
+ expect(mockInstance.onDidChangeModelContent).toHaveBeenCalledWith(expect.any(Function));
});
it('emits input event when the blob content is changed', () => {
@@ -117,6 +119,10 @@ describe('Editor Lite component', () => {
expect(wrapper.emitted()['editor-ready']).toBeDefined();
});
+ it('component API `getEditor()` returns the editor instance', () => {
+ expect(wrapper.vm.getEditor()).toBe(mockInstance);
+ });
+
describe('reaction to the value update', () => {
it('reacts to the changes in the passed value', async () => {
const newValue = 'New Value';
@@ -126,7 +132,7 @@ describe('Editor Lite component', () => {
});
await nextTick();
- expect(setValue).toHaveBeenCalledWith(newValue);
+ expect(mockInstance.setValue).toHaveBeenCalledWith(newValue);
});
it("does not update value if the passed one is exactly the same as the editor's content", async () => {
@@ -137,7 +143,7 @@ describe('Editor Lite component', () => {
});
await nextTick();
- expect(setValue).not.toHaveBeenCalled();
+ expect(mockInstance.setValue).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/vue_shared/components/expand_button_spec.js b/spec/frontend/vue_shared/components/expand_button_spec.js
index aea90e5b31f..724405a109f 100644
--- a/spec/frontend/vue_shared/components/expand_button_spec.js
+++ b/spec/frontend/vue_shared/components/expand_button_spec.js
@@ -41,12 +41,7 @@ describe('Expand button', () => {
});
it('does not render expanded text', () => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).not.toBe(text.short);
+ expect(wrapper.find(ExpandButton).text().trim()).not.toBe(text.short);
});
describe('when short text is provided', () => {
@@ -60,12 +55,7 @@ describe('Expand button', () => {
});
it('renders short text', () => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).toBe(text.short);
+ expect(wrapper.find(ExpandButton).text().trim()).toBe(text.short);
});
it('renders button before text', () => {
@@ -76,7 +66,7 @@ describe('Expand button', () => {
});
describe('on click', () => {
- beforeEach(done => {
+ beforeEach((done) => {
expanderPrependEl().trigger('click');
Vue.nextTick(done);
});
@@ -95,7 +85,7 @@ describe('Expand button', () => {
});
describe('when short text is provided', () => {
- beforeEach(done => {
+ beforeEach((done) => {
factory({
slots: {
expanded: `<p>${text.expanded}</p>`,
@@ -108,12 +98,7 @@ describe('Expand button', () => {
});
it('only renders expanded text', () => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).toBe(text.expanded);
+ expect(wrapper.find(ExpandButton).text().trim()).toBe(text.expanded);
});
it('renders button after text', () => {
@@ -125,7 +110,7 @@ describe('Expand button', () => {
});
describe('append button', () => {
- beforeEach(done => {
+ beforeEach((done) => {
expanderPrependEl().trigger('click');
Vue.nextTick(done);
});
@@ -140,26 +125,16 @@ describe('Expand button', () => {
});
it('clicking hides expanded text', () => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).toBe(text.expanded);
+ expect(wrapper.find(ExpandButton).text().trim()).toBe(text.expanded);
expanderAppendEl().trigger('click');
return wrapper.vm.$nextTick().then(() => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).not.toBe(text.expanded);
+ expect(wrapper.find(ExpandButton).text().trim()).not.toBe(text.expanded);
});
});
describe('when short text is provided', () => {
- beforeEach(done => {
+ beforeEach((done) => {
factory({
slots: {
expanded: `<p>${text.expanded}</p>`,
@@ -172,21 +147,11 @@ describe('Expand button', () => {
});
it('clicking reveals short text', () => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).toBe(text.expanded);
+ expect(wrapper.find(ExpandButton).text().trim()).toBe(text.expanded);
expanderAppendEl().trigger('click');
return wrapper.vm.$nextTick().then(() => {
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).toBe(text.short);
+ expect(wrapper.find(ExpandButton).text().trim()).toBe(text.short);
});
});
});
diff --git a/spec/frontend/vue_shared/components/file_finder/index_spec.js b/spec/frontend/vue_shared/components/file_finder/index_spec.js
index 40026021777..238a5440664 100644
--- a/spec/frontend/vue_shared/components/file_finder/index_spec.js
+++ b/spec/frontend/vue_shared/components/file_finder/index_spec.js
@@ -31,7 +31,7 @@ describe('File finder item spec', () => {
});
describe('with entries', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent({
files: [
{
@@ -57,7 +57,7 @@ describe('File finder item spec', () => {
expect(vm.$el.textContent).not.toContain('folder');
});
- it('filters entries', done => {
+ it('filters entries', (done) => {
vm.searchText = 'index';
setImmediate(() => {
@@ -68,7 +68,7 @@ describe('File finder item spec', () => {
});
});
- it('shows clear button when searchText is not empty', done => {
+ it('shows clear button when searchText is not empty', (done) => {
vm.searchText = 'index';
setImmediate(() => {
@@ -79,7 +79,7 @@ describe('File finder item spec', () => {
});
});
- it('clear button resets searchText', done => {
+ it('clear button resets searchText', (done) => {
vm.searchText = 'index';
waitForPromises()
@@ -94,7 +94,7 @@ describe('File finder item spec', () => {
.catch(done.fail);
});
- it('clear button focuses search input', done => {
+ it('clear button focuses search input', (done) => {
jest.spyOn(vm.$refs.searchInput, 'focus').mockImplementation(() => {});
vm.searchText = 'index';
@@ -111,7 +111,7 @@ describe('File finder item spec', () => {
});
describe('listShowCount', () => {
- it('returns 1 when no filtered entries exist', done => {
+ it('returns 1 when no filtered entries exist', (done) => {
vm.searchText = 'testing 123';
setImmediate(() => {
@@ -131,7 +131,7 @@ describe('File finder item spec', () => {
expect(vm.listHeight).toBe(55);
});
- it('returns 33 when entries dont exist', done => {
+ it('returns 33 when entries dont exist', (done) => {
vm.searchText = 'testing 123';
setImmediate(() => {
@@ -143,7 +143,7 @@ describe('File finder item spec', () => {
});
describe('filteredBlobsLength', () => {
- it('returns length of filtered blobs', done => {
+ it('returns length of filtered blobs', (done) => {
vm.searchText = 'index';
setImmediate(() => {
@@ -156,7 +156,7 @@ describe('File finder item spec', () => {
describe('watches', () => {
describe('searchText', () => {
- it('resets focusedIndex when updated', done => {
+ it('resets focusedIndex when updated', (done) => {
vm.focusedIndex = 1;
vm.searchText = 'test';
@@ -169,7 +169,7 @@ describe('File finder item spec', () => {
});
describe('visible', () => {
- it('returns searchText when false', done => {
+ it('returns searchText when false', (done) => {
vm.searchText = 'test';
vm.visible = true;
@@ -206,7 +206,7 @@ describe('File finder item spec', () => {
});
describe('onKeyup', () => {
- it('opens file on enter key', done => {
+ it('opens file on enter key', (done) => {
const event = new CustomEvent('keyup');
event.keyCode = ENTER_KEY_CODE;
@@ -221,7 +221,7 @@ describe('File finder item spec', () => {
});
});
- it('closes file finder on esc key', done => {
+ it('closes file finder on esc key', (done) => {
const event = new CustomEvent('keyup');
event.keyCode = ESC_KEY_CODE;
@@ -300,7 +300,7 @@ describe('File finder item spec', () => {
});
describe('keyboard shortcuts', () => {
- beforeEach(done => {
+ beforeEach((done) => {
createComponent();
jest.spyOn(vm, 'toggle').mockImplementation(() => {});
@@ -308,7 +308,7 @@ describe('File finder item spec', () => {
vm.$nextTick(done);
});
- it('calls toggle on `t` key press', done => {
+ it('calls toggle on `t` key press', (done) => {
Mousetrap.trigger('t');
vm.$nextTick()
@@ -319,7 +319,7 @@ describe('File finder item spec', () => {
.catch(done.fail);
});
- it('calls toggle on `mod+p` key press', done => {
+ it('calls toggle on `mod+p` key press', (done) => {
Mousetrap.trigger('mod+p');
vm.$nextTick()
diff --git a/spec/frontend/vue_shared/components/file_finder/item_spec.js b/spec/frontend/vue_shared/components/file_finder/item_spec.js
index 5a45a5dbba1..c60e6335389 100644
--- a/spec/frontend/vue_shared/components/file_finder/item_spec.js
+++ b/spec/frontend/vue_shared/components/file_finder/item_spec.js
@@ -37,7 +37,7 @@ describe('File finder item spec', () => {
expect(vm.$el.classList).toContain('is-focused');
});
- it('does not have is-focused class when not focused', done => {
+ it('does not have is-focused class when not focused', (done) => {
vm.focused = false;
vm.$nextTick(() => {
@@ -53,7 +53,7 @@ describe('File finder item spec', () => {
expect(vm.$el.querySelector('.diff-changed-stats')).toBe(null);
});
- it('renders when a changed file', done => {
+ it('renders when a changed file', (done) => {
vm.file.changed = true;
vm.$nextTick(() => {
@@ -63,7 +63,7 @@ describe('File finder item spec', () => {
});
});
- it('renders when a temp file', done => {
+ it('renders when a temp file', (done) => {
vm.file.tempFile = true;
vm.$nextTick(() => {
@@ -85,7 +85,7 @@ describe('File finder item spec', () => {
describe('path', () => {
let el;
- beforeEach(done => {
+ beforeEach((done) => {
vm.searchText = 'file';
el = vm.$el.querySelector('.diff-changed-file-path');
@@ -97,7 +97,7 @@ describe('File finder item spec', () => {
expect(el.querySelectorAll('.highlighted').length).toBe(4);
});
- it('adds ellipsis to long text', done => {
+ it('adds ellipsis to long text', (done) => {
vm.file.path = new Array(70)
.fill()
.map((_, i) => `${i}-`)
@@ -113,7 +113,7 @@ describe('File finder item spec', () => {
describe('name', () => {
let el;
- beforeEach(done => {
+ beforeEach((done) => {
vm.searchText = 'file';
el = vm.$el.querySelector('.diff-changed-file-name');
@@ -125,7 +125,7 @@ describe('File finder item spec', () => {
expect(el.querySelectorAll('.highlighted').length).toBe(4);
});
- it('does not add ellipsis to long text', done => {
+ it('does not add ellipsis to long text', (done) => {
vm.file.name = new Array(70)
.fill()
.map((_, i) => `${i}-`)
diff --git a/spec/frontend/vue_shared/components/file_tree_spec.js b/spec/frontend/vue_shared/components/file_tree_spec.js
index 38979d9d844..7a4982fd29b 100644
--- a/spec/frontend/vue_shared/components/file_tree_spec.js
+++ b/spec/frontend/vue_shared/components/file_tree_spec.js
@@ -28,7 +28,7 @@ describe('File Tree component', () => {
const findFileRow = () => wrapper.find(MockFileRow);
const findChildrenTrees = () => wrapper.findAll(FileTree).wrappers.slice(1);
const findChildrenTreeProps = () =>
- findChildrenTrees().map(x => ({
+ findChildrenTrees().map((x) => ({
...x.props(),
...pick(x.attributes(), Object.keys(TEST_EXTA_ARGS)),
}));
@@ -61,7 +61,7 @@ describe('File Tree component', () => {
describe('file tree', () => {
const createChildren = () => [{ id: 1 }, { id: 2 }];
const createChildrenExpectation = (props = {}) =>
- createChildren().map(file => ({
+ createChildren().map((file) => ({
fileRowComponent: MockFileRow,
file,
...TEST_EXTA_ARGS,
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
index 8cc5d6775a7..b58ce0083c0 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
@@ -25,10 +25,11 @@ import {
tokenValueLabel,
tokenValueMilestone,
tokenValueMembership,
+ tokenValueConfidential,
} from './mock_data';
jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils', () => ({
- uniqueTokens: jest.fn().mockImplementation(tokens => tokens),
+ uniqueTokens: jest.fn().mockImplementation((tokens) => tokens),
stripQuotes: jest.requireActual(
'~/vue_shared/components/filtered_search_bar/filtered_search_utils',
).stripQuotes,
@@ -227,12 +228,13 @@ describe('FilteredSearchBarRoot', () => {
});
describe('removeQuotesEnclosure', () => {
- const mockFilters = [tokenValueAuthor, tokenValueLabel, 'foo'];
+ const mockFilters = [tokenValueAuthor, tokenValueLabel, tokenValueConfidential, 'foo'];
it('returns filter array with unescaped strings for values which have spaces', () => {
expect(wrapper.vm.removeQuotesEnclosure(mockFilters)).toEqual([
tokenValueAuthor,
tokenValueLabel,
+ tokenValueConfidential,
'foo',
]);
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
index 4869e75a2f3..9e96c154546 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
@@ -82,7 +82,10 @@ describe('prepareTokens', () => {
],
[
'assignees',
- [{ value: 'krillin', operator: '=' }, { value: 'piccolo', operator: '!=' }],
+ [
+ { value: 'krillin', operator: '=' },
+ { value: 'piccolo', operator: '!=' },
+ ],
[
{ type: 'assignees', value: { data: 'krillin', operator: '=' } },
{ type: 'assignees', value: { data: 'piccolo', operator: '!=' } },
@@ -90,7 +93,10 @@ describe('prepareTokens', () => {
],
[
'foo',
- [{ value: 'bar', operator: '!=' }, { value: 'baz', operator: '!=' }],
+ [
+ { value: 'bar', operator: '!=' },
+ { value: 'baz', operator: '!=' },
+ ],
[
{ type: 'foo', value: { data: 'bar', operator: '!=' } },
{ type: 'foo', value: { data: 'baz', operator: '!=' } },
@@ -112,7 +118,10 @@ describe('processFilters', () => {
expect(result).toStrictEqual({
foo: [{ value: 'foo', operator: '=' }],
- bar: [{ value: 'bar1', operator: '=' }, { value: 'bar2', operator: '!=' }],
+ bar: [
+ { value: 'bar1', operator: '=' },
+ { value: 'bar2', operator: '!=' },
+ ],
});
});
@@ -164,17 +173,26 @@ describe('filterToQueryObject', () => {
],
[
'foo',
- [{ value: 'bar', operator: '=' }, { value: 'baz', operator: '=' }],
+ [
+ { value: 'bar', operator: '=' },
+ { value: 'baz', operator: '=' },
+ ],
{ foo: ['bar', 'baz'], 'not[foo]': null },
],
[
'foo',
- [{ value: 'bar', operator: '!=' }, { value: 'baz', operator: '!=' }],
+ [
+ { value: 'bar', operator: '!=' },
+ { value: 'baz', operator: '!=' },
+ ],
{ foo: null, 'not[foo]': ['bar', 'baz'] },
],
[
'foo',
- [{ value: 'bar', operator: '!=' }, { value: 'baz', operator: '=' }],
+ [
+ { value: 'bar', operator: '!=' },
+ { value: 'baz', operator: '=' },
+ ],
{ foo: ['baz'], 'not[foo]': ['bar'] },
],
])('gathers filter values %s=%j into query object=%j', (token, value, result) => {
@@ -200,15 +218,30 @@ describe('urlQueryToFilter', () => {
['not[foo]=bar&foo=baz', { foo: { value: 'baz', operator: '=' } }],
[
'foo[]=bar&foo[]=baz&not[foo]=',
- { foo: [{ value: 'bar', operator: '=' }, { value: 'baz', operator: '=' }] },
+ {
+ foo: [
+ { value: 'bar', operator: '=' },
+ { value: 'baz', operator: '=' },
+ ],
+ },
],
[
'foo[]=&not[foo][]=bar&not[foo][]=baz',
- { foo: [{ value: 'bar', operator: '!=' }, { value: 'baz', operator: '!=' }] },
+ {
+ foo: [
+ { value: 'bar', operator: '!=' },
+ { value: 'baz', operator: '!=' },
+ ],
+ },
],
[
'foo[]=baz&not[foo][]=bar',
- { foo: [{ value: 'baz', operator: '=' }, { value: 'bar', operator: '!=' }] },
+ {
+ foo: [
+ { value: 'baz', operator: '=' },
+ { value: 'bar', operator: '!=' },
+ ],
+ },
],
['not[foo][]=bar', { foo: [{ value: 'bar', operator: '!=' }] }],
])('gathers filter values %s into query object=%j', (query, result) => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index 64fbe70696d..7606b3bd91c 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -110,7 +110,10 @@ export const mockMembershipToken = {
token: GlFilteredSearchToken,
unique: true,
operators: [{ value: '=', description: 'is' }],
- options: [{ value: 'exclude', title: 'Direct' }, { value: 'only', title: 'Inherited' }],
+ options: [
+ { value: 'exclude', title: 'Direct' },
+ { value: 'only', title: 'Inherited' },
+ ],
};
export const mockMembershipTokenOptionsWithoutTitles = {
@@ -152,6 +155,14 @@ export const tokenValueMembership = {
},
};
+export const tokenValueConfidential = {
+ type: 'confidential',
+ value: {
+ operator: '=',
+ data: true,
+ },
+};
+
export const tokenValuePlain = {
type: 'filtered-search-term',
value: { data: 'foo' },
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js
index 1b7c80a5252..84297923df3 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js
@@ -1,11 +1,11 @@
export function getFilterParams(tokens, options = {}) {
const { key = 'value', operator = '=', prop = 'title' } = options;
- return tokens.map(token => {
+ return tokens.map((token) => {
return { [key]: token[prop], operator };
});
}
export function getFilterValues(tokens, options = {}) {
const { prop = 'title' } = options;
- return tokens.map(token => token[prop]);
+ return tokens.map((token) => token[prop]);
}
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 3fd1d8b7f42..3997d6a99a6 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -189,8 +189,8 @@ describe('AuthorToken', () => {
suggestionsSegment.vm.$emit('activate');
await wrapper.vm.$nextTick();
- expect(wrapper.contains(GlFilteredSearchSuggestion)).toBe(false);
- expect(wrapper.contains(GlDropdownDivider)).toBe(false);
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_LABEL_ANY` as default suggestions', async () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
index 5b7f7d242e9..35f487330be 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
@@ -189,8 +189,8 @@ describe('BranchToken', () => {
});
await showSuggestions();
- expect(wrapper.contains(GlFilteredSearchSuggestion)).toBe(false);
- expect(wrapper.contains(GlDropdownDivider)).toBe(false);
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
});
it('renders no suggestions as default', async () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index 74172db81c2..dda0ad39bbc 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -181,12 +181,9 @@ describe('LabelToken', () => {
expect(tokenSegments).toHaveLength(3); // Label, =, "Foo Label"
expect(tokenSegments.at(2).text()).toBe(`~${mockRegularLabel.title}`); // "Foo Label"
- expect(
- tokenSegments
- .at(2)
- .find('.gl-token')
- .attributes('style'),
- ).toBe('background-color: rgb(186, 218, 85); color: rgb(255, 255, 255);');
+ expect(tokenSegments.at(2).find('.gl-token').attributes('style')).toBe(
+ 'background-color: rgb(186, 218, 85); color: rgb(255, 255, 255);',
+ );
});
it('renders provided defaultLabels as suggestions', async () => {
@@ -219,8 +216,8 @@ describe('LabelToken', () => {
suggestionsSegment.vm.$emit('activate');
await wrapper.vm.$nextTick();
- expect(wrapper.contains(GlFilteredSearchSuggestion)).toBe(false);
- expect(wrapper.contains(GlDropdownDivider)).toBe(false);
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_LABELS` as default suggestions', async () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index 67f9a9c70cc..164561f6244 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -140,7 +140,10 @@ describe('MilestoneToken', () => {
});
describe('template', () => {
- const defaultMilestones = [{ text: 'foo', value: 'foo' }, { text: 'bar', value: 'baz' }];
+ const defaultMilestones = [
+ { text: 'foo', value: 'foo' },
+ { text: 'bar', value: 'baz' },
+ ];
beforeEach(async () => {
wrapper = createComponent({ value: { data: `"${mockRegularMilestone.title}"` } });
@@ -193,8 +196,8 @@ describe('MilestoneToken', () => {
suggestionsSegment.vm.$emit('activate');
await wrapper.vm.$nextTick();
- expect(wrapper.contains(GlFilteredSearchSuggestion)).toBe(false);
- expect(wrapper.contains(GlDropdownDivider)).toBe(false);
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_MILESTONES` as default suggestions', async () => {
diff --git a/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap b/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap
index d0fa2086fdc..d8e6e37bb89 100644
--- a/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap
+++ b/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap
@@ -1,5 +1,13 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
+exports[`gfm_autocomplete/utils emojis config shows the emoji name and icon in the menu item 1`] = `
+"raised_hands
+ <gl-emoji
+
+ data-name=\\"raised_hands\\"></gl-emoji>
+ "
+`;
+
exports[`gfm_autocomplete/utils issues config shows the iid and title in the menu item within a project context 1`] = `"<small>123456</small> Project context issue title &lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;"`;
exports[`gfm_autocomplete/utils issues config shows the reference and title in the menu item within a group context 1`] = `"<small>gitlab#987654</small> Group context issue title &lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;"`;
@@ -44,4 +52,9 @@ exports[`gfm_autocomplete/utils merge requests config shows the reference and ti
exports[`gfm_autocomplete/utils milestones config shows the title in the menu item 1`] = `"13.2 &lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;"`;
+exports[`gfm_autocomplete/utils quick actions config shows the name, aliases, params and description in the menu item 1`] = `
+"<div>/unlabel <small>(or /remove_label)</small> <small>~label1 ~\\"label 2\\"</small></div>
+ <div><small><em>Remove all or specific label(s)</em></small></div>"
+`;
+
exports[`gfm_autocomplete/utils snippets config shows the id and title in the menu item 1`] = `"<small>123456</small> Snippet title &lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;"`;
diff --git a/spec/frontend/vue_shared/components/gfm_autocomplete/utils_spec.js b/spec/frontend/vue_shared/components/gfm_autocomplete/utils_spec.js
index 647f8c6e000..7ec3fbd4e3b 100644
--- a/spec/frontend/vue_shared/components/gfm_autocomplete/utils_spec.js
+++ b/spec/frontend/vue_shared/components/gfm_autocomplete/utils_spec.js
@@ -2,6 +2,31 @@ import { escape, last } from 'lodash';
import { GfmAutocompleteType, tributeConfig } from '~/vue_shared/components/gfm_autocomplete/utils';
describe('gfm_autocomplete/utils', () => {
+ describe('emojis config', () => {
+ const emojisConfig = tributeConfig[GfmAutocompleteType.Emojis].config;
+ const emoji = 'raised_hands';
+
+ it('uses : as the trigger', () => {
+ expect(emojisConfig.trigger).toBe(':');
+ });
+
+ it('searches using the emoji name', () => {
+ expect(emojisConfig.lookup(emoji)).toBe(emoji);
+ });
+
+ it('limits the number of rendered items to 100', () => {
+ expect(emojisConfig.menuItemLimit).toBe(100);
+ });
+
+ it('shows the emoji name and icon in the menu item', () => {
+ expect(emojisConfig.menuItemTemplate({ original: emoji })).toMatchSnapshot();
+ });
+
+ it('inserts the emoji name on autocomplete selection', () => {
+ expect(emojisConfig.selectTemplate({ original: emoji })).toBe(`:${emoji}:`);
+ });
+ });
+
describe('issues config', () => {
const issuesConfig = tributeConfig[GfmAutocompleteType.Issues].config;
const groupContextIssue = {
@@ -26,6 +51,10 @@ describe('gfm_autocomplete/utils', () => {
);
});
+ it('limits the number of rendered items to 100', () => {
+ expect(issuesConfig.menuItemLimit).toBe(100);
+ });
+
it('shows the reference and title in the menu item within a group context', () => {
expect(issuesConfig.menuItemTemplate({ original: groupContextIssue })).toMatchSnapshot();
});
@@ -77,6 +106,10 @@ describe('gfm_autocomplete/utils', () => {
expect(labelsConfig.lookup).toBe('title');
});
+ it('limits the number of rendered items to 100', () => {
+ expect(labelsConfig.menuItemLimit).toBe(100);
+ });
+
it('shows the title in the menu item', () => {
expect(labelsConfig.menuItemTemplate({ original: label })).toMatchSnapshot();
});
@@ -180,6 +213,10 @@ describe('gfm_autocomplete/utils', () => {
expect(membersConfig.lookup(groupMember)).toBe(last(groupMember.name.split(' / ')));
});
+ it('limits the items in the autocomplete menu to 10', () => {
+ expect(membersConfig.menuItemLimit).toBe(10);
+ });
+
it('shows the avatar, name and username in the menu item for a user', () => {
expect(membersConfig.menuItemTemplate({ original: userMember })).toMatchSnapshot();
});
@@ -266,6 +303,10 @@ describe('gfm_autocomplete/utils', () => {
);
});
+ it('limits the number of rendered items to 100', () => {
+ expect(mergeRequestsConfig.menuItemLimit).toBe(100);
+ });
+
it('shows the reference and title in the menu item within a group context', () => {
expect(
mergeRequestsConfig.menuItemTemplate({ original: groupContextMergeRequest }),
@@ -307,6 +348,10 @@ describe('gfm_autocomplete/utils', () => {
expect(milestonesConfig.lookup).toBe('title');
});
+ it('limits the number of rendered items to 100', () => {
+ expect(milestonesConfig.menuItemLimit).toBe(100);
+ });
+
it('shows the title in the menu item', () => {
expect(milestonesConfig.menuItemTemplate({ original: milestone })).toMatchSnapshot();
});
@@ -318,6 +363,40 @@ describe('gfm_autocomplete/utils', () => {
});
});
+ describe('quick actions config', () => {
+ const quickActionsConfig = tributeConfig[GfmAutocompleteType.QuickActions].config;
+ const quickAction = {
+ name: 'unlabel',
+ aliases: ['remove_label'],
+ description: 'Remove all or specific label(s)',
+ warning: '',
+ icon: '',
+ params: ['~label1 ~"label 2"'],
+ };
+
+ it('uses / as the trigger', () => {
+ expect(quickActionsConfig.trigger).toBe('/');
+ });
+
+ it('inserts the name on autocomplete selection', () => {
+ expect(quickActionsConfig.fillAttr).toBe('name');
+ });
+
+ it('searches using both the name and aliases', () => {
+ expect(quickActionsConfig.lookup(quickAction)).toBe(
+ `${quickAction.name}${quickAction.aliases.join(', /')}`,
+ );
+ });
+
+ it('limits the number of rendered items to 100', () => {
+ expect(quickActionsConfig.menuItemLimit).toBe(100);
+ });
+
+ it('shows the name, aliases, params and description in the menu item', () => {
+ expect(quickActionsConfig.menuItemTemplate({ original: quickAction })).toMatchSnapshot();
+ });
+ });
+
describe('snippets config', () => {
const snippetsConfig = tributeConfig[GfmAutocompleteType.Snippets].config;
const snippet = {
@@ -337,6 +416,10 @@ describe('gfm_autocomplete/utils', () => {
expect(snippetsConfig.lookup(snippet)).toBe(`${snippet.id}${snippet.title}`);
});
+ it('limits the number of rendered items to 100', () => {
+ expect(snippetsConfig.menuItemLimit).toBe(100);
+ });
+
it('shows the id and title in the menu item', () => {
expect(snippetsConfig.menuItemTemplate({ original: snippet })).toMatchSnapshot();
});
diff --git a/spec/frontend/vue_shared/components/gl_countdown_spec.js b/spec/frontend/vue_shared/components/gl_countdown_spec.js
index 365c9fad478..fcc5c0cd310 100644
--- a/spec/frontend/vue_shared/components/gl_countdown_spec.js
+++ b/spec/frontend/vue_shared/components/gl_countdown_spec.js
@@ -17,21 +17,19 @@ describe('GlCountdown', () => {
});
describe('when there is time remaining', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = mountComponent(Component, {
endDateString: '2000-01-01T01:02:03Z',
});
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ Vue.nextTick().then(done).catch(done.fail);
});
it('displays remaining time', () => {
expect(vm.$el.textContent).toContain('01:02:03');
});
- it('updates remaining time', done => {
+ it('updates remaining time', (done) => {
now = '2000-01-01T00:00:01Z';
jest.advanceTimersByTime(1000);
@@ -45,14 +43,12 @@ describe('GlCountdown', () => {
});
describe('when there is no time remaining', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm = mountComponent(Component, {
endDateString: '1900-01-01T00:00:00Z',
});
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ Vue.nextTick().then(done).catch(done.fail);
});
it('displays 00:00:00', () => {
diff --git a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
index 93f4db5df18..6802499ed52 100644
--- a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
@@ -118,7 +118,7 @@ describe('GlModalVuex', () => {
expect(actions.hide).toHaveBeenCalledTimes(1);
});
- it('calls bootstrap show when isVisible changes', done => {
+ it('calls bootstrap show when isVisible changes', (done) => {
state.isVisible = false;
factory();
@@ -135,7 +135,7 @@ describe('GlModalVuex', () => {
.catch(done.fail);
});
- it('calls bootstrap hide when isVisible changes', done => {
+ it('calls bootstrap hide when isVisible changes', (done) => {
state.isVisible = true;
factory();
@@ -154,7 +154,7 @@ describe('GlModalVuex', () => {
it.each(['ok', 'cancel'])(
'passes an "%s" handler to the "modal-footer" slot scope',
- handlerName => {
+ (handlerName) => {
state.isVisible = true;
const modalFooterSlotContent = jest.fn();
diff --git a/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js b/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js
index 2f910a10bc6..a03a3915e1b 100644
--- a/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js
+++ b/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js
@@ -17,13 +17,13 @@ describe('IssuableHeaderWarnings', () => {
const findConfidentialIcon = () => wrapper.find('[data-testid="confidential"]');
const findLockedIcon = () => wrapper.find('[data-testid="locked"]');
- const renderTestMessage = renders => (renders ? 'renders' : 'does not render');
+ const renderTestMessage = (renders) => (renders ? 'renders' : 'does not render');
- const setLock = locked => {
+ const setLock = (locked) => {
store.getters.getNoteableData.discussion_locked = locked;
};
- const setConfidential = confidential => {
+ const setConfidential = (confidential) => {
store.getters.getNoteableData.confidential = confidential;
};
diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
index 192e33d8b00..5f614bfc751 100644
--- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
@@ -11,7 +11,7 @@ describe('IssueAssigneesComponent', () => {
let wrapper;
let vm;
- const factory = props => {
+ const factory = (props) => {
wrapper = shallowMount(IssueAssignees, {
propsData: {
assignees: mockAssigneesList,
@@ -99,11 +99,11 @@ describe('IssueAssigneesComponent', () => {
});
it('renders assignee', () => {
- const data = findAvatars().wrappers.map(x => ({
+ const data = findAvatars().wrappers.map((x) => ({
...x.props(),
}));
- const expected = mockAssigneesList.slice(0, TEST_MAX_VISIBLE - 1).map(x =>
+ const expected = mockAssigneesList.slice(0, TEST_MAX_VISIBLE - 1).map((x) =>
expect.objectContaining({
linkHref: x.web_url,
imgAlt: `Avatar for ${x.name}`,
diff --git a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
index d1bfc180082..ffcb891c4fc 100644
--- a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
@@ -19,7 +19,7 @@ describe('IssueMilestoneComponent', () => {
let wrapper;
let vm;
- beforeEach(done => {
+ beforeEach((done) => {
wrapper = createComponent();
({ vm } = wrapper);
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
index 2319bf61482..3dc34583118 100644
--- a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
import { formatDate } from '~/lib/utils/datetime_utility';
import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
import IssueDueDate from '~/boards/components/issue_due_date.vue';
@@ -115,9 +115,7 @@ describe('RelatedIssuableItem', () => {
const tokenMetadata = () => wrapper.find('.item-meta');
it('renders item path and ID', () => {
- const pathAndID = tokenMetadata()
- .find('.item-path-id')
- .text();
+ const pathAndID = tokenMetadata().find('.item-path-id').text();
expect(pathAndID).toContain('gitlab-org/gitlab-test');
expect(pathAndID).toContain('#1');
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js b/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js
index 17813f2833d..6cdb945ec20 100644
--- a/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js
@@ -1,4 +1,4 @@
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
export const defaultProps = {
endpoint: '/foo/bar/issues/1/related_issues',
diff --git a/spec/frontend/vue_shared/components/local_storage_sync_spec.js b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
index 464fe3411dd..4c5a0c1e601 100644
--- a/spec/frontend/vue_shared/components/local_storage_sync_spec.js
+++ b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
@@ -49,7 +49,7 @@ describe('Local Storage Sync', () => {
it.each('foo', 3, true, ['foo', 'bar'], { foo: 'bar' })(
'saves updated value to localStorage',
- newValue => {
+ (newValue) => {
createComponent({
props: {
storageKey,
diff --git a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
index b8a9143bc79..c454166e30b 100644
--- a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
+++ b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
@@ -7,6 +7,7 @@ exports[`Suggestion Diff component matches snapshot 1`] = `
<suggestion-diff-header-stub
batchsuggestionscount="1"
class="qa-suggestion-diff-header js-suggestion-diff-header"
+ defaultcommitmessage="Apply suggestion"
helppagepath="path_to_docs"
isapplyingbatch="true"
isbatched="true"
diff --git a/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js b/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js
index 0598506891b..b9f0d88548d 100644
--- a/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js
@@ -3,10 +3,10 @@ import { GlDropdown, GlFormTextarea, GlButton } from '@gitlab/ui';
import ApplySuggestionComponent from '~/vue_shared/components/markdown/apply_suggestion.vue';
describe('Apply Suggestion component', () => {
- const propsData = { fileName: 'test.js', disabled: false };
+ const propsData = { defaultCommitMessage: 'Apply suggestion', disabled: false };
let wrapper;
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = shallowMount(ApplySuggestionComponent, { propsData: { ...propsData, ...props } });
};
@@ -27,7 +27,6 @@ describe('Apply Suggestion component', () => {
expect(dropdown.exists()).toBe(true);
expect(dropdown.props('text')).toBe('Apply suggestion');
- expect(dropdown.props('headerText')).toBe('Apply suggestion commit message');
expect(dropdown.props('disabled')).toBe(false);
});
@@ -35,7 +34,7 @@ describe('Apply Suggestion component', () => {
const textArea = findTextArea();
expect(textArea.exists()).toBe(true);
- expect(textArea.attributes('placeholder')).toBe('Apply suggestion on test.js');
+ expect(textArea.attributes('placeholder')).toBe('Apply suggestion');
});
it('renders an apply button', () => {
@@ -55,11 +54,11 @@ describe('Apply Suggestion component', () => {
});
describe('apply suggestion', () => {
- it('emits an apply event with a default message if no message was added', () => {
+ it('emits an apply event with no message if no message was added', () => {
findTextArea().vm.$emit('input', null);
findApplyButton().vm.$emit('click');
- expect(wrapper.emitted('apply')).toEqual([['Apply suggestion on test.js']]);
+ expect(wrapper.emitted('apply')).toEqual([[null]]);
});
it('emits an apply event with a user-defined message', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 82bc9b9fe08..077c2174571 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -6,7 +6,7 @@ import ToolbarButton from '~/vue_shared/components/markdown/toolbar_button.vue';
describe('Markdown field header component', () => {
let wrapper;
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = shallowMount(HeaderComponent, {
propsData: {
previewMarkdown: false,
@@ -18,7 +18,7 @@ describe('Markdown field header component', () => {
const findToolbarButtons = () => wrapper.findAll(ToolbarButton);
const findToolbarButtonByProp = (prop, value) =>
findToolbarButtons()
- .filter(button => button.props(prop) === value)
+ .filter((button) => button.props(prop) === value)
.at(0);
beforeEach(() => {
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index c0a000690f8..bf65adc866d 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -9,6 +9,7 @@ const DEFAULT_PROPS = {
isBatched: false,
isApplyingBatch: false,
helpPagePath: 'path_to_docs',
+ defaultCommitMessage: 'Apply suggestion',
};
describe('Suggestion Diff component', () => {
@@ -91,7 +92,7 @@ describe('Suggestion Diff component', () => {
});
it('emits apply', () => {
- expect(wrapper.emitted().apply).toEqual([[expect.any(Function)]]);
+ expect(wrapper.emitted().apply).toEqual([[expect.any(Function), undefined]]);
});
it('does not render apply suggestion and add to batch buttons', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
index b67f4cf12bf..f9a8b64f89b 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
@@ -61,11 +61,7 @@ describe('SuggestionDiffRow', () => {
});
expect(wrapper.classes()).toContain('line_holder');
- expect(
- findSuggestionContent()
- .find('span')
- .classes(),
- ).toContain('line');
+ expect(findSuggestionContent().find('span').classes()).toContain('line');
});
it('renders the rich text when it is available', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
index 232feb126dc..5bd6bda2d2c 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
@@ -42,6 +42,7 @@ const MOCK_DATA = {
is_applying_batch: true,
},
helpPagePath: 'path_to_docs',
+ defaultCommitMessage: 'Apply suggestion',
batchSuggestionsInfo: [{ suggestionId }],
};
diff --git a/spec/frontend/vue_shared/components/markdown/suggestions_spec.js b/spec/frontend/vue_shared/components/markdown/suggestions_spec.js
index 34ccdf38b00..6fcac2df0b6 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestions_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestions_spec.js
@@ -44,13 +44,14 @@ const MOCK_DATA = {
`,
isApplied: false,
helpPagePath: 'path_to_docs',
+ defaultCommitMessage: 'Apply suggestion',
};
describe('Suggestion component', () => {
let vm;
let diffTable;
- beforeEach(done => {
+ beforeEach((done) => {
const Component = Vue.extend(SuggestionsComponent);
vm = new Component({
@@ -86,7 +87,7 @@ describe('Suggestion component', () => {
});
it('generates a diff table that contains contents the suggested lines', () => {
- MOCK_DATA.suggestions[0].diff_lines.forEach(line => {
+ MOCK_DATA.suggestions[0].diff_lines.forEach((line) => {
const text = line.text.substring(1);
expect(diffTable.innerHTML.includes(text)).toBe(true);
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
index 8a7946fd7b1..786dfabb990 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
@@ -10,7 +10,7 @@ describe('toolbar_button', () => {
tag: 'test tag',
};
- const createComponent = propUpdates => {
+ const createComponent = (propUpdates) => {
wrapper = shallowMount(ToolbarButton, {
propsData: {
...defaultProps,
diff --git a/spec/frontend/vue_shared/components/navigation_tabs_spec.js b/spec/frontend/vue_shared/components/navigation_tabs_spec.js
index 561456d614e..b1119bfb150 100644
--- a/spec/frontend/vue_shared/components/navigation_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/navigation_tabs_spec.js
@@ -1,64 +1,68 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import navigationTabs from '~/vue_shared/components/navigation_tabs.vue';
+import { mount } from '@vue/test-utils';
+import { GlTab } from '@gitlab/ui';
+import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
describe('navigation tabs component', () => {
- let vm;
- let Component;
- let data;
+ let wrapper;
- beforeEach(() => {
- data = [
- {
- name: 'All',
- scope: 'all',
- count: 1,
- isActive: true,
- },
- {
- name: 'Pending',
- scope: 'pending',
- count: 0,
- isActive: false,
- },
- {
- name: 'Running',
- scope: 'running',
- isActive: false,
+ const data = [
+ {
+ name: 'All',
+ scope: 'all',
+ count: 1,
+ isActive: true,
+ },
+ {
+ name: 'Pending',
+ scope: 'pending',
+ count: 0,
+ isActive: false,
+ },
+ {
+ name: 'Running',
+ scope: 'running',
+ isActive: false,
+ },
+ ];
+
+ const createComponent = () => {
+ wrapper = mount(NavigationTabs, {
+ propsData: {
+ tabs: data,
+ scope: 'pipelines',
},
- ];
+ });
+ };
- Component = Vue.extend(navigationTabs);
- vm = mountComponent(Component, { tabs: data, scope: 'pipelines' });
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
it('should render tabs', () => {
- expect(vm.$el.querySelectorAll('li').length).toEqual(data.length);
+ expect(wrapper.findAll(GlTab)).toHaveLength(data.length);
});
it('should render active tab', () => {
- expect(vm.$el.querySelector('.active .js-pipelines-tab-all')).toBeDefined();
+ expect(wrapper.find('.js-pipelines-tab-all').classes('active')).toBe(true);
});
it('should render badge', () => {
- expect(vm.$el.querySelector('.js-pipelines-tab-all .badge').textContent.trim()).toEqual('1');
- expect(vm.$el.querySelector('.js-pipelines-tab-pending .badge').textContent.trim()).toEqual(
- '0',
- );
+ expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('1');
+ expect(wrapper.find('.js-pipelines-tab-pending').text()).toContain('0');
});
it('should not render badge', () => {
- expect(vm.$el.querySelector('.js-pipelines-tab-running .badge')).toEqual(null);
+ expect(wrapper.find('.js-pipelines-tab-running .badge').exists()).toBe(false);
});
- it('should trigger onTabClick', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.$el.querySelector('.js-pipelines-tab-pending').click();
+ it('should trigger onTabClick', async () => {
+ await wrapper.find('.js-pipelines-tab-pending').trigger('click');
- expect(vm.$emit).toHaveBeenCalledWith('onChangeTab', 'pending');
+ expect(wrapper.emitted('onChangeTab')).toEqual([['pending']]);
});
});
diff --git a/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js b/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
index 61660f79b71..cc9f05beb06 100644
--- a/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
+++ b/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
@@ -10,7 +10,7 @@ describe('Issue Warning Component', () => {
const findConfidentialBlock = (w = wrapper) => w.find({ ref: 'confidential' });
const findLockedAndConfidentialBlock = (w = wrapper) => w.find({ ref: 'lockedAndConfidential' });
- const createComponent = props =>
+ const createComponent = (props) =>
shallowMount(NoteableWarning, {
propsData: {
...props,
diff --git a/spec/frontend/vue_shared/components/ordered_layout_spec.js b/spec/frontend/vue_shared/components/ordered_layout_spec.js
index eec153c3792..21588569d6a 100644
--- a/spec/frontend/vue_shared/components/ordered_layout_spec.js
+++ b/spec/frontend/vue_shared/components/ordered_layout_spec.js
@@ -29,7 +29,7 @@ describe('Ordered Layout', () => {
const verifyOrder = () =>
wrapper
.findAll('footer,header')
- .wrappers.map(x => (x.element.tagName === 'FOOTER' ? 'footer' : 'header'));
+ .wrappers.map((x) => (x.element.tagName === 'FOOTER' ? 'footer' : 'header'));
const createComponent = (props = {}) => {
wrapper = mount(TestComponent, {
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index 0f7c8e97635..491f783622a 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -124,7 +124,10 @@ describe('AlertManagementEmptyState', () => {
it('renders the tabs selection with valid tabs', () => {
mountComponent({
props: {
- statusTabs: [{ status: 'opened', title: 'Open' }, { status: 'closed', title: 'Closed' }],
+ statusTabs: [
+ { status: 'opened', title: 'Open' },
+ { status: 'closed', title: 'Closed' },
+ ],
},
});
@@ -216,12 +219,7 @@ describe('AlertManagementEmptyState', () => {
findPagination().vm.$emit('input', 3);
await wrapper.vm.$nextTick();
- expect(
- findPagination()
- .findAll('.page-item')
- .at(0)
- .text(),
- ).toBe('Prev');
+ expect(findPagination().findAll('.page-item').at(0).text()).toBe('Prev');
});
it('returns prevPage number', async () => {
@@ -244,12 +242,7 @@ describe('AlertManagementEmptyState', () => {
findPagination().vm.$emit('input', 3);
await wrapper.vm.$nextTick();
- expect(
- findPagination()
- .findAll('.page-item')
- .at(1)
- .text(),
- ).toBe('Next');
+ expect(findPagination().findAll('.page-item').at(1).text()).toBe('Next');
});
it('returns nextPage number', async () => {
diff --git a/spec/frontend/vue_shared/components/pagination_links_spec.js b/spec/frontend/vue_shared/components/pagination_links_spec.js
index bf004c83c4f..ad82aee0098 100644
--- a/spec/frontend/vue_shared/components/pagination_links_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_links_spec.js
@@ -49,7 +49,7 @@ describe('Pagination links component', () => {
});
it('should provide translated text to GitLab UI pagination', () => {
- Object.entries(translations).forEach(entry => {
+ Object.entries(translations).forEach((entry) => {
expect(glPagination.vm[entry[0]]).toBe(entry[1]);
});
});
diff --git a/spec/frontend/vue_shared/components/pikaday_spec.js b/spec/frontend/vue_shared/components/pikaday_spec.js
index 639b4828a09..1c6876c282c 100644
--- a/spec/frontend/vue_shared/components/pikaday_spec.js
+++ b/spec/frontend/vue_shared/components/pikaday_spec.js
@@ -1,42 +1,41 @@
import { shallowMount } from '@vue/test-utils';
+import { GlDatepicker } from '@gitlab/ui';
import datePicker from '~/vue_shared/components/pikaday.vue';
describe('datePicker', () => {
let wrapper;
- beforeEach(() => {
+
+ const buildWrapper = (propsData = {}) => {
wrapper = shallowMount(datePicker, {
- propsData: {
- label: 'label',
- },
- attachToDocument: true,
+ propsData,
});
- });
+ };
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
+ it('should emit newDateSelected when GlDatePicker emits the input event', () => {
+ const minDate = new Date();
+ const maxDate = new Date();
+ const selectedDate = new Date();
+ const theDate = selectedDate.toISOString().slice(0, 10);
- it('should render label text', () => {
- expect(
- wrapper
- .find('.dropdown-toggle-text')
- .text()
- .trim(),
- ).toEqual('label');
- });
+ buildWrapper({ minDate, maxDate, selectedDate });
- it('should show calendar', () => {
- expect(wrapper.find('.pika-single').element).toBeDefined();
+ expect(wrapper.find(GlDatepicker).props()).toMatchObject({
+ minDate,
+ maxDate,
+ value: selectedDate,
+ });
+ wrapper.find(GlDatepicker).vm.$emit('input', selectedDate);
+ expect(wrapper.emitted('newDateSelected')[0][0]).toBe(theDate);
});
+ it('should emit the hidePicker event when GlDatePicker emits the close event', () => {
+ buildWrapper();
- it('should emit hidePicker event when dropdown is clicked', () => {
- // Removing the bootstrap data-toggle property,
- // because it interfers with our click event
- delete wrapper.find('.dropdown-menu-toggle').element.dataset.toggle;
-
- wrapper.find('.dropdown-menu-toggle').trigger('click');
+ wrapper.find(GlDatepicker).vm.$emit('close');
- expect(wrapper.emitted('hidePicker')).toEqual([[]]);
+ expect(wrapper.emitted('hidePicker')).toHaveLength(1);
});
});
diff --git a/spec/frontend/vue_shared/components/project_avatar/default_spec.js b/spec/frontend/vue_shared/components/project_avatar/default_spec.js
index 090f8b69213..0daadeebc20 100644
--- a/spec/frontend/vue_shared/components/project_avatar/default_spec.js
+++ b/spec/frontend/vue_shared/components/project_avatar/default_spec.js
@@ -19,7 +19,7 @@ describe('ProjectAvatarDefault component', () => {
vm.$destroy();
});
- it('renders identicon if project has no avatar_url', done => {
+ it('renders identicon if project has no avatar_url', (done) => {
const expectedText = getFirstCharacterCapitalized(projectData.name);
vm.project = {
@@ -38,7 +38,7 @@ describe('ProjectAvatarDefault component', () => {
.catch(done.fail);
});
- it('renders avatar image if project has avatar_url', done => {
+ it('renders avatar image if project has avatar_url', (done) => {
const avatarUrl = `${TEST_HOST}/images/home/nasa.svg`;
vm.project = {
diff --git a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
index 6d1ebe85aa0..016622fd0bb 100644
--- a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
+++ b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
@@ -18,6 +18,13 @@ describe('ProjectSelector component', () => {
selected = selected.concat(allProjects.slice(0, 3)).concat(allProjects.slice(5, 8));
const findSearchInput = () => wrapper.find(GlSearchBoxByType).find('input');
+ const findLegendText = () => wrapper.find('[data-testid="legend-text"]').text();
+ const search = (query) => {
+ const searchInput = findSearchInput();
+
+ searchInput.setValue(query);
+ searchInput.trigger('input');
+ };
beforeEach(() => {
wrapper = mount(Vue.extend(ProjectSelector), {
@@ -31,7 +38,7 @@ describe('ProjectSelector component', () => {
showSearchErrorMessage: false,
totalResults: searchResults.length,
},
- attachToDocument: true,
+ attachTo: document.body,
});
({ vm } = wrapper);
@@ -48,10 +55,7 @@ describe('ProjectSelector component', () => {
it(`triggers a search when the search input value changes`, () => {
jest.spyOn(vm, '$emit').mockImplementation(() => {});
const query = 'my test query!';
- const searchInput = findSearchInput();
-
- searchInput.setValue(query);
- searchInput.trigger('input');
+ search(query);
expect(vm.$emit).toHaveBeenCalledWith('searched', query);
});
@@ -121,15 +125,21 @@ describe('ProjectSelector component', () => {
`(
'is "$expected" given $count results are showing out of $total',
({ count, total, expected }) => {
+ search('gitlab ui');
+
wrapper.setProps({
projectSearchResults: searchResults.slice(0, count),
totalResults: total,
});
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.text()).toContain(expected);
+ expect(findLegendText()).toBe(expected);
});
},
);
+
+ it('is not rendered without searching', () => {
+ expect(findLegendText()).toBe('');
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/registry/details_row_spec.js b/spec/frontend/vue_shared/components/registry/details_row_spec.js
index 16a55b84787..09dacfae363 100644
--- a/spec/frontend/vue_shared/components/registry/details_row_spec.js
+++ b/spec/frontend/vue_shared/components/registry/details_row_spec.js
@@ -8,7 +8,7 @@ describe('DetailsRow', () => {
const findIcon = () => wrapper.find(GlIcon);
const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(component, {
propsData: {
icon: 'clock',
diff --git a/spec/frontend/vue_shared/components/registry/list_item_spec.js b/spec/frontend/vue_shared/components/registry/list_item_spec.js
index 2a48bf4f2d6..33c9c808dc3 100644
--- a/spec/frontend/vue_shared/components/registry/list_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/list_item_spec.js
@@ -11,7 +11,7 @@ describe('list item', () => {
const findRightPrimarySlot = () => wrapper.find('[data-testid="right-primary"]');
const findRightSecondarySlot = () => wrapper.find('[data-testid="right-secondary"]');
const findRightActionSlot = () => wrapper.find('[data-testid="right-action"]');
- const findDetailsSlot = name => wrapper.find(`[data-testid="${name}"]`);
+ const findDetailsSlot = (name) => wrapper.find(`[data-testid="${name}"]`);
const findToggleDetailsButton = () => wrapper.find(GlButton);
const mountComponent = (propsData, slots) => {
@@ -74,14 +74,14 @@ describe('list item', () => {
findToggleDetailsButton().vm.$emit('click');
await wrapper.vm.$nextTick();
- slotNames.forEach(name => {
+ slotNames.forEach((name) => {
expect(findDetailsSlot(name).exists()).toBe(true);
});
});
it('are not visible when details are not shown', () => {
mountComponent({}, slotMocks);
- slotNames.forEach(name => {
+ slotNames.forEach((name) => {
expect(findDetailsSlot(name).exists()).toBe(false);
});
});
diff --git a/spec/frontend/vue_shared/components/registry/metadata_item_spec.js b/spec/frontend/vue_shared/components/registry/metadata_item_spec.js
index ff968ff1831..3d3cfbe13e3 100644
--- a/spec/frontend/vue_shared/components/registry/metadata_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/metadata_item_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlIcon, GlLink } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import component from '~/vue_shared/components/registry/metadata_item.vue';
import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue';
@@ -12,6 +13,9 @@ describe('Metadata Item', () => {
const mountComponent = (propsData = defaultProps) => {
wrapper = shallowMount(component, {
propsData,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
});
};
@@ -24,8 +28,9 @@ describe('Metadata Item', () => {
const findLink = (w = wrapper) => w.find(GlLink);
const findText = () => wrapper.find('[data-testid="metadata-item-text"]');
const findTooltipOnTruncate = (w = wrapper) => w.find(TooltipOnTruncate);
+ const findTextTooltip = () => wrapper.find('[data-testid="text-tooltip-container"]');
- describe.each(['xs', 's', 'm', 'l', 'xl'])('size class', size => {
+ describe.each(['xs', 's', 'm', 'l', 'xl'])('size class', (size) => {
const className = `mw-${size}`;
it(`${size} is assigned correctly to text`, () => {
@@ -55,6 +60,22 @@ describe('Metadata Item', () => {
expect(tooltip.exists()).toBe(true);
expect(tooltip.attributes('title')).toBe(defaultProps.text);
});
+
+ describe('with tooltip prop set to something', () => {
+ const textTooltip = 'foo';
+ it('hides tooltip_on_truncate', () => {
+ mountComponent({ ...defaultProps, textTooltip });
+
+ expect(findTooltipOnTruncate(findText()).exists()).toBe(false);
+ });
+
+ it('set the tooltip on the text', () => {
+ mountComponent({ ...defaultProps, textTooltip });
+
+ const tooltip = getBinding(findTextTooltip().element, 'gl-tooltip');
+ expect(tooltip.value.title).toBe(textTooltip);
+ });
+ });
});
describe('link', () => {
diff --git a/spec/frontend/vue_shared/components/registry/title_area_spec.js b/spec/frontend/vue_shared/components/registry/title_area_spec.js
index b743a663f06..fb0009ebb8d 100644
--- a/spec/frontend/vue_shared/components/registry/title_area_spec.js
+++ b/spec/frontend/vue_shared/components/registry/title_area_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatar, GlSprintf, GlLink } from '@gitlab/ui';
+import { GlAvatar, GlSprintf, GlLink, GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import component from '~/vue_shared/components/registry/title_area.vue';
@@ -9,12 +9,13 @@ describe('title area', () => {
const findSubHeaderSlot = () => wrapper.find('[data-testid="sub-header"]');
const findRightActionsSlot = () => wrapper.find('[data-testid="right-actions"]');
- const findMetadataSlot = name => wrapper.find(`[data-testid="${name}"]`);
+ const findMetadataSlot = (name) => wrapper.find(`[data-testid="${name}"]`);
const findTitle = () => wrapper.find('[data-testid="title"]');
const findAvatar = () => wrapper.find(GlAvatar);
const findInfoMessages = () => wrapper.findAll('[data-testid="info-message"]');
const findDynamicSlot = () => wrapper.find(`[data-testid="${DYNAMIC_SLOT}`);
const findSlotOrderElements = () => wrapper.findAll('[slot-test]');
+ const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
const mountComponent = ({ propsData = { title: 'foo' }, slots } = {}) => {
wrapper = shallowMount(component, {
@@ -96,10 +97,33 @@ describe('title area', () => {
mountComponent({ slots: slotMocks });
await wrapper.vm.$nextTick();
- slotNames.forEach(name => {
+ slotNames.forEach((name) => {
expect(findMetadataSlot(name).exists()).toBe(true);
});
});
+
+ it('is/are hidden when metadata-loading is true', async () => {
+ mountComponent({ slots: slotMocks, propsData: { title: 'foo', metadataLoading: true } });
+
+ await wrapper.vm.$nextTick();
+ slotNames.forEach((name) => {
+ expect(findMetadataSlot(name).exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('metadata skeleton loader', () => {
+ it('is hidden when metadata loading is false', () => {
+ mountComponent();
+
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('is shown when metadata loading is true', () => {
+ mountComponent({ propsData: { metadataLoading: true } });
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
});
describe('dynamic slots', () => {
@@ -142,16 +166,8 @@ describe('title area', () => {
await wrapper.vm.$nextTick();
- expect(
- findSlotOrderElements()
- .at(0)
- .attributes('data-testid'),
- ).toBe(DYNAMIC_SLOT);
- expect(
- findSlotOrderElements()
- .at(1)
- .attributes('data-testid'),
- ).toBe('metadata-foo');
+ expect(findSlotOrderElements().at(0).attributes('data-testid')).toBe(DYNAMIC_SLOT);
+ expect(findSlotOrderElements().at(1).attributes('data-testid')).toBe('metadata-foo');
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
index d79df4d0557..51619cd9578 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
@@ -20,7 +20,7 @@ describe('Editor Service', () => {
let mockInstance;
let event;
let handler;
- const parseHtml = str => {
+ const parseHtml = (str) => {
const wrapper = document.createElement('div');
wrapper.innerHTML = str;
return wrapper.firstChild;
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/upload_image_tab_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/upload_image_tab_spec.js
index ded490b2568..81fd059ce4f 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/upload_image_tab_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/upload_image_tab_spec.js
@@ -10,7 +10,7 @@ describe('Upload Image Tab', () => {
afterEach(() => wrapper.destroy());
- const triggerInputEvent = size => {
+ const triggerInputEvent = (size) => {
const file = { size, name: 'file-name.png' };
const mockEvent = new Event('input');
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js
index be3a4030b1d..d59d4cc1de9 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js
@@ -8,7 +8,7 @@ describe('Insert Video Modal', () => {
const findModal = () => wrapper.find(GlModal);
const findUrlInput = () => wrapper.find({ ref: 'urlInput' });
- const triggerInsertVideo = url => {
+ const triggerInsertVideo = (url) => {
const preventDefault = jest.fn();
findUrlInput().vm.$emit('input', url);
findModal().vm.$emit('primary', { preventDefault });
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
index cd1157a1c2e..2eb353a1801 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { mockEditorApi } from '@toast-ui/vue-editor';
+import { Editor, mockEditorApi } from '@toast-ui/vue-editor';
import RichContentEditor from '~/vue_shared/components/rich_content_editor/rich_content_editor.vue';
import AddImageModal from '~/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue';
import InsertVideoModal from '~/vue_shared/components/rich_content_editor/modals/insert_video_modal.vue';
@@ -17,16 +17,17 @@ import {
insertVideo,
registerHTMLToMarkdownRenderer,
getEditorOptions,
+ getMarkdown,
} from '~/vue_shared/components/rich_content_editor/services/editor_service';
jest.mock('~/vue_shared/components/rich_content_editor/services/editor_service', () => ({
- ...jest.requireActual('~/vue_shared/components/rich_content_editor/services/editor_service'),
addCustomEventListener: jest.fn(),
removeCustomEventListener: jest.fn(),
addImage: jest.fn(),
insertVideo: jest.fn(),
registerHTMLToMarkdownRenderer: jest.fn(),
getEditorOptions: jest.fn(),
+ getMarkdown: jest.fn(),
}));
describe('Rich Content Editor', () => {
@@ -38,9 +39,12 @@ describe('Rich Content Editor', () => {
const findAddImageModal = () => wrapper.find(AddImageModal);
const findInsertVideoModal = () => wrapper.find(InsertVideoModal);
- const buildWrapper = () => {
+ const buildWrapper = async () => {
wrapper = shallowMount(RichContentEditor, {
propsData: { content, imageRoot },
+ stubs: {
+ ToastEditor: Editor,
+ },
});
};
@@ -89,9 +93,8 @@ describe('Rich Content Editor', () => {
it('emits an input event with the changed content', () => {
const changedMarkdown = '## Changed Markdown';
- const getMarkdownMock = jest.fn().mockReturnValueOnce(changedMarkdown);
+ getMarkdown.mockReturnValueOnce(changedMarkdown);
- findEditor().setMethods({ invoke: getMarkdownMock });
findEditor().vm.$emit('change');
expect(wrapper.emitted().input[0][0]).toBe(changedMarkdown);
@@ -147,6 +150,7 @@ describe('Rich Content Editor', () => {
});
it('emits load event with the markdown formatted by Toast UI', () => {
+ mockEditorApi.getMarkdown.mockReturnValueOnce(formattedMarkdown);
expect(mockEditorApi.getMarkdown).toHaveBeenCalled();
expect(wrapper.emitted('load')[0]).toEqual([{ formattedMarkdown }]);
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
index 85516eae4cf..3caf03dabba 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
@@ -8,9 +8,9 @@ describe('rich_content_editor/services/html_to_markdown_renderer', () => {
beforeEach(() => {
baseRenderer = {
- trim: jest.fn(input => `trimmed ${input}`),
- getSpaceCollapsedText: jest.fn(input => `space collapsed ${input}`),
- getSpaceControlled: jest.fn(input => `space controlled ${input}`),
+ trim: jest.fn((input) => `trimmed ${input}`),
+ getSpaceCollapsedText: jest.fn((input) => `space collapsed ${input}`),
+ getSpaceControlled: jest.fn((input) => `space controlled ${input}`),
convert: jest.fn(),
};
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/mock_data.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/mock_data.js
index 5cf3961819e..407072fb596 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/mock_data.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/mock_data.js
@@ -1,12 +1,12 @@
// Node spec helpers
-export const buildMockTextNode = literal => ({ literal, type: 'text' });
+export const buildMockTextNode = (literal) => ({ literal, type: 'text' });
export const normalTextNode = buildMockTextNode('This is just normal text.');
// Token spec helpers
-const buildMockUneditableOpenToken = type => {
+const buildMockUneditableOpenToken = (type) => {
return {
type: 'openTag',
tagName: type,
@@ -17,7 +17,7 @@ const buildMockUneditableOpenToken = type => {
};
};
-const buildMockTextToken = content => {
+const buildMockTextToken = (content) => {
return {
type: 'text',
tagName: null,
@@ -25,7 +25,7 @@ const buildMockTextToken = content => {
};
};
-const buildMockUneditableCloseToken = type => ({ type: 'closeTag', tagName: type });
+const buildMockUneditableCloseToken = (type) => ({ type: 'closeTag', tagName: type });
export const originToken = buildMockTextToken('{:.no_toc .hidden-md .hidden-lg}');
const uneditableOpenToken = buildMockUneditableOpenToken('div');
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js
index 2897929f1bf..521885f5687 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js
@@ -36,7 +36,7 @@ describe('Render Identifier Instance Text renderer', () => {
`(
'should return inline editable, uneditable, and editable tokens in sequence',
({ start, middle, end }) => {
- const buildMockTextToken = content => ({ type: 'text', tagName: null, content });
+ const buildMockTextToken = (content) => ({ type: 'text', tagName: null, content });
const startToken = buildMockTextToken(start);
const middleToken = buildMockTextToken(middle);
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
index b3d9576f38b..470cf9bddaa 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
@@ -2,7 +2,7 @@ import renderer from '~/vue_shared/components/rich_content_editor/services/rende
import { buildMockTextNode } from './mock_data';
-const buildMockParagraphNode = literal => {
+const buildMockParagraphNode = (literal) => {
return {
firstChild: buildMockTextNode(literal),
type: 'paragraph',
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js
index 2db15a71215..0e6f951bd53 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js
@@ -9,7 +9,7 @@ describe('Toolbar Item', () => {
const findIcon = () => wrapper.find(GlIcon);
const findButton = () => wrapper.find('button');
- const buildWrapper = propsData => {
+ const buildWrapper = (propsData) => {
wrapper = shallowMount(ToolbarItem, {
propsData,
directives: {
diff --git a/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js b/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js
index 60203493cbd..ae86106d86e 100644
--- a/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js
@@ -8,7 +8,7 @@ const discoverProjectSecurityPath = '/discoverProjectSecurityPath';
describe('HelpIcon component', () => {
let wrapper;
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = shallowMount(HelpIcon, {
propsData: {
helpPath,
diff --git a/spec/frontend/vue_shared/components/security_reports/security_summary_spec.js b/spec/frontend/vue_shared/components/security_reports/security_summary_spec.js
index e57152c3cbf..f186eb848f2 100644
--- a/spec/frontend/vue_shared/components/security_reports/security_summary_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/security_summary_spec.js
@@ -6,7 +6,7 @@ import { groupedTextBuilder } from '~/vue_shared/security_reports/store/utils';
describe('SecuritySummary component', () => {
let wrapper;
- const createWrapper = message => {
+ const createWrapper = (message) => {
wrapper = shallowMount(SecuritySummary, {
propsData: { message },
stubs: {
@@ -26,7 +26,7 @@ describe('SecuritySummary component', () => {
groupedTextBuilder({ reportType: 'Security scanning', critical: 1, high: 0, total: 1 }),
groupedTextBuilder({ reportType: 'Security scanning', critical: 0, high: 1, total: 1 }),
groupedTextBuilder({ reportType: 'Security scanning', critical: 1, high: 2, total: 3 }),
- ])('given the message %p', message => {
+ ])('given the message %p', (message) => {
beforeEach(() => {
createWrapper(message);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js b/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
index 08fc822577e..230442ec547 100644
--- a/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
@@ -12,7 +12,7 @@ describe('collapsedGroupedDatePicker', () => {
});
describe('toggleCollapse events', () => {
- beforeEach(done => {
+ beforeEach((done) => {
jest.spyOn(vm, 'toggleSidebar').mockImplementation(() => {});
vm.minDate = new Date('07/17/2016');
Vue.nextTick(done);
@@ -26,7 +26,7 @@ describe('collapsedGroupedDatePicker', () => {
});
describe('minDate and maxDate', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.minDate = new Date('07/17/2016');
vm.maxDate = new Date('07/17/2017');
Vue.nextTick(done);
@@ -42,7 +42,7 @@ describe('collapsedGroupedDatePicker', () => {
});
describe('minDate', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.minDate = new Date('07/17/2016');
Vue.nextTick(done);
});
@@ -56,7 +56,7 @@ describe('collapsedGroupedDatePicker', () => {
});
describe('maxDate', () => {
- beforeEach(done => {
+ beforeEach((done) => {
vm.maxDate = new Date('07/17/2017');
Vue.nextTick(done);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/date_picker_spec.js b/spec/frontend/vue_shared/components/sidebar/date_picker_spec.js
index 47edfbe3115..fc1fa3fc1c1 100644
--- a/spec/frontend/vue_shared/components/sidebar/date_picker_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/date_picker_spec.js
@@ -46,12 +46,7 @@ describe('SidebarDatePicker', () => {
it('should render None if there is no selectedDate', () => {
mountComponent();
- expect(
- wrapper
- .find('.value-content span')
- .text()
- .trim(),
- ).toEqual('None');
+ expect(wrapper.find('.value-content span').text().trim()).toEqual('None');
});
it('should render date-picker when editing', () => {
@@ -63,12 +58,7 @@ describe('SidebarDatePicker', () => {
it('should render label', () => {
const label = 'label';
mountComponent({ label });
- expect(
- wrapper
- .find('.title')
- .text()
- .trim(),
- ).toEqual(label);
+ expect(wrapper.find('.title').text().trim()).toEqual(label);
});
it('should render loading-icon when isLoading', () => {
@@ -82,12 +72,7 @@ describe('SidebarDatePicker', () => {
});
it('should render edit button', () => {
- expect(
- wrapper
- .find('.title .btn-blank')
- .text()
- .trim(),
- ).toEqual('Edit');
+ expect(wrapper.find('.title .btn-blank').text().trim()).toEqual('Edit');
});
it('should enable editing when edit button is clicked', async () => {
@@ -102,12 +87,7 @@ describe('SidebarDatePicker', () => {
it('should render date if selectedDate', () => {
mountComponent({ selectedDate: new Date('07/07/2017') });
- expect(
- wrapper
- .find('.value-content strong')
- .text()
- .trim(),
- ).toEqual('Jul 7, 2017');
+ expect(wrapper.find('.value-content strong').text().trim()).toEqual('Jul 7, 2017');
});
describe('selectedDate and editable', () => {
@@ -116,12 +96,7 @@ describe('SidebarDatePicker', () => {
});
it('should render remove button if selectedDate and editable', () => {
- expect(
- wrapper
- .find('.value-content .btn-blank')
- .text()
- .trim(),
- ).toEqual('remove');
+ expect(wrapper.find('.value-content .btn-blank').text().trim()).toEqual('remove');
});
it('should emit saveDate with null when remove button is clicked', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js b/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js
index a97e26caf53..256b3cff525 100644
--- a/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js
@@ -184,11 +184,7 @@ describe('IssuableMoveDropdown', () => {
});
it('renders gl-dropdown-form component', () => {
- expect(
- findDropdownEl()
- .find(GlDropdownForm)
- .exists(),
- ).toBe(true);
+ expect(findDropdownEl().find(GlDropdownForm).exists()).toBe(true);
});
it('renders header element', () => {
@@ -216,11 +212,7 @@ describe('IssuableMoveDropdown', () => {
await wrapper.vm.$nextTick();
- expect(
- findDropdownEl()
- .find(GlLoadingIcon)
- .exists(),
- ).toBe(true);
+ expect(findDropdownEl().find(GlLoadingIcon).exists()).toBe(true);
});
it('renders gl-dropdown-item components for available projects', async () => {
@@ -288,10 +280,7 @@ describe('IssuableMoveDropdown', () => {
await wrapper.vm.$nextTick();
expect(
- wrapper
- .find('[data-testid="footer"]')
- .find(GlButton)
- .attributes('disabled'),
+ wrapper.find('[data-testid="footer"]').find(GlButton).attributes('disabled'),
).not.toBeDefined();
});
});
@@ -331,10 +320,7 @@ describe('IssuableMoveDropdown', () => {
});
it('close icon in dropdown header closes the dropdown when clicked', () => {
- wrapper
- .find('[data-testid="header"]')
- .find(GlButton)
- .vm.$emit('click', mockEvent);
+ wrapper.find('[data-testid="header"]').find(GlButton).vm.$emit('click', mockEvent);
expect(wrapper.vm.$refs.dropdown.hide).toHaveBeenCalled();
});
@@ -346,10 +332,7 @@ describe('IssuableMoveDropdown', () => {
await wrapper.vm.$nextTick();
- wrapper
- .findAll(GlDropdownItem)
- .at(0)
- .vm.$emit('click', mockEvent);
+ wrapper.findAll(GlDropdownItem).at(0).vm.$emit('click', mockEvent);
expect(wrapper.vm.selectedProject).toBe(mockProjects[0]);
});
@@ -361,10 +344,7 @@ describe('IssuableMoveDropdown', () => {
await wrapper.vm.$nextTick();
- wrapper
- .find('[data-testid="footer"]')
- .find(GlButton)
- .vm.$emit('click');
+ wrapper.find('[data-testid="footer"]').find(GlButton).vm.$emit('click');
expect(wrapper.vm.$refs.dropdown.hide).toHaveBeenCalled();
expect(wrapper.emitted('move-issuable')).toBeTruthy();
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js
index 596cb22fca5..a55ad37c498 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js
@@ -15,7 +15,7 @@ describe('BaseComponent', () => {
let wrapper;
let vm;
- beforeEach(done => {
+ beforeEach((done) => {
wrapper = createComponent();
({ vm } = wrapper);
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js
index c2091a681f2..4b4d265800b 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_create_label_spec.js
@@ -5,7 +5,7 @@ import dropdownCreateLabelComponent from '~/vue_shared/components/sidebar/labels
import { mockSuggestedColors } from './mock_data';
-const createComponent = headerTitle => {
+const createComponent = (headerTitle) => {
const Component = Vue.extend(dropdownCreateLabelComponent);
return mountComponent(Component, {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js
index 71c040c6633..003f3d2b4e6 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed_spec.js
@@ -37,7 +37,7 @@ describe('DropdownValueCollapsedComponent', () => {
const labels = mockLabels.concat(mockLabels);
const vmMoreLabels = createComponent(labels);
- const expectedText = labels.map(label => label.title).join(', ');
+ const expectedText = labels.map((label) => label.title).join(', ');
expect(vmMoreLabels.labelsList).toBe(expectedText);
vmMoreLabels.$destroy();
@@ -53,7 +53,7 @@ describe('DropdownValueCollapsedComponent', () => {
const expectedText = `${mockMoreLabels
.slice(0, 5)
- .map(label => label.title)
+ .map((label) => label.title)
.join(', ')}, and ${mockMoreLabels.length - 5} more`;
expect(vmMoreLabels.labelsList).toBe(expectedText);
@@ -61,7 +61,7 @@ describe('DropdownValueCollapsedComponent', () => {
});
it('returns first label name when `labels` prop has only one item present', () => {
- const text = mockLabels.map(label => label.title).join(', ');
+ const text = mockLabels.map((label) => label.title).join(', ');
expect(vm.labelsList).toBe(text);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
index 53e8a0e1278..ecb3c3a42c8 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
@@ -71,12 +71,7 @@ describe('DropdownValueComponent', () => {
it('render slot content inside component when `labels` prop is empty', () => {
const vmEmptyLabels = createComponent([]);
- expect(
- vmEmptyLabels
- .find('.text-secondary')
- .text()
- .trim(),
- ).toBe(mockConfig.emptyValueText);
+ expect(vmEmptyLabels.find('.text-secondary').text().trim()).toBe(mockConfig.emptyValueText);
vmEmptyLabels.destroy();
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
index 04320a72be6..0f49fe4fc5b 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
@@ -24,7 +24,7 @@ const createComponent = (initialState = mockConfig) => {
describe('DropdownContentsCreateView', () => {
let wrapper;
- const colors = Object.keys(mockSuggestedColors).map(color => ({
+ const colors = Object.keys(mockSuggestedColors).map((color) => ({
[color]: mockSuggestedColors[color],
}));
@@ -125,10 +125,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders dropdown back button element', () => {
- const backBtnEl = wrapper
- .find('.dropdown-title')
- .findAll(GlButton)
- .at(0);
+ const backBtnEl = wrapper.find('.dropdown-title').findAll(GlButton).at(0);
expect(backBtnEl.exists()).toBe(true);
expect(backBtnEl.attributes('aria-label')).toBe('Go back');
@@ -143,10 +140,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders dropdown close button element', () => {
- const closeBtnEl = wrapper
- .find('.dropdown-title')
- .findAll(GlButton)
- .at(1);
+ const closeBtnEl = wrapper.find('.dropdown-title').findAll(GlButton).at(1);
expect(closeBtnEl.exists()).toBe(true);
expect(closeBtnEl.attributes('aria-label')).toBe('Close');
@@ -190,10 +184,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders create button element', () => {
- const createBtnEl = wrapper
- .find('.dropdown-actions')
- .findAll(GlButton)
- .at(0);
+ const createBtnEl = wrapper.find('.dropdown-actions').findAll(GlButton).at(0);
expect(createBtnEl.exists()).toBe(true);
expect(createBtnEl.text()).toContain('Create');
@@ -211,10 +202,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders cancel button element', () => {
- const cancelBtnEl = wrapper
- .find('.dropdown-actions')
- .findAll(GlButton)
- .at(1);
+ const cancelBtnEl = wrapper.find('.dropdown-actions').findAll(GlButton).at(1);
expect(cancelBtnEl.exists()).toBe(true);
expect(cancelBtnEl.text()).toContain('Cancel');
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 78367b3a5b4..989cd256e26 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -371,9 +371,7 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.$store.state.allowLabelCreate = false;
return wrapper.vm.$nextTick(() => {
- const createLabelLink = findDropdownFooter()
- .findAll(GlLink)
- .at(0);
+ const createLabelLink = findDropdownFooter().findAll(GlLink).at(0);
expect(createLabelLink.text()).not.toBe('Create label');
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
index c742220ba8a..4909c43bc96 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -19,7 +19,7 @@ describe('LabelsSelect Actions', () => {
});
describe('setInitialState', () => {
- it('sets initial store state', done => {
+ it('sets initial store state', (done) => {
testAction(
actions.setInitialState,
mockInitialState,
@@ -32,7 +32,7 @@ describe('LabelsSelect Actions', () => {
});
describe('toggleDropdownButton', () => {
- it('toggles dropdown button', done => {
+ it('toggles dropdown button', (done) => {
testAction(
actions.toggleDropdownButton,
{},
@@ -45,7 +45,7 @@ describe('LabelsSelect Actions', () => {
});
describe('toggleDropdownContents', () => {
- it('toggles dropdown contents', done => {
+ it('toggles dropdown contents', (done) => {
testAction(
actions.toggleDropdownContents,
{},
@@ -58,7 +58,7 @@ describe('LabelsSelect Actions', () => {
});
describe('toggleDropdownContentsCreateView', () => {
- it('toggles dropdown create view', done => {
+ it('toggles dropdown create view', (done) => {
testAction(
actions.toggleDropdownContentsCreateView,
{},
@@ -71,13 +71,13 @@ describe('LabelsSelect Actions', () => {
});
describe('requestLabels', () => {
- it('sets value of `state.labelsFetchInProgress` to `true`', done => {
+ it('sets value of `state.labelsFetchInProgress` to `true`', (done) => {
testAction(actions.requestLabels, {}, state, [{ type: types.REQUEST_LABELS }], [], done);
});
});
describe('receiveLabelsSuccess', () => {
- it('sets provided labels to `state.labels`', done => {
+ it('sets provided labels to `state.labels`', (done) => {
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
testAction(
@@ -96,7 +96,7 @@ describe('LabelsSelect Actions', () => {
setFixtures('<div class="flash-container"></div>');
});
- it('sets value `state.labelsFetchInProgress` to `false`', done => {
+ it('sets value `state.labelsFetchInProgress` to `false`', (done) => {
testAction(
actions.receiveLabelsFailure,
{},
@@ -129,7 +129,7 @@ describe('LabelsSelect Actions', () => {
});
describe('on success', () => {
- it('dispatches `requestLabels` & `receiveLabelsSuccess` actions', done => {
+ it('dispatches `requestLabels` & `receiveLabelsSuccess` actions', (done) => {
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
mock.onGet(/labels.json/).replyOnce(200, labels);
@@ -145,7 +145,7 @@ describe('LabelsSelect Actions', () => {
});
describe('on failure', () => {
- it('dispatches `requestLabels` & `receiveLabelsFailure` actions', done => {
+ it('dispatches `requestLabels` & `receiveLabelsFailure` actions', (done) => {
mock.onGet(/labels.json/).replyOnce(500, {});
testAction(
@@ -161,7 +161,7 @@ describe('LabelsSelect Actions', () => {
});
describe('requestCreateLabel', () => {
- it('sets value `state.labelCreateInProgress` to `true`', done => {
+ it('sets value `state.labelCreateInProgress` to `true`', (done) => {
testAction(
actions.requestCreateLabel,
{},
@@ -174,7 +174,7 @@ describe('LabelsSelect Actions', () => {
});
describe('receiveCreateLabelSuccess', () => {
- it('sets value `state.labelCreateInProgress` to `false`', done => {
+ it('sets value `state.labelCreateInProgress` to `false`', (done) => {
testAction(
actions.receiveCreateLabelSuccess,
{},
@@ -191,7 +191,7 @@ describe('LabelsSelect Actions', () => {
setFixtures('<div class="flash-container"></div>');
});
- it('sets value `state.labelCreateInProgress` to `false`', done => {
+ it('sets value `state.labelCreateInProgress` to `false`', (done) => {
testAction(
actions.receiveCreateLabelFailure,
{},
@@ -224,7 +224,7 @@ describe('LabelsSelect Actions', () => {
});
describe('on success', () => {
- it('dispatches `requestCreateLabel`, `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', done => {
+ it('dispatches `requestCreateLabel`, `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', (done) => {
const label = { id: 1 };
mock.onPost(/labels.json/).replyOnce(200, label);
@@ -244,7 +244,7 @@ describe('LabelsSelect Actions', () => {
});
describe('on failure', () => {
- it('dispatches `requestCreateLabel` & `receiveCreateLabelFailure` actions', done => {
+ it('dispatches `requestCreateLabel` & `receiveCreateLabelFailure` actions', (done) => {
mock.onPost(/labels.json/).replyOnce(500, {});
testAction(
@@ -260,7 +260,7 @@ describe('LabelsSelect Actions', () => {
});
describe('updateSelectedLabels', () => {
- it('updates `state.labels` based on provided `labels` param', done => {
+ it('updates `state.labels` based on provided `labels` param', (done) => {
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
testAction(
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
index 52116f757c5..1f899e84897 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
@@ -26,7 +26,10 @@ describe('LabelsSelect Getters', () => {
});
it('returns first label title and remaining labels count when state.labels has more than 1 label', () => {
- const labels = [{ id: 1, title: 'Foo', set: true }, { id: 2, title: 'Bar', set: true }];
+ const labels = [
+ { id: 1, title: 'Foo', set: true },
+ { id: 2, title: 'Bar', set: true },
+ ];
expect(getters.dropdownButtonText({ labels }, { isDropdownVariantSidebar: true })).toBe(
'Foo +1 more',
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index 8081806e314..208f2f2d42d 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -93,14 +93,14 @@ describe('LabelsSelect Mutations', () => {
});
it('sets provided `labels` to `state.labels` along with `set` prop based on `state.selectedLabels`', () => {
- const selectedLabelIds = selectedLabels.map(label => label.id);
+ const selectedLabelIds = selectedLabels.map((label) => label.id);
const state = {
selectedLabels,
labelsFetchInProgress: true,
};
mutations[types.RECEIVE_SET_LABELS_SUCCESS](state, labels);
- state.labels.forEach(label => {
+ state.labels.forEach((label) => {
if (selectedLabelIds.includes(label.id)) {
expect(label.set).toBe(true);
}
@@ -162,7 +162,7 @@ describe('LabelsSelect Mutations', () => {
};
mutations[types.UPDATE_SELECTED_LABELS](state, { labels: [{ id: 2 }] });
- state.labels.forEach(label => {
+ state.labels.forEach((label) => {
if (updatedLabelIds.includes(label.id)) {
expect(label.touched).toBe(true);
expect(label.set).toBe(true);
diff --git a/spec/frontend/vue_shared/components/slot_switch_spec.js b/spec/frontend/vue_shared/components/slot_switch_spec.js
index 73307b5573f..f25b9877aba 100644
--- a/spec/frontend/vue_shared/components/slot_switch_spec.js
+++ b/spec/frontend/vue_shared/components/slot_switch_spec.js
@@ -10,14 +10,14 @@ describe('SlotSwitch', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(SlotSwitch, {
propsData,
slots,
});
};
- const getChildrenHtml = () => wrapper.findAll('* *').wrappers.map(c => c.html());
+ const getChildrenHtml = () => wrapper.findAll('* *').wrappers.map((c) => c.html());
afterEach(() => {
if (wrapper) {
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
index e09bc073042..ad11e6519c4 100644
--- a/spec/frontend/vue_shared/components/split_button_spec.js
+++ b/spec/frontend/vue_shared/components/split_button_spec.js
@@ -19,18 +19,15 @@ const mockActionItems = [
describe('SplitButton', () => {
let wrapper;
- const createComponent = propsData => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(SplitButton, {
propsData,
});
};
const findDropdown = () => wrapper.find(GlDropdown);
- const findDropdownItem = (index = 0) =>
- findDropdown()
- .findAll(GlDropdownItem)
- .at(index);
- const selectItem = index => {
+ const findDropdownItem = (index = 0) => findDropdown().findAll(GlDropdownItem).at(index);
+ const selectItem = (index) => {
findDropdownItem(index).vm.$emit('click');
return wrapper.vm.$nextTick();
@@ -87,7 +84,7 @@ describe('SplitButton', () => {
const addChangeEventHandler = () => {
changeEventHandler = jest.fn();
- wrapper.vm.$once('change', item => changeEventHandler(item));
+ wrapper.vm.$once('change', (item) => changeEventHandler(item));
};
it('defaults to first actionItems event', () => {
diff --git a/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js b/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js
index 0786882f527..c6f01efa71a 100644
--- a/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js
+++ b/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js
@@ -3,7 +3,7 @@ import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import stackedProgressBarComponent from '~/vue_shared/components/stacked_progress_bar.vue';
-const createComponent = config => {
+const createComponent = (config) => {
const Component = Vue.extend(stackedProgressBarComponent);
const defaultConfig = {
successLabel: 'Synced',
@@ -29,11 +29,12 @@ describe('StackedProgressBarComponent', () => {
vm.$destroy();
});
- const findSuccessBarText = wrapper => wrapper.$el.querySelector('.status-green').innerText.trim();
- const findNeutralBarText = wrapper =>
+ const findSuccessBarText = (wrapper) =>
+ wrapper.$el.querySelector('.status-green').innerText.trim();
+ const findNeutralBarText = (wrapper) =>
wrapper.$el.querySelector('.status-neutral').innerText.trim();
- const findFailureBarText = wrapper => wrapper.$el.querySelector('.status-red').innerText.trim();
- const findUnavailableBarText = wrapper =>
+ const findFailureBarText = (wrapper) => wrapper.$el.querySelector('.status-red').innerText.trim();
+ const findUnavailableBarText = (wrapper) =>
wrapper.$el.querySelector('.status-unavailable').innerText.trim();
describe('computed', () => {
diff --git a/spec/frontend/vue_shared/components/table_pagination_spec.js b/spec/frontend/vue_shared/components/table_pagination_spec.js
index 058dfcdbde2..12c47637358 100644
--- a/spec/frontend/vue_shared/components/table_pagination_spec.js
+++ b/spec/frontend/vue_shared/components/table_pagination_spec.js
@@ -6,7 +6,7 @@ describe('Pagination component', () => {
let wrapper;
let spy;
- const mountComponent = props => {
+ const mountComponent = (props) => {
wrapper = shallowMount(TablePagination, {
propsData: props,
});
diff --git a/spec/frontend/vue_shared/components/tabs/tab_spec.js b/spec/frontend/vue_shared/components/tabs/tab_spec.js
index 8cf07a9177c..ee0c983c764 100644
--- a/spec/frontend/vue_shared/components/tabs/tab_spec.js
+++ b/spec/frontend/vue_shared/components/tabs/tab_spec.js
@@ -10,7 +10,7 @@ describe('Tab component', () => {
vm = mountComponent(Component);
});
- it('sets localActive to equal active', done => {
+ it('sets localActive to equal active', (done) => {
vm.active = true;
vm.$nextTick(() => {
@@ -20,7 +20,7 @@ describe('Tab component', () => {
});
});
- it('sets active class', done => {
+ it('sets active class', (done) => {
vm.active = true;
vm.$nextTick(() => {
diff --git a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
index 175abf5aae0..27c9b099306 100644
--- a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
+++ b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
@@ -18,7 +18,7 @@ describe('TooltipOnTruncate component', () => {
const createComponent = ({ propsData, ...options } = {}) => {
wrapper = shallowMount(TooltipOnTruncate, {
- attachToDocument: true,
+ attachTo: document.body,
propsData: {
...propsData,
},
@@ -44,7 +44,7 @@ describe('TooltipOnTruncate component', () => {
},
{
propsData: { ...propsData },
- attachToDocument: true,
+ attachTo: document.body,
...options,
},
);
@@ -139,7 +139,7 @@ describe('TooltipOnTruncate component', () => {
createComponent({
propsData: {
title: DUMMY_TEXT,
- truncateTarget: el => el.childNodes[1],
+ truncateTarget: (el) => el.childNodes[1],
},
slots: {
default: [createChildElement(), createChildElement()],
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
index 663d0af4cc4..2c3fc70e116 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -91,11 +91,11 @@ describe('User Avatar Image Component', () => {
});
it('renders the tooltip slot', () => {
- expect(wrapper.find('.js-user-avatar-image-toolip').exists()).toBe(true);
+ expect(wrapper.find('.js-user-avatar-image-tooltip').exists()).toBe(true);
});
it('renders the tooltip content', () => {
- expect(wrapper.find('.js-user-avatar-image-toolip').text()).toContain(slots.default[0]);
+ expect(wrapper.find('.js-user-avatar-image-tooltip').text()).toContain(slots.default[0]);
});
it('does not render tooltip data attributes for on avatar image', () => {
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index 84e7a6a162e..d151cd15bc4 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -20,7 +20,7 @@ describe('User Avatar Link Component', () => {
username: 'username',
};
- const createWrapper = props => {
+ const createWrapper = (props) => {
wrapper = shallowMount(UserAvatarLink, {
propsData: {
...defaultProps,
@@ -83,7 +83,7 @@ describe('User Avatar Link Component', () => {
describe('username', () => {
it('should not render avatar image tooltip', () => {
- expect(wrapper.find('.js-user-avatar-image-toolip').exists()).toBe(false);
+ expect(wrapper.find('.js-user-avatar-image-tooltip').exists()).toBe(false);
});
it('should render username prop in <span>', () => {
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
index 6f66d1cafb9..e3cd2bb9aaa 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
@@ -9,13 +9,13 @@ const TEST_BREAKPOINT = 5;
const TEST_EMPTY_MESSAGE = 'Lorem ipsum empty';
const DEFAULT_EMPTY_MESSAGE = 'None';
-const createUser = id => ({
+const createUser = (id) => ({
id,
name: 'Lorem',
web_url: `${TEST_HOST}/${id}`,
avatar_url: `${TEST_HOST}/${id}/avatar`,
});
-const createList = n =>
+const createList = (n) =>
Array(n)
.fill(1)
.map((x, id) => createUser(id));
@@ -79,10 +79,10 @@ describe('UserAvatarList', () => {
factory({ propsData: { items } });
const links = wrapper.findAll(UserAvatarLink);
- const linkProps = links.wrappers.map(x => x.props());
+ const linkProps = links.wrappers.map((x) => x.props());
expect(linkProps).toEqual(
- items.map(x =>
+ items.map((x) =>
expect.objectContaining({
linkHref: x.web_url,
imgSrc: x.avatar_url,
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 7d58a865ba3..435c3a5406e 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -33,7 +33,7 @@ describe('User Popover Component', () => {
wrapper.destroy();
});
- const findByTestId = testid => wrapper.find(`[data-testid="${testid}"]`);
+ const findByTestId = (testid) => wrapper.find(`[data-testid="${testid}"]`);
const findUserStatus = () => wrapper.find('.js-user-status');
const findTarget = () => document.querySelector('.js-user-link');
const findAvailabilityStatus = () => wrapper.find(UserAvailabilityStatus);
@@ -81,6 +81,7 @@ describe('User Popover Component', () => {
});
it('shows icon for location', () => {
+ createWrapper();
const iconEl = wrapper.find(GlIcon);
expect(iconEl.props('name')).toEqual('location');
@@ -147,7 +148,7 @@ describe('User Popover Component', () => {
createWrapper({ user });
expect(
- wrapper.findAll(GlIcon).filter(icon => icon.props('name') === 'profile').length,
+ wrapper.findAll(GlIcon).filter((icon) => icon.props('name') === 'profile').length,
).toEqual(1);
});
@@ -159,9 +160,9 @@ describe('User Popover Component', () => {
createWrapper({ user });
- expect(wrapper.findAll(GlIcon).filter(icon => icon.props('name') === 'work').length).toEqual(
- 1,
- );
+ expect(
+ wrapper.findAll(GlIcon).filter((icon) => icon.props('name') === 'work').length,
+ ).toEqual(1);
});
});
diff --git a/spec/frontend/vue_shared/directives/tooltip_spec.js b/spec/frontend/vue_shared/directives/tooltip_spec.js
index 4217b8d3c02..28ec23ad4c1 100644
--- a/spec/frontend/vue_shared/directives/tooltip_spec.js
+++ b/spec/frontend/vue_shared/directives/tooltip_spec.js
@@ -19,7 +19,7 @@ describe('Tooltip directive', () => {
data: () => ({ tooltip: text }),
template,
},
- { attachToDocument: true },
+ { attachTo: document.body },
);
}
@@ -151,11 +151,7 @@ describe('Tooltip directive', () => {
});
it('should have tooltip plugin applied to all instances', () => {
- expect(
- $(wrapper.vm.$el)
- .find('.js-look-for-tooltip')
- .data('bs.tooltip'),
- ).toBeDefined();
+ expect($(wrapper.vm.$el).find('.js-look-for-tooltip').data('bs.tooltip')).toBeDefined();
});
});
});
diff --git a/spec/frontend/vue_shared/directives/validation_spec.js b/spec/frontend/vue_shared/directives/validation_spec.js
index 814d6f43589..2764a71d204 100644
--- a/spec/frontend/vue_shared/directives/validation_spec.js
+++ b/spec/frontend/vue_shared/directives/validation_spec.js
@@ -36,7 +36,7 @@ describe('validation directive', () => {
`,
};
- wrapper = shallowMount(component, { attachToDocument: true });
+ wrapper = shallowMount(component, { attachTo: document.body });
};
afterEach(() => {
@@ -50,7 +50,7 @@ describe('validation directive', () => {
describe.each([true, false])(
'with fields untouched and "showValidation" set to "%s"',
- showValidation => {
+ (showValidation) => {
beforeEach(() => {
createComponent({ showValidation });
});
@@ -78,7 +78,7 @@ describe('validation directive', () => {
`(
'with input-attributes set to $inputAttributes',
({ inputAttributes, validValue, invalidValue }) => {
- const setValueAndTriggerValidation = value => {
+ const setValueAndTriggerValidation = (value) => {
const input = findInput();
input.setValue(value);
input.trigger('blur');
diff --git a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
index 7e70407655a..9138d2d3f4c 100644
--- a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
+++ b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
@@ -6,7 +6,7 @@ describe('SecurityReportDownloadDropdown component', () => {
let wrapper;
let artifacts;
- const createComponent = props => {
+ const createComponent = (props) => {
wrapper = shallowMount(SecurityReportDownloadDropdown, {
propsData: { ...props },
});
diff --git a/spec/frontend/vue_shared/security_reports/mock_data.js b/spec/frontend/vue_shared/security_reports/mock_data.js
index e93ca8329e7..b3ff7daef2b 100644
--- a/spec/frontend/vue_shared/security_reports/mock_data.js
+++ b/spec/frontend/vue_shared/security_reports/mock_data.js
@@ -392,6 +392,33 @@ export const securityReportDownloadPathsQueryResponse = {
},
__typename: 'CiJob',
},
+ {
+ name: 'all_artifacts',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=archive',
+ fileType: 'ARCHIVE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=metadata',
+ fileType: 'METADATA',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
],
__typename: 'CiJobConnection',
},
@@ -435,3 +462,51 @@ export const expectedDownloadDropdownProps = {
loading: false,
artifacts: [...secretDetectionArtifacts, ...sastArtifacts],
};
+
+/**
+ * These correspond to any jobs with zip archives in the securityReportDownloadPathsQueryResponse above.
+ */
+export const archiveArtifacts = [
+ {
+ name: 'all_artifacts Archive',
+ path: '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=archive',
+ reportType: 'ARCHIVE',
+ },
+];
+
+/**
+ * These correspond to any jobs with trace data in the securityReportDownloadPathsQueryResponse above.
+ */
+export const traceArtifacts = [
+ {
+ name: 'secret_detection Trace',
+ path: '/gitlab-org/secrets-detection-test/-/jobs/1399/artifacts/download?file_type=trace',
+ reportType: 'TRACE',
+ },
+ {
+ name: 'bandit-sast Trace',
+ path: '/gitlab-org/secrets-detection-test/-/jobs/1400/artifacts/download?file_type=trace',
+ reportType: 'TRACE',
+ },
+ {
+ name: 'eslint-sast Trace',
+ path: '/gitlab-org/secrets-detection-test/-/jobs/1401/artifacts/download?file_type=trace',
+ reportType: 'TRACE',
+ },
+ {
+ name: 'all_artifacts Trace',
+ path: '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=trace',
+ reportType: 'TRACE',
+ },
+];
+
+/**
+ * These correspond to any jobs with metadata data in the securityReportDownloadPathsQueryResponse above.
+ */
+export const metadataArtifacts = [
+ {
+ name: 'all_artifacts Metadata',
+ path: '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=metadata',
+ reportType: 'METADATA',
+ },
+];
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index c440081a0c4..50d1d130675 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import { merge } from 'lodash';
import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
-import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import {
@@ -42,7 +42,7 @@ describe('Security reports app', () => {
discoverProjectSecurityPath: '/discoverProjectSecurityPath',
};
- const createComponent = options => {
+ const createComponent = (options) => {
wrapper = mount(
SecurityReportsApp,
merge(
@@ -61,7 +61,7 @@ describe('Security reports app', () => {
const pendingHandler = () => new Promise(() => {});
const successHandler = () => Promise.resolve({ data: securityReportDownloadPathsQueryResponse });
const failureHandler = () => Promise.resolve({ errors: [{ message: 'some error' }] });
- const createMockApolloProvider = handler => {
+ const createMockApolloProvider = (handler) => {
localVue.use(VueApollo);
const requestHandlers = [[securityReportDownloadPathsQuery, handler]];
@@ -74,7 +74,7 @@ describe('Security reports app', () => {
const findDownloadDropdown = () => wrapper.find(SecurityReportDownloadDropdown);
const findPipelinesTabAnchor = () => wrapper.find('[data-testid="show-pipelines"]');
const findHelpIconComponent = () => wrapper.find(HelpIcon);
- const setupMockJobArtifact = reportType => {
+ const setupMockJobArtifact = (reportType) => {
jest
.spyOn(Api, 'pipelineJobs')
.mockResolvedValue({ data: [{ artifacts: [{ file_type: reportType }] }] });
@@ -93,8 +93,8 @@ describe('Security reports app', () => {
describe.each([false, true])(
'given the coreSecurityMrWidgetCounts feature flag is %p',
- coreSecurityMrWidgetCounts => {
- const createComponentWithFlag = options =>
+ (coreSecurityMrWidgetCounts) => {
+ const createComponentWithFlag = (options) =>
createComponent(
merge(
{
@@ -108,7 +108,7 @@ describe('Security reports app', () => {
),
);
- describe.each(SecurityReportsApp.reportTypes)('given a report type %p', reportType => {
+ describe.each(SecurityReportsApp.reportTypes)('given a report type %p', (reportType) => {
beforeEach(() => {
window.mrTabs = { tabShown: jest.fn() };
setupMockJobArtifact(reportType);
@@ -245,7 +245,7 @@ describe('Security reports app', () => {
describe('given the coreSecurityMrWidgetCounts feature flag is enabled', () => {
let mock;
- const createComponentWithFlagEnabled = options =>
+ const createComponentWithFlagEnabled = (options) =>
createComponent(
merge(options, {
provide: {
@@ -350,7 +350,7 @@ describe('Security reports app', () => {
});
describe('given coreSecurityMrWidgetDownloads feature flag is enabled', () => {
- const createComponentWithFlagEnabled = options =>
+ const createComponentWithFlagEnabled = (options) =>
createComponent(
merge(options, {
provide: {
@@ -371,7 +371,7 @@ describe('Security reports app', () => {
// TODO: Remove this assertion as part of
// https://gitlab.com/gitlab-org/gitlab/-/issues/273431
it('initially renders nothing', () => {
- expect(wrapper.isEmpty()).toBe(true);
+ expect(wrapper.html()).toBe('');
});
});
@@ -415,7 +415,7 @@ describe('Security reports app', () => {
// TODO: Remove this assertion as part of
// https://gitlab.com/gitlab-org/gitlab/-/issues/273431
it('renders nothing', () => {
- expect(wrapper.isEmpty()).toBe(true);
+ expect(wrapper.html()).toBe('');
});
});
});
diff --git a/spec/frontend/vue_shared/security_reports/store/getters_spec.js b/spec/frontend/vue_shared/security_reports/store/getters_spec.js
index 8de704be455..b146a281d7b 100644
--- a/spec/frontend/vue_shared/security_reports/store/getters_spec.js
+++ b/spec/frontend/vue_shared/security_reports/store/getters_spec.js
@@ -13,7 +13,7 @@ import {
} from '~/vue_shared/security_reports/store/getters';
import { CRITICAL, HIGH, LOW } from '~/vulnerabilities/constants';
-const generateVuln = severity => ({ severity });
+const generateVuln = (severity) => ({ severity });
describe('Security reports getters', () => {
let state;
diff --git a/spec/frontend/vue_shared/security_reports/store/modules/sast/actions_spec.js b/spec/frontend/vue_shared/security_reports/store/modules/sast/actions_spec.js
index a11f4e05913..c9d1db8a504 100644
--- a/spec/frontend/vue_shared/security_reports/store/modules/sast/actions_spec.js
+++ b/spec/frontend/vue_shared/security_reports/store/modules/sast/actions_spec.js
@@ -26,7 +26,7 @@ describe('sast report actions', () => {
});
describe('setDiffEndpoint', () => {
- it(`should commit ${types.SET_DIFF_ENDPOINT} with the correct path`, done => {
+ it(`should commit ${types.SET_DIFF_ENDPOINT} with the correct path`, (done) => {
testAction(
actions.setDiffEndpoint,
diffEndpoint,
@@ -44,13 +44,13 @@ describe('sast report actions', () => {
});
describe('requestDiff', () => {
- it(`should commit ${types.REQUEST_DIFF}`, done => {
+ it(`should commit ${types.REQUEST_DIFF}`, (done) => {
testAction(actions.requestDiff, {}, state, [{ type: types.REQUEST_DIFF }], [], done);
});
});
describe('receiveDiffSuccess', () => {
- it(`should commit ${types.RECEIVE_DIFF_SUCCESS} with the correct response`, done => {
+ it(`should commit ${types.RECEIVE_DIFF_SUCCESS} with the correct response`, (done) => {
testAction(
actions.receiveDiffSuccess,
reports,
@@ -68,7 +68,7 @@ describe('sast report actions', () => {
});
describe('receiveDiffError', () => {
- it(`should commit ${types.RECEIVE_DIFF_ERROR} with the correct response`, done => {
+ it(`should commit ${types.RECEIVE_DIFF_ERROR} with the correct response`, (done) => {
testAction(
actions.receiveDiffError,
error,
@@ -107,7 +107,7 @@ describe('sast report actions', () => {
.replyOnce(200, reports.enrichData);
});
- it('should dispatch the `receiveDiffSuccess` action', done => {
+ it('should dispatch the `receiveDiffSuccess` action', (done) => {
const { diff, enrichData } = reports;
testAction(
actions.fetchDiff,
@@ -135,7 +135,7 @@ describe('sast report actions', () => {
mock.onGet(diffEndpoint).replyOnce(200, reports.diff);
});
- it('should dispatch the `receiveDiffSuccess` action with empty enrich data', done => {
+ it('should dispatch the `receiveDiffSuccess` action with empty enrich data', (done) => {
const { diff } = reports;
const enrichData = [];
testAction(
@@ -167,7 +167,7 @@ describe('sast report actions', () => {
.replyOnce(404);
});
- it('should dispatch the `receiveError` action', done => {
+ it('should dispatch the `receiveError` action', (done) => {
testAction(
actions.fetchDiff,
{},
@@ -188,7 +188,7 @@ describe('sast report actions', () => {
.replyOnce(200, reports.enrichData);
});
- it('should dispatch the `receiveDiffError` action', done => {
+ it('should dispatch the `receiveDiffError` action', (done) => {
testAction(
actions.fetchDiff,
{},
diff --git a/spec/frontend/vue_shared/security_reports/store/modules/secret_detection/actions_spec.js b/spec/frontend/vue_shared/security_reports/store/modules/secret_detection/actions_spec.js
index bbcdfb5cd99..beda1a55438 100644
--- a/spec/frontend/vue_shared/security_reports/store/modules/secret_detection/actions_spec.js
+++ b/spec/frontend/vue_shared/security_reports/store/modules/secret_detection/actions_spec.js
@@ -26,7 +26,7 @@ describe('secret detection report actions', () => {
});
describe('setDiffEndpoint', () => {
- it(`should commit ${types.SET_DIFF_ENDPOINT} with the correct path`, done => {
+ it(`should commit ${types.SET_DIFF_ENDPOINT} with the correct path`, (done) => {
testAction(
actions.setDiffEndpoint,
diffEndpoint,
@@ -44,13 +44,13 @@ describe('secret detection report actions', () => {
});
describe('requestDiff', () => {
- it(`should commit ${types.REQUEST_DIFF}`, done => {
+ it(`should commit ${types.REQUEST_DIFF}`, (done) => {
testAction(actions.requestDiff, {}, state, [{ type: types.REQUEST_DIFF }], [], done);
});
});
describe('receiveDiffSuccess', () => {
- it(`should commit ${types.RECEIVE_DIFF_SUCCESS} with the correct response`, done => {
+ it(`should commit ${types.RECEIVE_DIFF_SUCCESS} with the correct response`, (done) => {
testAction(
actions.receiveDiffSuccess,
reports,
@@ -68,7 +68,7 @@ describe('secret detection report actions', () => {
});
describe('receiveDiffError', () => {
- it(`should commit ${types.RECEIVE_DIFF_ERROR} with the correct response`, done => {
+ it(`should commit ${types.RECEIVE_DIFF_ERROR} with the correct response`, (done) => {
testAction(
actions.receiveDiffError,
error,
@@ -107,7 +107,7 @@ describe('secret detection report actions', () => {
.replyOnce(200, reports.enrichData);
});
- it('should dispatch the `receiveDiffSuccess` action', done => {
+ it('should dispatch the `receiveDiffSuccess` action', (done) => {
const { diff, enrichData } = reports;
testAction(
actions.fetchDiff,
@@ -135,7 +135,7 @@ describe('secret detection report actions', () => {
mock.onGet(diffEndpoint).replyOnce(200, reports.diff);
});
- it('should dispatch the `receiveDiffSuccess` action with empty enrich data', done => {
+ it('should dispatch the `receiveDiffSuccess` action with empty enrich data', (done) => {
const { diff } = reports;
const enrichData = [];
testAction(
@@ -167,7 +167,7 @@ describe('secret detection report actions', () => {
.replyOnce(404);
});
- it('should dispatch the `receiveDiffError` action', done => {
+ it('should dispatch the `receiveDiffError` action', (done) => {
testAction(
actions.fetchDiff,
{},
@@ -188,7 +188,7 @@ describe('secret detection report actions', () => {
.replyOnce(200, reports.enrichData);
});
- it('should dispatch the `receiveDiffError` action', done => {
+ it('should dispatch the `receiveDiffError` action', (done) => {
testAction(
actions.fetchDiff,
{},
diff --git a/spec/frontend/vue_shared/security_reports/utils_spec.js b/spec/frontend/vue_shared/security_reports/utils_spec.js
index ea54644796a..7e5a27694ef 100644
--- a/spec/frontend/vue_shared/security_reports/utils_spec.js
+++ b/spec/frontend/vue_shared/security_reports/utils_spec.js
@@ -2,11 +2,15 @@ import { extractSecurityReportArtifacts } from '~/vue_shared/security_reports/ut
import {
REPORT_TYPE_SAST,
REPORT_TYPE_SECRET_DETECTION,
+ REPORT_FILE_TYPES,
} from '~/vue_shared/security_reports/constants';
import {
securityReportDownloadPathsQueryResponse,
sastArtifacts,
secretDetectionArtifacts,
+ archiveArtifacts,
+ traceArtifacts,
+ metadataArtifacts,
} from './mock_data';
describe('extractSecurityReportArtifacts', () => {
@@ -17,6 +21,9 @@ describe('extractSecurityReportArtifacts', () => {
${[REPORT_TYPE_SAST]} | ${sastArtifacts}
${[REPORT_TYPE_SECRET_DETECTION]} | ${secretDetectionArtifacts}
${[REPORT_TYPE_SAST, REPORT_TYPE_SECRET_DETECTION]} | ${[...secretDetectionArtifacts, ...sastArtifacts]}
+ ${[REPORT_FILE_TYPES.ARCHIVE]} | ${archiveArtifacts}
+ ${[REPORT_FILE_TYPES.TRACE]} | ${traceArtifacts}
+ ${[REPORT_FILE_TYPES.METADATA]} | ${metadataArtifacts}
`(
'returns the expected artifacts given report types $reportTypes',
({ reportTypes, expectedArtifacts }) => {
diff --git a/spec/frontend/vuex_shared/modules/modal/actions_spec.js b/spec/frontend/vuex_shared/modules/modal/actions_spec.js
index 353dbcb522f..a8269194c0b 100644
--- a/spec/frontend/vuex_shared/modules/modal/actions_spec.js
+++ b/spec/frontend/vuex_shared/modules/modal/actions_spec.js
@@ -4,7 +4,7 @@ import * as actions from '~/vuex_shared/modules/modal/actions';
describe('Vuex ModalModule actions', () => {
describe('open', () => {
- it('works', done => {
+ it('works', (done) => {
const data = { id: 7 };
testAction(actions.open, data, {}, [{ type: types.OPEN, payload: data }], [], done);
@@ -12,19 +12,19 @@ describe('Vuex ModalModule actions', () => {
});
describe('close', () => {
- it('works', done => {
+ it('works', (done) => {
testAction(actions.close, null, {}, [{ type: types.CLOSE }], [], done);
});
});
describe('show', () => {
- it('works', done => {
+ it('works', (done) => {
testAction(actions.show, null, {}, [{ type: types.SHOW }], [], done);
});
});
describe('hide', () => {
- it('works', done => {
+ it('works', (done) => {
testAction(actions.hide, null, {}, [{ type: types.HIDE }], [], done);
});
});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index 7a9340da87a..13e6bec47ab 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -103,7 +103,7 @@ describe('App', () => {
expect(actions.closeDrawer).toHaveBeenCalled();
});
- it.each([true, false])('passes open property', async openState => {
+ it.each([true, false])('passes open property', async (openState) => {
wrapper.vm.$store.state.open = openState;
await wrapper.vm.$nextTick();
@@ -183,7 +183,7 @@ describe('App', () => {
const clickSecondTab = async () => {
const secondTab = wrapper.findAll('.nav-link').at(1);
await secondTab.trigger('click');
- await new Promise(resolve => requestAnimationFrame(resolve));
+ await new Promise((resolve) => requestAnimationFrame(resolve));
};
beforeEach(() => {
diff --git a/spec/frontend/wikis_spec.js b/spec/frontend/wikis_spec.js
index cf1ea972697..c4a2bf1a69a 100644
--- a/spec/frontend/wikis_spec.js
+++ b/spec/frontend/wikis_spec.js
@@ -1,128 +1,160 @@
import { escape } from 'lodash';
+import { setHTMLFixture } from 'helpers/fixtures';
import Wikis from '~/pages/shared/wikis/wikis';
import Tracking from '~/tracking';
-import { setHTMLFixture } from './helpers/fixtures';
describe('Wikis', () => {
- describe('setting the commit message when the title changes', () => {
- const editFormHtmlFixture = args => `<form class="wiki-form ${
- args.newPage ? 'js-new-wiki-page' : ''
- }">
- <input type="text" id="wiki_title" value="My title" />
- <input type="text" id="wiki_message" />
- <select class="form-control select-control" name="wiki[format]" id="wiki_format">
- <option value="markdown">Markdown</option>
- <option selected="selected" value="rdoc">RDoc</option>
- <option value="asciidoc">AsciiDoc</option>
- <option value="org">Org</option>
- </select>
- <textarea id="wiki_content"></textarea>
- <code class="js-markup-link-example">{Link title}[link:page-slug]</code>
- </form>
- `;
-
- let wikis;
- let titleInput;
- let messageInput;
- let changeFormatSelect;
- let linkExample;
-
- const findBeforeUnloadWarning = () => window.onbeforeunload?.();
- const findContent = () => document.getElementById('wiki_content');
- const findForm = () => document.querySelector('.wiki-form');
-
- describe('when the wiki page is being created', () => {
- const formHtmlFixture = editFormHtmlFixture({ newPage: true });
+ const editFormHtmlFixture = (args) => `<form class="wiki-form ${
+ args.newPage ? 'js-new-wiki-page' : ''
+ }">
+ <input type="text" id="wiki_title" value="My title" />
+ <input type="text" id="wiki_message" />
+ <select class="form-control select-control" name="wiki[format]" id="wiki_format">
+ <option value="markdown">Markdown</option>
+ <option selected="selected" value="rdoc">RDoc</option>
+ <option value="asciidoc">AsciiDoc</option>
+ <option value="org">Org</option>
+ </select>
+ <textarea id="wiki_content"></textarea>
+ <code class="js-markup-link-example">{Link title}[link:page-slug]</code>
+ <input type="submit" class="js-wiki-btn-submit">
+ </input>
+ </form>
+ `;
+
+ let wikis;
+ let titleInput;
+ let contentInput;
+ let messageInput;
+ let changeFormatSelect;
+ let linkExample;
+
+ const findBeforeUnloadWarning = () => window.onbeforeunload?.();
+ const findForm = () => document.querySelector('.wiki-form');
+ const findSubmitButton = () => document.querySelector('.js-wiki-btn-submit');
+
+ describe('when the wiki page is being created', () => {
+ const formHtmlFixture = editFormHtmlFixture({ newPage: true });
- beforeEach(() => {
- setHTMLFixture(formHtmlFixture);
+ beforeEach(() => {
+ setHTMLFixture(formHtmlFixture);
- titleInput = document.getElementById('wiki_title');
- messageInput = document.getElementById('wiki_message');
- changeFormatSelect = document.querySelector('#wiki_format');
- linkExample = document.querySelector('.js-markup-link-example');
- wikis = new Wikis();
- });
+ titleInput = document.getElementById('wiki_title');
+ messageInput = document.getElementById('wiki_message');
+ changeFormatSelect = document.querySelector('#wiki_format');
+ linkExample = document.querySelector('.js-markup-link-example');
+ wikis = new Wikis();
+ });
- it('binds an event listener to the title input', () => {
- wikis.handleWikiTitleChange = jest.fn();
+ it('binds an event listener to the title input', () => {
+ wikis.handleWikiTitleChange = jest.fn();
- titleInput.dispatchEvent(new Event('keyup'));
+ titleInput.dispatchEvent(new Event('keyup'));
- expect(wikis.handleWikiTitleChange).toHaveBeenCalled();
- });
+ expect(wikis.handleWikiTitleChange).toHaveBeenCalled();
+ });
- it('sets the commit message when title changes', () => {
- titleInput.value = 'My title';
- messageInput.value = '';
+ it('sets the commit message when title changes', () => {
+ titleInput.value = 'My title';
+ messageInput.value = '';
- titleInput.dispatchEvent(new Event('keyup'));
+ titleInput.dispatchEvent(new Event('keyup'));
- expect(messageInput.value).toEqual('Create My title');
- });
+ expect(messageInput.value).toEqual('Create My title');
+ });
- it('replaces hyphens with spaces', () => {
- titleInput.value = 'my-hyphenated-title';
- titleInput.dispatchEvent(new Event('keyup'));
+ it('replaces hyphens with spaces', () => {
+ titleInput.value = 'my-hyphenated-title';
+ titleInput.dispatchEvent(new Event('keyup'));
- expect(messageInput.value).toEqual('Create my hyphenated title');
- });
+ expect(messageInput.value).toEqual('Create my hyphenated title');
});
+ });
- describe('when the wiki page is being updated', () => {
- const formHtmlFixture = editFormHtmlFixture({ newPage: false });
+ describe('when the wiki page is being updated', () => {
+ const formHtmlFixture = editFormHtmlFixture({ newPage: false });
- beforeEach(() => {
- setHTMLFixture(formHtmlFixture);
+ beforeEach(() => {
+ setHTMLFixture(formHtmlFixture);
- titleInput = document.getElementById('wiki_title');
- messageInput = document.getElementById('wiki_message');
- wikis = new Wikis();
- });
+ titleInput = document.getElementById('wiki_title');
+ messageInput = document.getElementById('wiki_message');
+ wikis = new Wikis();
+ });
- it('sets the commit message when title changes, prefixing with "Update"', () => {
- titleInput.value = 'My title';
- messageInput.value = '';
+ it('sets the commit message when title changes, prefixing with "Update"', () => {
+ titleInput.value = 'My title';
+ messageInput.value = '';
- titleInput.dispatchEvent(new Event('keyup'));
+ titleInput.dispatchEvent(new Event('keyup'));
- expect(messageInput.value).toEqual('Update My title');
+ expect(messageInput.value).toEqual('Update My title');
+ });
+
+ it.each`
+ value | text
+ ${'markdown'} | ${'[Link Title](page-slug)'}
+ ${'rdoc'} | ${'{Link title}[link:page-slug]'}
+ ${'asciidoc'} | ${'link:page-slug[Link title]'}
+ ${'org'} | ${'[[page-slug]]'}
+ `('updates a message when value=$value is selected', ({ value, text }) => {
+ changeFormatSelect.value = value;
+ changeFormatSelect.dispatchEvent(new Event('change'));
+
+ expect(linkExample.innerHTML).toBe(text);
+ });
+
+ it('starts with no unload warning', () => {
+ expect(findBeforeUnloadWarning()).toBeUndefined();
+ });
+
+ describe('when wiki content is updated', () => {
+ beforeEach(() => {
+ contentInput = document.getElementById('wiki_content');
+ contentInput.value = 'Lorem ipsum dolar sit!';
+ contentInput.dispatchEvent(new Event('input'));
});
- it.each`
- value | text
- ${'markdown'} | ${'[Link Title](page-slug)'}
- ${'rdoc'} | ${'{Link title}[link:page-slug]'}
- ${'asciidoc'} | ${'link:page-slug[Link title]'}
- ${'org'} | ${'[[page-slug]]'}
- `('updates a message when value=$value is selected', ({ value, text }) => {
- changeFormatSelect.value = value;
- changeFormatSelect.dispatchEvent(new Event('change'));
-
- expect(linkExample.innerHTML).toBe(text);
+ it('sets before unload warning', () => {
+ expect(findBeforeUnloadWarning()).toBe('');
});
- it('starts with no unload warning', () => {
+ it('when form submitted, unsets before unload warning', () => {
+ findForm().dispatchEvent(new Event('submit'));
expect(findBeforeUnloadWarning()).toBeUndefined();
});
+ });
+ });
- describe('when wiki content is updated', () => {
- beforeEach(() => {
- const content = findContent();
- content.value = 'Lorem ipsum dolar sit!';
- content.dispatchEvent(new Event('input'));
- });
-
- it('sets before unload warning', () => {
- expect(findBeforeUnloadWarning()).toBe('');
- });
-
- it('when form submitted, unsets before unload warning', () => {
- findForm().dispatchEvent(new Event('submit'));
- expect(findBeforeUnloadWarning()).toBeUndefined();
- });
- });
+ describe('submit button state', () => {
+ beforeEach(() => {
+ setHTMLFixture(editFormHtmlFixture({ newPage: true }));
+
+ titleInput = document.getElementById('wiki_title');
+ contentInput = document.getElementById('wiki_content');
+
+ wikis = new Wikis();
});
+
+ it.each`
+ title | text | buttonState | disabledAttr
+ ${'something'} | ${'something'} | ${'enabled'} | ${null}
+ ${''} | ${'something'} | ${'disabled'} | ${'true'}
+ ${'something'} | ${''} | ${'disabled'} | ${'true'}
+ ${''} | ${''} | ${'disabled'} | ${'true'}
+ ${' '} | ${' '} | ${'disabled'} | ${'true'}
+ `(
+ "when title='$title', content='$content', then, buttonState='$buttonState'",
+ ({ title, text, disabledAttr }) => {
+ titleInput.value = title;
+ titleInput.dispatchEvent(new Event('keyup'));
+
+ contentInput.value = text;
+ contentInput.dispatchEvent(new Event('input'));
+
+ expect(findSubmitButton().getAttribute('disabled')).toBe(disabledAttr);
+ },
+ );
});
describe('trackPageView', () => {
diff --git a/spec/frontend/zen_mode_spec.js b/spec/frontend/zen_mode_spec.js
index 8e0d170289b..e874d018e92 100644
--- a/spec/frontend/zen_mode_spec.js
+++ b/spec/frontend/zen_mode_spec.js
@@ -3,6 +3,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import Dropzone from 'dropzone';
import Mousetrap from 'mousetrap';
+import * as utils from '~/lib/utils/common_utils';
import ZenMode from '~/zen_mode';
import initNotes from '~/init_notes';
@@ -103,10 +104,10 @@ describe('ZenMode', () => {
});
it('restores the scroll position', () => {
- jest.spyOn(zen, 'scrollTo').mockImplementation(() => {});
+ jest.spyOn(utils, 'scrollToElement');
exitZen();
- expect(zen.scrollTo).toHaveBeenCalled();
+ expect(utils.scrollToElement).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend_integration/ide/helpers/ide_helper.js b/spec/frontend_integration/ide/helpers/ide_helper.js
index fe8d5f93794..8d5d047b146 100644
--- a/spec/frontend_integration/ide/helpers/ide_helper.js
+++ b/spec/frontend_integration/ide/helpers/ide_helper.js
@@ -8,11 +8,11 @@ import {
findByText,
} from '@testing-library/dom';
-const isFolderRowOpen = row => row.matches('.folder.is-open');
+const isFolderRowOpen = (row) => row.matches('.folder.is-open');
const getLeftSidebar = () => screen.getByTestId('left-sidebar');
-export const switchLeftSidebarTab = name => {
+export const switchLeftSidebarTab = (name) => {
const sidebar = getLeftSidebar();
const button = getByLabelText(sidebar, name);
@@ -23,7 +23,7 @@ export const switchLeftSidebarTab = name => {
export const getStatusBar = () => document.querySelector('.ide-status-bar');
export const waitForMonacoEditor = () =>
- new Promise(resolve => window.monaco.editor.onDidCreateEditor(resolve));
+ new Promise((resolve) => window.monaco.editor.onDidCreateEditor(resolve));
export const findMonacoEditor = () =>
screen.findAllByLabelText(/Editor content;/).then(([x]) => x.closest('.monaco-editor'));
@@ -31,7 +31,7 @@ export const findMonacoEditor = () =>
export const findMonacoDiffEditor = () =>
screen.findAllByLabelText(/Editor content;/).then(([x]) => x.closest('.monaco-diff-editor'));
-export const findAndSetEditorValue = async value => {
+export const findAndSetEditorValue = async (value) => {
const editor = await findMonacoEditor();
const uri = editor.getAttribute('data-uri');
@@ -45,9 +45,9 @@ export const getEditorValue = async () => {
return window.monaco.editor.getModel(uri).getValue();
};
-const findTreeBody = () => screen.findByTestId('ide-tree-body', {}, { timeout: 5000 });
+const findTreeBody = () => screen.findByTestId('ide-tree-body');
-const findRootActions = () => screen.findByTestId('ide-root-actions', {}, { timeout: 7000 });
+const findRootActions = () => screen.findByTestId('ide-root-actions');
const findFileRowContainer = (row = null) =>
row ? Promise.resolve(row.parentElement) : findTreeBody();
@@ -56,10 +56,12 @@ const findFileChild = async (row, name, index = 0) => {
const container = await findFileRowContainer(row);
const children = await findAllByText(container, name, { selector: '.file-row-name' });
- return children.map(x => x.closest('.file-row')).find(x => x.dataset.level === index.toString());
+ return children
+ .map((x) => x.closest('.file-row'))
+ .find((x) => x.dataset.level === index.toString());
};
-const openFileRow = row => {
+const openFileRow = (row) => {
if (!row || isFolderRowOpen(row)) {
return;
}
@@ -101,7 +103,7 @@ const fillFileNameModal = async (value, submitText = 'Create file') => {
createButton.click();
};
-const findAndClickRootAction = async name => {
+const findAndClickRootAction = async (name) => {
const container = await findRootActions();
const button = getByLabelText(container, name);
@@ -112,20 +114,17 @@ export const clickPreviewMarkdown = () => {
screen.getByText('Preview Markdown').click();
};
-export const openFile = async path => {
+export const openFile = async (path) => {
const row = await findAndTraverseToPath(path);
openFileRow(row);
};
-export const waitForTabToOpen = fileName =>
+export const waitForTabToOpen = (fileName) =>
findByText(document.querySelector('.multi-file-edit-pane'), fileName);
export const createFile = async (path, content) => {
- const parentPath = path
- .split('/')
- .slice(0, -1)
- .join('/');
+ const parentPath = path.split('/').slice(0, -1).join('/');
const parentRow = await findAndTraverseToPath(parentPath);
@@ -139,11 +138,16 @@ export const createFile = async (path, content) => {
await findAndSetEditorValue(content);
};
+export const updateFile = async (path, content) => {
+ await openFile(path);
+ await findAndSetEditorValue(content);
+};
+
export const getFilesList = () => {
- return screen.getAllByTestId('file-row-name-container').map(e => e.textContent.trim());
+ return screen.getAllByTestId('file-row-name-container').map((e) => e.textContent.trim());
};
-export const deleteFile = async path => {
+export const deleteFile = async (path) => {
const row = await findAndTraverseToPath(path);
clickFileRowAction(row, 'Delete');
};
@@ -155,7 +159,7 @@ export const renameFile = async (path, newPath) => {
await fillFileNameModal(newPath, 'Rename file');
};
-export const closeFile = async path => {
+export const closeFile = async (path) => {
const button = await screen.getByLabelText(`Close ${path}`, {
selector: '.multi-file-tabs button',
});
@@ -163,11 +167,33 @@ export const closeFile = async path => {
button.click();
};
-export const commit = async () => {
+/**
+ * Fill out and submit the commit form in the Web IDE
+ *
+ * @param {Object} options - Used to fill out the commit form in the IDE
+ * @param {Boolean} options.newBranch - Flag for the "Create a new branch" radio.
+ * @param {Boolean} options.newMR - Flag for the "Start a new merge request" checkbox.
+ * @param {String} options.newBranchName - Value to put in the new branch name input field. The Web IDE supports leaving this field blank.
+ */
+export const commit = async ({ newBranch = false, newMR = false, newBranchName = '' } = {}) => {
switchLeftSidebarTab('Commit');
screen.getByTestId('begin-commit-button').click();
- await screen.findByLabelText(/Commit to .+ branch/).then(x => x.click());
+ if (!newBranch) {
+ const option = await screen.findByLabelText(/Commit to .+ branch/);
+ option.click();
+ } else {
+ const option = await screen.findByLabelText('Create a new branch');
+ option.click();
+
+ const branchNameInput = await screen.findByTestId('ide-new-branch-name');
+ fireEvent.input(branchNameInput, { target: { value: newBranchName } });
+
+ const mrCheck = await screen.findByLabelText('Start a new merge request');
+ if (Boolean(mrCheck.checked) !== newMR) {
+ mrCheck.click();
+ }
+ }
screen.getByText('Commit').click();
};
diff --git a/spec/frontend_integration/ide/helpers/start.js b/spec/frontend_integration/ide/helpers/start.js
index 9dc9649e1bf..43a996286e7 100644
--- a/spec/frontend_integration/ide/helpers/start.js
+++ b/spec/frontend_integration/ide/helpers/start.js
@@ -2,6 +2,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import extendStore from '~/ide/stores/extend';
import { IDE_DATASET } from './mock_data';
import { initIde } from '~/ide';
+import Editor from '~/ide/lib/editor';
export default (container, { isRepoEmpty = false, path = '' } = {}) => {
global.jsdom.reconfigure({
@@ -13,5 +14,16 @@ export default (container, { isRepoEmpty = false, path = '' } = {}) => {
const el = document.createElement('div');
Object.assign(el.dataset, IDE_DATASET);
container.appendChild(el);
- return initIde(el, { extendStore });
+ const vm = initIde(el, { extendStore });
+
+ // We need to dispose of editor Singleton things or tests will bump into eachother
+ vm.$on('destroy', () => {
+ if (Editor.editorInstance) {
+ Editor.editorInstance.modelManager.dispose();
+ Editor.editorInstance.dispose();
+ Editor.editorInstance = null;
+ }
+ });
+
+ return vm;
};
diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js
index dacc538d5ba..00a73661d14 100644
--- a/spec/frontend_integration/ide/ide_integration_spec.js
+++ b/spec/frontend_integration/ide/ide_integration_spec.js
@@ -1,5 +1,6 @@
import { waitForText } from 'helpers/wait_for_text';
import waitForPromises from 'helpers/wait_for_promises';
+import { setTestTimeout } from 'helpers/timeout';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import { createCommitId } from 'test_helpers/factories/commit_id';
import * as ideHelper from './helpers/ide_helper';
@@ -12,6 +13,9 @@ describe('WebIDE', () => {
let container;
beforeEach(() => {
+ // For some reason these tests were timing out in CI.
+ // We will investigate in https://gitlab.com/gitlab-org/gitlab/-/issues/298714
+ setTestTimeout(20000);
setFixtures('<div class="webide-container"></div>');
container = document.querySelector('.webide-container');
});
@@ -55,6 +59,25 @@ describe('WebIDE', () => {
});
});
+ it('user commits changes to new branch', async () => {
+ vm = startWebIDE(container);
+
+ expect(window.location.pathname).toBe('/-/ide/project/gitlab-test/lorem-ipsum/tree/master/-/');
+
+ await ideHelper.updateFile('README.md', 'Lorem dolar si amit\n');
+ await ideHelper.commit({ newBranch: true, newMR: false, newBranchName: 'test-hello-world' });
+
+ await waitForText('All changes are committed');
+
+ // Wait for IDE to load new commit
+ await waitForText('10000000', document.querySelector('.ide-status-bar'));
+
+ // It's important that the new branch is now in the route
+ expect(window.location.pathname).toBe(
+ '/-/ide/project/gitlab-test/lorem-ipsum/blob/test-hello-world/-/README.md',
+ );
+ });
+
it('user adds file that starts with +', async () => {
vm = startWebIDE(container);
@@ -66,7 +89,7 @@ describe('WebIDE', () => {
// Assert that +test is the only open tab
const tabs = Array.from(document.querySelectorAll('.multi-file-tab'));
- expect(tabs.map(x => x.textContent.trim())).toEqual(['+test']);
+ expect(tabs.map((x) => x.textContent.trim())).toEqual(['+test']);
});
describe('editor info', () => {
diff --git a/spec/frontend_integration/ide/user_opens_file_spec.js b/spec/frontend_integration/ide/user_opens_file_spec.js
index 98a73c7a029..7fa6dcecc9e 100644
--- a/spec/frontend_integration/ide/user_opens_file_spec.js
+++ b/spec/frontend_integration/ide/user_opens_file_spec.js
@@ -3,9 +3,7 @@ import { screen } from '@testing-library/dom';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
-// https://gitlab.com/gitlab-org/gitlab/-/issues/293654#note_466432769
-// eslint-disable-next-line jest/no-disabled-tests
-describe.skip('IDE: User opens a file in the Web IDE', () => {
+describe('IDE: User opens a file in the Web IDE', () => {
useOverclockTimers();
let vm;
diff --git a/spec/frontend_integration/test_helpers/factories/commit_id.js b/spec/frontend_integration/test_helpers/factories/commit_id.js
index 9fa278c9dde..815da19a2d9 100644
--- a/spec/frontend_integration/test_helpers/factories/commit_id.js
+++ b/spec/frontend_integration/test_helpers/factories/commit_id.js
@@ -1,7 +1,5 @@
const COMMIT_ID_LENGTH = 40;
-const DEFAULT_COMMIT_ID = Array(COMMIT_ID_LENGTH)
- .fill('0')
- .join('');
+const DEFAULT_COMMIT_ID = Array(COMMIT_ID_LENGTH).fill('0').join('');
export const createCommitId = (index = 0) =>
`${index}${DEFAULT_COMMIT_ID}`.substr(0, COMMIT_ID_LENGTH);
diff --git a/spec/frontend_integration/test_helpers/fixtures.js b/spec/frontend_integration/test_helpers/fixtures.js
index 46946ed71f2..fde3fd8cb63 100644
--- a/spec/frontend_integration/test_helpers/fixtures.js
+++ b/spec/frontend_integration/test_helpers/fixtures.js
@@ -10,9 +10,9 @@ const createFactoryWithDefault = (fn, defaultValue) => () => {
};
const factory = {
- json: fn => createFactoryWithDefault(fn, { error: 'fixture not found' }),
- text: fn => createFactoryWithDefault(fn, 'Hello world\nHow are you today?\n'),
- binary: fn => createFactoryWithDefault(fn, ''),
+ json: (fn) => createFactoryWithDefault(fn, { error: 'fixture not found' }),
+ text: (fn) => createFactoryWithDefault(fn, 'Hello world\nHow are you today?\n'),
+ binary: (fn) => createFactoryWithDefault(fn, ''),
};
export const getProject = factory.json(() => require('test_fixtures/api/projects/get.json'));
diff --git a/spec/frontend_integration/test_helpers/mock_server/index.js b/spec/frontend_integration/test_helpers/mock_server/index.js
index 6f090565635..2aebdefaafb 100644
--- a/spec/frontend_integration/test_helpers/mock_server/index.js
+++ b/spec/frontend_integration/test_helpers/mock_server/index.js
@@ -26,7 +26,7 @@ export const createMockServerOptions = () => ({
},
seeds(schema) {
schema.db.loadData({
- files: getRepositoryFiles().map(path => ({ path })),
+ files: getRepositoryFiles().map((path) => ({ path })),
projects: [getProject(), getEmptyProject()],
branches: [getBranch()],
mergeRequests: getMergeRequests(),
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/404.js b/spec/frontend_integration/test_helpers/mock_server/routes/404.js
index 9e08016577b..bc8edba927e 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/404.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/404.js
@@ -1,5 +1,5 @@
-export default server => {
- ['get', 'post', 'put', 'delete', 'patch'].forEach(method => {
+export default (server) => {
+ ['get', 'post', 'put', 'delete', 'patch'].forEach((method) => {
server[method]('*', () => {
return new Response(404);
});
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/ci.js b/spec/frontend_integration/test_helpers/mock_server/routes/ci.js
index 83951f09c56..6f1593a2cf9 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/ci.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/ci.js
@@ -1,6 +1,6 @@
import { getPipelinesEmptyResponse } from 'test_helpers/fixtures';
-export default server => {
+export default (server) => {
server.get('*/commit/:id/pipelines', () => {
return getPipelinesEmptyResponse();
});
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js b/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js
index ebb5415ba97..a22763dcb45 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js
@@ -1,6 +1,6 @@
import { graphqlQuery } from '../graphql';
-export default server => {
+export default (server) => {
server.post('/api/graphql', (schema, request) => {
const batches = JSON.parse(request.requestBody);
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/index.js b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
index eea196b5158..e30fecf2f06 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/index.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
@@ -1,5 +1,5 @@
/* eslint-disable global-require */
-export default server => {
+export default (server) => {
[
require('./graphql'),
require('./projects'),
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/projects.js b/spec/frontend_integration/test_helpers/mock_server/routes/projects.js
index f4d8ce4b23d..de37aa98eee 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/projects.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/projects.js
@@ -1,6 +1,6 @@
import { withKeys } from 'test_helpers/utils/obj';
-export default server => {
+export default (server) => {
server.get('/api/v4/projects/:id', (schema, request) => {
const { id } = request.params;
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/repository.js b/spec/frontend_integration/test_helpers/mock_server/routes/repository.js
index 166c0cc32db..ba36463cad8 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/repository.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/repository.js
@@ -1,9 +1,9 @@
import { createNewCommit, createCommitIdGenerator } from 'test_helpers/factories';
-export default server => {
+export default (server) => {
const commitIdGenerator = createCommitIdGenerator();
- server.get('/api/v4/projects/:id/repository/branches', schema => {
+ server.get('/api/v4/projects/:id/repository/branches', (schema) => {
return schema.db.branches;
});
@@ -15,7 +15,7 @@ export default server => {
return branch.attrs;
});
- server.get('*/-/files/:id', schema => {
+ server.get('*/-/files/:id', (schema) => {
return schema.db.files.map(({ path }) => path);
});
@@ -37,13 +37,23 @@ export default server => {
);
const branch = schema.branches.findBy({ name: branchName });
+ const prevCommit = branch
+ ? branch.attrs.commit
+ : schema.branches.findBy({ name: 'master' }).attrs.commit;
const commit = {
- ...createNewCommit({ id: commitIdGenerator.next(), message }, branch.attrs.commit),
+ ...createNewCommit({ id: commitIdGenerator.next(), message }, prevCommit),
__actions: actions,
};
- branch.update({ commit });
+ if (branch) {
+ branch.update({ commit });
+ } else {
+ schema.branches.create({
+ name: branchName,
+ commit,
+ });
+ }
return commit;
});
diff --git a/spec/frontend_integration/test_helpers/setup/index.js b/spec/frontend_integration/test_helpers/setup/index.js
index ba1d256e16e..946ccbec00c 100644
--- a/spec/frontend_integration/test_helpers/setup/index.js
+++ b/spec/frontend_integration/test_helpers/setup/index.js
@@ -3,3 +3,4 @@ import './setup_globals';
import './setup_axios';
import './setup_serializers';
import './setup_mock_server';
+import './setup_testing_library';
diff --git a/spec/frontend_integration/test_helpers/setup/setup_testing_library.js b/spec/frontend_integration/test_helpers/setup/setup_testing_library.js
new file mode 100644
index 00000000000..5081b1c3b62
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/setup/setup_testing_library.js
@@ -0,0 +1,3 @@
+import { configure } from '@testing-library/dom';
+
+configure({ asyncUtilTimeout: 10000 });
diff --git a/spec/frontend_integration/test_helpers/snapshot_serializer.js b/spec/frontend_integration/test_helpers/snapshot_serializer.js
index 8c4f95a9156..aacd144217f 100644
--- a/spec/frontend_integration/test_helpers/snapshot_serializer.js
+++ b/spec/frontend_integration/test_helpers/snapshot_serializer.js
@@ -4,7 +4,7 @@ export default {
},
print(element, serialize) {
element.$_hit = true;
- element.querySelectorAll('[style]').forEach(el => {
+ element.querySelectorAll('[style]').forEach((el) => {
el.$_hit = true;
if (el.style.display === 'none') {
el.textContent = '(jest: contents hidden)';
diff --git a/spec/frontend_integration/test_helpers/utils/obj_spec.js b/spec/frontend_integration/test_helpers/utils/obj_spec.js
index 0ad7b4a1a4c..614250ccddc 100644
--- a/spec/frontend_integration/test_helpers/utils/obj_spec.js
+++ b/spec/frontend_integration/test_helpers/utils/obj_spec.js
@@ -3,9 +3,9 @@ import { withKeys, withValues } from './obj';
describe('frontend_integration/test_helpers/utils/obj', () => {
describe('withKeys', () => {
it('picks and maps keys', () => {
- expect(withKeys({ a: '123', b: 456, c: 'd' }, { b: 'lorem', c: 'ipsum', z: 'zed ' })).toEqual(
- { lorem: 456, ipsum: 'd' },
- );
+ expect(
+ withKeys({ a: '123', b: 456, c: 'd' }, { b: 'lorem', c: 'ipsum', z: 'zed ' }),
+ ).toEqual({ lorem: 456, ipsum: 'd' });
});
});
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
new file mode 100644
index 00000000000..bd0d38cb49f
--- /dev/null
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Namespace::PackageSettings::Update do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:namespace) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { { namespace_path: namespace.full_path } }
+
+ specify { expect(described_class).to require_graphql_authorizations(:create_package_settings) }
+
+ describe '#resolve' do
+ subject { described_class.new(object: namespace, context: { current_user: user }, field: nil).resolve(**params) }
+
+ RSpec.shared_examples 'returning a success' do
+ it 'returns the namespace package setting with no errors' do
+ expect(subject).to eq(
+ package_settings: package_settings,
+ errors: []
+ )
+ end
+ end
+
+ RSpec.shared_examples 'updating the namespace package setting' do
+ it_behaves_like 'updating the namespace package setting attributes', from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT' }, to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' }
+
+ it_behaves_like 'returning a success'
+
+ context 'with invalid params' do
+ let_it_be(:params) { { namespace_path: namespace.full_path, maven_duplicate_exception_regex: '[' } }
+
+ it_behaves_like 'not creating the namespace package setting'
+
+ it 'doesn\'t update the maven_duplicates_allowed' do
+ expect { subject }
+ .not_to change { package_settings.reload.maven_duplicates_allowed }
+ end
+
+ it 'returns an error' do
+ expect(subject).to eq(
+ package_settings: nil,
+ errors: ['Maven duplicate exception regex not valid RE2 syntax: missing ]: [']
+ )
+ end
+ end
+ end
+
+ RSpec.shared_examples 'denying access to namespace package setting' do
+ it 'raises Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'with existing namespace package setting' do
+ let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
+ let_it_be(:params) { { namespace_path: namespace.full_path, maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' } }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the namespace package setting'
+ :developer | 'updating the namespace package setting'
+ :reporter | 'denying access to namespace package setting'
+ :guest | 'denying access to namespace package setting'
+ :anonymous | 'denying access to namespace package setting'
+ end
+
+ with_them do
+ before do
+ namespace.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'without existing namespace package setting' do
+ let_it_be(:package_settings) { namespace.package_settings }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'creating the namespace package setting'
+ :developer | 'creating the namespace package setting'
+ :reporter | 'denying access to namespace package setting'
+ :guest | 'denying access to namespace package setting'
+ :anonymous | 'denying access to namespace package setting'
+ end
+
+ with_them do
+ before do
+ namespace.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/releases/create_spec.rb b/spec/graphql/mutations/releases/create_spec.rb
index d6305691dac..7776f968346 100644
--- a/spec/graphql/mutations/releases/create_spec.rb
+++ b/spec/graphql/mutations/releases/create_spec.rb
@@ -88,12 +88,9 @@ RSpec.describe Mutations::Releases::Create do
it 'creates the release with the correct milestone associations' do
expected_milestone_titles = [milestone_12_3.title, milestone_12_4.title]
- actual_milestone_titles = new_release.milestones.map { |m| m.title }
+ actual_milestone_titles = new_release.milestones.order_by_dates_and_title.map { |m| m.title }
- # Right now the milestones are returned in a non-deterministic order.
- # `match_array` should be updated to `eq` once
- # https://gitlab.com/gitlab-org/gitlab/-/issues/259012 is addressed.
- expect(actual_milestone_titles).to match_array(expected_milestone_titles)
+ expect(actual_milestone_titles).to eq(expected_milestone_titles)
end
describe 'asset links' do
diff --git a/spec/graphql/mutations/releases/update_spec.rb b/spec/graphql/mutations/releases/update_spec.rb
index 0406e9c96f3..c541afd53a1 100644
--- a/spec/graphql/mutations/releases/update_spec.rb
+++ b/spec/graphql/mutations/releases/update_spec.rb
@@ -48,12 +48,7 @@ RSpec.describe Mutations::Releases::Update do
expect(updated_release.name).to eq(name) unless except_for == :name
expect(updated_release.description).to eq(description) unless except_for == :description
expect(updated_release.released_at).to eq(released_at) unless except_for == :released_at
-
- # Right now the milestones are returned in a non-deterministic order.
- # Because of this, we need to allow for milestones to be returned in any order.
- # Once https://gitlab.com/gitlab-org/gitlab/-/issues/259012 has been
- # fixed, this can be updated to expect a specific order.
- expect(updated_release.milestones).to match_array([milestone_12_3, milestone_12_4]) unless except_for == :milestones
+ expect(updated_release.milestones.order_by_dates_and_title).to eq([milestone_12_3, milestone_12_4]) unless except_for == :milestones
end
end
diff --git a/spec/graphql/resolvers/ci/config_resolver_spec.rb b/spec/graphql/resolvers/ci/config_resolver_spec.rb
index 6911acdb4ec..ca7ae73fef8 100644
--- a/spec/graphql/resolvers/ci/config_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/config_resolver_spec.rb
@@ -7,16 +7,26 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
describe '#resolve' do
before do
- yaml_processor_double = instance_double(::Gitlab::Ci::YamlProcessor)
- allow(yaml_processor_double).to receive(:execute).and_return(fake_result)
+ ci_lint_double = instance_double(::Gitlab::Ci::Lint)
+ allow(ci_lint_double).to receive(:validate).and_return(fake_result)
- allow(::Gitlab::Ci::YamlProcessor).to receive(:new).and_return(yaml_processor_double)
+ allow(::Gitlab::Ci::Lint).to receive(:new).and_return(ci_lint_double)
+ end
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
+
+ subject(:response) do
+ resolve(described_class,
+ args: { project_path: project.full_path, content: content },
+ ctx: { current_user: user })
end
context 'with a valid .gitlab-ci.yml' do
let(:fake_result) do
- ::Gitlab::Ci::YamlProcessor::Result.new(
- ci_config: ::Gitlab::Ci::Config.new(content),
+ ::Gitlab::Ci::Lint::Result.new(
+ merged_yaml: content,
+ jobs: [],
errors: [],
warnings: []
)
@@ -27,8 +37,6 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
end
it 'lints the ci config file' do
- response = resolve(described_class, args: { content: content }, ctx: {})
-
expect(response[:status]).to eq(:valid)
expect(response[:errors]).to be_empty
end
@@ -38,16 +46,15 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
let(:content) { 'invalid' }
let(:fake_result) do
- Gitlab::Ci::YamlProcessor::Result.new(
- ci_config: nil,
+ Gitlab::Ci::Lint::Result.new(
+ jobs: [],
+ merged_yaml: content,
errors: ['Invalid configuration format'],
warnings: []
)
end
it 'responds with errors about invalid syntax' do
- response = resolve(described_class, args: { content: content }, ctx: {})
-
expect(response[:status]).to eq(:invalid)
expect(response[:errors]).to eq(['Invalid configuration format'])
end
diff --git a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
index 46ee74a5f7e..c44f6b623d7 100644
--- a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Resolvers::Ci::JobsResolver do
include GraphqlHelpers
- let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
before_all do
create(:ci_build, name: 'Normal job', pipeline: pipeline)
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index f6f746a8572..269ee9eabf9 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -264,13 +264,13 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'finds a specific issue with iid', :request_store do
- result = batch_sync(max_queries: 2) { resolve_issues(iid: issue1.iid) }
+ result = batch_sync(max_queries: 4) { resolve_issues(iid: issue1.iid) }
expect(result).to contain_exactly(issue1)
end
it 'batches queries that only include IIDs', :request_store do
- result = batch_sync(max_queries: 2) do
+ result = batch_sync(max_queries: 4) do
[issue1, issue2]
.map { |issue| resolve_issues(iid: issue.iid.to_s) }
.flat_map(&:to_a)
@@ -280,7 +280,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'finds a specific issue with iids', :request_store do
- result = batch_sync(max_queries: 2) do
+ result = batch_sync(max_queries: 4) do
resolve_issues(iids: [issue1.iid])
end
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index 63fbd04848d..50b9243efa5 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -231,6 +231,16 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'sorts merge requests descending' do
expect(resolve_mr(project, sort: :merged_at_desc)).to eq [merge_request_3, merge_request_1, merge_request_with_milestone, merge_request_6, merge_request_5, merge_request_4, merge_request_2]
end
+
+ context 'when label filter is given and the optimized_issuable_label_filter feature flag is off' do
+ before do
+ stub_feature_flags(optimized_issuable_label_filter: false)
+ end
+
+ it 'does not raise PG::GroupingError' do
+ expect { resolve_mr(project, sort: :merged_at_desc, labels: %w[a b]) }.not_to raise_error
+ end
+ end
end
end
end
diff --git a/spec/graphql/resolvers/package_details_resolver_spec.rb b/spec/graphql/resolvers/package_details_resolver_spec.rb
new file mode 100644
index 00000000000..825b2aed40a
--- /dev/null
+++ b/spec/graphql/resolvers/package_details_resolver_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::PackageDetailsResolver do
+ include GraphqlHelpers
+
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { project.owner }
+ let_it_be(:package) { create(:composer_package, project: project) }
+
+ describe '#resolve' do
+ let(:args) do
+ { id: package.to_global_id.to_s }
+ end
+
+ subject { resolve(described_class, ctx: { current_user: user }, args: args).sync }
+
+ it { is_expected.to eq(package) }
+ end
+end
diff --git a/spec/graphql/resolvers/projects_resolver_spec.rb b/spec/graphql/resolvers/projects_resolver_spec.rb
index 3de54c7e410..34ddc9cd8cb 100644
--- a/spec/graphql/resolvers/projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects_resolver_spec.rb
@@ -129,7 +129,7 @@ RSpec.describe Resolvers::ProjectsResolver do
let(:filters) { { search: 'projA', sort: 'similarity' } }
it 'returns projects in order of similarity to search' do
- stub_feature_flags(project_finder_similarity_sort: true)
+ stub_feature_flags(project_finder_similarity_sort: current_user)
is_expected.to eq([named_project3, named_project1, named_project2])
end
diff --git a/spec/graphql/resolvers/release_milestones_resolver_spec.rb b/spec/graphql/resolvers/release_milestones_resolver_spec.rb
new file mode 100644
index 00000000000..5f66cba859d
--- /dev/null
+++ b/spec/graphql/resolvers/release_milestones_resolver_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::ReleaseMilestonesResolver do
+ include GraphqlHelpers
+
+ let_it_be(:release) { create(:release, :with_milestones, milestones_count: 2) }
+
+ let(:resolved) do
+ resolve(described_class, obj: release)
+ end
+
+ describe '#resolve' do
+ it "returns an OffsetActiveRecordRelationConnection" do
+ expect(resolved).to be_a(::Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ end
+
+ it "includes the release's milestones in the returned OffsetActiveRecordRelationConnection" do
+ expect(resolved.items).to eq(release.milestones.order_by_dates_and_title)
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
index 3cb0810c698..6cf23a2f57f 100644
--- a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Resolvers::UserNotesCountResolver do
context 'when a user does not have permission to view notes' do
subject { batch_sync { resolve_user_notes_count(private_issue) } }
- it 'returns no notes' do
+ it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
@@ -80,7 +80,7 @@ RSpec.describe Resolvers::UserNotesCountResolver do
context 'when a user does not have permission to view notes' do
subject { batch_sync { resolve_user_notes_count(private_merge_request) } }
- it 'returns no notes' do
+ it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
diff --git a/spec/graphql/resolvers/users_resolver_spec.rb b/spec/graphql/resolvers/users_resolver_spec.rb
index 1aa24055a89..031d7c99eef 100644
--- a/spec/graphql/resolvers/users_resolver_spec.rb
+++ b/spec/graphql/resolvers/users_resolver_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Resolvers::UsersResolver do
context 'when both ids and usernames are passed ' do
it 'raises an error' do
- expect { resolve_users(ids: [user1.to_global_id.to_s], usernames: [user1.username]) }
+ expect { resolve_users( args: { ids: [user1.to_global_id.to_s], usernames: [user1.username] } ) }
.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
end
@@ -35,7 +35,7 @@ RSpec.describe Resolvers::UsersResolver do
context 'when a set of IDs is passed' do
it 'returns those users' do
expect(
- resolve_users(ids: [user1.to_global_id.to_s, user2.to_global_id.to_s])
+ resolve_users( args: { ids: [user1.to_global_id.to_s, user2.to_global_id.to_s] } )
).to contain_exactly(user1, user2)
end
end
@@ -43,21 +43,31 @@ RSpec.describe Resolvers::UsersResolver do
context 'when a set of usernames is passed' do
it 'returns those users' do
expect(
- resolve_users(usernames: [user1.username, user2.username])
+ resolve_users( args: { usernames: [user1.username, user2.username] } )
).to contain_exactly(user1, user2)
end
end
+ context 'when admins is true', :enable_admin_mode do
+ let(:admin_user) { create(:user, :admin) }
+
+ it 'returns only admins' do
+ expect(
+ resolve_users( args: { admins: true }, ctx: { current_user: admin_user } )
+ ).to contain_exactly(admin_user)
+ end
+ end
+
context 'when a search term is passed' do
it 'returns all users who match', :aggregate_failures do
- expect(resolve_users(search: "some")).to contain_exactly(user1, user2)
- expect(resolve_users(search: "123784")).to contain_exactly(user2)
- expect(resolve_users(search: "someperson")).to contain_exactly(user1)
+ expect(resolve_users( args: { search: "some" } )).to contain_exactly(user1, user2)
+ expect(resolve_users( args: { search: "123784" } )).to contain_exactly(user2)
+ expect(resolve_users( args: { search: "someperson" } )).to contain_exactly(user1)
end
end
end
- def resolve_users(args = {})
- resolve(described_class, args: args)
+ def resolve_users(args: {}, ctx: {})
+ resolve(described_class, args: args, ctx: ctx)
end
end
diff --git a/spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb b/spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb
index de8143a5466..ffb1a0f30c9 100644
--- a/spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb
+++ b/spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb
@@ -8,4 +8,48 @@ RSpec.describe GitlabSchema.types['InstanceStatisticsMeasurement'] do
it { is_expected.to have_graphql_field(:recorded_at) }
it { is_expected.to have_graphql_field(:identifier) }
it { is_expected.to have_graphql_field(:count) }
+
+ describe 'authorization' do
+ let_it_be(:measurement) { create(:instance_statistics_measurement, :project_count) }
+ let(:user) { create(:user) }
+
+ let(:query) do
+ <<~GRAPHQL
+ query instanceStatisticsMeasurements($identifier: MeasurementIdentifier!) {
+ instanceStatisticsMeasurements(identifier: $identifier) {
+ nodes {
+ count
+ identifier
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ subject do
+ GitlabSchema.execute(
+ query,
+ variables: { identifier: 'PROJECTS' },
+ context: { current_user: user }
+ ).to_h
+ end
+
+ context 'when the user is not admin' do
+ it 'returns no data' do
+ expect(subject.dig('data', 'instanceStatisticsMeasurements')).to be_nil
+ end
+ end
+
+ context 'when user is an admin' do
+ let(:user) { create(:user, :admin) }
+
+ before do
+ stub_feature_flags(user_mode_in_session: false)
+ end
+
+ it 'returns data' do
+ expect(subject.dig('data', 'instanceStatisticsMeasurements', 'nodes')).not_to be_empty
+ end
+ end
+ end
end
diff --git a/spec/graphql/types/base_enum_spec.rb b/spec/graphql/types/base_enum_spec.rb
index b7adcf217f6..744aee40044 100644
--- a/spec/graphql/types/base_enum_spec.rb
+++ b/spec/graphql/types/base_enum_spec.rb
@@ -3,7 +3,75 @@
require 'spec_helper'
RSpec.describe Types::BaseEnum do
- describe '#enum' do
+ describe '.declarative_enum' do
+ let(:use_name) { true }
+ let(:use_description) { true }
+ let(:enum_type) do
+ Class.new(described_class) do
+ graphql_name 'OriginalName'
+ description 'Original description'
+ end
+ end
+
+ let(:enum_module) do
+ Module.new do
+ extend DeclarativeEnum
+
+ name 'Name'
+ description 'Description'
+
+ define do
+ foo value: 0, description: 'description of foo'
+ end
+ end
+ end
+
+ subject(:set_declarative_enum) { enum_type.declarative_enum(enum_module, use_name: use_name, use_description: use_description) }
+
+ describe '#graphql_name' do
+ context 'when the use_name is `true`' do
+ it 'changes the graphql_name' do
+ expect { set_declarative_enum }.to change { enum_type.graphql_name }.from('OriginalName').to('Name')
+ end
+ end
+
+ context 'when the use_name is `false`' do
+ let(:use_name) { false }
+
+ it 'does not change the graphql_name' do
+ expect { set_declarative_enum }.not_to change { enum_type.graphql_name }.from('OriginalName')
+ end
+ end
+ end
+
+ describe '#description' do
+ context 'when the use_description is `true`' do
+ it 'changes the description' do
+ expect { set_declarative_enum }.to change { enum_type.description }.from('Original description').to('Description')
+ end
+ end
+
+ context 'when the use_description is `false`' do
+ let(:use_description) { false }
+
+ it 'does not change the description' do
+ expect { set_declarative_enum }.not_to change { enum_type.description }.from('Original description')
+ end
+ end
+ end
+
+ describe '#values' do
+ it 'sets the values defined by the declarative enum' do
+ set_declarative_enum
+
+ expect(enum_type.values.keys).to eq(['FOO'])
+ expect(enum_type.values.values.map(&:description)).to eq(['description of foo'])
+ expect(enum_type.values.values.map(&:value)).to eq([0])
+ end
+ end
+ end
+
+ describe '.enum' do
let(:enum) do
Class.new(described_class) do
value 'TEST', value: 3
diff --git a/spec/graphql/types/board_type_spec.rb b/spec/graphql/types/board_type_spec.rb
index b02b342390d..5ea87d5f473 100644
--- a/spec/graphql/types/board_type_spec.rb
+++ b/spec/graphql/types/board_type_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe GitlabSchema.types['Board'] do
specify { expect(described_class).to require_graphql_authorizations(:read_board) }
it 'has specific fields' do
- expected_fields = %w[id name]
+ expected_fields = %w[id name web_url web_path]
expect(described_class).to include_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/ci/config/job_restriction_type_spec.rb b/spec/graphql/types/ci/config/job_restriction_type_spec.rb
new file mode 100644
index 00000000000..dd46a38b7c2
--- /dev/null
+++ b/spec/graphql/types/ci/config/job_restriction_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::Config::JobRestrictionType do
+ specify { expect(described_class.graphql_name).to eq('CiConfigJobRestriction') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[refs]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/config/job_type_spec.rb b/spec/graphql/types/ci/config/job_type_spec.rb
index 600d665a84b..de4e167f69c 100644
--- a/spec/graphql/types/ci/config/job_type_spec.rb
+++ b/spec/graphql/types/ci/config/job_type_spec.rb
@@ -7,10 +7,19 @@ RSpec.describe Types::Ci::Config::JobType do
it 'exposes the expected fields' do
expected_fields = %i[
+ afterScript
+ allowFailure
+ beforeScript
+ environment
+ except
+ script
name
+ only
group_name
stage
+ tags
needs
+ when
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index 441a719df8c..e277916f5cb 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Types::Ci::JobType do
specify { expect(described_class.graphql_name).to eq('CiJob') }
+ specify { expect(described_class).to require_graphql_authorizations(:read_commit_status) }
it 'exposes the expected fields' do
expected_fields = %i[
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 558fc479af1..21fc530149c 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe GitlabSchema.types['Issue'] do
fields = %i[id iid title description state reference author assignees updated_by participants labels milestone due_date
confidential discussion_locked upvotes downvotes user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
- design_collection alert_management_alert severity current_user_todos moved moved_to]
+ design_collection alert_management_alert severity current_user_todos moved moved_to
+ create_note_email]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 51e7b4029d5..63d288934e5 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -25,21 +25,15 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
merge_ongoing mergeable_discussions_state web_url
source_branch_exists target_branch_exists
upvotes downvotes head_pipeline pipelines task_completion_status
- milestone assignees participants subscribed labels discussion_locked time_estimate
+ milestone assignees reviewers participants subscribed labels discussion_locked time_estimate
total_time_spent reference author merged_at commit_count current_user_todos
conflicts auto_merge_enabled approved_by source_branch_protected
default_merge_commit_message_with_description squash_on_merge available_auto_merge_strategies
- has_ci mergeable commits_without_merge_commits security_auto_fix
+ has_ci mergeable commits_without_merge_commits squash security_auto_fix default_squash_commit_message
+ auto_merge_strategy merge_user
]
- if Gitlab.ee?
- expected_fields << 'approved'
- expected_fields << 'approvals_left'
- expected_fields << 'approvals_required'
- expected_fields << 'merge_trains_count'
- end
-
- expect(described_class).to have_graphql_fields(*expected_fields)
+ expect(described_class).to have_graphql_fields(*expected_fields).at_least
end
describe '#pipelines' do
diff --git a/spec/graphql/types/namespace/package_settings_type_spec.rb b/spec/graphql/types/namespace/package_settings_type_spec.rb
new file mode 100644
index 00000000000..b9592d230ca
--- /dev/null
+++ b/spec/graphql/types/namespace/package_settings_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageSettings'] do
+ specify { expect(described_class.graphql_name).to eq('PackageSettings') }
+
+ specify { expect(described_class.description).to eq('Namespace-level Package Registry settings') }
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_package_settings) }
+
+ describe 'maven_duplicate_exception_regex field' do
+ subject { described_class.fields['mavenDuplicateExceptionRegex'] }
+
+ it { is_expected.to have_graphql_type(Types::UntrustedRegexp) }
+ end
+end
diff --git a/spec/graphql/types/notes/note_type_spec.rb b/spec/graphql/types/notes/note_type_spec.rb
index 180d13d35d2..03ff7828cf5 100644
--- a/spec/graphql/types/notes/note_type_spec.rb
+++ b/spec/graphql/types/notes/note_type_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe GitlabSchema.types['Note'] do
system_note_icon_name
updated_at
user_permissions
+ url
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/composer/details_type_spec.rb b/spec/graphql/types/packages/composer/details_type_spec.rb
new file mode 100644
index 00000000000..2e4cb965ded
--- /dev/null
+++ b/spec/graphql/types/packages/composer/details_type_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageComposerDetails'] do
+ it { expect(described_class.graphql_name).to eq('PackageComposerDetails') }
+
+ it 'includes all the package fields' do
+ expected_fields = %w[
+ id name version created_at updated_at package_type tags project pipelines versions
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+
+ it 'includes composer specific files' do
+ expected_fields = %w[
+ composer_metadatum
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/composer/json_type_spec.rb b/spec/graphql/types/packages/composer/json_type_spec.rb
new file mode 100644
index 00000000000..af5194ffb49
--- /dev/null
+++ b/spec/graphql/types/packages/composer/json_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageComposerJsonType'] do
+ it { expect(described_class.graphql_name).to eq('PackageComposerJsonType') }
+
+ it 'includes composer json files' do
+ expected_fields = %w[
+ name type license version
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/composer/metadatum_type_spec.rb b/spec/graphql/types/packages/composer/metadatum_type_spec.rb
new file mode 100644
index 00000000000..0f47d8f1812
--- /dev/null
+++ b/spec/graphql/types/packages/composer/metadatum_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageComposerMetadatumType'] do
+ it { expect(described_class.graphql_name).to eq('PackageComposerMetadatumType') }
+
+ it 'includes composer metadatum fields' do
+ expected_fields = %w[
+ target_sha composer_json
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/package_type_enum_spec.rb b/spec/graphql/types/packages/package_type_enum_spec.rb
index 407d5786f65..407d5786f65 100644
--- a/spec/graphql/types/package_type_enum_spec.rb
+++ b/spec/graphql/types/packages/package_type_enum_spec.rb
diff --git a/spec/graphql/types/package_type_spec.rb b/spec/graphql/types/packages/package_type_spec.rb
index 22048e7a693..7003a4d4d07 100644
--- a/spec/graphql/types/package_type_spec.rb
+++ b/spec/graphql/types/packages/package_type_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['Package'] do
it 'includes all the package fields' do
expected_fields = %w[
- id name version created_at updated_at package_type
+ id name version created_at updated_at package_type tags project pipelines versions
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/tag_type_spec.rb b/spec/graphql/types/packages/tag_type_spec.rb
new file mode 100644
index 00000000000..83b705157d8
--- /dev/null
+++ b/spec/graphql/types/packages/tag_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageTag'] do
+ it { expect(described_class.graphql_name).to eq('PackageTag') }
+
+ it 'includes all the package tag fields' do
+ expected_fields = %w[
+ id name created_at updated_at
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index b3028e034cc..9d0d7a3918a 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe GitlabSchema.types['Project'] do
container_expiration_policy service_desk_enabled service_desk_address
issue_status_counts terraform_states alert_management_integrations
container_repositories container_repositories_count
- pipeline_analytics total_pipeline_duration squash_read_only
+ pipeline_analytics squash_read_only
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/projects/service_type_spec.rb b/spec/graphql/types/projects/service_type_spec.rb
index f110322ac89..cca7c49e132 100644
--- a/spec/graphql/types/projects/service_type_spec.rb
+++ b/spec/graphql/types/projects/service_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Types::Projects::ServiceType do
it 'resolves the corresponding type for objects' do
expect(described_class.resolve_type(build(:jira_service), {})).to eq(Types::Projects::Services::JiraServiceType)
expect(described_class.resolve_type(build(:service), {})).to eq(Types::Projects::Services::BaseServiceType)
- expect(described_class.resolve_type(build(:alerts_service), {})).to eq(Types::Projects::Services::BaseServiceType)
+ expect(described_class.resolve_type(build(:drone_ci_service), {})).to eq(Types::Projects::Services::BaseServiceType)
expect(described_class.resolve_type(build(:custom_issue_tracker_service), {})).to eq(Types::Projects::Services::BaseServiceType)
end
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 7a0b3035607..3e716865e56 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -94,4 +94,10 @@ RSpec.describe GitlabSchema.types['Query'] do
it { is_expected.to have_graphql_type(Types::ContainerRepositoryDetailsType) }
end
+
+ describe 'package_composer_details field' do
+ subject { described_class.fields['packageComposerDetails'] }
+
+ it { is_expected.to have_graphql_type(Types::Packages::Composer::DetailsType) }
+ end
end
diff --git a/spec/graphql/types/repository_type_spec.rb b/spec/graphql/types/repository_type_spec.rb
index 27780476421..e9199bd286e 100644
--- a/spec/graphql/types/repository_type_spec.rb
+++ b/spec/graphql/types/repository_type_spec.rb
@@ -10,4 +10,6 @@ RSpec.describe GitlabSchema.types['Repository'] do
specify { expect(described_class).to have_graphql_field(:root_ref) }
specify { expect(described_class).to have_graphql_field(:tree) }
+
+ specify { expect(described_class).to have_graphql_field(:exists, calls_gitaly?: true, complexity: 2) }
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 764c582e987..b584a906565 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe BlobHelper do
end
end
- describe "#edit_blob_link" do
+ describe "#edit_blob_button" do
let(:namespace) { create(:namespace, name: 'gitlab') }
let(:project) { create(:project, :repository, namespace: namespace) }
@@ -28,12 +28,13 @@ RSpec.describe BlobHelper do
allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
end
- it 'verifies blob is text' do
+ it 'does not render edit button when blob is not text' do
expect(helper).not_to receive(:blob_text_viewable?)
- button = helper.edit_blob_button(project, 'refs/heads/master', 'README.md')
+ # RADME.md is not a valid file.
+ button = helper.edit_blob_button(project, 'refs/heads/master', 'RADME.md')
- expect(button).to start_with('<button')
+ expect(button).to eq(nil)
end
it 'uses the passed blob instead retrieve from repository' do
@@ -94,7 +95,7 @@ RSpec.describe BlobHelper do
context 'viewer related' do
include FakeBlobHelpers
- let(:project) { build(:project, lfs_enabled: true) }
+ let_it_be(:project) { create(:project, lfs_enabled: true) }
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
index a96d6e7711f..94b5e707d73 100644
--- a/spec/helpers/ci/pipelines_helper_spec.rb
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -52,4 +52,23 @@ RSpec.describe Ci::PipelinesHelper do
end
end
end
+
+ describe 'has_gitlab_ci?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:has_gitlab_ci?) { helper.has_gitlab_ci?(project) }
+
+ let(:project) { double(:project, has_ci?: has_ci?, builds_enabled?: builds_enabled?) }
+
+ where(:builds_enabled?, :has_ci?, :result) do
+ true | true | true
+ true | false | false
+ false | true | false
+ false | false | false
+ end
+
+ with_them do
+ it { expect(has_gitlab_ci?).to eq(result) }
+ end
+ end
end
diff --git a/spec/helpers/ci/triggers_helper_spec.rb b/spec/helpers/ci/triggers_helper_spec.rb
new file mode 100644
index 00000000000..5e43dbfdd5c
--- /dev/null
+++ b/spec/helpers/ci/triggers_helper_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TriggersHelper do
+ let(:project_id) { 1 }
+
+ describe '.builds_trigger_url' do
+ subject { helper.builds_trigger_url(project_id, ref: ref) }
+
+ context 'with no ref' do
+ let(:ref) { nil }
+
+ specify { expect(subject).to eq "#{Settings.gitlab.url}/api/v4/projects/1/trigger/pipeline" }
+ end
+
+ context 'with ref' do
+ let(:ref) { 'master' }
+
+ specify { expect(subject).to eq "#{Settings.gitlab.url}/api/v4/projects/1/ref/master/trigger/pipeline" }
+ end
+ end
+
+ describe '.service_trigger_url' do
+ subject { helper.service_trigger_url(service) }
+
+ let(:service) { double(project_id: 1, to_param: 'param') }
+
+ specify { expect(subject).to eq "#{Settings.gitlab.url}/api/v4/projects/1/services/param/trigger" }
+ end
+end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index cc318a21fc9..8a570bf9a90 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -3,6 +3,39 @@
require 'spec_helper'
RSpec.describe CommitsHelper do
+ describe '#revert_commit_link' do
+ context 'when current_user exists' do
+ before do
+ allow(helper).to receive(:current_user).and_return(double('User'))
+ allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
+ end
+
+ it 'renders a div for Vue' do
+ result = helper.revert_commit_link('_commit_', '_path_', pajamas: true)
+
+ expect(result).to include('js-revert-commit-trigger')
+ end
+
+ it 'does not render a div for Vue' do
+ result = helper.revert_commit_link('_commit_', '_path_')
+
+ expect(result).not_to include('js-revert-commit-trigger')
+ end
+ end
+
+ context 'when current_user does not exist' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
+ it 'does not render anything' do
+ result = helper.revert_commit_link(double('Commit'), '_path_')
+
+ expect(result).to be_nil
+ end
+ end
+ end
+
describe 'commit_author_link' do
it 'escapes the author email' do
commit = double(
diff --git a/spec/helpers/dashboard_helper_spec.rb b/spec/helpers/dashboard_helper_spec.rb
index 65182dcb729..8a76771be0a 100644
--- a/spec/helpers/dashboard_helper_spec.rb
+++ b/spec/helpers/dashboard_helper_spec.rb
@@ -89,4 +89,10 @@ RSpec.describe DashboardHelper do
it { is_expected.to eq(false) }
end
+
+ describe '#reviewer_mrs_dashboard_path' do
+ subject { helper.reviewer_mrs_dashboard_path }
+
+ it { is_expected.to eq(merge_requests_dashboard_path(reviewer_username: user.username)) }
+ end
end
diff --git a/spec/helpers/graph_helper_spec.rb b/spec/helpers/graph_helper_spec.rb
index 3c7e4f970c3..682f6365481 100644
--- a/spec/helpers/graph_helper_spec.rb
+++ b/spec/helpers/graph_helper_spec.rb
@@ -15,4 +15,16 @@ RSpec.describe GraphHelper do
expect(refs).to match('master')
end
end
+
+ describe '#should_render_deployment_frequency_charts' do
+ let(:project) { create(:project, :private) }
+
+ before do
+ self.instance_variable_set(:@project, project)
+ end
+
+ it 'always returns false' do
+ expect(should_render_deployment_frequency_charts).to be(false)
+ end
+ end
end
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index 222cca43860..d75124b6da7 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -34,38 +34,38 @@ RSpec.describe Groups::GroupMembersHelper do
end
describe '#members_data_json' do
- shared_examples 'group_members.json' do
+ shared_examples 'members.json' do
it 'matches json schema' do
json = helper.members_data_json(group, present_members([group_member]))
- expect(json).to match_schema('group_members')
+ expect(json).to match_schema('members')
end
end
context 'for a group member' do
let(:group_member) { create(:group_member, group: group, created_by: current_user) }
- it_behaves_like 'group_members.json'
+ it_behaves_like 'members.json'
context 'with user status set' do
let(:user) { create(:user) }
let!(:status) { create(:user_status, user: user) }
let(:group_member) { create(:group_member, group: group, user: user, created_by: current_user) }
- it_behaves_like 'group_members.json'
+ it_behaves_like 'members.json'
end
end
context 'for an invited group member' do
let(:group_member) { create(:group_member, :invited, group: group, created_by: current_user) }
- it_behaves_like 'group_members.json'
+ it_behaves_like 'members.json'
end
context 'for an access request' do
let(:group_member) { create(:group_member, :access_request, group: group, created_by: current_user) }
- it_behaves_like 'group_members.json'
+ it_behaves_like 'members.json'
end
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index d75b3c9f2e3..914d0931476 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -114,4 +114,69 @@ RSpec.describe InviteMembersHelper do
end
end
end
+
+ describe '#dropdown_invite_members_link' do
+ shared_examples_for 'dropdown invite members link' do
+ let(:link_regex) do
+ /data-track-event="click_link".*data-track-property="_track_property_".*Invite members/
+ end
+
+ before do
+ allow(helper).to receive(:experiment_tracking_category_and_group) { '_track_property_' }
+ allow(helper).to receive(:tracking_label).with(owner)
+ allow(helper).to receive(:current_user) { owner }
+ end
+
+ it 'records the experiment' do
+ allow(helper).to receive(:experiment_enabled?)
+
+ helper.dropdown_invite_members_link(form_model)
+
+ expect(helper).to have_received(:experiment_tracking_category_and_group)
+ .with(:invite_members_new_dropdown, subject: owner)
+ end
+
+ context 'with experiment enabled' do
+ before do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_new_dropdown) { true }
+ end
+
+ it 'returns link' do
+ link = helper.dropdown_invite_members_link(form_model)
+
+ expect(link).to match(link_regex)
+ expect(link).to include(link_href)
+ expect(link).to include('gl-emoji')
+ end
+ end
+
+ context 'with no experiment enabled' do
+ before do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_new_dropdown) { false }
+ end
+
+ it 'returns link' do
+ link = helper.dropdown_invite_members_link(form_model)
+
+ expect(link).to match(link_regex)
+ expect(link).to include(link_href)
+ expect(link).not_to include('gl-emoji')
+ end
+ end
+ end
+
+ context 'with a project' do
+ let_it_be(:form_model) { project }
+ let(:link_href) { "href=\"#{project_project_members_path(form_model)}\"" }
+
+ it_behaves_like 'dropdown invite members link'
+ end
+
+ context 'with a group' do
+ let_it_be(:form_model) { create(:group) }
+ let(:link_href) { "href=\"#{group_group_members_path(form_model)}\"" }
+
+ it_behaves_like 'dropdown invite members link'
+ end
+ end
end
diff --git a/spec/helpers/jira_connect_helper_spec.rb b/spec/helpers/jira_connect_helper_spec.rb
new file mode 100644
index 00000000000..a99072527c8
--- /dev/null
+++ b/spec/helpers/jira_connect_helper_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnectHelper do
+ describe '#jira_connect_app_data' do
+ subject { helper.jira_connect_app_data }
+
+ it 'includes Jira Connect app attributes' do
+ is_expected.to include(
+ :groups_path
+ )
+ end
+ end
+end
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index 377e2c43a72..821faaab194 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -67,4 +67,37 @@ RSpec.describe MergeRequestsHelper do
end
end
end
+
+ describe '#user_merge_requests_counts' do
+ let(:user) do
+ double(
+ assigned_open_merge_requests_count: 1,
+ review_requested_open_merge_requests_count: 2
+ )
+ end
+
+ subject { helper.user_merge_requests_counts }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ it "returns assigned, review requested and total merge request counts" do
+ expect(subject).to eq(
+ assigned: user.assigned_open_merge_requests_count,
+ review_requested: user.review_requested_open_merge_requests_count,
+ total: user.assigned_open_merge_requests_count + user.review_requested_open_merge_requests_count
+ )
+ end
+
+ context 'when merge_request_reviewers is disabled' do
+ before do
+ stub_feature_flags(merge_request_reviewers: false)
+ end
+
+ it 'returns review_requested as 0' do
+ expect(subject[:review_requested]).to eq(0)
+ end
+ end
+ end
end
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
new file mode 100644
index 00000000000..cc290367e34
--- /dev/null
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ProjectMembersHelper do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:allow_admin_project) { nil }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:can?).with(current_user, :admin_project_member, project).and_return(allow_admin_project)
+ end
+
+ shared_examples 'when `current_user` does not have `admin_project_member` permissions' do
+ let(:allow_admin_project) { false }
+
+ it { is_expected.to be(false) }
+ end
+
+ describe '#can_manage_project_members?' do
+ subject { helper.can_manage_project_members?(project) }
+
+ context 'when `current_user` has `admin_project_member` permissions' do
+ let(:allow_admin_project) { true }
+
+ it { is_expected.to be(true) }
+ end
+
+ include_examples 'when `current_user` does not have `admin_project_member` permissions'
+ end
+
+ describe '#show_groups?' do
+ subject { helper.show_groups?(project.project_group_links) }
+
+ context 'when group links exist' do
+ let!(:project_group_link) { create(:project_group_link, project: project) }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when `search_groups` param is set' do
+ before do
+ allow(helper).to receive(:params).and_return({ search_groups: 'foo' })
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when `search_groups` param is not set and group links do not exist' do
+ it { is_expected.to be(false) }
+ end
+ end
+
+ describe '#show_invited_members?' do
+ subject { helper.show_invited_members?(project, project.project_members.invite) }
+
+ context 'when `current_user` has `admin_project_member` permissions' do
+ let(:allow_admin_project) { true }
+
+ context 'when invited members exist' do
+ let!(:invite) { create(:project_member, :invited, project: project) }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when invited members do not exist' do
+ it { is_expected.to be(false) }
+ end
+ end
+
+ include_examples 'when `current_user` does not have `admin_project_member` permissions'
+ end
+
+ describe '#show_access_requests?' do
+ subject { helper.show_access_requests?(project, project.requesters) }
+
+ context 'when `current_user` has `admin_project_member` permissions' do
+ let(:allow_admin_project) { true }
+
+ context 'when access requests exist' do
+ let!(:access_request) { create(:project_member, :access_request, project: project) }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when access requests do not exist' do
+ it { is_expected.to be(false) }
+ end
+ end
+
+ include_examples 'when `current_user` does not have `admin_project_member` permissions'
+ end
+
+ describe '#groups_tab_active?' do
+ subject { helper.groups_tab_active? }
+
+ context 'when `search_groups` param is set' do
+ before do
+ allow(helper).to receive(:params).and_return({ search_groups: 'foo' })
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when `search_groups` param is not set' do
+ it { is_expected.to be(false) }
+ end
+ end
+
+ describe '#current_user_is_group_owner?' do
+ let(:group) { create(:group) }
+
+ subject { helper.current_user_is_group_owner?(project2) }
+
+ describe "when current user is the owner of the project's parent group" do
+ let(:project2) { create(:project, namespace: group) }
+
+ before do
+ group.add_owner(current_user)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ describe "when current user is not the owner of the project's parent group" do
+ let_it_be(:user) { create(:user) }
+ let(:project2) { create(:project, namespace: group) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it { is_expected.to be(false) }
+ end
+
+ describe "when project does not have a parent group" do
+ let(:user) { create(:user) }
+ let(:project2) { create(:project, namespace: user.namespace) }
+
+ it { is_expected.to be(false) }
+ end
+ end
+end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index d28d5ecda1b..b920e2e5600 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -92,37 +92,11 @@ RSpec.describe ProjectsHelper do
expect(helper.can_change_visibility_level?(project, user)).to be_falsey
end
- it "returns true if there are permissions and it is not fork" do
+ it "returns true if there are permissions" do
allow(helper).to receive(:can?) { true }
expect(helper.can_change_visibility_level?(project, user)).to be_truthy
end
-
- it 'allows visibility level to be changed if the project is forked' do
- allow(helper).to receive(:can?).with(user, :change_visibility_level, project) { true }
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- fork_project(project)
-
- expect(helper.can_change_visibility_level?(project, user)).to be_truthy
- end
-
- context "forks" do
- it "returns false if there are permissions and origin project is PRIVATE" do
- allow(helper).to receive(:can?) { true }
-
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
-
- expect(helper.can_change_visibility_level?(forked_project, user)).to be_falsey
- end
-
- it "returns true if there are permissions and origin project is INTERNAL" do
- allow(helper).to receive(:can?) { true }
-
- project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
-
- expect(helper.can_change_visibility_level?(forked_project, user)).to be_truthy
- end
- end
end
describe '#can_disable_emails?' do
@@ -459,6 +433,7 @@ RSpec.describe ProjectsHelper do
context 'when project has external wiki' do
it 'includes external wiki tab' do
project.create_external_wiki_service(active: true, properties: { 'external_wiki_url' => 'https://gitlab.com' })
+ project.reload
is_expected.to include(:external_wiki)
end
diff --git a/spec/helpers/services_helper_spec.rb b/spec/helpers/services_helper_spec.rb
index 650642f8982..534f33d9b5a 100644
--- a/spec/helpers/services_helper_spec.rb
+++ b/spec/helpers/services_helper_spec.rb
@@ -25,6 +25,10 @@ RSpec.describe ServicesHelper do
:integration_level
)
end
+
+ specify do
+ expect(subject[:reset_path]).to eq(helper.scoped_reset_integration_path(integration))
+ end
end
end
@@ -47,52 +51,12 @@ RSpec.describe ServicesHelper do
is_expected.to eq(reset_group_settings_integration_path(group, integration))
end
end
- end
-
- describe '#reset_integration?' do
- let(:group) { nil }
-
- subject { helper.reset_integration?(integration, group: group) }
-
- context 'when integration is existing record' do
- let_it_be(:integration) { create(:jira_service) }
-
- context 'when `reset_integrations` is not enabled' do
- it 'returns false' do
- stub_feature_flags(reset_integrations: false)
-
- is_expected.to eq(false)
- end
- end
- context 'when `reset_integrations` is enabled' do
- it 'returns true' do
- stub_feature_flags(reset_integrations: true)
-
- is_expected.to eq(true)
- end
- end
-
- context 'when `reset_integrations` is enabled for a group' do
- let(:group) { build_stubbed(:group) }
-
- it 'returns true' do
- stub_feature_flags(reset_integrations: group)
-
- is_expected.to eq(true)
- end
- end
- end
-
- context 'when integration is a new record' do
+ context 'when a new integration is not persisted' do
let_it_be(:integration) { build(:jira_service) }
- context 'when `reset_integrations` is enabled' do
- it 'returns false' do
- stub_feature_flags(reset_integrations: true)
-
- is_expected.to eq(false)
- end
+ it 'returns an empty string' do
+ is_expected.to eq('')
end
end
end
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 136ec07e73d..6cb9894e306 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -330,9 +330,8 @@ RSpec.describe TreeHelper do
end
end
- context 'gitpod feature is enabled' do
+ context 'gitpod settings is enabled' do
before do
- stub_feature_flags(gitpod: true)
allow(Gitlab::CurrentSettings)
.to receive(:gitpod_enabled)
.and_return(true)
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index c92c6e6e78e..f0f09408249 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -337,10 +337,14 @@ RSpec.describe UsersHelper do
describe '#admin_users_data_attributes' do
subject(:data) { helper.admin_users_data_attributes([user]) }
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
it 'users matches the serialized json' do
entity = double
expect_next_instance_of(Admin::UserSerializer) do |instance|
- expect(instance).to receive(:represent).with([user]).and_return(entity)
+ expect(instance).to receive(:represent).with([user], current_user: user).and_return(entity)
end
expect(entity).to receive(:to_json).and_return("{\"username\":\"admin\"}")
expect(data[:users]).to eq "{\"username\":\"admin\"}"
diff --git a/spec/helpers/visibility_level_helper_spec.rb b/spec/helpers/visibility_level_helper_spec.rb
index 86b0693af92..10e0815918f 100644
--- a/spec/helpers/visibility_level_helper_spec.rb
+++ b/spec/helpers/visibility_level_helper_spec.rb
@@ -35,29 +35,33 @@ RSpec.describe VisibilityLevelHelper do
describe 'visibility_level_description' do
context 'used with a Project' do
- it 'delegates projects to #project_visibility_level_description' do
- expect(visibility_level_description(Gitlab::VisibilityLevel::PRIVATE, project))
- .to match /project/i
+ let(:descriptions) do
+ [
+ visibility_level_description(Gitlab::VisibilityLevel::PRIVATE, project),
+ visibility_level_description(Gitlab::VisibilityLevel::INTERNAL, project),
+ visibility_level_description(Gitlab::VisibilityLevel::PUBLIC, project)
+ ]
end
- end
- context 'used with a Group' do
- it 'delegates groups to #group_visibility_level_description' do
- expect(visibility_level_description(Gitlab::VisibilityLevel::PRIVATE, group))
- .to match /group/i
+ it 'returns different project related descriptions depending on visibility level' do
+ expect(descriptions.uniq.size).to eq(descriptions.size)
+ expect(descriptions).to all match /project/i
end
end
- end
- describe "#project_visibility_level_description" do
- it "describes private projects" do
- expect(project_visibility_level_description(Gitlab::VisibilityLevel::PRIVATE))
- .to eq _('Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group.')
- end
+ context 'used with a Group' do
+ let(:descriptions) do
+ [
+ visibility_level_description(Gitlab::VisibilityLevel::PRIVATE, group),
+ visibility_level_description(Gitlab::VisibilityLevel::INTERNAL, group),
+ visibility_level_description(Gitlab::VisibilityLevel::PUBLIC, group)
+ ]
+ end
- it "describes public projects" do
- expect(project_visibility_level_description(Gitlab::VisibilityLevel::PUBLIC))
- .to eq _('The project can be accessed without any authentication.')
+ it 'returns different group related descriptions depending on visibility level' do
+ expect(descriptions.uniq.size).to eq(descriptions.size)
+ expect(descriptions).to all match /group/i
+ end
end
end
diff --git a/spec/initializers/carrierwave_patch_spec.rb b/spec/initializers/carrierwave_patch_spec.rb
index c4a7bfa59c6..cbdad4aa9ac 100644
--- a/spec/initializers/carrierwave_patch_spec.rb
+++ b/spec/initializers/carrierwave_patch_spec.rb
@@ -6,38 +6,95 @@ RSpec.describe 'CarrierWave::Storage::Fog::File' do
let(:uploader_class) { Class.new(CarrierWave::Uploader::Base) }
let(:uploader) { uploader_class.new }
let(:storage) { CarrierWave::Storage::Fog.new(uploader) }
- let(:azure_options) do
- {
- azure_storage_account_name: 'AZURE_ACCOUNT_NAME',
- azure_storage_access_key: 'AZURE_ACCESS_KEY',
- provider: 'AzureRM'
- }
- end
+ let(:bucket_name) { 'some-bucket' }
+ let(:connection) { ::Fog::Storage.new(connection_options) }
+ let(:bucket) { connection.directories.new(key: bucket_name )}
+ let(:test_filename) { 'test' }
+ let(:test_data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
- subject { CarrierWave::Storage::Fog::File.new(uploader, storage, 'test') }
+ subject { CarrierWave::Storage::Fog::File.new(uploader, storage, test_filename) }
before do
require 'fog/azurerm'
- allow(uploader).to receive(:fog_credentials).and_return(azure_options)
- Fog.mock!
+ require 'fog/aws'
+
+ stub_object_storage(connection_params: connection_options, remote_directory: bucket_name)
+
+ allow(uploader).to receive(:fog_directory).and_return(bucket_name)
+ allow(uploader).to receive(:fog_credentials).and_return(connection_options)
+
+ bucket.files.create(key: test_filename, body: test_data) # rubocop:disable Rails/SaveBang
end
- describe '#authenticated_url' do
- context 'with Azure' do
+ context 'AWS' do
+ let(:connection_options) do
+ {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY',
+ aws_secret_access_key: 'AWS_SECRET_KEY'
+ }
+ end
+
+ describe '#copy_to' do
+ let(:dest_filename) { 'copied.txt'}
+
+ it 'copies the file' do
+ fog_file = subject.send(:file)
+
+ expect(fog_file).to receive(:concurrency=).with(10).and_call_original
+ # multipart_chunk_size must be explicitly set in order to leverage
+ # multithreaded, multipart transfers for files below 5GB.
+ expect(fog_file).to receive(:multipart_chunk_size=).with(10.megabytes).and_call_original
+ expect(fog_file).to receive(:copy).with(bucket_name, dest_filename, anything).and_call_original
+
+ result = subject.copy_to(dest_filename)
+
+ expect(result.exists?).to be true
+ expect(result.read).to eq(test_data)
+
+ # Sanity check that the file actually is there
+ copied = bucket.files.get(dest_filename)
+ expect(copied).to be_present
+ expect(copied.body).to eq(test_data)
+ end
+ end
+ end
+
+ context 'Azure' do
+ let(:connection_options) do
+ {
+ provider: 'AzureRM',
+ azure_storage_account_name: 'AZURE_ACCOUNT_NAME',
+ azure_storage_access_key: 'AZURE_ACCESS_KEY'
+ }
+ end
+
+ describe '#copy_to' do
+ let(:dest_filename) { 'copied.txt'}
+
+ it 'copies the file' do
+ result = subject.copy_to(dest_filename)
+
+ # Fog Azure provider doesn't mock the actual copied data
+ expect(result.exists?).to be true
+ end
+ end
+
+ describe '#authenticated_url' do
it 'has an authenticated URL' do
expect(subject.authenticated_url).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
end
- end
- context 'with custom expire_at' do
- it 'properly sets expires param' do
- expire_at = 24.hours.from_now
+ context 'with custom expire_at' do
+ it 'properly sets expires param' do
+ expire_at = 24.hours.from_now
- expect_next_instance_of(Fog::Storage::AzureRM::File) do |file|
- expect(file).to receive(:url).with(expire_at).and_call_original
- end
+ expect_next_instance_of(Fog::Storage::AzureRM::File) do |file|
+ expect(file).to receive(:url).with(expire_at).and_call_original
+ end
- expect(subject.authenticated_url(expire_at: expire_at)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
+ expect(subject.authenticated_url(expire_at: expire_at)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
+ end
end
end
end
diff --git a/spec/javascripts/fly_out_nav_browser_spec.js b/spec/javascripts/fly_out_nav_browser_spec.js
index f84cee72042..f92994594a9 100644
--- a/spec/javascripts/fly_out_nav_browser_spec.js
+++ b/spec/javascripts/fly_out_nav_browser_spec.js
@@ -157,7 +157,7 @@ describe('Fly out sidebar navigation', () => {
'<div class="sidebar-sub-level-items" style="position: absolute; top: 0; left: 100px; height: 200px;"></div>';
});
- it('shows sub-items after 0ms if no menu is open', done => {
+ it('shows sub-items after 0ms if no menu is open', (done) => {
mouseEnterTopItems(el);
expect(getHideSubItemsInterval()).toBe(0);
@@ -169,7 +169,7 @@ describe('Fly out sidebar navigation', () => {
});
});
- it('shows sub-items after 300ms if a menu is currently open', done => {
+ it('shows sub-items after 300ms if a menu is currently open', (done) => {
documentMouseMove({
clientX: el.getBoundingClientRect().left,
clientY: el.getBoundingClientRect().top,
@@ -255,8 +255,9 @@ describe('Fly out sidebar navigation', () => {
showSubLevelItems(el);
expect(subItems.style.transform).toBe(
- `translate3d(200px, ${Math.floor(el.getBoundingClientRect().top) -
- getHeaderHeight()}px, 0px)`,
+ `translate3d(200px, ${
+ Math.floor(el.getBoundingClientRect().top) - getHeaderHeight()
+ }px, 0px)`,
);
});
diff --git a/spec/javascripts/lib/utils/browser_spec.js b/spec/javascripts/lib/utils/browser_spec.js
index 6b1074a3b4f..f41fa2503b1 100644
--- a/spec/javascripts/lib/utils/browser_spec.js
+++ b/spec/javascripts/lib/utils/browser_spec.js
@@ -5,30 +5,8 @@
* https://gitlab.com/groups/gitlab-org/-/epics/895#what-if-theres-a-karma-spec-which-is-simply-unmovable-to-jest-ie-it-is-dependent-on-a-running-browser-environment
*/
-import MockAdapter from 'axios-mock-adapter';
import { GlBreakpointInstance as breakpointInstance } from '@gitlab/ui/dist/utils';
-import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
-import { faviconDataUrl, overlayDataUrl, faviconWithOverlayDataUrl } from './mock_data';
-
-const PIXEL_TOLERANCE = 0.2;
-
-/**
- * Loads a data URL as the src of an
- * {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement/Image|Image}
- * and resolves to that Image once loaded.
- *
- * @param url
- * @returns {Promise}
- */
-const urlToImage = url =>
- new Promise(resolve => {
- const img = new Image();
- img.onload = function() {
- resolve(img);
- };
- img.src = url;
- });
describe('common_utils browser specific specs', () => {
describe('contentTop', () => {
@@ -63,90 +41,6 @@ describe('common_utils browser specific specs', () => {
});
});
- describe('createOverlayIcon', () => {
- it('should return the favicon with the overlay', done => {
- commonUtils
- .createOverlayIcon(faviconDataUrl, overlayDataUrl)
- .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
- .then(([actual, expected]) => {
- expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('setFaviconOverlay', () => {
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('data-original-href', faviconDataUrl);
- document.body.appendChild(favicon);
- });
-
- afterEach(() => {
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should set page favicon to provided favicon overlay', done => {
- commonUtils
- .setFaviconOverlay(overlayDataUrl)
- .then(() => document.getElementById('favicon').getAttribute('href'))
- .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
- .then(([actual, expected]) => {
- expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('setCiStatusFavicon', () => {
- const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
- let mock;
-
- beforeEach(() => {
- const favicon = document.createElement('link');
- favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('href', 'null');
- favicon.setAttribute('data-original-href', faviconDataUrl);
- document.body.appendChild(favicon);
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- document.body.removeChild(document.getElementById('favicon'));
- });
-
- it('should reset favicon in case of error', done => {
- mock.onGet(BUILD_URL).replyOnce(500);
-
- commonUtils.setCiStatusFavicon(BUILD_URL).catch(() => {
- const favicon = document.getElementById('favicon');
-
- expect(favicon.getAttribute('href')).toEqual(faviconDataUrl);
- done();
- });
- });
-
- it('should set page favicon to CI status favicon based on provided status', done => {
- mock.onGet(BUILD_URL).reply(200, {
- favicon: overlayDataUrl,
- });
-
- commonUtils
- .setCiStatusFavicon(BUILD_URL)
- .then(() => document.getElementById('favicon').getAttribute('href'))
- .then(url => Promise.all([urlToImage(url), urlToImage(faviconWithOverlayDataUrl)]))
- .then(([actual, expected]) => {
- expect(actual).toImageDiffEqual(expected, PIXEL_TOLERANCE);
- done();
- })
- .catch(done.fail);
- });
- });
-
describe('isInViewport', () => {
let el;
diff --git a/spec/javascripts/matchers.js b/spec/javascripts/matchers.js
index 5b7b7dc78b1..ae005e152ed 100644
--- a/spec/javascripts/matchers.js
+++ b/spec/javascripts/matchers.js
@@ -2,7 +2,7 @@ import pixelmatch from 'pixelmatch';
export default {
toImageDiffEqual: () => {
- const getImageData = img => {
+ const getImageData = (img) => {
const canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
@@ -32,8 +32,9 @@ export default {
return {
pass: differentPixels < 20,
- message: `${differentPixels} pixels differ more than ${threshold *
- 100} percent between input and output.`,
+ message: `${differentPixels} pixels differ more than ${
+ threshold * 100
+ } percent between input and output.`,
};
},
};
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index b37a53515a6..59136de0b0d 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -30,7 +30,7 @@ Vue.config.warnHandler = (msg, vm, trace) => {
const currentStack = new Error().stack;
const isInVueTestUtils = currentStack
.split('\n')
- .some(line => line.startsWith(' at VueWrapper.setProps ('));
+ .some((line) => line.startsWith(' at VueWrapper.setProps ('));
if (isInVueTestUtils) {
return;
}
@@ -40,7 +40,7 @@ Vue.config.warnHandler = (msg, vm, trace) => {
};
let hasVueErrors = false;
-Vue.config.errorHandler = function(err) {
+Vue.config.errorHandler = function (err) {
hasVueErrors = true;
fail(err);
};
@@ -75,7 +75,7 @@ gon.relative_url_root = '';
let hasUnhandledPromiseRejections = false;
-window.addEventListener('unhandledrejection', event => {
+window.addEventListener('unhandledrejection', (event) => {
hasUnhandledPromiseRejections = true;
console.error('Unhandled promise rejection:');
console.error(event.reason.stack || event.reason);
@@ -87,11 +87,11 @@ window.addEventListener('unhandledrejection', event => {
// enough for the socket to continue to communicate.
// The downside is that it creates a minor performance penalty in the time it takes
// to run our unit tests.
-beforeEach(done => done());
+beforeEach((done) => done());
let longRunningTestTimeoutHandle;
-beforeEach(done => {
+beforeEach((done) => {
longRunningTestTimeoutHandle = setTimeout(() => {
done.fail('Test is running too long!');
}, 4000);
@@ -111,15 +111,15 @@ if (process.env.IS_EE) {
testContexts.push(require.context('ee_spec', true, /_spec$/));
}
-testContexts.forEach(context => {
- context.keys().forEach(path => {
+testContexts.forEach((context) => {
+ context.keys().forEach((path) => {
try {
context(path);
} catch (err) {
console.log(err);
console.error('[GL SPEC RUNNER ERROR] Unable to load spec: ', path);
- describe('Test bundle', function() {
- it(`includes '${path}'`, function() {
+ describe('Test bundle', function () {
+ it(`includes '${path}'`, function () {
expect(err).toBeNull();
});
});
@@ -128,7 +128,7 @@ testContexts.forEach(context => {
});
describe('test errors', () => {
- beforeAll(done => {
+ beforeAll((done) => {
if (hasUnhandledPromiseRejections || hasVueWarnings || hasVueErrors) {
setTimeout(done, 1000);
} else {
diff --git a/spec/javascripts/test_constants.js b/spec/javascripts/test_constants.js
index 51c0716b99d..de7b3a0e80c 100644
--- a/spec/javascripts/test_constants.js
+++ b/spec/javascripts/test_constants.js
@@ -1 +1 @@
-export * from '../frontend/helpers/test_constants';
+export * from '../frontend/__helpers__/test_constants';
diff --git a/spec/lib/api/entities/snippet_repository_storage_move_spec.rb b/spec/lib/api/entities/snippet_repository_storage_move_spec.rb
new file mode 100644
index 00000000000..8086be3ffa7
--- /dev/null
+++ b/spec/lib/api/entities/snippet_repository_storage_move_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::SnippetRepositoryStorageMove do
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ let(:default_storage) { 'default' }
+ let(:second_storage) { 'test_second_storage' }
+ let(:storage_move) { create(:snippet_repository_storage_move, :scheduled, destination_storage_name: second_storage) }
+ let(:entity) { described_class.new(storage_move) }
+
+ it 'includes basic fields' do
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%W[#{default_storage} #{second_storage}])
+
+ is_expected.to include(
+ state: 'scheduled',
+ source_storage_name: default_storage,
+ destination_storage_name: second_storage,
+ snippet: a_kind_of(Hash)
+ )
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/authentication_spec.rb b/spec/lib/api/helpers/authentication_spec.rb
new file mode 100644
index 00000000000..461b0d2f6f9
--- /dev/null
+++ b/spec/lib/api/helpers/authentication_spec.rb
@@ -0,0 +1,207 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::Authentication do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :public) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+
+ describe 'class methods' do
+ subject { Class.new.include(described_class::ClassMethods).new }
+
+ describe '.authenticate_with' do
+ it 'sets namespace_inheritable :authentication to correctly when body is empty' do
+ expect(subject).to receive(:namespace_inheritable).with(:authentication, {})
+
+ subject.authenticate_with { |allow| }
+ end
+
+ it 'sets namespace_inheritable :authentication to correctly when body is not empty' do
+ expect(subject).to receive(:namespace_inheritable).with(:authentication, { basic: [:pat, :job], oauth: [:pat, :job] })
+
+ subject.authenticate_with { |allow| allow.token_type(:pat, :job).sent_through(:basic, :oauth) }
+ end
+ end
+ end
+
+ describe 'helper methods' do
+ let(:object) do
+ cls = Class.new
+
+ class << cls
+ def helpers(*modules, &block)
+ modules.each { |m| include m }
+ include Module.new.tap { |m| m.class_eval(&block) } if block_given?
+ end
+ end
+
+ cls.define_method(:unauthorized!) { raise '401' }
+ cls.define_method(:bad_request!) { |m| raise "400 - #{m}" }
+
+ # Include the helper class methods, as instance methods
+ cls.include described_class::ClassMethods
+
+ # Include the methods under test
+ cls.include described_class
+
+ cls.new
+ end
+
+ describe '#token_from_namespace_inheritable' do
+ let(:object) do
+ o = super()
+
+ o.instance_eval do
+ # It doesn't matter what this returns as long as the method is defined
+ def current_request
+ nil
+ end
+
+ # Spoof Grape's namespace inheritable system
+ def namespace_inheritable(key, value = nil)
+ return unless key == :authentication
+
+ if value
+ @authentication = value
+ else
+ @authentication
+ end
+ end
+ end
+
+ o
+ end
+
+ let(:authentication) do
+ object.authenticate_with { |allow| allow.token_types(*resolvers).sent_through(*locators) }
+ end
+
+ subject { object.token_from_namespace_inheritable }
+
+ before do
+ # Skip validation of token transports and types to simplify testing
+ allow(Gitlab::APIAuthentication::TokenLocator).to receive(:new) { |type| type }
+ allow(Gitlab::APIAuthentication::TokenResolver).to receive(:new) { |type| type }
+
+ authentication
+ end
+
+ shared_examples 'stops early' do |response_method|
+ it "calls ##{response_method}" do
+ errcls = Class.new(StandardError)
+ expect(object).to receive(response_method).and_raise(errcls)
+ expect { subject }.to raise_error(errcls)
+ end
+ end
+
+ shared_examples 'an anonymous request' do
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ shared_examples 'an authenticated request' do
+ it 'returns the token' do
+ expect(subject).to be(token)
+ end
+ end
+
+ shared_examples 'an unauthorized request' do
+ it_behaves_like 'stops early', :unauthorized!
+ end
+
+ context 'with no allowed authentication strategies' do
+ let(:authentication) { nil }
+
+ it_behaves_like 'an anonymous request'
+ end
+
+ context 'with no located credentials' do
+ let(:locators) { [double(extract: nil)] }
+ let(:resolvers) { [] }
+
+ it_behaves_like 'an anonymous request'
+ end
+
+ context 'with one set of located credentials' do
+ let(:locators) { [double(extract: true)] }
+
+ context 'when the credentials contain a valid token' do
+ let(:token) { double }
+ let(:resolvers) { [double(resolve: token)] }
+
+ it_behaves_like 'an authenticated request'
+ end
+
+ context 'when the credentials do not contain a valid token' do
+ let(:resolvers) { [double(resolve: nil)] }
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
+
+ context 'with multiple located credentials' do
+ let(:locators) { [double(extract: true), double(extract: true)] }
+ let(:resolvers) { [] }
+
+ it_behaves_like 'stops early', :bad_request!
+ end
+
+ context 'when a resolver raises UnauthorizedError' do
+ let(:locators) { [double(extract: true)] }
+ let(:resolvers) do
+ r = double
+ expect(r).to receive(:resolve).and_raise(Gitlab::Auth::UnauthorizedError)
+ r
+ end
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
+
+ describe '#access_token_from_namespace_inheritable' do
+ subject { object.access_token_from_namespace_inheritable }
+
+ it 'returns #token_from_namespace_inheritable if it is a personal access token' do
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(personal_access_token)
+ expect(subject).to be(personal_access_token)
+ end
+
+ it 'returns nil if #token_from_namespace_inheritable is not a personal access token' do
+ token = double
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(token)
+ expect(subject).to be(nil)
+ end
+ end
+
+ describe '#user_from_namespace_inheritable' do
+ subject { object.user_from_namespace_inheritable }
+
+ it 'returns #token_from_namespace_inheritable if it is a deploy token' do
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(deploy_token)
+ expect(subject).to be(deploy_token)
+ end
+
+ it 'returns #token_from_namespace_inheritable.user if the token is not a deploy token' do
+ user = double
+ token = double(user: user)
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(token)
+
+ expect(subject).to be(user)
+ end
+
+ it 'falls back to #find_user_from_warden if #token_from_namespace_inheritable.user is nil' do
+ token = double(user: nil)
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(token)
+ subject
+ end
+
+ it 'falls back to #find_user_from_warden if #token_from_namespace_inheritable is nil' do
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(nil)
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index be5f0cc9f9a..bdf04fafaae 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -205,7 +205,7 @@ RSpec.describe API::Helpers do
end
it 'tracks redis hll event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
subject.increment_unique_values(event_name, value)
end
@@ -363,4 +363,49 @@ RSpec.describe API::Helpers do
end
end
end
+
+ describe '#present_disk_file!' do
+ let_it_be(:dummy_class) do
+ Class.new do
+ attr_reader :headers
+ alias_method :header, :headers
+
+ def initialize
+ @headers = {}
+ end
+ end
+ end
+
+ let(:dummy_instance) { dummy_class.include(described_class).new }
+ let(:path) { '/tmp/file.txt' }
+ let(:filename) { 'file.txt' }
+
+ subject { dummy_instance.present_disk_file!(path, filename) }
+
+ before do
+ expect(dummy_instance).to receive(:content_type).with('application/octet-stream')
+ end
+
+ context 'with X-Sendfile supported' do
+ before do
+ dummy_instance.headers['X-Sendfile-Type'] = 'X-Sendfile'
+ end
+
+ it 'sends the file using X-Sendfile' do
+ expect(dummy_instance).to receive(:body).with('')
+
+ subject
+
+ expect(dummy_instance.headers['X-Sendfile']).to eq(path)
+ end
+ end
+
+ context 'without X-Sendfile supported' do
+ it 'sends the file' do
+ expect(dummy_instance).to receive(:sendfile).with(path)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index 6a161854dfb..21ee40f22fe 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -8,6 +8,15 @@ RSpec.describe Atlassian::JiraConnect::Client do
subject { described_class.new('https://gitlab-test.atlassian.net', 'sample_secret') }
let_it_be(:project) { create_default(:project, :repository) }
+ let_it_be(:mrs_by_title) { create_list(:merge_request, 4, :unique_branches, :jira_title) }
+ let_it_be(:mrs_by_branch) { create_list(:merge_request, 2, :jira_branch) }
+ let_it_be(:red_herrings) { create_list(:merge_request, 1, :unique_branches) }
+
+ let_it_be(:pipelines) do
+ (red_herrings + mrs_by_branch + mrs_by_title).map do |mr|
+ create(:ci_pipeline, merge_request: mr)
+ end
+ end
around do |example|
freeze_time { example.run }
@@ -22,13 +31,25 @@ RSpec.describe Atlassian::JiraConnect::Client do
end
describe '#send_info' do
- it 'calls store_build_info and store_dev_info as appropriate' do
+ it 'calls more specific methods as appropriate' do
+ expect(subject).to receive(:store_ff_info).with(
+ project: project,
+ update_sequence_id: :x,
+ feature_flags: :r
+ ).and_return(:ff_stored)
+
expect(subject).to receive(:store_build_info).with(
project: project,
update_sequence_id: :x,
pipelines: :y
).and_return(:build_stored)
+ expect(subject).to receive(:store_deploy_info).with(
+ project: project,
+ update_sequence_id: :x,
+ deployments: :q
+ ).and_return(:deploys_stored)
+
expect(subject).to receive(:store_dev_info).with(
project: project,
update_sequence_id: :x,
@@ -43,10 +64,13 @@ RSpec.describe Atlassian::JiraConnect::Client do
commits: :a,
branches: :b,
merge_requests: :c,
- pipelines: :y
+ pipelines: :y,
+ deployments: :q,
+ feature_flags: :r
}
- expect(subject.send_info(**args)).to contain_exactly(:dev_stored, :build_stored)
+ expect(subject.send_info(**args))
+ .to contain_exactly(:dev_stored, :build_stored, :deploys_stored, :ff_stored)
end
it 'only calls methods that we need to call' do
@@ -83,31 +107,263 @@ RSpec.describe Atlassian::JiraConnect::Client do
}
end
- describe '#store_build_info' do
- let_it_be(:mrs_by_title) { create_list(:merge_request, 4, :unique_branches, :jira_title) }
- let_it_be(:mrs_by_branch) { create_list(:merge_request, 2, :jira_branch) }
- let_it_be(:red_herrings) { create_list(:merge_request, 1, :unique_branches) }
+ describe '#handle_response' do
+ let(:errors) { [{ 'message' => 'X' }, { 'message' => 'Y' }] }
+ let(:processed) { subject.send(:handle_response, response, 'foo') { |x| [:data, x] } }
+
+ context 'the response is 200 OK' do
+ let(:response) { double(code: 200, parsed_response: :foo) }
+
+ it 'yields to the block' do
+ expect(processed).to eq [:data, :foo]
+ end
+ end
+
+ context 'the response is 400 bad request' do
+ let(:response) { double(code: 400, parsed_response: errors) }
+
+ it 'extracts the errors messages' do
+ expect(processed).to eq('errorMessages' => %w(X Y))
+ end
+ end
+
+ context 'the response is 401 forbidden' do
+ let(:response) { double(code: 401, parsed_response: nil) }
+
+ it 'reports that our JWT is wrong' do
+ expect(processed).to eq('errorMessages' => ['Invalid JWT'])
+ end
+ end
+
+ context 'the response is 403' do
+ let(:response) { double(code: 403, parsed_response: nil) }
+
+ it 'reports that the App is misconfigured' do
+ expect(processed).to eq('errorMessages' => ['App does not support foo'])
+ end
+ end
+
+ context 'the response is 413' do
+ let(:response) { double(code: 413, parsed_response: errors) }
+
+ it 'extracts the errors messages' do
+ expect(processed).to eq('errorMessages' => ['Data too large', 'X', 'Y'])
+ end
+ end
+
+ context 'the response is 429' do
+ let(:response) { double(code: 429, parsed_response: nil) }
+
+ it 'reports that we exceeded the rate limit' do
+ expect(processed).to eq('errorMessages' => ['Rate limit exceeded'])
+ end
+ end
+
+ context 'the response is 503' do
+ let(:response) { double(code: 503, parsed_response: nil) }
- let_it_be(:pipelines) do
- (red_herrings + mrs_by_branch + mrs_by_title).map do |mr|
- create(:ci_pipeline, merge_request: mr)
+ it 'reports that the service is unavailable' do
+ expect(processed).to eq('errorMessages' => ['Service unavailable'])
end
end
+ context 'the response is anything else' do
+ let(:response) { double(code: 1000, parsed_response: :something) }
+
+ it 'reports that this was unanticipated' do
+ expect(processed).to eq('errorMessages' => ['Unknown error'], 'response' => :something)
+ end
+ end
+ end
+
+ describe '#store_deploy_info' do
+ let_it_be(:environment) { create(:environment, name: 'DEV', project: project) }
+ let_it_be(:deployments) do
+ pipelines.map do |p|
+ build = create(:ci_build, environment: environment.name, pipeline: p, project: project)
+ create(:deployment, deployable: build, environment: environment)
+ end
+ end
+
+ let(:schema) do
+ Atlassian::Schemata.deploy_info_payload
+ end
+
+ let(:body) do
+ matcher = be_valid_json.and match_schema(schema)
+
+ ->(text) { matcher.matches?(text) }
+ end
+
+ let(:rejections) { [] }
+ let(:response_body) do
+ {
+ acceptedDeployments: [],
+ rejectedDeployments: rejections,
+ unknownIssueKeys: []
+ }.to_json
+ end
+
+ before do
+ path = '/rest/deployments/0.1/bulk'
+ stub_full_request('https://gitlab-test.atlassian.net' + path, method: :post)
+ .with(body: body, headers: expected_headers(path))
+ .to_return(body: response_body, headers: { 'Content-Type': 'application/json' })
+ end
+
+ it "calls the API with auth headers" do
+ subject.send(:store_deploy_info, project: project, deployments: deployments)
+ end
+
+ it 'only sends information about relevant MRs' do
+ expect(subject).to receive(:post).with('/rest/deployments/0.1/bulk', { deployments: have_attributes(size: 6) }).and_call_original
+
+ subject.send(:store_deploy_info, project: project, deployments: deployments)
+ end
+
+ it 'does not call the API if there is nothing to report' do
+ expect(subject).not_to receive(:post)
+
+ subject.send(:store_deploy_info, project: project, deployments: deployments.take(1))
+ end
+
+ context 'there are errors' do
+ let(:rejections) do
+ [{ errors: [{ message: 'X' }, { message: 'Y' }] }, { errors: [{ message: 'Z' }] }]
+ end
+
+ it 'reports the errors' do
+ response = subject.send(:store_deploy_info, project: project, deployments: deployments)
+
+ expect(response['errorMessages']).to eq(%w(X Y Z))
+ end
+ end
+
+ it 'does not call the API if the feature flag is not enabled' do
+ stub_feature_flags(jira_sync_deployments: false)
+
+ expect(subject).not_to receive(:post)
+
+ subject.send(:store_deploy_info, project: project, deployments: deployments)
+ end
+
+ it 'does call the API if the feature flag enabled for the project' do
+ stub_feature_flags(jira_sync_deployments: project)
+
+ expect(subject).to receive(:post).with('/rest/deployments/0.1/bulk', { deployments: Array }).and_call_original
+
+ subject.send(:store_deploy_info, project: project, deployments: deployments)
+ end
+ end
+
+ describe '#store_ff_info' do
+ let_it_be(:feature_flags) { create_list(:operations_feature_flag, 3, project: project) }
+
+ let(:schema) do
+ Atlassian::Schemata.ff_info_payload
+ end
+
+ let(:body) do
+ matcher = be_valid_json.and match_schema(schema)
+
+ ->(text) { matcher.matches?(text) }
+ end
+
+ let(:failures) { {} }
+ let(:response_body) do
+ {
+ acceptedFeatureFlags: [],
+ failedFeatureFlags: failures,
+ unknownIssueKeys: []
+ }.to_json
+ end
+
+ before do
+ feature_flags.first.update!(description: 'RELEVANT-123')
+ feature_flags.second.update!(description: 'RELEVANT-123')
+ path = '/rest/featureflags/0.1/bulk'
+ stub_full_request('https://gitlab-test.atlassian.net' + path, method: :post)
+ .with(body: body, headers: expected_headers(path))
+ .to_return(body: response_body, headers: { 'Content-Type': 'application/json' })
+ end
+
+ it "calls the API with auth headers" do
+ subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
+ end
+
+ it 'only sends information about relevant MRs' do
+ expect(subject).to receive(:post).with('/rest/featureflags/0.1/bulk', {
+ flags: have_attributes(size: 2), properties: Hash
+ }).and_call_original
+
+ subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
+ end
+
+ it 'does not call the API if there is nothing to report' do
+ expect(subject).not_to receive(:post)
+
+ subject.send(:store_ff_info, project: project, feature_flags: [feature_flags.last])
+ end
+
+ context 'there are errors' do
+ let(:failures) do
+ {
+ a: [{ message: 'X' }, { message: 'Y' }],
+ b: [{ message: 'Z' }]
+ }
+ end
+
+ it 'reports the errors' do
+ response = subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
+
+ expect(response['errorMessages']).to eq(['a: X', 'a: Y', 'b: Z'])
+ end
+ end
+
+ it 'does not call the API if the feature flag is not enabled' do
+ stub_feature_flags(jira_sync_feature_flags: false)
+
+ expect(subject).not_to receive(:post)
+
+ subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
+ end
+
+ it 'does call the API if the feature flag enabled for the project' do
+ stub_feature_flags(jira_sync_feature_flags: project)
+
+ expect(subject).to receive(:post).with('/rest/featureflags/0.1/bulk', {
+ flags: Array, properties: Hash
+ }).and_call_original
+
+ subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
+ end
+ end
+
+ describe '#store_build_info' do
let(:build_info_payload_schema) do
Atlassian::Schemata.build_info_payload
end
let(:body) do
- matcher = be_valid_json.according_to_schema(build_info_payload_schema)
+ matcher = be_valid_json.and match_schema(build_info_payload_schema)
->(text) { matcher.matches?(text) }
end
+ let(:failures) { [] }
+ let(:response_body) do
+ {
+ acceptedBuilds: [],
+ rejectedBuilds: failures,
+ unknownIssueKeys: []
+ }.to_json
+ end
+
before do
path = '/rest/builds/0.1/bulk'
stub_full_request('https://gitlab-test.atlassian.net' + path, method: :post)
.with(body: body, headers: expected_headers(path))
+ .to_return(body: response_body, headers: { 'Content-Type': 'application/json' })
end
it "calls the API with auth headers" do
@@ -115,7 +371,9 @@ RSpec.describe Atlassian::JiraConnect::Client do
end
it 'only sends information about relevant MRs' do
- expect(subject).to receive(:post).with('/rest/builds/0.1/bulk', { builds: have_attributes(size: 6) })
+ expect(subject).to receive(:post)
+ .with('/rest/builds/0.1/bulk', { builds: have_attributes(size: 6) })
+ .and_call_original
subject.send(:store_build_info, project: project, pipelines: pipelines)
end
@@ -137,12 +395,28 @@ RSpec.describe Atlassian::JiraConnect::Client do
it 'does call the API if the feature flag enabled for the project' do
stub_feature_flags(jira_sync_builds: project)
- expect(subject).to receive(:post).with('/rest/builds/0.1/bulk', { builds: Array })
+ expect(subject).to receive(:post)
+ .with('/rest/builds/0.1/bulk', { builds: Array })
+ .and_call_original
subject.send(:store_build_info, project: project, pipelines: pipelines)
end
+ context 'there are errors' do
+ let(:failures) do
+ [{ errors: [{ message: 'X' }, { message: 'Y' }] }, { errors: [{ message: 'Z' }] }]
+ end
+
+ it 'reports the errors' do
+ response = subject.send(:store_build_info, project: project, pipelines: pipelines)
+
+ expect(response['errorMessages']).to eq(%w(X Y Z))
+ end
+ end
+
it 'avoids N+1 database queries' do
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/issues/292818'
+
baseline = ActiveRecord::QueryRecorder.new do
subject.send(:store_build_info, project: project, pipelines: pipelines)
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
index 52e475d20ca..4bbd654655d 100644
--- a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::BuildEntity do
end
it 'is invalid, since it has no issue keys' do
- expect(subject.to_json).not_to be_valid_json.according_to_schema(Atlassian::Schemata.build_info)
+ expect(subject.to_json).not_to match_schema(Atlassian::Schemata.build_info)
end
end
end
@@ -43,7 +43,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::BuildEntity do
describe '#to_json' do
it 'is valid according to the build info schema' do
- expect(subject.to_json).to be_valid_json.according_to_schema(Atlassian::Schemata.build_info)
+ expect(subject.to_json).to be_valid_json.and match_schema(Atlassian::Schemata.build_info)
end
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
new file mode 100644
index 00000000000..82bcbdc4561
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Atlassian::JiraConnect::Serializers::DeploymentEntity do
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:project) { create_default(:project, :repository) }
+ let_it_be(:environment) { create(:environment, name: 'prod', project: project) }
+ let_it_be_with_reload(:deployment) { create(:deployment, environment: environment) }
+
+ subject { described_class.represent(deployment) }
+
+ context 'when the deployment does not belong to any Jira issue' do
+ describe '#issue_keys' do
+ it 'is empty' do
+ expect(subject.issue_keys).to be_empty
+ end
+ end
+
+ describe '#to_json' do
+ it 'can encode the object' do
+ expect(subject.to_json).to be_valid_json
+ end
+
+ it 'is invalid, since it has no issue keys' do
+ expect(subject.to_json).not_to match_schema(Atlassian::Schemata.deployment_info)
+ end
+ end
+ end
+
+ context 'this is an external deployment' do
+ before do
+ deployment.update!(deployable: nil)
+ end
+
+ it 'does not raise errors when serializing' do
+ expect { subject.to_json }.not_to raise_error
+ end
+
+ it 'returns an empty list of issue keys' do
+ expect(subject.issue_keys).to be_empty
+ end
+ end
+
+ describe 'environment type' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:env_name, :env_type) do
+ 'prod' | 'production'
+ 'test' | 'testing'
+ 'staging' | 'staging'
+ 'dev' | 'development'
+ 'review/app' | 'development'
+ 'something-else' | 'unmapped'
+ end
+
+ with_them do
+ before do
+ environment.update!(name: env_name)
+ end
+
+ let(:exposed_type) { subject.send(:environment_entity).send(:type) }
+
+ it 'has the correct environment type' do
+ expect(exposed_type).to eq(env_type)
+ end
+ end
+ end
+
+ context 'when the deployment can be linked to a Jira issue' do
+ let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
+
+ before do
+ subject.deployable.update!(pipeline: pipeline)
+ end
+
+ %i[jira_branch jira_title].each do |trait|
+ context "because it belongs to an MR with a #{trait}" do
+ let(:merge_request) { create(:merge_request, trait) }
+
+ describe '#issue_keys' do
+ it 'is not empty' do
+ expect(subject.issue_keys).not_to be_empty
+ end
+ end
+
+ describe '#to_json' do
+ it 'is valid according to the deployment info schema' do
+ expect(subject.to_json).to be_valid_json.and match_schema(Atlassian::Schemata.deployment_info)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb
new file mode 100644
index 00000000000..964801338cf
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Atlassian::JiraConnect::Serializers::FeatureFlagEntity do
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:project) { create_default(:project) }
+
+ subject { described_class.represent(feature_flag) }
+
+ context 'when the feature flag does not belong to any Jira issue' do
+ let_it_be(:feature_flag) { create(:operations_feature_flag) }
+
+ describe '#issue_keys' do
+ it 'is empty' do
+ expect(subject.issue_keys).to be_empty
+ end
+ end
+
+ describe '#to_json' do
+ it 'can encode the object' do
+ expect(subject.to_json).to be_valid_json
+ end
+
+ it 'is invalid, since it has no issue keys' do
+ expect(subject.to_json).not_to match_schema(Atlassian::Schemata.feature_flag_info)
+ end
+ end
+ end
+
+ context 'when the feature flag does belong to a Jira issue' do
+ let(:feature_flag) do
+ create(:operations_feature_flag, description: 'THING-123')
+ end
+
+ describe '#issue_keys' do
+ it 'is not empty' do
+ expect(subject.issue_keys).not_to be_empty
+ end
+ end
+
+ describe '#to_json' do
+ it 'is valid according to the feature flag info schema' do
+ expect(subject.to_json).to be_valid_json.and match_schema(Atlassian::Schemata.feature_flag_info)
+ end
+ end
+
+ context 'it has a percentage strategy' do
+ let!(:scopes) do
+ strat = create(:operations_strategy,
+ feature_flag: feature_flag,
+ name: ::Operations::FeatureFlags::Strategy::STRATEGY_GRADUALROLLOUTUSERID,
+ parameters: { 'percentage' => '50', 'groupId' => 'abcde' })
+
+ [
+ create(:operations_scope, strategy: strat, environment_scope: 'production in live'),
+ create(:operations_scope, strategy: strat, environment_scope: 'staging'),
+ create(:operations_scope, strategy: strat)
+ ]
+ end
+
+ let(:entity) { Gitlab::Json.parse(subject.to_json) }
+
+ it 'is valid according to the feature flag info schema' do
+ expect(subject.to_json).to be_valid_json.and match_schema(Atlassian::Schemata.feature_flag_info)
+ end
+
+ it 'has the correct summary' do
+ expect(entity.dig('summary', 'status')).to eq(
+ 'enabled' => true,
+ 'defaultValue' => '',
+ 'rollout' => { 'percentage' => 50.0, 'text' => 'Percent of users' }
+ )
+ end
+
+ it 'includes the correct environments' do
+ expect(entity['details']).to contain_exactly(
+ include('environment' => { 'name' => 'production in live', 'type' => 'production' }),
+ include('environment' => { 'name' => 'staging', 'type' => 'staging' }),
+ include('environment' => { 'name' => scopes.last.environment_scope })
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
index 2a4ee28130b..1f886059bf6 100644
--- a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
+++ b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
@@ -35,8 +35,8 @@ RSpec.describe Banzai::Filter::AssetProxyFilter do
expect(Gitlab.config.asset_proxy.enabled).to be_truthy
expect(Gitlab.config.asset_proxy.secret_key).to eq 'shared-secret'
expect(Gitlab.config.asset_proxy.url).to eq 'https://assets.example.com'
- expect(Gitlab.config.asset_proxy.whitelist).to eq %w(gitlab.com *.mydomain.com)
- expect(Gitlab.config.asset_proxy.domain_regexp).to eq /^(gitlab\.com|.*?\.mydomain\.com)$/i
+ expect(Gitlab.config.asset_proxy.allowlist).to eq %w(gitlab.com *.mydomain.com)
+ expect(Gitlab.config.asset_proxy.domain_regexp).to eq(/^(gitlab\.com|.*?\.mydomain\.com)$/i)
end
context 'when whitelist is empty' do
@@ -46,7 +46,7 @@ RSpec.describe Banzai::Filter::AssetProxyFilter do
described_class.initialize_settings
- expect(Gitlab.config.asset_proxy.whitelist).to eq [Gitlab.config.gitlab.host]
+ expect(Gitlab.config.asset_proxy.allowlist).to eq [Gitlab.config.gitlab.host]
end
end
end
@@ -56,8 +56,8 @@ RSpec.describe Banzai::Filter::AssetProxyFilter do
stub_asset_proxy_setting(enabled: true)
stub_asset_proxy_setting(secret_key: 'shared-secret')
stub_asset_proxy_setting(url: 'https://assets.example.com')
- stub_asset_proxy_setting(whitelist: %W(gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}))
- stub_asset_proxy_setting(domain_regexp: described_class.compile_whitelist(Gitlab.config.asset_proxy.whitelist))
+ stub_asset_proxy_setting(allowlist: %W(gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}))
+ stub_asset_proxy_setting(domain_regexp: described_class.compile_allowlist(Gitlab.config.asset_proxy.allowlist))
@context = described_class.transform_context({})
end
diff --git a/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb b/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb
index 1f65268bd3c..67b480f8973 100644
--- a/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::BroadcastMessageSanitizationFilter do
include FilterSpecHelper
- it_behaves_like 'default whitelist'
+ it_behaves_like 'default allowlist'
- describe 'custom whitelist' do
+ describe 'custom allowlist' do
it_behaves_like 'XSS prevention'
it_behaves_like 'sanitize link'
@@ -26,19 +26,19 @@ RSpec.describe Banzai::Filter::BroadcastMessageSanitizationFilter do
end
context 'when `a` elements have `style` attribute' do
- let(:whitelisted_style) { 'color: red; border: blue; background: green; padding: 10px; margin: 10px; text-decoration: underline;' }
+ let(:allowed_style) { 'color: red; border: blue; background: green; padding: 10px; margin: 10px; text-decoration: underline;' }
context 'allows specific properties' do
- let(:exp) { %{<a href="#" style="#{whitelisted_style}">Stylish Link</a>} }
+ let(:exp) { %{<a href="#" style="#{allowed_style}">Stylish Link</a>} }
it { is_expected.to eq(exp) }
end
it 'disallows other properties in `style` attribute on `a` elements' do
- style = [whitelisted_style, 'position: fixed'].join(';')
+ style = [allowed_style, 'position: fixed'].join(';')
doc = filter(%{<a href="#" style="#{style}">Stylish Link</a>})
- expect(doc.at_css('a')['style']).to eq(whitelisted_style)
+ expect(doc.at_css('a')['style']).to eq(allowed_style)
end
end
diff --git a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
index ac1cabb34cc..d0336e9e059 100644
--- a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
@@ -143,15 +143,32 @@ RSpec.describe Banzai::Filter::ReferenceRedactorFilter do
expect(doc.css('a').length).to eq 1
end
- it 'allows references for admin' do
- admin = create(:admin)
- project = create(:project, :public)
- issue = create(:issue, :confidential, project: project)
- link = reference_link(project: project.id, issue: issue.id, reference_type: 'issue')
+ context 'for admin' do
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'allows references' do
+ admin = create(:admin)
+ project = create(:project, :public)
+ issue = create(:issue, :confidential, project: project)
+ link = reference_link(project: project.id, issue: issue.id, reference_type: 'issue')
+
+ doc = filter(link, current_user: admin)
+
+ expect(doc.css('a').length).to eq 1
+ end
+ end
- doc = filter(link, current_user: admin)
+ context 'when admin mode is disabled' do
+ it 'removes references' do
+ admin = create(:admin)
+ project = create(:project, :public)
+ issue = create(:issue, :confidential, project: project)
+ link = reference_link(project: project.id, issue: issue.id, reference_type: 'issue')
- expect(doc.css('a').length).to eq 1
+ doc = filter(link, current_user: admin)
+
+ expect(doc.css('a').length).to eq 0
+ end
+ end
end
context "when a confidential issue is moved from a public project to a private one" do
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 09dcd5518ff..bc4b60dfe60 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -5,31 +5,31 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::SanitizationFilter do
include FilterSpecHelper
- it_behaves_like 'default whitelist'
+ it_behaves_like 'default allowlist'
- describe 'custom whitelist' do
+ describe 'custom allowlist' do
it_behaves_like 'XSS prevention'
it_behaves_like 'sanitize link'
- it 'customizes the whitelist only once' do
+ it 'customizes the allowlist only once' do
instance = described_class.new('Foo')
- control_count = instance.whitelist[:transformers].size
+ control_count = instance.allowlist[:transformers].size
- 3.times { instance.whitelist }
+ 3.times { instance.allowlist }
- expect(instance.whitelist[:transformers].size).to eq control_count
+ expect(instance.allowlist[:transformers].size).to eq control_count
end
- it 'customizes the whitelist only once for different instances' do
+ it 'customizes the allowlist only once for different instances' do
instance1 = described_class.new('Foo1')
instance2 = described_class.new('Foo2')
- control_count = instance1.whitelist[:transformers].size
+ control_count = instance1.allowlist[:transformers].size
- instance1.whitelist
- instance2.whitelist
+ instance1.allowlist
+ instance2.allowlist
- expect(instance1.whitelist[:transformers].size).to eq control_count
- expect(instance2.whitelist[:transformers].size).to eq control_count
+ expect(instance1.allowlist[:transformers].size).to eq control_count
+ expect(instance2.allowlist[:transformers].size).to eq control_count
end
it 'sanitizes `class` attribute from all elements' do
diff --git a/spec/lib/banzai/filter/truncate_source_filter_spec.rb b/spec/lib/banzai/filter/truncate_source_filter_spec.rb
new file mode 100644
index 00000000000..b0c6d91daa8
--- /dev/null
+++ b/spec/lib/banzai/filter/truncate_source_filter_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::TruncateSourceFilter do
+ include FilterSpecHelper
+
+ let(:short_text) { 'foo' * 10 }
+ let(:long_text) { ([short_text] * 10).join(' ') }
+
+ it 'does nothing when limit is unspecified' do
+ output = filter(long_text)
+
+ expect(output).to eq(long_text)
+ end
+
+ it 'does nothing to a short-enough text' do
+ output = filter(short_text, limit: short_text.bytesize)
+
+ expect(output).to eq(short_text)
+ end
+
+ it 'truncates UTF-8 text by bytes, on a character boundary' do
+ utf8_text = '日本語の文字が大きい'
+ truncated = '日…'
+
+ expect(filter(utf8_text, limit: truncated.bytesize)).to eq(truncated)
+ expect(filter(utf8_text, limit: utf8_text.bytesize)).to eq(utf8_text)
+ expect(filter(utf8_text, limit: utf8_text.mb_chars.size)).not_to eq(utf8_text)
+ end
+end
diff --git a/spec/lib/banzai/pipeline/description_pipeline_spec.rb b/spec/lib/banzai/pipeline/description_pipeline_spec.rb
index 82d4f883e0d..be553433e9e 100644
--- a/spec/lib/banzai/pipeline/description_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/description_pipeline_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Banzai::Pipeline::DescriptionPipeline do
stub_commonmark_sourcepos_disabled
end
- it 'uses a limited whitelist' do
+ it 'uses a limited allowlist' do
doc = parse('# Description')
expect(doc.strip).to eq 'Description'
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index 247f4591632..31047b9494a 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -176,8 +176,8 @@ RSpec.describe Banzai::Pipeline::GfmPipeline do
stub_asset_proxy_setting(enabled: true)
stub_asset_proxy_setting(secret_key: 'shared-secret')
stub_asset_proxy_setting(url: 'https://assets.example.com')
- stub_asset_proxy_setting(whitelist: %W(gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}))
- stub_asset_proxy_setting(domain_regexp: Banzai::Filter::AssetProxyFilter.compile_whitelist(Gitlab.config.asset_proxy.whitelist))
+ stub_asset_proxy_setting(allowlist: %W(gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}))
+ stub_asset_proxy_setting(domain_regexp: Banzai::Filter::AssetProxyFilter.compile_allowlist(Gitlab.config.asset_proxy.allowlist))
end
it 'replaces a lazy loaded img src' do
diff --git a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
index fc74c592867..f0498f41b61 100644
--- a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
@@ -24,4 +24,12 @@ RSpec.describe Banzai::Pipeline::PreProcessPipeline do
expect(result[:output]).to include "> blockquote\n"
end
end
+
+ it 'truncates the text if requested' do
+ text = (['foo'] * 10).join(' ')
+
+ result = described_class.call(text, limit: 12)
+
+ expect(result[:output]).to eq('foo foo f…')
+ end
end
diff --git a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
index a7a19fb73fc..2abd3df20fd 100644
--- a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
+++ b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
@@ -27,11 +27,8 @@ RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
allow(graphql_client).to receive(:execute).and_return(response)
end
- it 'returns an enumerator with fetched results' do
- response = subject.extract(context)
-
- expect(response).to be_instance_of(Enumerator)
- expect(response.first).to eq({ foo: :bar })
+ it 'returns original hash' do
+ expect(subject.extract(context)).to eq({ foo: :bar })
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index c9b481388c3..1a91f3d7a78 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -75,13 +75,11 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
- expect(described_class.extractors)
- .to contain_exactly(
- {
- klass: BulkImports::Common::Extractors::GraphqlExtractor,
- options: {
- query: BulkImports::Groups::Graphql::GetGroupQuery
- }
+ expect(described_class.get_extractor)
+ .to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Groups::Graphql::GetGroupQuery
}
)
end
@@ -97,9 +95,7 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
end
it 'has loaders' do
- expect(described_class.loaders).to contain_exactly({
- klass: BulkImports::Groups::Loaders::GroupLoader, options: nil
- })
+ expect(described_class.get_loader).to eq(klass: BulkImports::Groups::Loaders::GroupLoader, options: nil)
end
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index 788a6e98c45..e5a8ed7f47d 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -58,10 +58,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
- expect(described_class.extractors).to contain_exactly(
- klass: BulkImports::Groups::Extractors::SubgroupsExtractor,
- options: nil
- )
+ expect(described_class.get_extractor).to eq(klass: BulkImports::Groups::Extractors::SubgroupsExtractor, options: nil)
end
it 'has transformers' do
@@ -72,10 +69,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
end
it 'has loaders' do
- expect(described_class.loaders).to contain_exactly(
- klass: BulkImports::Common::Loaders::EntityLoader,
- options: nil
- )
+ expect(described_class.get_loader).to eq(klass: BulkImports::Common::Loaders::EntityLoader, options: nil)
end
end
end
diff --git a/spec/lib/bulk_imports/importers/group_importer_spec.rb b/spec/lib/bulk_imports/importers/group_importer_spec.rb
index 95dca7fc486..87baf1b8026 100644
--- a/spec/lib/bulk_imports/importers/group_importer_spec.rb
+++ b/spec/lib/bulk_imports/importers/group_importer_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe BulkImports::Importers::GroupImporter do
let(:user) { create(:user) }
let(:bulk_import) { create(:bulk_import) }
- let(:bulk_import_entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let(:bulk_import_entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let(:context) do
BulkImports::Pipeline::Context.new(
@@ -18,14 +18,13 @@ RSpec.describe BulkImports::Importers::GroupImporter do
subject { described_class.new(bulk_import_entity) }
before do
- allow(Gitlab).to receive(:ee?).and_return(false)
allow(BulkImports::Pipeline::Context).to receive(:new).and_return(context)
end
describe '#execute' do
it 'starts the entity and run its pipelines' do
- expect(bulk_import_entity).to receive(:start).and_call_original
expect_to_run_pipeline BulkImports::Groups::Pipelines::GroupPipeline, context: context
+ expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicsPipeline'.constantize, context: context) if Gitlab.ee?
expect_to_run_pipeline BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline, context: context
subject.execute
diff --git a/spec/lib/bulk_imports/importers/groups_importer_spec.rb b/spec/lib/bulk_imports/importers/groups_importer_spec.rb
deleted file mode 100644
index 4865034b0cd..00000000000
--- a/spec/lib/bulk_imports/importers/groups_importer_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Importers::GroupsImporter do
- let_it_be(:bulk_import) { create(:bulk_import) }
-
- subject { described_class.new(bulk_import.id) }
-
- describe '#execute' do
- context "when there is entities to be imported" do
- let!(:bulk_import_entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
-
- it "starts the bulk_import and imports its entities" do
- expect(BulkImports::Importers::GroupImporter).to receive(:new)
- .with(bulk_import_entity).and_return(double(execute: true))
- expect(BulkImportWorker).to receive(:perform_async).with(bulk_import.id)
-
- subject.execute
-
- expect(bulk_import.reload).to be_started
- end
- end
-
- context "when there is no entities to be imported" do
- it "starts the bulk_import and imports its entities" do
- expect(BulkImports::Importers::GroupImporter).not_to receive(:new)
- expect(BulkImportWorker).not_to receive(:perform_async)
-
- subject.execute
-
- expect(bulk_import.reload).to be_finished
- end
- end
- end
-end
diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb
index 94052be7df2..3811a02a7fd 100644
--- a/spec/lib/bulk_imports/pipeline_spec.rb
+++ b/spec/lib/bulk_imports/pipeline_spec.rb
@@ -24,9 +24,9 @@ RSpec.describe BulkImports::Pipeline do
describe 'getters' do
it 'retrieves class attributes' do
- expect(BulkImports::MyPipeline.extractors).to contain_exactly({ klass: BulkImports::Extractor, options: { foo: :bar } })
+ expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: BulkImports::Extractor, options: { foo: :bar } })
expect(BulkImports::MyPipeline.transformers).to contain_exactly({ klass: BulkImports::Transformer, options: { foo: :bar } })
- expect(BulkImports::MyPipeline.loaders).to contain_exactly({ klass: BulkImports::Loader, options: { foo: :bar } })
+ expect(BulkImports::MyPipeline.get_loader).to eq({ klass: BulkImports::Loader, options: { foo: :bar } })
expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
end
end
@@ -41,20 +41,14 @@ RSpec.describe BulkImports::Pipeline do
BulkImports::MyPipeline.loader(klass, options)
BulkImports::MyPipeline.abort_on_failure!
- expect(BulkImports::MyPipeline.extractors)
- .to contain_exactly(
- { klass: BulkImports::Extractor, options: { foo: :bar } },
- { klass: klass, options: options })
+ expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: klass, options: options })
expect(BulkImports::MyPipeline.transformers)
.to contain_exactly(
{ klass: BulkImports::Transformer, options: { foo: :bar } },
{ klass: klass, options: options })
- expect(BulkImports::MyPipeline.loaders)
- .to contain_exactly(
- { klass: BulkImports::Loader, options: { foo: :bar } },
- { klass: klass, options: options })
+ expect(BulkImports::MyPipeline.get_loader).to eq({ klass: klass, options: options })
expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
end
diff --git a/spec/lib/constraints/admin_constrainer_spec.rb b/spec/lib/constraints/admin_constrainer_spec.rb
index 3efe683177c..ac6ad31120e 100644
--- a/spec/lib/constraints/admin_constrainer_spec.rb
+++ b/spec/lib/constraints/admin_constrainer_spec.rb
@@ -2,7 +2,7 @@
#
require 'spec_helper'
-RSpec.describe Constraints::AdminConstrainer, :do_not_mock_admin_mode do
+RSpec.describe Constraints::AdminConstrainer do
let(:user) { create(:user) }
let(:session) { {} }
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index 2c08fdc1e75..9d6f4db537d 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -26,7 +26,54 @@ RSpec.describe ContainerRegistry::Client do
}
end
- shared_examples '#repository_manifest' do |manifest_type|
+ let(:expected_faraday_headers) { { user_agent: "GitLab/#{Gitlab::VERSION}" } }
+ let(:expected_faraday_request_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
+
+ shared_examples 'handling timeouts' do
+ let(:retry_options) do
+ ContainerRegistry::Client::RETRY_OPTIONS.merge(
+ interval: 0.1,
+ interval_randomness: 0,
+ backoff_factor: 0
+ )
+ end
+
+ before do
+ stub_request(method, url).to_timeout
+ end
+
+ it 'handles network timeouts' do
+ actual_retries = 0
+ retry_options_with_block = retry_options.merge(
+ retry_block: -> (_, _, _, _) { actual_retries += 1 }
+ )
+
+ stub_const('ContainerRegistry::Client::RETRY_OPTIONS', retry_options_with_block)
+
+ expect { subject }.to raise_error(Faraday::ConnectionFailed)
+ expect(actual_retries).to eq(retry_options_with_block[:max])
+ end
+
+ it 'logs the error' do
+ stub_const('ContainerRegistry::Client::RETRY_OPTIONS', retry_options)
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .exactly(retry_options[:max] + 1)
+ .times
+ .with(
+ an_instance_of(Faraday::ConnectionFailed),
+ class: described_class.name,
+ url: URI(url)
+ )
+
+ expect { subject }.to raise_error(Faraday::ConnectionFailed)
+ end
+ end
+
+ shared_examples 'handling repository manifest' do |manifest_type|
+ let(:method) { :get }
+ let(:url) { 'http://container-registry/v2/group/test/manifests/mytag' }
let(:manifest) do
{
"schemaVersion" => 2,
@@ -48,7 +95,7 @@ RSpec.describe ContainerRegistry::Client do
end
it 'GET /v2/:name/manifests/mytag' do
- stub_request(:get, "http://container-registry/v2/group/test/manifests/mytag")
+ stub_request(method, url)
.with(headers: {
'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json',
'Authorization' => "bearer #{token}",
@@ -56,14 +103,24 @@ RSpec.describe ContainerRegistry::Client do
})
.to_return(status: 200, body: manifest.to_json, headers: { content_type: manifest_type })
- expect(client.repository_manifest('group/test', 'mytag')).to eq(manifest)
+ expect_new_faraday
+
+ expect(subject).to eq(manifest)
end
+
+ it_behaves_like 'handling timeouts'
end
- it_behaves_like '#repository_manifest', described_class::DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE
- it_behaves_like '#repository_manifest', described_class::OCI_MANIFEST_V1_TYPE
+ describe '#repository_manifest' do
+ subject { client.repository_manifest('group/test', 'mytag') }
+
+ it_behaves_like 'handling repository manifest', described_class::DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE
+ it_behaves_like 'handling repository manifest', described_class::OCI_MANIFEST_V1_TYPE
+ end
describe '#blob' do
+ let(:method) { :get }
+ let(:url) { 'http://container-registry/v2/group/test/blobs/sha256:0123456789012345' }
let(:blob_headers) do
{
'Accept' => 'application/octet-stream',
@@ -78,16 +135,20 @@ RSpec.describe ContainerRegistry::Client do
}
end
+ subject { client.blob('group/test', 'sha256:0123456789012345') }
+
it 'GET /v2/:name/blobs/:digest' do
- stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345")
+ stub_request(method, url)
.with(headers: blob_headers)
.to_return(status: 200, body: "Blob")
- expect(client.blob('group/test', 'sha256:0123456789012345')).to eq('Blob')
+ expect_new_faraday
+
+ expect(subject).to eq('Blob')
end
it 'follows 307 redirect for GET /v2/:name/blobs/:digest' do
- stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345")
+ stub_request(method, url)
.with(headers: blob_headers)
.to_return(status: 307, body: '', headers: { Location: 'http://redirected' })
# We should probably use hash_excluding here, but that requires an update to WebMock:
@@ -98,10 +159,12 @@ RSpec.describe ContainerRegistry::Client do
end
.to_return(status: 200, body: "Successfully redirected")
- response = client.blob('group/test', 'sha256:0123456789012345')
+ expect_new_faraday(times: 2)
- expect(response).to eq('Successfully redirected')
+ expect(subject).to eq('Successfully redirected')
end
+
+ it_behaves_like 'handling timeouts'
end
describe '#upload_blob' do
@@ -111,6 +174,8 @@ RSpec.describe ContainerRegistry::Client do
it 'starts the upload and posts the blob' do
stub_upload('path', 'content', 'sha256:123')
+ expect_new_faraday(timeout: false)
+
expect(subject).to be_success
end
end
@@ -173,6 +238,8 @@ RSpec.describe ContainerRegistry::Client do
.with(body: "{\n \"foo\": \"bar\"\n}", headers: manifest_headers)
.to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:123' })
+ expect_new_faraday(timeout: false)
+
expect(subject).to eq 'sha256:123'
end
end
@@ -375,4 +442,17 @@ RSpec.describe ContainerRegistry::Client do
headers: { 'Allow' => 'DELETE' }
)
end
+
+ def expect_new_faraday(times: 1, timeout: true)
+ request_options = timeout ? expected_faraday_request_options : nil
+ expect(Faraday)
+ .to receive(:new)
+ .with(
+ 'http://container-registry',
+ headers: expected_faraday_headers,
+ request: request_options
+ ).and_call_original
+ .exactly(times)
+ .times
+ end
end
diff --git a/spec/lib/declarative_enum_spec.rb b/spec/lib/declarative_enum_spec.rb
new file mode 100644
index 00000000000..66cda9fc3a8
--- /dev/null
+++ b/spec/lib/declarative_enum_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DeclarativeEnum do
+ let(:enum_module) do
+ Module.new do
+ extend DeclarativeEnum
+
+ key :my_enum
+ name 'MyEnumName'
+
+ description "Enum description"
+
+ define do
+ foo value: 0, description: 'description of foo'
+ bar value: 1, description: 'description of bar'
+ end
+ end
+ end
+
+ let(:original_definition) do
+ {
+ foo: { description: 'description of foo', value: 0 },
+ bar: { description: 'description of bar', value: 1 }
+ }
+ end
+
+ describe '.key' do
+ subject(:key) { enum_module.key(new_key) }
+
+ context 'when the argument is set' do
+ let(:new_key) { :new_enum_key }
+
+ it 'changes the key' do
+ expect { key }.to change { enum_module.key }.from(:my_enum).to(:new_enum_key)
+ end
+ end
+
+ context 'when the argument is `nil`' do
+ let(:new_key) { nil }
+
+ it { is_expected.to eq(:my_enum) }
+ end
+ end
+
+ describe '.name' do
+ subject(:name) { enum_module.name(new_name) }
+
+ context 'when the argument is set' do
+ let(:new_name) { 'NewMyEnumName' }
+
+ it 'changes the name' do
+ expect { name }.to change { enum_module.name }.from('MyEnumName').to('NewMyEnumName')
+ end
+ end
+
+ context 'when the argument is `nil`' do
+ let(:new_name) { nil }
+
+ it { is_expected.to eq('MyEnumName') }
+ end
+ end
+
+ describe '.description' do
+ subject(:description) { enum_module.description(new_description) }
+
+ context 'when the argument is set' do
+ let(:new_description) { 'New enum description' }
+
+ it 'changes the description' do
+ expect { description }.to change { enum_module.description }.from('Enum description').to('New enum description')
+ end
+ end
+
+ context 'when the argument is `nil`' do
+ let(:new_description) { nil }
+
+ it { is_expected.to eq('Enum description') }
+ end
+ end
+
+ describe '.define' do
+ subject(:define) { enum_module.define(&block) }
+
+ context 'when there is a block given' do
+ context 'when the given block tries to register the same key' do
+ let(:block) do
+ proc do
+ foo value: 2, description: 'description of foo'
+ end
+ end
+
+ it 'raises a `KeyCollisionError`' do
+ expect { define }.to raise_error(DeclarativeEnum::Builder::KeyCollisionError)
+ end
+ end
+
+ context 'when the given block does not try to register the same key' do
+ let(:expected_new_definition) { original_definition.merge(zoo: { description: 'description of zoo', value: 0 }) }
+ let(:block) do
+ proc do
+ zoo value: 0, description: 'description of zoo'
+ end
+ end
+
+ it 'appends the new definition' do
+ expect { define }.to change { enum_module.definition }.from(original_definition).to(expected_new_definition)
+ end
+ end
+ end
+
+ context 'when there is no block given' do
+ let(:block) { nil }
+
+ it 'raises a LocalJumpError' do
+ expect { define }.to raise_error(LocalJumpError)
+ end
+ end
+ end
+
+ describe '.definition' do
+ subject { enum_module.definition }
+
+ it { is_expected.to eq(original_definition) }
+ end
+
+ describe 'extending the enum module' do
+ let(:extended_definition) { original_definition.merge(zoo: { value: 2, description: 'description of zoo' }) }
+ let(:new_enum_module) do
+ Module.new do
+ extend DeclarativeEnum
+
+ define do
+ zoo value: 2, description: 'description of zoo'
+ end
+ end
+ end
+
+ subject(:prepend_new_enum_module) { enum_module.prepend(new_enum_module) }
+
+ it 'extends the values of the base enum module' do
+ expect { prepend_new_enum_module }.to change { enum_module.definition }.from(original_definition)
+ .to(extended_definition)
+ end
+ end
+end
diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb
index a994b4b92a6..b603325cdb8 100644
--- a/spec/lib/expand_variables_spec.rb
+++ b/spec/lib/expand_variables_spec.rb
@@ -224,4 +224,41 @@ RSpec.describe ExpandVariables do
end
end
end
+
+ describe '#possible_var_reference?' do
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty value": {
+ value: '',
+ result: false
+ },
+ "normal value": {
+ value: 'some value',
+ result: false
+ },
+ "simple expansions": {
+ value: 'key$variable',
+ result: true
+ },
+ "complex expansions": {
+ value: 'key${variable}${variable2}',
+ result: true
+ },
+ "complex expansions for Windows": {
+ value: 'key%variable%%variable2%',
+ result: true
+ }
+ }
+ end
+
+ with_them do
+ subject { ExpandVariables.possible_var_reference?(value) }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb
new file mode 100644
index 00000000000..93e588675d3
--- /dev/null
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueDeployedToProduction do
+ it_behaves_like 'value stream analytics event'
+end
diff --git a/spec/lib/gitlab/api_authentication/builder_spec.rb b/spec/lib/gitlab/api_authentication/builder_spec.rb
new file mode 100644
index 00000000000..e241aa77805
--- /dev/null
+++ b/spec/lib/gitlab/api_authentication/builder_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::APIAuthentication::Builder do
+ describe '#build' do
+ shared_examples 'builds the correct result' do |token_type:, sent_through:, builds:|
+ context "with #{token_type.size} token type(s) and #{sent_through.size} sent through(s)" do
+ it 'works when passed together' do
+ strategies = described_class.new.build { |allow| allow.token_types(*token_type).sent_through(*sent_through) }
+
+ expect(strategies).to eq(builds)
+ end
+
+ it 'works when token types are passed separately' do
+ strategies = described_class.new.build { |allow| token_type.each { |t| allow.token_types(t).sent_through(*sent_through) } }
+
+ expect(strategies).to eq(builds)
+ end
+
+ it 'works when sent throughs are passed separately' do
+ strategies = described_class.new.build { |allow| sent_through.each { |s| allow.token_types(*token_type).sent_through(s) } }
+
+ expect(strategies).to eq(builds)
+ end
+
+ it 'works when token types and sent throughs are passed separately' do
+ strategies = described_class.new.build { |allow| token_type.each { |t| sent_through.each { |s| allow.token_types(t).sent_through(s) } } }
+
+ expect(strategies).to eq(builds)
+ end
+ end
+ end
+
+ it_behaves_like 'builds the correct result',
+ token_type: [:pat],
+ sent_through: [:basic],
+ builds: { basic: [:pat] }
+
+ it_behaves_like 'builds the correct result',
+ token_type: [:pat],
+ sent_through: [:basic, :oauth],
+ builds: { basic: [:pat], oauth: [:pat] }
+
+ it_behaves_like 'builds the correct result',
+ token_type: [:pat, :job],
+ sent_through: [:basic],
+ builds: { basic: [:pat, :job] }
+
+ it_behaves_like 'builds the correct result',
+ token_type: [:pat, :job],
+ sent_through: [:basic, :oauth],
+ builds: { basic: [:pat, :job], oauth: [:pat, :job] }
+
+ context 'with a complex auth strategy' do
+ it 'builds the correct result' do
+ strategies = described_class.new.build do |allow|
+ allow.token_types(:pat, :job, :deploy).sent_through(:http_basic, :oauth)
+ allow.token_types(:pat).sent_through(:http_private, :query_private)
+ allow.token_types(:oauth2).sent_through(:http_bearer, :query_access)
+ end
+
+ expect(strategies).to eq({
+ http_basic: [:pat, :job, :deploy],
+ oauth: [:pat, :job, :deploy],
+
+ http_private: [:pat],
+ query_private: [:pat],
+
+ http_bearer: [:oauth2],
+ query_access: [:oauth2]
+ })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/api_authentication/sent_through_builder_spec.rb b/spec/lib/gitlab/api_authentication/sent_through_builder_spec.rb
new file mode 100644
index 00000000000..845e317f3aa
--- /dev/null
+++ b/spec/lib/gitlab/api_authentication/sent_through_builder_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::APIAuthentication::SentThroughBuilder do
+ describe '#sent_through' do
+ let(:resolvers) { Array.new(3) { double } }
+ let(:locators) { Array.new(3) { double } }
+
+ it 'adds a strategy for each of locators x resolvers' do
+ strategies = locators.to_h { |l| [l, []] }
+ described_class.new(strategies, resolvers).sent_through(*locators)
+
+ expect(strategies).to eq(locators.to_h { |l| [l, resolvers] })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/api_authentication/token_locator_spec.rb b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
new file mode 100644
index 00000000000..68ce48a70ea
--- /dev/null
+++ b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::APIAuthentication::TokenLocator do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :public) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:ci_job) { create(:ci_build, project: project, user: user, status: :running) }
+ let_it_be(:ci_job_done) { create(:ci_build, project: project, user: user, status: :success) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+
+ describe '.new' do
+ context 'with a valid type' do
+ it 'creates a new instance' do
+ expect(described_class.new(:http_basic_auth)).to be_a(described_class)
+ end
+ end
+
+ context 'with an invalid type' do
+ it 'raises ActiveModel::ValidationError' do
+ expect { described_class.new(:not_a_real_locator) }.to raise_error(ActiveModel::ValidationError)
+ end
+ end
+ end
+
+ describe '#extract' do
+ let(:locator) { described_class.new(type) }
+
+ subject { locator.extract(request) }
+
+ context 'with :http_basic_auth' do
+ let(:type) { :http_basic_auth }
+
+ context 'without credentials' do
+ let(:request) { double(authorization: nil) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:username) { 'foo' }
+ let(:password) { 'bar' }
+ let(:request) { double(authorization: "Basic #{::Base64.strict_encode64("#{username}:#{password}")}") }
+
+ it 'returns the credentials' do
+ expect(subject.username).to eq(username)
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
new file mode 100644
index 00000000000..0028fb080ac
--- /dev/null
+++ b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::APIAuthentication::TokenResolver do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :public) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:ci_job) { create(:ci_build, project: project, user: user, status: :running) }
+ let_it_be(:ci_job_done) { create(:ci_build, project: project, user: user, status: :success) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+
+ shared_examples 'an authorized request' do
+ it 'returns the correct token' do
+ expect(subject).to eq(token)
+ end
+ end
+
+ shared_examples 'an unauthorized request' do
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ shared_examples 'an anoymous request' do
+ it 'returns nil' do
+ expect(subject).to eq(nil)
+ end
+ end
+
+ describe '.new' do
+ context 'with a valid type' do
+ it 'creates a new instance' do
+ expect(described_class.new(:personal_access_token)).to be_a(described_class)
+ end
+ end
+
+ context 'with an invalid type' do
+ it 'raises a validation error' do
+ expect { described_class.new(:not_a_real_locator) }.to raise_error(ActiveModel::ValidationError)
+ end
+ end
+ end
+
+ describe '#resolve' do
+ let(:resolver) { described_class.new(type) }
+
+ subject { resolver.resolve(raw) }
+
+ context 'with :personal_access_token' do
+ let(:type) { :personal_access_token }
+ let(:token) { personal_access_token }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password(user.username, token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+
+ context 'with an invalid username' do
+ let(:raw) { username_and_password("not-my-#{user.username}", token.token) }
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
+
+ context 'with :job_token' do
+ let(:type) { :job_token }
+ let(:token) { ci_job }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password(Gitlab::Auth::CI_JOB_USER, token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+
+ context 'when the job is not running' do
+ let(:raw) { username_and_password(Gitlab::Auth::CI_JOB_USER, ci_job_done.token) }
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'with the wrong username' do
+ let(:raw) { username_and_password("not-#{Gitlab::Auth::CI_JOB_USER}", nil) }
+
+ it_behaves_like 'an anoymous request'
+ end
+
+ context 'with an invalid job token' do
+ let(:raw) { username_and_password(Gitlab::Auth::CI_JOB_USER, "not a valid CI job token") }
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
+
+ context 'with :deploy_token' do
+ let(:type) { :deploy_token }
+ let(:token) { deploy_token }
+
+ context 'with a valid deploy token' do
+ let(:raw) { username_and_password(token.username, token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+
+ context 'with an invalid username' do
+ let(:raw) { username_and_password("not-my-#{token.username}", token.token) }
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
+ end
+
+ def username_and_password(username, password)
+ ::Gitlab::APIAuthentication::TokenLocator::UsernameAndPassword.new(username, password)
+ end
+end
diff --git a/spec/lib/gitlab/api_authentication/token_type_builder_spec.rb b/spec/lib/gitlab/api_authentication/token_type_builder_spec.rb
new file mode 100644
index 00000000000..fbca62c9a42
--- /dev/null
+++ b/spec/lib/gitlab/api_authentication/token_type_builder_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::APIAuthentication::TokenTypeBuilder do
+ describe '#token_types' do
+ it 'passes strategies and resolvers to SentThroughBuilder' do
+ strategies = double
+ resolvers = Array.new(3) { double }
+ retval = double
+ expect(Gitlab::APIAuthentication::SentThroughBuilder).to receive(:new).with(strategies, resolvers).and_return(retval)
+
+ expect(described_class.new(strategies).token_types(*resolvers)).to be(retval)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/asset_proxy_spec.rb b/spec/lib/gitlab/asset_proxy_spec.rb
index 73b101c0dd8..7d7952d5741 100644
--- a/spec/lib/gitlab/asset_proxy_spec.rb
+++ b/spec/lib/gitlab/asset_proxy_spec.rb
@@ -17,12 +17,12 @@ RSpec.describe Gitlab::AssetProxy do
context 'when asset proxy is enabled' do
before do
- stub_asset_proxy_setting(whitelist: %w(gitlab.com *.mydomain.com))
+ stub_asset_proxy_setting(allowlist: %w(gitlab.com *.mydomain.com))
stub_asset_proxy_setting(
enabled: true,
url: 'https://assets.example.com',
secret_key: 'shared-secret',
- domain_regexp: Banzai::Filter::AssetProxyFilter.compile_whitelist(Gitlab.config.asset_proxy.whitelist)
+ domain_regexp: Banzai::Filter::AssetProxyFilter.compile_allowlist(Gitlab.config.asset_proxy.allowlist)
)
end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index f927d5912bb..775f8f056b5 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Gitlab::Auth::AuthFinders do
include described_class
include HttpBasicAuthHelpers
- let(:user) { create(:user) }
+ # Create the feed_token and static_object_token for the user
+ let_it_be(:user) { create(:user).tap(&:feed_token).tap(&:static_object_token) }
let(:env) do
{
'rack.input' => ''
@@ -65,7 +66,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#find_user_from_bearer_token' do
- let(:job) { create(:ci_build, user: user) }
+ let_it_be_with_reload(:job) { create(:ci_build, user: user) }
subject { find_user_from_bearer_token }
@@ -91,7 +92,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'with a personal access token' do
- let(:pat) { create(:personal_access_token, user: user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
let(:token) { pat.token }
before do
@@ -148,7 +149,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
it 'returns nil if valid feed_token and disabled' do
- allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(true)
+ stub_application_setting(disable_feed_token: true)
set_param(:feed_token, user.feed_token)
expect(find_user_from_feed_token(:rss)).to be_nil
@@ -166,7 +167,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'when rss_token param is provided' do
- it 'returns user if valid rssd_token' do
+ it 'returns user if valid rss_token' do
set_param(:rss_token, user.feed_token)
expect(find_user_from_feed_token(:rss)).to eq user
@@ -347,7 +348,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#find_user_from_access_token' do
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
before do
set_header('SCRIPT_NAME', 'url.atom')
@@ -386,7 +387,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'when using a non-prefixed access token' do
- let(:personal_access_token) { create(:personal_access_token, :no_prefix, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, :no_prefix, user: user) }
it 'returns user' do
set_header('HTTP_AUTHORIZATION', "Bearer #{personal_access_token.token}")
@@ -398,7 +399,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#find_user_from_web_access_token' do
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be_with_reload(:personal_access_token) { create(:personal_access_token, user: user) }
before do
set_header(described_class::PRIVATE_TOKEN_HEADER, personal_access_token.token)
@@ -449,6 +450,22 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect(find_user_from_web_access_token(:api)).to be_nil
end
+ context 'when the token has read_api scope' do
+ before do
+ personal_access_token.update!(scopes: ['read_api'])
+
+ set_header('SCRIPT_NAME', '/api/endpoint')
+ end
+
+ it 'raises InsufficientScopeError by default' do
+ expect { find_user_from_web_access_token(:api) }.to raise_error(Gitlab::Auth::InsufficientScopeError)
+ end
+
+ it 'finds the user when the read_api scope is passed' do
+ expect(find_user_from_web_access_token(:api, scopes: [:api, :read_api])).to eq(user)
+ end
+ end
+
context 'when relative_url_root is set' do
before do
stub_config_setting(relative_url_root: '/relative_root')
@@ -464,7 +481,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#find_personal_access_token' do
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
before do
set_header('SCRIPT_NAME', 'url.atom')
@@ -534,7 +551,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'access token is valid' do
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let(:route_authentication_setting) { { basic_auth_personal_access_token: true } }
it 'finds the token from basic auth' do
@@ -555,7 +572,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'route_setting is not set' do
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
it 'returns nil' do
auth_header_with(personal_access_token.token)
@@ -565,7 +582,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'route_setting is not correct' do
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let(:route_authentication_setting) { { basic_auth_personal_access_token: false } }
it 'returns nil' do
@@ -611,8 +628,9 @@ RSpec.describe Gitlab::Auth::AuthFinders do
context 'with CI username' do
let(:username) { ::Gitlab::Auth::CI_JOB_USER }
- let(:user) { create(:user) }
- let(:build) { create(:ci_build, user: user, status: :running) }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:build) { create(:ci_build, user: user, status: :running) }
it 'returns nil without password' do
set_basic_auth_header(username, nil)
@@ -645,11 +663,11 @@ RSpec.describe Gitlab::Auth::AuthFinders do
describe '#validate_access_token!' do
subject { validate_access_token! }
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be_with_reload(:personal_access_token) { create(:personal_access_token, user: user) }
context 'with a job token' do
+ let_it_be(:job) { create(:ci_build, user: user, status: :running) }
let(:route_authentication_setting) { { job_token_allowed: true } }
- let(:job) { create(:ci_build, user: user, status: :running) }
before do
env['HTTP_AUTHORIZATION'] = "Bearer #{job.token}"
@@ -671,7 +689,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
it 'returns Gitlab::Auth::ExpiredError if token expired' do
- personal_access_token.expires_at = 1.day.ago
+ personal_access_token.update!(expires_at: 1.day.ago)
expect { validate_access_token! }.to raise_error(Gitlab::Auth::ExpiredError)
end
@@ -688,7 +706,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'with impersonation token' do
- let(:personal_access_token) { create(:personal_access_token, :impersonation, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, :impersonation, user: user) }
context 'when impersonation is disabled' do
before do
@@ -704,7 +722,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#find_user_from_job_token' do
- let(:job) { create(:ci_build, user: user, status: :running) }
+ let_it_be(:job) { create(:ci_build, user: user, status: :running) }
let(:route_authentication_setting) { { job_token_allowed: true } }
subject { find_user_from_job_token }
@@ -866,7 +884,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#find_runner_from_token' do
- let(:runner) { create(:ci_runner) }
+ let_it_be(:runner) { create(:ci_runner) }
context 'with API requests' do
before do
diff --git a/spec/lib/gitlab/auth/current_user_mode_spec.rb b/spec/lib/gitlab/auth/current_user_mode_spec.rb
index ffd7813190a..a21f0931b78 100644
--- a/spec/lib/gitlab/auth/current_user_mode_spec.rb
+++ b/spec/lib/gitlab/auth/current_user_mode_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode, :request_store do
+RSpec.describe Gitlab::Auth::CurrentUserMode, :request_store do
let(:user) { build_stubbed(:user) }
subject { described_class.new(user) }
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index e4c87a54365..7a657cce597 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Ldap::Config do
include LdapHelpers
+ before do
+ stub_ldap_setting(enabled: true)
+ end
+
let(:config) { described_class.new('ldapmain') }
def raw_cert
@@ -68,12 +72,28 @@ AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
describe '.servers' do
it 'returns empty array if no server information is available' do
- allow(Gitlab.config).to receive(:ldap).and_return('enabled' => false)
+ stub_ldap_setting(servers: {})
expect(described_class.servers).to eq []
end
end
+ describe '.available_providers' do
+ before do
+ stub_licensed_features(multiple_ldap_servers: false)
+ stub_ldap_setting(
+ 'servers' => {
+ 'main' => { 'provider_name' => 'ldapmain' },
+ 'secondary' => { 'provider_name' => 'ldapsecondary' }
+ }
+ )
+ end
+
+ it 'returns one provider' do
+ expect(described_class.available_providers).to match_array(%w(ldapmain))
+ end
+ end
+
describe '#initialize' do
it 'requires a provider' do
expect { described_class.new }.to raise_error ArgumentError
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
index ef6b1d72712..93e9cb06786 100644
--- a/spec/lib/gitlab/auth/request_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -47,7 +47,10 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
let!(:job_token_user) { build(:user) }
it 'returns access_token user first' do
- allow_any_instance_of(described_class).to receive(:find_user_from_web_access_token).and_return(access_token_user)
+ allow_any_instance_of(described_class).to receive(:find_user_from_web_access_token)
+ .with(anything, scopes: [:api, :read_api])
+ .and_return(access_token_user)
+
allow_any_instance_of(described_class).to receive(:find_user_from_feed_token).and_return(feed_token_user)
expect(subject.find_sessionless_user(:api)).to eq access_token_user
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index dfd21983682..4e4bbd1bb60 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -372,6 +372,11 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: project, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
end
+
+ it 'successfully authenticates the project bot with a nil project' do
+ expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: nil, ip: 'ip'))
+ .to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
+ end
end
context 'with invalid project access token' do
diff --git a/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
new file mode 100644
index 00000000000..49fa7b41916
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20201111152859 do
+ subject(:perform) { migration.perform(1, 99) }
+
+ let(:migration) { described_class.new }
+ let(:artifact_outside_id_range) { create_artifact!(id: 100, created_at: 1.year.ago, expire_at: nil) }
+ let(:artifact_outside_date_range) { create_artifact!(id: 40, created_at: Time.current, expire_at: nil) }
+ let(:old_artifact) { create_artifact!(id: 10, created_at: 16.months.ago, expire_at: nil) }
+ let(:recent_artifact) { create_artifact!(id: 20, created_at: 1.year.ago, expire_at: nil) }
+ let(:artifact_with_expiry) { create_artifact!(id: 30, created_at: 1.year.ago, expire_at: Time.current + 1.day) }
+
+ before do
+ table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path')
+ table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1)
+ table(:ci_builds).create!(id: 1, allow_failure: false)
+ end
+
+ context 'when current date is before the 22nd' do
+ before do
+ travel_to(Time.zone.local(2020, 1, 1, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for old artifacts' do
+ expect(old_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(old_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2020, 4, 22, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for recent artifacts' do
+ expect(recent_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(recent_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2021, 1, 22, 0, 0, 0))
+ end
+ end
+
+ context 'when current date is after the 22nd' do
+ before do
+ travel_to(Time.zone.local(2020, 1, 23, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for old artifacts' do
+ expect(old_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(old_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2020, 5, 22, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for recent artifacts' do
+ expect(recent_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(recent_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2021, 2, 22, 0, 0, 0))
+ end
+ end
+
+ it 'does not touch artifacts with expiry date' do
+ expect { perform }.not_to change { artifact_with_expiry.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside id range' do
+ expect { perform }.not_to change { artifact_outside_id_range.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside date range' do
+ expect { perform }.not_to change { artifact_outside_date_range.reload.expire_at }
+ end
+
+ private
+
+ def create_artifact!(**args)
+ table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
new file mode 100644
index 00000000000..110a1ff8a08
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob do
+ let(:table_name) { :copy_primary_key_test }
+ let(:test_table) { table(table_name) }
+ let(:sub_batch_size) { 1000 }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{table_name}
+ (
+ id integer NOT NULL,
+ name character varying,
+ fk integer NOT NULL,
+ id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
+ fk_convert_to_bigint bigint DEFAULT 0 NOT NULL,
+ name_convert_to_text text DEFAULT 'no name'
+ );
+ SQL
+
+ # Insert some data, it doesn't make a difference
+ test_table.create!(id: 11, name: 'test1', fk: 1)
+ test_table.create!(id: 12, name: 'test2', fk: 2)
+ test_table.create!(id: 15, name: nil, fk: 3)
+ test_table.create!(id: 19, name: 'test4', fk: 4)
+ end
+
+ after do
+ # Make sure that the temp table we created is dropped (it is not removed by the database_cleaner)
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ SQL
+ end
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ let(:migration_class) { described_class.name }
+ let!(:job1) do
+ table(:background_migration_jobs).create!(
+ class_name: migration_class,
+ arguments: [1, 10, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size]
+ )
+ end
+
+ let!(:job2) do
+ table(:background_migration_jobs).create!(
+ class_name: migration_class,
+ arguments: [11, 20, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size]
+ )
+ end
+
+ it 'copies all primary keys in range' do
+ subject.perform(12, 15, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size)
+
+ expect(test_table.where('id = id_convert_to_bigint').pluck(:id)).to contain_exactly(12, 15)
+ expect(test_table.where(id_convert_to_bigint: 0).pluck(:id)).to contain_exactly(11, 19)
+ expect(test_table.all.count).to eq(4)
+ end
+
+ it 'copies all foreign keys in range' do
+ subject.perform(10, 14, table_name, 'id', 'fk', 'fk_convert_to_bigint', sub_batch_size)
+
+ expect(test_table.where('fk = fk_convert_to_bigint').pluck(:id)).to contain_exactly(11, 12)
+ expect(test_table.where(fk_convert_to_bigint: 0).pluck(:id)).to contain_exactly(15, 19)
+ expect(test_table.all.count).to eq(4)
+ end
+
+ it 'copies columns with NULLs' do
+ expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(4)
+
+ subject.perform(10, 20, table_name, 'id', 'name', 'name_convert_to_text', sub_batch_size)
+
+ expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(11, 12, 19)
+ expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
+ expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
+ end
+
+ it 'tracks completion with BackgroundMigrationJob' do
+ expect do
+ subject.perform(11, 20, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size)
+ end.to change { Gitlab::Database::BackgroundMigrationJob.succeeded.count }.from(0).to(1)
+
+ expect(job1.reload.status).to eq(0)
+ expect(job2.reload.status).to eq(1)
+ expect(test_table.where('id = id_convert_to_bigint').count).to eq(4)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
new file mode 100644
index 00000000000..8e74935e127
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20201211090634 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:identifiers) { table(:vulnerability_identifiers) }
+ let(:findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_feedback) { table(:vulnerability_feedback) }
+
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+ let(:user) { users.create!(username: 'john.doe', projects_limit: 5) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
+ let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'baz') }
+ let(:sast_report) { 0 }
+ let(:dependency_scanning_report) { 1 }
+ let(:dast_report) { 3 }
+ let(:secret_detection_report) { 4 }
+ let(:project_fingerprint) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:location_fingerprint_1) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:location_fingerprint_2) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:location_fingerprint_3) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:finding_1) { finding_creator.call(sast_report, location_fingerprint_1) }
+ let(:finding_2) { finding_creator.call(dast_report, location_fingerprint_2) }
+ let(:finding_3) { finding_creator.call(secret_detection_report, location_fingerprint_3) }
+ let(:uuid_1_components) { ['sast', identifier.fingerprint, location_fingerprint_1, project.id].join('-') }
+ let(:uuid_2_components) { ['dast', identifier.fingerprint, location_fingerprint_2, project.id].join('-') }
+ let(:uuid_3_components) { ['secret_detection', identifier.fingerprint, location_fingerprint_3, project.id].join('-') }
+ let(:expected_uuid_1) { Gitlab::UUID.v5(uuid_1_components) }
+ let(:expected_uuid_2) { Gitlab::UUID.v5(uuid_2_components) }
+ let(:expected_uuid_3) { Gitlab::UUID.v5(uuid_3_components) }
+ let(:finding_creator) do
+ -> (report_type, location_fingerprint) do
+ findings.create!(
+ project_id: project.id,
+ primary_identifier_id: identifier.id,
+ scanner_id: scanner.id,
+ report_type: report_type,
+ uuid: SecureRandom.uuid,
+ name: 'Foo',
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize(location_fingerprint),
+ project_fingerprint: Gitlab::Database::ShaAttribute.serialize(project_fingerprint),
+ metadata_version: '1',
+ severity: 0,
+ confidence: 5,
+ raw_metadata: '{}'
+ )
+ end
+ end
+
+ let(:feedback_creator) do
+ -> (category, project_fingerprint) do
+ vulnerability_feedback.create!(
+ project_id: project.id,
+ author_id: user.id,
+ feedback_type: 0,
+ category: category,
+ project_fingerprint: project_fingerprint
+ )
+ end
+ end
+
+ let!(:feedback_1) { feedback_creator.call(finding_1.report_type, project_fingerprint) }
+ let!(:feedback_2) { feedback_creator.call(finding_2.report_type, project_fingerprint) }
+ let!(:feedback_3) { feedback_creator.call(finding_3.report_type, project_fingerprint) }
+ let!(:feedback_4) { feedback_creator.call(finding_1.report_type, 'foo') }
+ let!(:feedback_5) { feedback_creator.call(dependency_scanning_report, project_fingerprint) }
+
+ subject(:populate_finding_uuids) { described_class.new.perform(feedback_1.id, feedback_5.id) }
+
+ before do
+ allow(Gitlab::BackgroundMigration::Logger).to receive(:info)
+ end
+
+ describe '#perform' do
+ it 'updates the `finding_uuid` attributes of the feedback records' do
+ expect { populate_finding_uuids }.to change { feedback_1.reload.finding_uuid }.from(nil).to(expected_uuid_1)
+ .and change { feedback_2.reload.finding_uuid }.from(nil).to(expected_uuid_2)
+ .and change { feedback_3.reload.finding_uuid }.from(nil).to(expected_uuid_3)
+ .and not_change { feedback_4.reload.finding_uuid }
+ .and not_change { feedback_5.reload.finding_uuid }
+
+ expect(Gitlab::BackgroundMigration::Logger).to have_received(:info).once
+ end
+
+ it 'preloads the finding and identifier records to prevent N+1 queries' do
+ # Load feedback records(1), load findings(2), load identifiers(3) and finally update feedback records one by one(6)
+ expect { populate_finding_uuids }.not_to exceed_query_limit(6)
+ end
+
+ context 'when setting the `finding_uuid` attribute of a feedback record fails' do
+ let(:expected_error) { RuntimeError.new }
+
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ allow_next_found_instance_of(described_class::VulnerabilityFeedback) do |feedback|
+ allow(feedback).to receive(:update_column).and_raise(expected_error)
+ end
+ end
+
+ it 'captures the errors and does not crash entirely' do
+ expect { populate_finding_uuids }.not_to raise_error
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(expected_error).exactly(3).times
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb
new file mode 100644
index 00000000000..391b27b28e6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateServices, :migration, schema: 20201207165956 do
+ let_it_be(:users) { table(:users) }
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:services) { table(:services) }
+
+ let_it_be(:alerts_service_data) { table(:alerts_service_data) }
+ let_it_be(:chat_names) { table(:chat_names) }
+ let_it_be(:issue_tracker_data) { table(:issue_tracker_data) }
+ let_it_be(:jira_tracker_data) { table(:jira_tracker_data) }
+ let_it_be(:open_project_tracker_data) { table(:open_project_tracker_data) }
+ let_it_be(:slack_integrations) { table(:slack_integrations) }
+ let_it_be(:web_hooks) { table(:web_hooks) }
+
+ let_it_be(:data_tables) do
+ [alerts_service_data, chat_names, issue_tracker_data, jira_tracker_data, open_project_tracker_data, slack_integrations, web_hooks]
+ end
+
+ let!(:user) { users.create!(id: 1, projects_limit: 100) }
+ let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
+
+ # project without duplicate services
+ let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id) }
+ let!(:service1) { services.create!(id: 1, project_id: project1.id, type: 'AsanaService') }
+ let!(:service2) { services.create!(id: 2, project_id: project1.id, type: 'JiraService') }
+ let!(:service3) { services.create!(id: 3, project_id: project1.id, type: 'SlackService') }
+
+ # project with duplicate services
+ let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id) }
+ let!(:service4) { services.create!(id: 4, project_id: project2.id, type: 'AsanaService') }
+ let!(:service5) { services.create!(id: 5, project_id: project2.id, type: 'JiraService') }
+ let!(:service6) { services.create!(id: 6, project_id: project2.id, type: 'JiraService') }
+ let!(:service7) { services.create!(id: 7, project_id: project2.id, type: 'SlackService') }
+ let!(:service8) { services.create!(id: 8, project_id: project2.id, type: 'SlackService') }
+ let!(:service9) { services.create!(id: 9, project_id: project2.id, type: 'SlackService') }
+
+ # project with duplicate services and dependant records
+ let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id) }
+ let!(:service10) { services.create!(id: 10, project_id: project3.id, type: 'AlertsService') }
+ let!(:service11) { services.create!(id: 11, project_id: project3.id, type: 'AlertsService') }
+ let!(:service12) { services.create!(id: 12, project_id: project3.id, type: 'SlashCommandsService') }
+ let!(:service13) { services.create!(id: 13, project_id: project3.id, type: 'SlashCommandsService') }
+ let!(:service14) { services.create!(id: 14, project_id: project3.id, type: 'IssueTrackerService') }
+ let!(:service15) { services.create!(id: 15, project_id: project3.id, type: 'IssueTrackerService') }
+ let!(:service16) { services.create!(id: 16, project_id: project3.id, type: 'JiraService') }
+ let!(:service17) { services.create!(id: 17, project_id: project3.id, type: 'JiraService') }
+ let!(:service18) { services.create!(id: 18, project_id: project3.id, type: 'OpenProjectService') }
+ let!(:service19) { services.create!(id: 19, project_id: project3.id, type: 'OpenProjectService') }
+ let!(:service20) { services.create!(id: 20, project_id: project3.id, type: 'SlackService') }
+ let!(:service21) { services.create!(id: 21, project_id: project3.id, type: 'SlackService') }
+ let!(:dependant_records) do
+ alerts_service_data.create!(id: 1, service_id: service10.id)
+ alerts_service_data.create!(id: 2, service_id: service11.id)
+ chat_names.create!(id: 1, service_id: service12.id, user_id: user.id, team_id: 'team1', chat_id: 'chat1')
+ chat_names.create!(id: 2, service_id: service13.id, user_id: user.id, team_id: 'team2', chat_id: 'chat2')
+ issue_tracker_data.create!(id: 1, service_id: service14.id)
+ issue_tracker_data.create!(id: 2, service_id: service15.id)
+ jira_tracker_data.create!(id: 1, service_id: service16.id)
+ jira_tracker_data.create!(id: 2, service_id: service17.id)
+ open_project_tracker_data.create!(id: 1, service_id: service18.id)
+ open_project_tracker_data.create!(id: 2, service_id: service19.id)
+ slack_integrations.create!(id: 1, service_id: service20.id, user_id: user.id, team_id: 'team1', team_name: 'team1', alias: 'alias1')
+ slack_integrations.create!(id: 2, service_id: service21.id, user_id: user.id, team_id: 'team2', team_name: 'team2', alias: 'alias2')
+ web_hooks.create!(id: 1, service_id: service20.id)
+ web_hooks.create!(id: 2, service_id: service21.id)
+ end
+
+ # project without services
+ let!(:project4) { projects.create!(id: 4, namespace_id: namespace.id) }
+
+ it 'removes duplicate services and dependant records' do
+ # Determine which services we expect to keep
+ expected_services = projects.pluck(:id).each_with_object({}) do |project_id, map|
+ project_services = services.where(project_id: project_id)
+ types = project_services.distinct.pluck(:type)
+
+ map[project_id] = types.map { |type| project_services.where(type: type).take!.id }
+ end
+
+ expect do
+ subject.perform(project2.id, project3.id)
+ end.to change { services.count }.from(21).to(12)
+
+ services1 = services.where(project_id: project1.id)
+ expect(services1.count).to be(3)
+ expect(services1.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
+ expect(services1.pluck(:id)).to contain_exactly(*expected_services[project1.id])
+
+ services2 = services.where(project_id: project2.id)
+ expect(services2.count).to be(3)
+ expect(services2.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
+ expect(services2.pluck(:id)).to contain_exactly(*expected_services[project2.id])
+
+ services3 = services.where(project_id: project3.id)
+ expect(services3.count).to be(6)
+ expect(services3.pluck(:type)).to contain_exactly('AlertsService', 'SlashCommandsService', 'IssueTrackerService', 'JiraService', 'OpenProjectService', 'SlackService')
+ expect(services3.pluck(:id)).to contain_exactly(*expected_services[project3.id])
+
+ kept_services = expected_services.values.flatten
+ data_tables.each do |table|
+ expect(table.count).to be(1)
+ expect(kept_services).to include(table.pluck(:service_id).first)
+ end
+ end
+
+ it 'does not delete services without duplicates' do
+ expect do
+ subject.perform(project1.id, project4.id)
+ end.not_to change { services.count }
+ end
+
+ it 'only deletes duplicate services for the current batch' do
+ expect do
+ subject.perform(project2.id)
+ end.to change { services.count }.by(-3)
+ end
+end
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
index f4daafb1d0e..6b45b8d4628 100644
--- a/spec/lib/gitlab/checks/diff_check_spec.rb
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -6,96 +6,63 @@ RSpec.describe Gitlab::Checks::DiffCheck do
include_context 'change access checks context'
describe '#validate!' do
- let(:owner) { create(:user) }
-
- before do
- allow(project.repository).to receive(:new_commits).and_return(
- project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
- )
- end
-
- context 'with LFS not enabled' do
- before do
- allow(project).to receive(:lfs_enabled?).and_return(false)
- end
-
- it 'does not invoke :lfs_file_locks_validation' do
- expect(subject).not_to receive(:lfs_file_locks_validation)
+ context 'when commits is empty' do
+ it 'does not call find_changed_paths' do
+ expect(project.repository).not_to receive(:find_changed_paths)
subject.validate!
end
end
- context 'with LFS enabled' do
- let!(:lock) { create(:lfs_file_lock, user: owner, project: project, path: 'README') }
-
+ context 'when commits is not empty' do
before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
+ allow(project.repository).to receive(:new_commits).and_return(
+ project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
+ )
end
- context 'when change is sent by a different user' do
- context 'when diff check with paths rpc feature flag is true' do
- it 'raises an error if the user is not allowed to update the file' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
- end
- end
+ context 'when deletion is true' do
+ let(:newrev) { Gitlab::Git::BLANK_SHA }
- context 'when diff check with paths rpc feature flag is false' do
- before do
- stub_feature_flags(diff_check_with_paths_changed_rpc: false)
- end
+ it 'does not call find_changed_paths' do
+ expect(project.repository).not_to receive(:find_changed_paths)
- it 'raises an error if the user is not allowed to update the file' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
- end
+ subject.validate!
end
end
- context 'when change is sent by the author of the lock' do
- let(:user) { owner }
-
- it "doesn't raise any error" do
- expect { subject.validate! }.not_to raise_error
+ context 'with LFS not enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
end
- end
- end
-
- context 'commit diff validations' do
- before do
- allow(subject).to receive(:validations_for_diff).and_return([lambda { |diff| return }])
-
- expect_any_instance_of(Commit).to receive(:raw_deltas).and_call_original
-
- stub_feature_flags(diff_check_with_paths_changed_rpc: false)
-
- subject.validate!
- end
- context 'when request store is inactive' do
- it 'are run for every commit' do
- expect_any_instance_of(Commit).to receive(:raw_deltas).and_call_original
+ it 'does not invoke :lfs_file_locks_validation' do
+ expect(subject).not_to receive(:lfs_file_locks_validation)
subject.validate!
end
end
- context 'when request store is active', :request_store do
- it 'are cached for every commit' do
- expect_any_instance_of(Commit).not_to receive(:raw_deltas)
+ context 'with LFS enabled' do
+ let(:owner) { create(:user) }
+ let!(:lock) { create(:lfs_file_lock, user: owner, project: project, path: 'README') }
- subject.validate!
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
end
- it 'are run for not cached commits' do
- allow(project.repository).to receive(:new_commits).and_return(
- project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', 'a5391128b0ef5d21df5dd23d98557f4ef12fae20')
- )
- change_access.instance_variable_set(:@commits, project.repository.new_commits)
+ context 'when change is sent by a different user' do
+ it 'raises an error if the user is not allowed to update the file' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
+ end
+ end
- expect(project.repository.new_commits.first).not_to receive(:raw_deltas).and_call_original
- expect(project.repository.new_commits.last).to receive(:raw_deltas).and_call_original
+ context 'when change is sent by the author of the lock' do
+ let(:user) { owner }
- subject.validate!
+ it "doesn't raise any error" do
+ expect { subject.validate! }.not_to raise_error
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
index 028dcd3e1e6..0e6d5b6c311 100644
--- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
@@ -36,6 +36,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Artifacts do
expect(entry.value).to eq config
end
end
+
+ context "when value includes 'public' keyword" do
+ let(:config) { { paths: %w[results.txt], public: false } }
+
+ it 'returns general artifact and report-type artifacts configuration' do
+ expect(entry.value).to eq config
+ end
+ end
end
context 'when entry value is not correct' do
@@ -67,6 +75,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Artifacts do
end
end
+ context "when 'public' is not a boolean" do
+ let(:config) { { paths: %w[results.txt], public: 'false' } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'artifacts public should be a boolean value'
+ end
+ end
+
context "when 'expose_as' is not a string" do
let(:config) { { paths: %w[results.txt], expose_as: 1 } }
diff --git a/spec/lib/gitlab/ci/config/entry/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
index 426a38e2ef7..78d37e228df 100644
--- a/spec/lib/gitlab/ci/config/entry/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Variables do
let(:metadata) { {} }
- subject { described_class.new(config, metadata) }
+ subject { described_class.new(config, **metadata) }
shared_examples 'valid config' do
describe '#value' do
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index fdd29afe2d6..7e39fae7b9b 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -16,7 +16,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
project: project,
sha: sha,
user: user,
- parent_pipeline: parent_pipeline
+ parent_pipeline: parent_pipeline,
+ variables: project.predefined_variables.to_runner_variables
}
end
@@ -131,7 +132,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
user: user,
project: project,
sha: sha,
- parent_pipeline: parent_pipeline)
+ parent_pipeline: parent_pipeline,
+ variables: project.predefined_variables.to_runner_variables)
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index a5e4e27df6f..0e8851ba915 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -16,7 +16,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
project: context_project,
sha: '12345',
user: context_user,
- parent_pipeline: parent_pipeline
+ parent_pipeline: parent_pipeline,
+ variables: project.predefined_variables.to_runner_variables
}
end
@@ -165,7 +166,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
user: user,
project: project,
sha: project.commit('master').id,
- parent_pipeline: parent_pipeline)
+ parent_pipeline: parent_pipeline,
+ variables: project.predefined_variables.to_runner_variables)
end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 7ad57827e30..4fdaaca8316 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
- let(:context_params) { { project: project, sha: '123456', user: user } }
+ let(:context_params) { { project: project, sha: '123456', user: user, variables: project.predefined_variables.to_runner_variables } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:file_content) do
@@ -124,17 +124,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
an_instance_of(Gitlab::Ci::Config::External::File::Project),
an_instance_of(Gitlab::Ci::Config::External::File::Project))
end
-
- context 'when FF ci_include_multiple_files_from_project is disabled' do
- before do
- stub_feature_flags(ci_include_multiple_files_from_project: false)
- end
-
- it 'returns a File instance' do
- expect(subject).to contain_exactly(
- an_instance_of(Gitlab::Ci::Config::External::File::Project))
- end
- end
end
end
@@ -236,5 +225,118 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
end
end
end
+
+ context "when 'include' section uses project variable" do
+ let(:full_local_file_path) { '$CI_PROJECT_PATH' + local_file }
+
+ context 'when local file is included as a single string' do
+ let(:values) do
+ { include: full_local_file_path }
+ end
+
+ it 'expands the variable', :aggregate_failures do
+ expect(subject[0].location).to eq(project.full_path + local_file)
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Local))
+ end
+ end
+
+ context 'when remote file is included as a single string' do
+ let(:remote_url) { "#{Gitlab.config.gitlab.url}/radio/.gitlab-ci.yml" }
+
+ let(:values) do
+ { include: '$CI_SERVER_URL/radio/.gitlab-ci.yml' }
+ end
+
+ it 'expands the variable', :aggregate_failures do
+ expect(subject[0].location).to eq(remote_url)
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote))
+ end
+ end
+
+ context 'defined as an array' do
+ let(:values) do
+ { include: [full_local_file_path, remote_url],
+ image: 'ruby:2.7' }
+ end
+
+ it 'expands the variable' do
+ expect(subject[0].location).to eq(project.full_path + local_file)
+ expect(subject[1].location).to eq(remote_url)
+ end
+ end
+
+ context 'defined as an array of hashes' do
+ let(:values) do
+ { include: [{ local: full_local_file_path }, { remote: remote_url }],
+ image: 'ruby:2.7' }
+ end
+
+ it 'expands the variable' do
+ expect(subject[0].location).to eq(project.full_path + local_file)
+ expect(subject[1].location).to eq(remote_url)
+ end
+ end
+
+ context 'local file hash' do
+ let(:values) do
+ { include: { 'local' => full_local_file_path } }
+ end
+
+ it 'expands the variable' do
+ expect(subject[0].location).to eq(project.full_path + local_file)
+ end
+ end
+
+ context 'project name' do
+ let(:values) do
+ { include: { project: '$CI_PROJECT_PATH', file: local_file },
+ image: 'ruby:2.7' }
+ end
+
+ it 'expands the variable', :aggregate_failures do
+ expect(subject[0].project_name).to eq(project.full_path)
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Project))
+ end
+ end
+
+ context 'with multiple files' do
+ let(:values) do
+ { include: { project: project.full_path, file: [full_local_file_path, 'another_file_path.yml'] },
+ image: 'ruby:2.7' }
+ end
+
+ it 'expands the variable' do
+ expect(subject[0].location).to eq(project.full_path + local_file)
+ expect(subject[1].location).to eq('another_file_path.yml')
+ end
+ end
+
+ context 'when include variable has an unsupported type for variable expansion' do
+ let(:values) do
+ { include: { project: project.id, file: local_file },
+ image: 'ruby:2.7' }
+ end
+
+ it 'does not invoke expansion for the variable', :aggregate_failures do
+ expect(ExpandVariables).not_to receive(:expand).with(project.id, context_params[:variables])
+
+ expect { subject }.to raise_error(described_class::AmbigiousSpecificationError)
+ end
+ end
+
+ context 'when feature flag is turned off' do
+ let(:values) do
+ { include: full_local_file_path }
+ end
+
+ before do
+ stub_feature_flags(variables_in_include_section_ci: false)
+ end
+
+ it 'does not expand the variables' do
+ expect(subject[0].location).to eq('$CI_PROJECT_PATH' + local_file)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 150a2ec2929..d2d7116bb12 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -365,19 +365,6 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
output = processor.perform
expect(output.keys).to match_array([:image, :my_build, :my_test])
end
-
- context 'when FF ci_include_multiple_files_from_project is disabled' do
- before do
- stub_feature_flags(ci_include_multiple_files_from_project: false)
- end
-
- it 'raises an error' do
- expect { processor.perform }.to raise_error(
- described_class::IncludeError,
- 'Included file `["/templates/my-build.yml", "/templates/my-test.yml"]` needs to be a string'
- )
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index b5a0f0e3fd7..dc03d2f80fe 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -82,6 +82,30 @@ RSpec.describe Gitlab::Ci::Config do
end
end
+ describe '#included_templates' do
+ let(:yml) do
+ <<-EOS
+ include:
+ - template: Jobs/Deploy.gitlab-ci.yml
+ - template: Jobs/Build.gitlab-ci.yml
+ - remote: https://example.com/gitlab-ci.yml
+ EOS
+ end
+
+ before do
+ stub_request(:get, 'https://example.com/gitlab-ci.yml').to_return(status: 200, body: <<-EOS)
+ test:
+ script: [ 'echo hello world' ]
+ EOS
+ end
+
+ subject(:included_templates) do
+ config.included_templates
+ end
+
+ it { is_expected.to contain_exactly('Jobs/Deploy.gitlab-ci.yml', 'Jobs/Build.gitlab-ci.yml') }
+ end
+
context 'when using extendable hash' do
let(:yml) do
<<-EOS
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index c67f8464123..67324c09d86 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -247,7 +247,7 @@ RSpec.describe Gitlab::Ci::Lint do
include_context 'advanced validations' do
it 'runs advanced logical validations' do
expect(subject).not_to be_valid
- expect(subject.errors).to eq(["test: needs 'build'"])
+ expect(subject.errors).to eq(["'test' job needs 'build' job, but it was not added to the pipeline"])
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
index 2313378d1e9..546de2bee5c 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
@@ -224,6 +224,12 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::Cobertura do
it_behaves_like 'ignoring sources, project_path, and worktree_paths'
end
+ context 'when there is an empty <sources>' do
+ let(:sources_xml) { '<sources />' }
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
context 'when there is a <sources>' do
context 'and has a single source with a pattern for Go projects' do
let(:project_path) { 'local/go' } # Make sure we're not making false positives
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index 6da565a2bf6..20406acb658 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
let(:pipeline) { Ci::Pipeline.new }
@@ -29,29 +29,96 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
let(:step) { described_class.new(pipeline, command) }
- before do
- stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
+ shared_examples 'builds pipeline' do
+ it 'builds a pipeline with the expected attributes' do
+ step.perform!
+
+ expect(pipeline.sha).not_to be_empty
+ expect(pipeline.sha).to eq project.commit.id
+ expect(pipeline.ref).to eq 'master'
+ expect(pipeline.tag).to be false
+ expect(pipeline.user).to eq user
+ expect(pipeline.project).to eq project
+ end
end
- it 'never breaks the chain' do
- step.perform!
+ shared_examples 'breaks the chain' do
+ it 'returns true' do
+ step.perform!
+
+ expect(step.break?).to be true
+ end
+ end
+
+ shared_examples 'does not break the chain' do
+ it 'returns false' do
+ step.perform!
+
+ expect(step.break?).to be false
+ end
+ end
- expect(step.break?).to be false
+ before do
+ stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
end
- it 'fills pipeline object with data' do
+ it_behaves_like 'does not break the chain'
+ it_behaves_like 'builds pipeline'
+
+ it 'sets pipeline variables' do
step.perform!
- expect(pipeline.sha).not_to be_empty
- expect(pipeline.sha).to eq project.commit.id
- expect(pipeline.ref).to eq 'master'
- expect(pipeline.tag).to be false
- expect(pipeline.user).to eq user
- expect(pipeline.project).to eq project
expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
.to eq variables_attributes.map(&:with_indifferent_access)
end
+ context 'when project setting restrict_user_defined_variables is enabled' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ context 'when user is developer' do
+ it_behaves_like 'breaks the chain'
+ it_behaves_like 'builds pipeline'
+
+ it 'returns an error on variables_attributes', :aggregate_failures do
+ step.perform!
+
+ expect(pipeline.errors.full_messages).to eq(['Insufficient permissions to set pipeline variables'])
+ expect(pipeline.variables).to be_empty
+ end
+
+ context 'when variables_attributes is not specified' do
+ let(:variables_attributes) { nil }
+
+ it_behaves_like 'does not break the chain'
+ it_behaves_like 'builds pipeline'
+
+ it 'assigns empty variables' do
+ step.perform!
+
+ expect(pipeline.variables).to be_empty
+ end
+ end
+ end
+
+ context 'when user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'does not break the chain'
+ it_behaves_like 'builds pipeline'
+
+ it 'assigns variables_attributes' do
+ step.perform!
+
+ expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
+ .to eq variables_attributes.map(&:with_indifferent_access)
+ end
+ end
+ end
+
it 'returns a valid pipeline' do
step.perform!
@@ -157,4 +224,25 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
expect(pipeline.target_sha).to eq(external_pull_request.target_sha)
end
end
+
+ context 'when keep_latest_artifact is set' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:keep_latest_artifact, :locking_result) do
+ true | 'artifacts_locked'
+ false | 'unlocked'
+ end
+
+ with_them do
+ before do
+ project.update!(ci_keep_latest_artifact: keep_latest_artifact)
+ end
+
+ it 'builds a pipeline with appropriate locked value' do
+ step.perform!
+
+ expect(pipeline.locked).to eq(locking_result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index bc2012e83bd..9ca5aeeea58 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -295,4 +295,30 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
it { is_expected.to eq(false) }
end
end
+
+ describe '#creates_child_pipeline?' do
+ let(:command) { described_class.new(bridge: bridge) }
+
+ subject { command.creates_child_pipeline? }
+
+ context 'when bridge is present' do
+ context 'when bridge triggers a child pipeline' do
+ let(:bridge) { double(:bridge, triggers_child_pipeline?: true) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when bridge triggers a multi-project pipeline' do
+ let(:bridge) { double(:bridge, triggers_child_pipeline?: false) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when bridge is not present' do
+ let(:bridge) { nil }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
index 85c8e20767f..fabfbd779f3 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
@@ -51,18 +51,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::SeedBlock do
expect(pipeline.variables.size).to eq(1)
end
-
- context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
- before do
- stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
- end
-
- it 'does not execute the block' do
- run_chain
-
- expect(pipeline.variables.size).to eq(0)
- end
- end
end
context 'when the seeds_block tries to save the pipelie' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 0ce8b80902e..80013cab6ee 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
describe '#perform!' do
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
+ run_chain
end
let(:config) do
@@ -36,20 +37,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'allocates next IID' do
- run_chain
-
expect(pipeline.iid).to be_present
end
it 'ensures ci_ref' do
- run_chain
-
expect(pipeline.ci_ref).to be_present
end
it 'sets the seeds in the command object' do
- run_chain
-
expect(command.pipeline_seed).to be_a(Gitlab::Ci::Pipeline::Seed::Pipeline)
expect(command.pipeline_seed.size).to eq 1
end
@@ -64,8 +59,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'correctly fabricates stages and builds' do
- run_chain
-
seed = command.pipeline_seed
expect(seed.stages.size).to eq 2
@@ -91,8 +84,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns pipeline seed with jobs only assigned to master' do
- run_chain
-
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -112,8 +103,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns pipeline seed with jobs only assigned to schedules' do
- run_chain
-
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -141,8 +130,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
let(:pipeline) { build(:ci_pipeline, project: project) }
it 'returns seeds for kubernetes dependent job' do
- run_chain
-
seed = command.pipeline_seed
expect(seed.size).to eq 2
@@ -154,8 +141,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
context 'when kubernetes is not active' do
it 'does not return seeds for kubernetes dependent job' do
- run_chain
-
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -173,8 +158,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns stage seeds only when variables expression is truthy' do
- run_chain
-
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -187,24 +170,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
->(pipeline) { pipeline.variables.build(key: 'VAR', value: '123') }
end
- context 'when FF ci_seed_block_run_before_workflow_rules is enabled' do
- it 'does not execute the block' do
- run_chain
-
- expect(pipeline.variables.size).to eq(0)
- end
- end
-
- context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
- before do
- stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
- end
-
- it 'executes the block' do
- run_chain
-
- expect(pipeline.variables.size).to eq(1)
- end
+ it 'does not execute the block' do
+ expect(pipeline.variables.size).to eq(0)
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
new file mode 100644
index 00000000000..3616461d94f
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::TemplateUsage do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ describe '#perform!' do
+ subject(:perform) { step.perform! }
+
+ it 'tracks the included templates' do
+ expect(command).to(
+ receive(:yaml_processor_result)
+ .and_return(
+ double(included_templates: %w(Template-1 Template-2))
+ )
+ )
+
+ %w(Template-1 Template-2).each do |expected_template|
+ expect(Gitlab::UsageDataCounters::CiTemplateUniqueCounter).to(
+ receive(:track_unique_project_event)
+ .with(project_id: project.id, template: expected_template)
+ )
+ end
+
+ perform
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index bc10e94c81d..cf020fc343c 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -966,7 +966,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it "returns an error" do
expect(subject.errors).to contain_exactly(
- "rspec: needs 'build'")
+ "'rspec' job needs 'build' job, but it was not added to the pipeline")
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index 1790388da03..860b07647bd 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -62,7 +62,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
needs_attributes: [{ name: 'non-existent', artifacts: true }]
}
- expect(seed.errors).to contain_exactly("invalid_job: needs 'non-existent'")
+ expect(seed.errors).to contain_exactly(
+ "'invalid_job' job needs 'non-existent' job, but it was not added to the pipeline")
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
index 8df34eddffd..831bc5e9f37 100644
--- a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
@@ -28,18 +28,5 @@ RSpec.describe Gitlab::Ci::Reports::TestFailureHistory, :aggregate_failures do
expect(failed_rspec.recent_failures).to eq(count: 2, base_branch: 'master')
expect(failed_java.recent_failures).to eq(count: 1, base_branch: 'master')
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(test_failure_history: false)
- end
-
- it 'does not set recent failures' do
- load_history
-
- expect(failed_rspec.recent_failures).to be_nil
- expect(failed_java.recent_failures).to be_nil
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/status/group/factory_spec.rb b/spec/lib/gitlab/ci/status/group/factory_spec.rb
index 6267b26aa78..c67c7ff8271 100644
--- a/spec/lib/gitlab/ci/status/group/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/group/factory_spec.rb
@@ -12,4 +12,9 @@ RSpec.describe Gitlab::Ci::Status::Group::Factory do
expect(described_class.common_helpers)
.to eq Gitlab::Ci::Status::Group::Common
end
+
+ it 'exposes extended statuses' do
+ expect(described_class.extended_statuses)
+ .to eq([[Gitlab::Ci::Status::SuccessWarning]])
+ end
end
diff --git a/spec/lib/gitlab/ci/syntax_templates_spec.rb b/spec/lib/gitlab/ci/syntax_templates_spec.rb
new file mode 100644
index 00000000000..ce3169e17ec
--- /dev/null
+++ b/spec/lib/gitlab/ci/syntax_templates_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'ci/syntax_templates' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:lint) { Gitlab::Ci::Lint.new(project: project, current_user: user) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ subject(:lint_result) { lint.validate(content) }
+
+ Dir.glob('lib/gitlab/ci/syntax_templates/**/*.yml').each do |template|
+ describe template do
+ let(:content) { File.read(template) }
+
+ it 'validates the template' do
+ expect(lint_result).to be_valid, "got errors: #{lint_result.errors.join(', ')}"
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
new file mode 100644
index 00000000000..6bc8e261640
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe '5-Minute-Production-App.gitlab-ci.yml' do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('5-Minute-Production-App') }
+
+ describe 'the created pipeline' do
+ let_it_be(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
+
+ let(:user) { project.owner }
+ let(:default_branch) { 'master' }
+ let(:pipeline_branch) { default_branch }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ end
+
+ it 'creates only build job' do
+ expect(build_names).to match_array('build')
+ end
+
+ context 'when AWS variables are set' do
+ before do
+ create(:ci_variable, project: project, key: 'AWS_ACCESS_KEY_ID', value: 'AKIAIOSFODNN7EXAMPLE')
+ create(:ci_variable, project: project, key: 'AWS_SECRET_ACCESS_KEY', value: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY')
+ create(:ci_variable, project: project, key: 'AWS_DEFAULT_REGION', value: 'us-west-2')
+ end
+
+ it 'creates all jobs' do
+ expect(build_names).to match_array(%w(build terraform_apply deploy terraform_destroy))
+ end
+
+ context 'pipeline branch is protected' do
+ before do
+ create(:protected_branch, project: project, name: pipeline_branch)
+ project.reload
+ end
+
+ it 'does not create a destroy job' do
+ expect(build_names).to match_array(%w(build terraform_apply deploy))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
index 4be92e8608e..653b3be0b2a 100644
--- a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -6,10 +6,10 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('AWS/Deploy-ECS') }
describe 'the created pipeline' do
- let_it_be(:user) { create(:admin) }
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
index 4f8faa5ddb1..1f278048ad5 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Build') }
describe 'the created pipeline' do
- let_it_be(:user) { create(:admin) }
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
index e685ad3b46e..0a76de82421 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Code-Quality') }
describe 'the created pipeline' do
- let_it_be(:user) { create(:admin) }
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
index ea9bd5bd02c..25c88c161ea 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
@@ -27,8 +27,8 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
end
describe 'the created pipeline' do
- let(:user) { create(:admin) }
let(:project) { create(:project, :repository) }
+ let(:user) { project.owner }
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
index f475785be98..b64959a9917 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Test') }
describe 'the created pipeline' do
- let_it_be(:user) { create(:admin) }
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
diff --git a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
index 8df739d9245..0811c07e896 100644
--- a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
@@ -6,10 +6,10 @@ RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform/Base.latest') }
describe 'the created pipeline' do
- let(:user) { create(:admin) }
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
diff --git a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
index 9711df55226..03fa45fe0a1 100644
--- a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
@@ -19,8 +19,8 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
end
describe 'the created pipeline' do
- let(:user) { create(:admin) }
let(:project) { create(:project, :repository) }
+ let(:user) { project.owner }
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index 793df55f45d..f9d6fe24e70 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -6,10 +6,10 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps') }
describe 'the created pipeline' do
- let(:user) { create(:admin) }
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
@@ -232,8 +232,8 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
end
with_them do
- let(:user) { create(:admin) }
let(:project) { create(:project, :custom_repo, files: files) }
+ let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: 'master' ) }
let(:pipeline) { service.execute(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
diff --git a/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..4e5fe622648
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Flutter.gitlab-ci.yml' do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Flutter') }
+
+ describe 'the created pipeline' do
+ let(:pipeline_branch) { 'master' }
+ let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ end
+
+ it 'creates test and code_quality jobs' do
+ expect(build_names).to include('test', 'code_quality')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/npm_spec.rb b/spec/lib/gitlab/ci/templates/npm_spec.rb
index 1f8e32ce019..b10e2b0e057 100644
--- a/spec/lib/gitlab/ci/templates/npm_spec.rb
+++ b/spec/lib/gitlab/ci/templates/npm_spec.rb
@@ -6,11 +6,10 @@ RSpec.describe 'npm.latest.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('npm.latest') }
describe 'the created pipeline' do
- let_it_be(:user) { create(:admin) }
-
let(:repo_files) { { 'package.json' => '{}', 'README.md' => '' } }
let(:modified_files) { %w[package.json] }
let(:project) { create(:project, :custom_repo, files: repo_files) }
+ let(:user) { project.owner }
let(:pipeline_branch) { project.default_branch }
let(:pipeline_tag) { 'v1.2.1' }
let(:pipeline_ref) { pipeline_branch }
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
index 5eec021b9d7..4377f155d34 100644
--- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -10,11 +10,10 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform.latest') }
describe 'the created pipeline' do
- let_it_be(:user) { create(:admin) }
-
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
diff --git a/spec/lib/gitlab/ci/variables/collection/sorted_spec.rb b/spec/lib/gitlab/ci/variables/collection/sorted_spec.rb
new file mode 100644
index 00000000000..d85bf29f77f
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/collection/sorted_spec.rb
@@ -0,0 +1,251 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Collection::Sorted do
+ describe '#errors' do
+ context 'when FF :variable_inside_variable is disabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: false)
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: []
+ },
+ "simple expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ]
+ },
+ "complex expansion": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'key${variable}' }
+ ]
+ },
+ "complex expansions with missing variable for Windows": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'key%variable%%variable2%' }
+ ]
+ },
+ "out-of-order variable reference": {
+ variables: [
+ { key: 'variable2', value: 'key${variable}' },
+ { key: 'variable', value: 'value' }
+ ]
+ },
+ "array with cyclic dependency": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ]
+ }
+ }
+ end
+
+ with_them do
+ subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables) }
+
+ it 'does not report error' do
+ expect(subject.errors).to eq(nil)
+ end
+
+ it 'valid? reports true' do
+ expect(subject.valid?).to eq(true)
+ end
+ end
+ end
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: true)
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: [],
+ validation_result: nil
+ },
+ "simple expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ validation_result: nil
+ },
+ "cyclic dependency": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ validation_result: 'circular variable reference detected: ["variable", "variable2", "variable3"]'
+ }
+ }
+ end
+
+ with_them do
+ subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables) }
+
+ it 'errors matches expected validation result' do
+ expect(subject.errors).to eq(validation_result)
+ end
+
+ it 'valid? matches expected validation result' do
+ expect(subject.valid?).to eq(validation_result.nil?)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#sort' do
+ context 'when FF :variable_inside_variable is disabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: false)
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: []
+ },
+ "simple expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ]
+ },
+ "complex expansion": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'key${variable}' }
+ ]
+ },
+ "complex expansions with missing variable for Windows": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'key%variable%%variable2%' }
+ ]
+ },
+ "out-of-order variable reference": {
+ variables: [
+ { key: 'variable2', value: 'key${variable}' },
+ { key: 'variable', value: 'value' }
+ ]
+ },
+ "array with cyclic dependency": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ]
+ }
+ }
+ end
+
+ with_them do
+ subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables) }
+
+ it 'does not expand variables' do
+ expect(subject.sort).to eq(variables)
+ end
+ end
+ end
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ before do
+ stub_licensed_features(group_saml_group_sync: true)
+ stub_feature_flags(saml_group_links: true)
+ stub_feature_flags(variable_inside_variable: true)
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: [],
+ result: []
+ },
+ "simple expansions, no reordering needed": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ result: %w[variable variable2 variable3]
+ },
+ "complex expansion, reordering needed": {
+ variables: [
+ { key: 'variable2', value: 'key${variable}' },
+ { key: 'variable', value: 'value' }
+ ],
+ result: %w[variable variable2]
+ },
+ "unused variables": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable4', value: 'key$variable$variable3' },
+ { key: 'variable2', value: 'result2' },
+ { key: 'variable3', value: 'result3' }
+ ],
+ result: %w[variable variable3 variable4 variable2]
+ },
+ "missing variable": {
+ variables: [
+ { key: 'variable2', value: 'key$variable' }
+ ],
+ result: %w[variable2]
+ },
+ "complex expansions with missing variable": {
+ variables: [
+ { key: 'variable4', value: 'key${variable}${variable2}${variable3}' },
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'value3' }
+ ],
+ result: %w[variable variable3 variable4]
+ },
+ "cyclic dependency causes original array to be returned": {
+ variables: [
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' },
+ { key: 'variable', value: '$variable2' }
+ ],
+ result: %w[variable2 variable3 variable]
+ }
+ }
+ end
+
+ with_them do
+ subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables) }
+
+ it 'sort returns correctly sorted variables' do
+ expect(subject.sort.map { |var| var[:key] }).to eq(result)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 5ad1b3dd241..9498453852a 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -2711,40 +2711,6 @@ module Gitlab
end
end
- describe "#validation_message" do
- subject { Gitlab::Ci::YamlProcessor.validation_message(content) }
-
- context "when the YAML could not be parsed" do
- let(:content) { YAML.dump("invalid: yaml: test") }
-
- it { is_expected.to eq "Invalid configuration format" }
- end
-
- context "when the tags parameter is invalid" do
- let(:content) { YAML.dump({ rspec: { script: "test", tags: "mysql" } }) }
-
- it { is_expected.to eq "jobs:rspec:tags config should be an array of strings" }
- end
-
- context "when YAML content is empty" do
- let(:content) { '' }
-
- it { is_expected.to eq "Please provide content of .gitlab-ci.yml" }
- end
-
- context 'when the YAML contains an unknown alias' do
- let(:content) { 'steps: *bad_alias' }
-
- it { is_expected.to eq "Unknown alias: bad_alias" }
- end
-
- context "when the YAML is valid" do
- let(:content) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
-
- it { is_expected.to be_nil }
- end
- end
-
describe '#execute' do
subject { Gitlab::Ci::YamlProcessor.new(content).execute }
diff --git a/spec/lib/gitlab/composer/version_index_spec.rb b/spec/lib/gitlab/composer/version_index_spec.rb
new file mode 100644
index 00000000000..4c4742d9f59
--- /dev/null
+++ b/spec/lib/gitlab/composer/version_index_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Composer::VersionIndex do
+ let_it_be(:package_name) { 'sample-project' }
+ let_it_be(:json) { { 'name' => package_name } }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+ let_it_be(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let_it_be(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
+
+ let(:branch) { project.repository.find_branch('master') }
+
+ let(:packages) { [package1, package2] }
+
+ describe '#as_json' do
+ subject(:index) { described_class.new(packages).as_json }
+
+ def expected_json(package)
+ {
+ 'dist' => {
+ 'reference' => branch.target,
+ 'shasum' => '',
+ 'type' => 'zip',
+ 'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}"
+ },
+ 'name' => package.name,
+ 'uid' => package.id,
+ 'version' => package.version
+ }
+ end
+
+ it 'returns the packages json' do
+ packages = index['packages'][package_name]
+
+ expect(packages['1.0.0']).to eq(expected_json(package1))
+ expect(packages['2.0.0']).to eq(expected_json(package2))
+ end
+ end
+
+ describe '#sha' do
+ subject(:sha) { described_class.new(packages).sha }
+
+ it 'returns the json SHA' do
+ expect(sha).to match /^[A-Fa-f0-9]{64}$/
+ end
+ end
+end
diff --git a/spec/lib/gitlab/config/entry/composable_hash_spec.rb b/spec/lib/gitlab/config/entry/composable_hash_spec.rb
index 15bbf2047c5..f64b39231a3 100644
--- a/spec/lib/gitlab/config/entry/composable_hash_spec.rb
+++ b/spec/lib/gitlab/config/entry/composable_hash_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::Config::Entry::ComposableHash, :aggregate_failures do
end
let(:entry) do
- parent_entry = composable_hash_parent_class.new(secrets: config)
+ parent_entry = composable_hash_parent_class.new({ secrets: config })
parent_entry.compose!
parent_entry[:secrets]
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 0de944d3f8a..bb9bee763d8 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -97,19 +97,27 @@ RSpec.describe Gitlab::Conflict::File do
let(:diff_line_types) { conflict_file.diff_lines_for_serializer.map(&:type) }
it 'assigns conflict types to the diff lines' do
- expect(diff_line_types[4]).to eq('conflict_marker')
- expect(diff_line_types[5..10]).to eq(['conflict_marker_our'] * 6)
+ expect(diff_line_types[4]).to eq('conflict_marker_our')
+ expect(diff_line_types[5..10]).to eq(['conflict_our'] * 6)
expect(diff_line_types[11]).to eq('conflict_marker')
- expect(diff_line_types[12..17]).to eq(['conflict_marker_their'] * 6)
- expect(diff_line_types[18]).to eq('conflict_marker')
+ expect(diff_line_types[12..17]).to eq(['conflict_their'] * 6)
+ expect(diff_line_types[18]).to eq('conflict_marker_their')
expect(diff_line_types[19..24]).to eq([nil] * 6)
- expect(diff_line_types[25]).to eq('conflict_marker')
- expect(diff_line_types[26..27]).to eq(['conflict_marker_our'] * 2)
+ expect(diff_line_types[25]).to eq('conflict_marker_our')
+ expect(diff_line_types[26..27]).to eq(['conflict_our'] * 2)
expect(diff_line_types[28]).to eq('conflict_marker')
- expect(diff_line_types[29..30]).to eq(['conflict_marker_their'] * 2)
- expect(diff_line_types[31]).to eq('conflict_marker')
+ expect(diff_line_types[29..30]).to eq(['conflict_their'] * 2)
+ expect(diff_line_types[31]).to eq('conflict_marker_their')
+ end
+
+ # Swap the positions around due to conflicts/diffs display inconsistency
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/291989
+ it 'swaps the new and old positions around' do
+ lines = conflict_file.diff_lines_for_serializer
+ expect(lines.map(&:old_pos)[26..27]).to eq([21, 22])
+ expect(lines.map(&:new_pos)[29..30]).to eq([21, 22])
end
it 'does not add a match line to the end of the section' do
@@ -124,13 +132,13 @@ RSpec.describe Gitlab::Conflict::File do
expect(diff_line_types).to eq([
'match',
nil, nil, nil,
- "conflict_marker",
"conflict_marker_our",
+ "conflict_our",
"conflict_marker",
+ "conflict_their",
+ "conflict_their",
+ "conflict_their",
"conflict_marker_their",
- "conflict_marker_their",
- "conflict_marker_their",
- "conflict_marker",
nil, nil, nil,
"match"
])
diff --git a/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb
deleted file mode 100644
index 056c1b5bc9f..00000000000
--- a/spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::BaseEventFetcher do
- let(:max_events) { 2 }
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user, :admin) }
- let(:start_time_attrs) { Issue.arel_table[:created_at] }
- let(:end_time_attrs) { [Issue::Metrics.arel_table[:first_associated_with_milestone_at]] }
- let(:options) do
- { start_time_attrs: start_time_attrs,
- end_time_attrs: end_time_attrs,
- from: 30.days.ago,
- project: project }
- end
-
- subject do
- described_class.new(stage: :issue,
- options: options).fetch
- end
-
- before do
- allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return(Issue.all)
- allow_any_instance_of(described_class).to receive(:serialize) do |event|
- event
- end
- allow_any_instance_of(described_class)
- .to receive(:allowed_ids).and_return(nil)
-
- stub_const('Gitlab::CycleAnalytics::BaseEventFetcher::MAX_EVENTS', max_events)
-
- setup_events(count: 3)
- end
-
- it 'limits the rows to the max number' do
- expect(subject.count).to eq(max_events)
- end
-
- def setup_events(count:)
- count.times do
- issue = create(:issue, project: project, created_at: 2.days.ago)
- milestone = create(:milestone, project: project)
-
- issue.update(milestone: milestone)
- create_merge_request_closing_issue(user, project, issue)
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/code_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/code_event_fetcher_spec.rb
deleted file mode 100644
index a1a173abe57..00000000000
--- a/spec/lib/gitlab/cycle_analytics/code_event_fetcher_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::CodeEventFetcher do
- let(:stage_name) { :code }
-
- it_behaves_like 'default query config' do
- it 'has a default order' do
- expect(event.order).not_to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb
deleted file mode 100644
index 17104715580..00000000000
--- a/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb
+++ /dev/null
@@ -1,129 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::CodeStage do
- let(:stage_name) { :code }
-
- let(:project) { create(:project) }
- let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
- let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:issue_3) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:mr_1) { create(:merge_request, source_project: project, created_at: 15.minutes.ago) }
- let(:mr_2) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'A') }
- let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
- let(:stage) { described_class.new(options: stage_options) }
-
- before do
- issue_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 45.minutes.ago)
- issue_2.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
- issue_3.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
- create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B')
- create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
- create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
- end
-
- it_behaves_like 'base stage'
-
- context 'when using the new query backend' do
- include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
- let(:expected_record_count) { 2 }
- let(:expected_ordered_attribute_values) { [mr_2.title, mr_1.title] }
- end
- end
-
- describe '#project_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.project_median).to eq(ISSUES_MEDIAN)
- end
-
- include_examples 'calculate #median with date range'
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that closes issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(mr_1.title, mr_2.title)
- end
- end
-
- context 'when group is given' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project_2) { create(:project, group: group) }
- let(:project_3) { create(:project, group: group) }
- let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
- let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
- let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
- let(:mr_2_1) { create(:merge_request, source_project: project_2, created_at: 15.minutes.ago) }
- let(:mr_2_2) { create(:merge_request, source_project: project_3, created_at: 10.minutes.ago, source_branch: 'A') }
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
-
- before do
- group.add_owner(user)
- issue_2_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 45.minutes.ago)
- issue_2_2.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
- issue_2_3.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
- create(:merge_requests_closing_issues, merge_request: mr_2_1, issue: issue_2_1)
- create(:merge_requests_closing_issues, merge_request: mr_2_2, issue: issue_2_2)
- end
-
- describe '#group_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.group_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(mr_2_1.title, mr_2_2.title)
- end
- end
-
- context 'when subgroup is given' do
- let(:subgroup) { create(:group, parent: group) }
- let(:project_4) { create(:project, group: subgroup) }
- let(:project_5) { create(:project, group: subgroup) }
- let(:issue_3_1) { create(:issue, project: project_4, created_at: 90.minutes.ago) }
- let(:issue_3_2) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
- let(:issue_3_3) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
- let(:mr_3_1) { create(:merge_request, source_project: project_4, created_at: 15.minutes.ago) }
- let(:mr_3_2) { create(:merge_request, source_project: project_5, created_at: 10.minutes.ago, source_branch: 'A') }
-
- before do
- issue_3_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 45.minutes.ago)
- issue_3_2.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
- issue_3_3.metrics.update!(first_added_to_board_at: 60.minutes.ago, first_mentioned_in_commit_at: 40.minutes.ago)
- create(:merge_requests_closing_issues, merge_request: mr_3_1, issue: issue_3_1)
- create(:merge_requests_closing_issues, merge_request: mr_3_2, issue: issue_3_2)
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(4)
- expect(subject.map { |event| event[:title] }).to contain_exactly(mr_2_1.title, mr_2_2.title, mr_3_1.title, mr_3_2.title)
- end
-
- it 'exposes merge requests that close issues with full path for subgroup' do
- expect(subject.count).to eq(4)
- expect(subject.find { |event| event[:title] == mr_3_1.title }[:url]).to include("#{subgroup.full_path}")
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
deleted file mode 100644
index 553f33a66c4..00000000000
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ /dev/null
@@ -1,182 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'value stream analytics events', :aggregate_failures do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user, :admin) }
- let(:from_date) { 10.days.ago }
- let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
-
- let(:events) do
- CycleAnalytics::ProjectLevel
- .new(project, options: { from: from_date, current_user: user })[stage]
- .events
- end
-
- let(:event) { events.first }
-
- before do
- setup(context)
- end
-
- describe '#issue_events' do
- let(:stage) { :issue }
-
- it 'has correct attributes' do
- expect(event[:total_time]).not_to be_empty
- expect(event[:title]).to eq(context.title)
- expect(event[:url]).not_to be_nil
- expect(event[:iid]).to eq(context.iid.to_s)
- expect(event[:created_at]).to end_with('ago')
- expect(event[:author][:web_url]).not_to be_nil
- expect(event[:author][:avatar_url]).not_to be_nil
- expect(event[:author][:name]).to eq(context.author.name)
- end
- end
-
- describe '#plan_events' do
- let(:stage) { :plan }
-
- before do
- create_commit_referencing_issue(context)
-
- # Adding extra duration because the new VSA backend filters out 0 durations between these columns
- context.metrics.update!(first_mentioned_in_commit_at: context.metrics.first_associated_with_milestone_at + 1.day)
- end
-
- it 'has correct attributes' do
- expect(event[:total_time]).not_to be_empty
- expect(event[:title]).to eq(context.title)
- expect(event[:url]).not_to be_nil
- expect(event[:iid]).to eq(context.iid.to_s)
- expect(event[:created_at]).to end_with('ago')
- expect(event[:author][:web_url]).not_to be_nil
- expect(event[:author][:avatar_url]).not_to be_nil
- expect(event[:author][:name]).to eq(context.author.name)
- end
- end
-
- describe '#code_events' do
- let(:stage) { :code }
- let!(:merge_request) { MergeRequest.first }
-
- before do
- create_commit_referencing_issue(context)
- end
-
- it 'has correct attributes' do
- expect(event[:total_time]).not_to be_empty
- expect(event[:title]).to eq('Awesome merge_request')
- expect(event[:iid]).to eq(context.iid.to_s)
- expect(event[:created_at]).to end_with('ago')
- expect(event[:author][:web_url]).not_to be_nil
- expect(event[:author][:avatar_url]).not_to be_nil
- expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
- end
- end
-
- describe '#test_events', :sidekiq_might_not_need_inline do
- let(:stage) { :test }
-
- let(:merge_request) { MergeRequest.first }
- let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
-
- let!(:pipeline) do
- create(:ci_pipeline,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- project: project,
- head_pipeline_of: merge_request)
- end
-
- before do
- create(:ci_build, :success, pipeline: pipeline, author: user)
- create(:ci_build, :success, pipeline: pipeline, author: user)
-
- pipeline.run!
- pipeline.succeed!
- merge_merge_requests_closing_issue(user, project, context)
- end
-
- it 'has correct attributes' do
- expect(event[:name]).not_to be_nil
- expect(event[:id]).not_to be_nil
- expect(event[:url]).not_to be_nil
- expect(event[:branch]).not_to be_nil
- expect(event[:branch][:url]).not_to be_nil
- expect(event[:short_sha]).not_to be_nil
- expect(event[:commit_url]).not_to be_nil
- expect(event[:date]).not_to be_nil
- expect(event[:total_time]).not_to be_empty
- end
- end
-
- describe '#review_events' do
- let(:stage) { :review }
- let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
-
- before do
- merge_merge_requests_closing_issue(user, project, context)
- end
-
- it 'has correct attributes' do
- expect(event[:total_time]).not_to be_empty
- expect(event[:title]).to eq('Awesome merge_request')
- expect(event[:iid]).to eq(context.iid.to_s)
- expect(event[:url]).not_to be_nil
- expect(event[:state]).not_to be_nil
- expect(event[:created_at]).not_to be_nil
- expect(event[:author][:web_url]).not_to be_nil
- expect(event[:author][:avatar_url]).not_to be_nil
- expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
- end
- end
-
- describe '#staging_events', :sidekiq_might_not_need_inline do
- let(:stage) { :staging }
- let(:merge_request) { MergeRequest.first }
-
- let!(:pipeline) do
- create(:ci_pipeline,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- project: project,
- head_pipeline_of: merge_request)
- end
-
- before do
- create(:ci_build, :success, pipeline: pipeline, author: user)
- create(:ci_build, :success, pipeline: pipeline, author: user)
-
- pipeline.run!
- pipeline.succeed!
-
- merge_merge_requests_closing_issue(user, project, context)
- deploy_master(user, project)
- end
-
- it 'has correct attributes' do
- expect(event[:name]).not_to be_nil
- expect(event[:id]).not_to be_nil
- expect(event[:url]).not_to be_nil
- expect(event[:branch]).not_to be_nil
- expect(event[:branch][:url]).not_to be_nil
- expect(event[:short_sha]).not_to be_nil
- expect(event[:commit_url]).not_to be_nil
- expect(event[:date]).not_to be_nil
- expect(event[:total_time]).not_to be_empty
- expect(event[:author][:web_url]).not_to be_nil
- expect(event[:author][:avatar_url]).not_to be_nil
- expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
- end
- end
-
- def setup(context)
- milestone = create(:milestone, project: project)
- context.update!(milestone: milestone)
- mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}")
-
- ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/issue_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/issue_event_fetcher_spec.rb
deleted file mode 100644
index 7a49ee53e8f..00000000000
--- a/spec/lib/gitlab/cycle_analytics/issue_event_fetcher_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::IssueEventFetcher do
- let(:stage_name) { :issue }
-
- it_behaves_like 'default query config'
-end
diff --git a/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb
deleted file mode 100644
index c7ab2b9b84b..00000000000
--- a/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::IssueStage do
- let(:stage_name) { :issue }
- let(:project) { create(:project) }
- let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
- let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:issue_3) { create(:issue, project: project, created_at: 30.minutes.ago) }
- let!(:issue_without_milestone) { create(:issue, project: project, created_at: 1.minute.ago) }
- let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
- let(:stage) { described_class.new(options: stage_options) }
-
- before do
- issue_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago )
- issue_2.metrics.update!(first_added_to_board_at: 30.minutes.ago)
- issue_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
- end
-
- it_behaves_like 'base stage'
-
- context 'when using the new query backend' do
- include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
- let(:expected_record_count) { 3 }
- let(:expected_ordered_attribute_values) { [issue_3.title, issue_2.title, issue_1.title] }
- end
- end
-
- describe '#median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.project_median).to eq(ISSUES_MEDIAN)
- end
-
- include_examples 'calculate #median with date range'
- end
-
- describe '#events' do
- it 'exposes issues with metrics' do
- result = stage.events
-
- expect(result.count).to eq(3)
- expect(result.map { |event| event[:title] }).to contain_exactly(issue_1.title, issue_2.title, issue_3.title)
- end
- end
- context 'when group is given' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project_2) { create(:project, group: group) }
- let(:project_3) { create(:project, group: group) }
- let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
- let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
- let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
-
- before do
- group.add_owner(user)
- issue_2_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago)
- issue_2_2.metrics.update!(first_added_to_board_at: 30.minutes.ago)
- end
-
- describe '#group_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.group_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title)
- end
- end
-
- context 'when only part of projects is chosen' do
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group, projects: [project_2.id] }) }
-
- describe '#group_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.group_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(1)
- expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title)
- end
- end
- end
-
- context 'when subgroup is given' do
- let(:subgroup) { create(:group, parent: group) }
- let(:project_4) { create(:project, group: subgroup) }
- let(:project_5) { create(:project, group: subgroup) }
- let(:issue_3_1) { create(:issue, project: project_4, created_at: 90.minutes.ago) }
- let(:issue_3_2) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
- let(:issue_3_3) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
-
- before do
- issue_3_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago)
- issue_3_2.metrics.update!(first_added_to_board_at: 30.minutes.ago)
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(4)
- expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title, issue_3_1.title, issue_3_2.title)
- end
-
- it 'exposes merge requests that close issues with full path for subgroup' do
- expect(subject.count).to eq(4)
- expect(subject.find { |event| event[:title] == issue_3_1.title }[:url]).to include("#{subgroup.full_path}")
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/plan_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/plan_event_fetcher_spec.rb
deleted file mode 100644
index bc14a772d34..00000000000
--- a/spec/lib/gitlab/cycle_analytics/plan_event_fetcher_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::PlanEventFetcher do
- let(:stage_name) { :plan }
-
- it_behaves_like 'default query config' do
- context 'no commits' do
- it 'does not blow up if there are no commits' do
- allow(event).to receive(:event_result).and_return([{}])
-
- expect { event.fetch }.not_to raise_error
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb
deleted file mode 100644
index 2547c05c025..00000000000
--- a/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::PlanStage do
- let(:stage_name) { :plan }
- let(:project) { create(:project) }
- let!(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
- let!(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let!(:issue_3) { create(:issue, project: project, created_at: 30.minutes.ago) }
- let!(:issue_without_milestone) { create(:issue, project: project, created_at: 1.minute.ago) }
- let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
- let(:stage) { described_class.new(options: stage_options) }
-
- before do
- issue_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 10.minutes.ago)
- issue_2.metrics.update!(first_added_to_board_at: 30.minutes.ago, first_mentioned_in_commit_at: 20.minutes.ago)
- issue_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
- end
-
- it_behaves_like 'base stage'
-
- context 'when using the new query backend' do
- include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
- let(:expected_record_count) { 2 }
- let(:expected_ordered_attribute_values) { [issue_1.title, issue_2.title] }
- end
- end
-
- describe '#project_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.project_median).to eq(ISSUES_MEDIAN)
- end
-
- include_examples 'calculate #median with date range'
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes issues with metrics' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(issue_1.title, issue_2.title)
- end
- end
-
- context 'when group is given' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project_2) { create(:project, group: group) }
- let(:project_3) { create(:project, group: group) }
- let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
- let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
- let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
-
- before do
- group.add_owner(user)
- issue_2_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 10.minutes.ago)
- issue_2_2.metrics.update!(first_added_to_board_at: 30.minutes.ago, first_mentioned_in_commit_at: 20.minutes.ago)
- issue_2_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
- end
-
- describe '#group_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.group_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title)
- end
- end
-
- context 'when subgroup is given' do
- let(:subgroup) { create(:group, parent: group) }
- let(:project_4) { create(:project, group: subgroup) }
- let(:project_5) { create(:project, group: subgroup) }
- let(:issue_3_1) { create(:issue, project: project_4, created_at: 90.minutes.ago) }
- let(:issue_3_2) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
- let(:issue_3_3) { create(:issue, project: project_5, created_at: 60.minutes.ago) }
-
- before do
- issue_3_1.metrics.update!(first_associated_with_milestone_at: 60.minutes.ago, first_mentioned_in_commit_at: 10.minutes.ago)
- issue_3_2.metrics.update!(first_added_to_board_at: 30.minutes.ago, first_mentioned_in_commit_at: 20.minutes.ago)
- issue_3_3.metrics.update!(first_added_to_board_at: 15.minutes.ago)
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(4)
- expect(subject.map { |event| event[:title] }).to contain_exactly(issue_2_1.title, issue_2_2.title, issue_3_1.title, issue_3_2.title)
- end
-
- it 'exposes merge requests that close issues with full path for subgroup' do
- expect(subject.count).to eq(4)
- expect(subject.find { |event| event[:title] == issue_3_1.title }[:url]).to include("#{subgroup.full_path}")
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/production_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/production_event_fetcher_spec.rb
deleted file mode 100644
index 86b07a95cbb..00000000000
--- a/spec/lib/gitlab/cycle_analytics/production_event_fetcher_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::ProductionEventFetcher do
- let(:stage_name) { :production }
-
- it_behaves_like 'default query config'
-end
diff --git a/spec/lib/gitlab/cycle_analytics/review_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/review_event_fetcher_spec.rb
deleted file mode 100644
index fe13cc6b065..00000000000
--- a/spec/lib/gitlab/cycle_analytics/review_event_fetcher_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::ReviewEventFetcher do
- let(:stage_name) { :review }
-
- it_behaves_like 'default query config'
-end
diff --git a/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb
deleted file mode 100644
index 5593013740e..00000000000
--- a/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb
+++ /dev/null
@@ -1,90 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::ReviewStage do
- let(:stage_name) { :review }
- let(:project) { create(:project) }
- let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
- let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:issue_3) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:mr_1) { create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago) }
- let(:mr_2) { create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A') }
- let(:mr_3) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B') }
- let!(:mr_4) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'C') }
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: project.creator, project: project }) }
-
- before do
- mr_1.metrics.update!(merged_at: 30.minutes.ago)
- mr_2.metrics.update!(merged_at: 10.minutes.ago)
-
- create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
- create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
- create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
- end
-
- it_behaves_like 'base stage'
-
- describe '#project_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.project_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(mr_1.title, mr_2.title)
- end
- end
-
- context 'when group is given' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project_2) { create(:project, group: group) }
- let(:project_3) { create(:project, group: group) }
- let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
- let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
- let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
- let(:mr_2_1) { create(:merge_request, :closed, source_project: project_2, created_at: 60.minutes.ago) }
- let(:mr_2_2) { create(:merge_request, :closed, source_project: project_3, created_at: 40.minutes.ago, source_branch: 'A') }
- let(:mr_2_3) { create(:merge_request, source_project: project_2, created_at: 10.minutes.ago, source_branch: 'B') }
- let!(:mr_2_4) { create(:merge_request, source_project: project_3, created_at: 10.minutes.ago, source_branch: 'C') }
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
-
- before do
- group.add_owner(user)
- mr_2_1.metrics.update!(merged_at: 30.minutes.ago)
- mr_2_2.metrics.update!(merged_at: 10.minutes.ago)
-
- create(:merge_requests_closing_issues, merge_request: mr_2_1, issue: issue_2_1)
- create(:merge_requests_closing_issues, merge_request: mr_2_2, issue: issue_2_2)
- create(:merge_requests_closing_issues, merge_request: mr_2_3, issue: issue_2_3)
- end
-
- describe '#group_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.group_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:title] }).to contain_exactly(mr_2_1.title, mr_2_2.title)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/staging_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/staging_event_fetcher_spec.rb
deleted file mode 100644
index bdf1b99c4c9..00000000000
--- a/spec/lib/gitlab/cycle_analytics/staging_event_fetcher_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::StagingEventFetcher do
- let(:stage_name) { :staging }
-
- it_behaves_like 'default query config' do
- it 'has a default order' do
- expect(event.order).not_to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb
deleted file mode 100644
index 852f7041dc6..00000000000
--- a/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::StagingStage do
- let(:stage_name) { :staging }
-
- let(:project) { create(:project) }
- let(:issue_1) { create(:issue, project: project, created_at: 90.minutes.ago) }
- let(:issue_2) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:issue_3) { create(:issue, project: project, created_at: 60.minutes.ago) }
- let(:mr_1) { create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago) }
- let(:mr_2) { create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A') }
- let(:mr_3) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B') }
- let(:build_1) { create(:ci_build, project: project) }
- let(:build_2) { create(:ci_build, project: project) }
-
- let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
- let(:stage) { described_class.new(options: stage_options) }
-
- before do
- mr_1.metrics.update!(merged_at: 80.minutes.ago, first_deployed_to_production_at: 50.minutes.ago, pipeline_id: build_1.commit_id)
- mr_2.metrics.update!(merged_at: 60.minutes.ago, first_deployed_to_production_at: 30.minutes.ago, pipeline_id: build_2.commit_id)
- mr_3.metrics.update!(merged_at: 10.minutes.ago, first_deployed_to_production_at: 3.days.ago, pipeline_id: create(:ci_build, project: project).commit_id)
-
- create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
- create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
- create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
- end
-
- it_behaves_like 'base stage'
-
- describe '#project_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.project_median).to eq(ISSUES_MEDIAN)
- end
-
- it_behaves_like 'calculate #median with date range'
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes builds connected to merge request' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:name] }).to contain_exactly(build_1.name, build_2.name)
- end
- end
-
- context 'when group is given' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project_2) { create(:project, group: group) }
- let(:project_3) { create(:project, group: group) }
- let(:issue_2_1) { create(:issue, project: project_2, created_at: 90.minutes.ago) }
- let(:issue_2_2) { create(:issue, project: project_3, created_at: 60.minutes.ago) }
- let(:issue_2_3) { create(:issue, project: project_2, created_at: 60.minutes.ago) }
- let(:mr_1) { create(:merge_request, :closed, source_project: project_2, created_at: 60.minutes.ago) }
- let(:mr_2) { create(:merge_request, :closed, source_project: project_3, created_at: 40.minutes.ago, source_branch: 'A') }
- let(:mr_3) { create(:merge_request, source_project: project_2, created_at: 10.minutes.ago, source_branch: 'B') }
- let(:build_1) { create(:ci_build, project: project_2) }
- let(:build_2) { create(:ci_build, project: project_3) }
- let(:stage) { described_class.new(options: { from: 2.days.ago, current_user: user, group: group }) }
-
- before do
- group.add_owner(user)
- mr_1.metrics.update!(merged_at: 80.minutes.ago, first_deployed_to_production_at: 50.minutes.ago, pipeline_id: build_1.commit_id)
- mr_2.metrics.update!(merged_at: 60.minutes.ago, first_deployed_to_production_at: 30.minutes.ago, pipeline_id: build_2.commit_id)
- mr_3.metrics.update!(merged_at: 10.minutes.ago, first_deployed_to_production_at: 3.days.ago, pipeline_id: create(:ci_build, project: project_2).commit_id)
-
- create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_2_1)
- create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2_2)
- create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_2_3)
- end
-
- describe '#group_median' do
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.group_median).to eq(ISSUES_MEDIAN)
- end
- end
-
- describe '#events' do
- subject { stage.events }
-
- it 'exposes merge requests that close issues' do
- expect(subject.count).to eq(2)
- expect(subject.map { |event| event[:name] }).to contain_exactly(build_1.name, build_2.name)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/test_event_fetcher_spec.rb b/spec/lib/gitlab/cycle_analytics/test_event_fetcher_spec.rb
deleted file mode 100644
index 1277385d0b4..00000000000
--- a/spec/lib/gitlab/cycle_analytics/test_event_fetcher_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::TestEventFetcher do
- let(:stage_name) { :test }
-
- it_behaves_like 'default query config' do
- it 'has a default order' do
- expect(event.order).not_to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
deleted file mode 100644
index 49ee6624260..00000000000
--- a/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::TestStage do
- let(:stage_name) { :test }
- let(:project) { create(:project) }
- let(:stage_options) { { from: 2.days.ago, current_user: project.creator, project: project } }
- let(:stage) { described_class.new(options: stage_options) }
-
- it_behaves_like 'base stage'
-
- describe '#median' do
- let(:mr_1) { create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago) }
- let(:mr_2) { create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A') }
- let(:mr_3) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B') }
- let(:mr_4) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'C') }
- let(:mr_5) { create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'D') }
- let(:ci_build1) { create(:ci_build, project: project) }
- let(:ci_build2) { create(:ci_build, project: project) }
-
- before do
- issue_1 = create(:issue, project: project, created_at: 90.minutes.ago)
- issue_2 = create(:issue, project: project, created_at: 60.minutes.ago)
- issue_3 = create(:issue, project: project, created_at: 60.minutes.ago)
- mr_1.metrics.update!(latest_build_started_at: 32.minutes.ago, latest_build_finished_at: 2.minutes.ago, pipeline_id: ci_build1.commit_id)
- mr_2.metrics.update!(latest_build_started_at: 62.minutes.ago, latest_build_finished_at: 32.minutes.ago, pipeline_id: ci_build2.commit_id)
- mr_3.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
- mr_4.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
- mr_5.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
-
- create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
- create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
- create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
- create(:merge_requests_closing_issues, merge_request: mr_4, issue: issue_3)
- create(:merge_requests_closing_issues, merge_request: mr_5, issue: issue_3)
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- it 'counts median from issues with metrics' do
- expect(stage.project_median).to eq(ISSUES_MEDIAN)
- end
-
- include_examples 'calculate #median with date range'
-
- context 'when using the new query backend' do
- include_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
- let(:expected_record_count) { 2 }
- let(:attribute_to_verify) { :id }
- let(:expected_ordered_attribute_values) { [mr_1.metrics.pipeline.builds.first.id, mr_2.metrics.pipeline.builds.first.id] }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/danger/base_linter_spec.rb b/spec/lib/gitlab/danger/base_linter_spec.rb
index bd0ceb5a125..0136a0278ae 100644
--- a/spec/lib/gitlab/danger/base_linter_spec.rb
+++ b/spec/lib/gitlab/danger/base_linter_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'rspec-parameterized'
require_relative 'danger_spec_helper'
require 'gitlab/danger/base_linter'
@@ -70,19 +71,57 @@ RSpec.describe Gitlab::Danger::BaseLinter do
end
end
- context 'when subject is a WIP' do
+ context 'when ignoring length issues for subject having not-ready wording' do
+ using RSpec::Parameterized::TableSyntax
+
let(:final_message) { 'A B C' }
- # commit message with prefix will be over max length. commit message without prefix will be of maximum size
- let(:commit_message) { described_class::WIP_PREFIX + final_message + 'D' * (described_class::MAX_LINE_LENGTH - final_message.size) }
- it 'does not have any problems' do
- commit_linter.lint_subject
+ context 'when used as prefix' do
+ where(prefix: [
+ 'WIP: ',
+ 'WIP:',
+ 'wIp:',
+ '[WIP] ',
+ '[WIP]',
+ '[draft]',
+ '[draft] ',
+ '(draft)',
+ '(draft) ',
+ 'draft - ',
+ 'draft: ',
+ 'draft:',
+ 'DRAFT:'
+ ])
+
+ with_them do
+ it 'does not have any problems' do
+ commit_message = prefix + final_message + 'D' * (described_class::MAX_LINE_LENGTH - final_message.size)
+ commit = commit_class.new(commit_message, anything, anything)
+
+ linter = described_class.new(commit).lint_subject
+
+ expect(linter.problems).to be_empty
+ end
+ end
+ end
- expect(commit_linter.problems).to be_empty
+ context 'when used as suffix' do
+ where(suffix: %w[WIP draft])
+
+ with_them do
+ it 'does not have any problems' do
+ commit_message = final_message + 'D' * (described_class::MAX_LINE_LENGTH - final_message.size) + suffix
+ commit = commit_class.new(commit_message, anything, anything)
+
+ linter = described_class.new(commit).lint_subject
+
+ expect(linter.problems).to be_empty
+ end
+ end
end
end
- context 'when subject is too short and too long' do
+ context 'when subject does not have enough words and is too long' do
let(:commit_message) { 'A ' + 'B' * described_class::MAX_LINE_LENGTH }
it 'adds a problem' do
diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/lib/gitlab/danger/changelog_spec.rb
index 2da60f4f8bd..04c515f1205 100644
--- a/spec/lib/gitlab/danger/changelog_spec.rb
+++ b/spec/lib/gitlab/danger/changelog_spec.rb
@@ -150,41 +150,80 @@ RSpec.describe Gitlab::Danger::Changelog do
end
describe '#modified_text' do
- let(:sanitize_mr_title) { 'Fake Title' }
let(:mr_json) { { "iid" => 1234, "title" => sanitize_mr_title } }
subject { changelog.modified_text }
- it do
- expect(subject).to include('CHANGELOG.md was edited')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ context "when title is not changed from sanitization", :aggregate_failures do
+ let(:sanitize_mr_title) { 'Fake Title' }
+
+ specify do
+ expect(subject).to include('CHANGELOG.md was edited')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:sanitize_mr_title) { 'DRAFT: Fake Title' }
+
+ specify do
+ expect(subject).to include('CHANGELOG.md was edited')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
end
end
describe '#required_text' do
- let(:sanitize_mr_title) { 'Fake Title' }
let(:mr_json) { { "iid" => 1234, "title" => sanitize_mr_title } }
subject { changelog.required_text }
- it do
- expect(subject).to include('CHANGELOG missing')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).not_to include('--ee')
+ context "when title is not changed from sanitization", :aggregate_failures do
+ let(:sanitize_mr_title) { 'Fake Title' }
+
+ specify do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).not_to include('--ee')
+ end
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:sanitize_mr_title) { 'DRAFT: Fake Title' }
+
+ specify do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).not_to include('--ee')
+ end
end
end
- describe 'optional_text' do
- let(:sanitize_mr_title) { 'Fake Title' }
+ describe '#optional_text' do
let(:mr_json) { { "iid" => 1234, "title" => sanitize_mr_title } }
subject { changelog.optional_text }
- it do
- expect(subject).to include('CHANGELOG missing')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ context "when title is not changed from sanitization", :aggregate_failures do
+ let(:sanitize_mr_title) { 'Fake Title' }
+
+ specify do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:sanitize_mr_title) { 'DRAFT: Fake Title' }
+
+ specify do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
end
end
end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index a8f113a8cd1..bd5c746dd54 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -351,33 +351,23 @@ RSpec.describe Gitlab::Danger::Helper do
end
context 'having specific changes' do
- it 'has database and backend categories' do
- changed_files = ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb']
-
- changed_files.each do |file|
- allow(fake_git).to receive(:diff_for_file).with(file) { double(:diff, patch: "+ count(User.active)") }
-
- expect(helper.categories_for_file(file)).to eq([:database, :backend])
- end
- end
-
- it 'has backend category' do
- allow(fake_git).to receive(:diff_for_file).with('usage_data.rb') { double(:diff, patch: "+ alt_usage_data(User.active)") }
-
- expect(helper.categories_for_file('usage_data.rb')).to eq([:backend])
- end
-
- it 'has backend category for changes outside usage_data files' do
- allow(fake_git).to receive(:diff_for_file).with('user.rb') { double(:diff, patch: "+ count(User.active)") }
-
- expect(helper.categories_for_file('user.rb')).to eq([:backend])
+ where(:expected_categories, :patch, :changed_files) do
+ [:database, :backend] | '+ count(User.active)' | ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb']
+ [:database, :backend] | '+ estimate_batch_distinct_count(User.active)' | ['usage_data.rb']
+ [:backend] | '+ alt_usage_data(User.active)' | ['usage_data.rb']
+ [:backend] | '+ count(User.active)' | ['user.rb']
+ [:backend] | '+ count(User.active)' | ['usage_data/topology.rb']
+ [:backend] | '+ foo_count(User.active)' | ['usage_data.rb']
end
- it 'has backend category for files that are not usage_data.rb' do
- changed_file = 'usage_data/topology.rb'
- allow(fake_git).to receive(:diff_for_file).with(changed_file) { double(:diff, patch: "+ count(User.active)") }
+ with_them do
+ it 'has the correct categories' do
+ changed_files.each do |file|
+ allow(fake_git).to receive(:diff_for_file).with(file) { double(:diff, patch: patch) }
- expect(helper.categories_for_file(changed_file)).to eq([:backend])
+ expect(helper.categories_for_file(file)).to eq(expected_categories)
+ end
+ end
end
end
end
@@ -412,24 +402,6 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
- describe '#sanitize_mr_title' do
- where(:mr_title, :expected_mr_title) do
- 'My MR title' | 'My MR title'
- 'WIP: My MR title' | 'My MR title'
- 'Draft: My MR title' | 'My MR title'
- '(Draft) My MR title' | 'My MR title'
- '[Draft] My MR title' | 'My MR title'
- '[DRAFT] My MR title' | 'My MR title'
- 'DRAFT: My MR title' | 'My MR title'
- end
-
- with_them do
- subject { helper.sanitize_mr_title(mr_title) }
-
- it { is_expected.to eq(expected_mr_title) }
- end
- end
-
describe '#security_mr?' do
it 'returns false when `gitlab_helper` is unavailable' do
expect(helper).to receive(:gitlab_helper).and_return(nil)
diff --git a/spec/lib/gitlab/danger/roulette_spec.rb b/spec/lib/gitlab/danger/roulette_spec.rb
index 561e108bf31..59ac3b12b6b 100644
--- a/spec/lib/gitlab/danger/roulette_spec.rb
+++ b/spec/lib/gitlab/danger/roulette_spec.rb
@@ -245,69 +245,6 @@ RSpec.describe Gitlab::Danger::Roulette do
end
end
end
-
- describe 'reviewer suggestion probability' do
- let(:reviewer) { teammate_with_capability('reviewer', 'reviewer backend') }
- let(:hungry_reviewer) { teammate_with_capability('hungry_reviewer', 'reviewer backend', hungry: true) }
- let(:traintainer) { teammate_with_capability('traintainer', 'trainee_maintainer backend') }
- let(:hungry_traintainer) { teammate_with_capability('hungry_traintainer', 'trainee_maintainer backend', hungry: true) }
- let(:teammates) do
- [
- reviewer.to_h,
- hungry_reviewer.to_h,
- traintainer.to_h,
- hungry_traintainer.to_h
- ]
- end
-
- let(:categories) { [:backend] }
-
- # This test is testing probability with inherent randomness.
- # The variance is inversely related to sample size
- # Given large enough sample size, the variance would be smaller,
- # but the test would take longer.
- # Given smaller sample size, the variance would be larger,
- # but the test would take less time.
- let!(:sample_size) { 500 }
- let!(:variance) { 0.1 }
-
- before do
- # This test needs actual randomness to simulate probabilities
- allow(subject).to receive(:new_random).and_return(Random.new)
- WebMock
- .stub_request(:get, described_class::ROULETTE_DATA_URL)
- .to_return(body: teammate_json)
- end
-
- it 'has 1:2:3:4 probability of picking reviewer, hungry_reviewer, traintainer, hungry_traintainer' do
- picks = Array.new(sample_size).map do
- spins = subject.spin(project, categories, timezone_experiment: timezone_experiment)
- spins.first.reviewer.name
- end
-
- expect(probability(picks, 'reviewer')).to be_within(variance).of(0.1)
- expect(probability(picks, 'hungry_reviewer')).to be_within(variance).of(0.2)
- expect(probability(picks, 'traintainer')).to be_within(variance).of(0.3)
- expect(probability(picks, 'hungry_traintainer')).to be_within(variance).of(0.4)
- end
-
- def probability(picks, role)
- picks.count(role).to_f / picks.length
- end
-
- def teammate_with_capability(name, capability, hungry: false)
- Gitlab::Danger::Teammate.new(
- {
- 'name' => name,
- 'projects' => {
- 'gitlab' => capability
- },
- 'available' => true,
- 'hungry' => hungry
- }
- )
- end
- end
end
RSpec::Matchers.define :match_teammates do |expected|
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index eebe14ed5e1..9c066ba4c1b 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -121,6 +121,14 @@ RSpec.describe Gitlab::Danger::Teammate do
end
end
+ context 'when capabilities include maintainer engineering productivity' do
+ let(:capabilities) { ['maintainer engineering_productivity'] }
+
+ it '#maintainer? returns true' do
+ expect(subject.maintainer?(project, :engineering_productivity, labels)).to be_truthy
+ end
+ end
+
context 'when capabilities include trainee_maintainer backend' do
let(:capabilities) { ['trainee_maintainer backend'] }
diff --git a/spec/lib/gitlab/danger/title_linting_spec.rb b/spec/lib/gitlab/danger/title_linting_spec.rb
new file mode 100644
index 00000000000..b48d2c5e53d
--- /dev/null
+++ b/spec/lib/gitlab/danger/title_linting_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+require 'gitlab/danger/title_linting'
+
+RSpec.describe Gitlab::Danger::TitleLinting do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#sanitize_mr_title' do
+ where(:mr_title, :expected_mr_title) do
+ '`My MR title`' | "\\`My MR title\\`"
+ 'WIP: My MR title' | 'My MR title'
+ 'Draft: My MR title' | 'My MR title'
+ '(Draft) My MR title' | 'My MR title'
+ '[Draft] My MR title' | 'My MR title'
+ '[DRAFT] My MR title' | 'My MR title'
+ 'DRAFT: My MR title' | 'My MR title'
+ 'DRAFT: `My MR title`' | "\\`My MR title\\`"
+ end
+
+ with_them do
+ subject { described_class.sanitize_mr_title(mr_title) }
+
+ it { is_expected.to eq(expected_mr_title) }
+ end
+ end
+
+ describe '#remove_draft_flag' do
+ where(:mr_title, :expected_mr_title) do
+ 'WIP: My MR title' | 'My MR title'
+ 'Draft: My MR title' | 'My MR title'
+ '(Draft) My MR title' | 'My MR title'
+ '[Draft] My MR title' | 'My MR title'
+ '[DRAFT] My MR title' | 'My MR title'
+ 'DRAFT: My MR title' | 'My MR title'
+ end
+
+ with_them do
+ subject { described_class.remove_draft_flag(mr_title) }
+
+ it { is_expected.to eq(expected_mr_title) }
+ end
+ end
+
+ describe '#has_draft_flag?' do
+ it 'returns true for a draft title' do
+ expect(described_class.has_draft_flag?('Draft: My MR title')).to be true
+ end
+
+ it 'returns false for non draft title' do
+ expect(described_class.has_draft_flag?('My MR title')).to be false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/weightage/maintainers_spec.rb b/spec/lib/gitlab/danger/weightage/maintainers_spec.rb
new file mode 100644
index 00000000000..066bb487fa2
--- /dev/null
+++ b/spec/lib/gitlab/danger/weightage/maintainers_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'gitlab/danger/weightage/maintainers'
+
+RSpec.describe Gitlab::Danger::Weightage::Maintainers do
+ let(:multiplier) { Gitlab::Danger::Weightage::CAPACITY_MULTIPLIER }
+ let(:regular_maintainer) { double('Teammate', reduced_capacity: false) }
+ let(:reduced_capacity_maintainer) { double('Teammate', reduced_capacity: true) }
+ let(:maintainers) do
+ [
+ regular_maintainer,
+ reduced_capacity_maintainer
+ ]
+ end
+
+ let(:maintainer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier }
+ let(:reduced_capacity_maintainer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT }
+
+ subject(:weighted_maintainers) { described_class.new(maintainers).execute }
+
+ describe '#execute' do
+ it 'weights the maintainers overall' do
+ expect(weighted_maintainers.count).to eq maintainer_count + reduced_capacity_maintainer_count
+ end
+
+ it 'has total count of regular maintainers' do
+ expect(weighted_maintainers.count { |r| r.object_id == regular_maintainer.object_id }).to eq maintainer_count
+ end
+
+ it 'has count of reduced capacity maintainers' do
+ expect(weighted_maintainers.count { |r| r.object_id == reduced_capacity_maintainer.object_id }).to eq reduced_capacity_maintainer_count
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/weightage/reviewers_spec.rb b/spec/lib/gitlab/danger/weightage/reviewers_spec.rb
new file mode 100644
index 00000000000..cca81f4d9b5
--- /dev/null
+++ b/spec/lib/gitlab/danger/weightage/reviewers_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'gitlab/danger/weightage/reviewers'
+
+RSpec.describe Gitlab::Danger::Weightage::Reviewers do
+ let(:multiplier) { Gitlab::Danger::Weightage::CAPACITY_MULTIPLIER }
+ let(:regular_reviewer) { double('Teammate', hungry: false, reduced_capacity: false) }
+ let(:hungry_reviewer) { double('Teammate', hungry: true, reduced_capacity: false) }
+ let(:reduced_capacity_reviewer) { double('Teammate', hungry: false, reduced_capacity: true) }
+ let(:reviewers) do
+ [
+ hungry_reviewer,
+ regular_reviewer,
+ reduced_capacity_reviewer
+ ]
+ end
+
+ let(:regular_traintainer) { double('Teammate', hungry: false, reduced_capacity: false) }
+ let(:hungry_traintainer) { double('Teammate', hungry: true, reduced_capacity: false) }
+ let(:reduced_capacity_traintainer) { double('Teammate', hungry: false, reduced_capacity: true) }
+ let(:traintainers) do
+ [
+ hungry_traintainer,
+ regular_traintainer,
+ reduced_capacity_traintainer
+ ]
+ end
+
+ let(:hungry_reviewer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier + described_class::DEFAULT_REVIEWER_WEIGHT }
+ let(:hungry_traintainer_count) { described_class::TRAINTAINER_WEIGHT * multiplier + described_class::DEFAULT_REVIEWER_WEIGHT }
+ let(:reviewer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier }
+ let(:traintainer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * described_class::TRAINTAINER_WEIGHT * multiplier }
+ let(:reduced_capacity_reviewer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT }
+ let(:reduced_capacity_traintainer_count) { described_class::TRAINTAINER_WEIGHT }
+
+ subject(:weighted_reviewers) { described_class.new(reviewers, traintainers).execute }
+
+ describe '#execute', :aggregate_failures do
+ it 'weights the reviewers overall' do
+ reviewers_count = hungry_reviewer_count + reviewer_count + reduced_capacity_reviewer_count
+ traintainers_count = hungry_traintainer_count + traintainer_count + reduced_capacity_traintainer_count
+
+ expect(weighted_reviewers.count).to eq reviewers_count + traintainers_count
+ end
+
+ it 'has total count of hungry reviewers and traintainers' do
+ expect(weighted_reviewers.count(&:hungry)).to eq hungry_reviewer_count + hungry_traintainer_count
+ expect(weighted_reviewers.count { |r| r.object_id == hungry_reviewer.object_id }).to eq hungry_reviewer_count
+ expect(weighted_reviewers.count { |r| r.object_id == hungry_traintainer.object_id }).to eq hungry_traintainer_count
+ end
+
+ it 'has total count of regular reviewers and traintainers' do
+ expect(weighted_reviewers.count { |r| r.object_id == regular_reviewer.object_id }).to eq reviewer_count
+ expect(weighted_reviewers.count { |r| r.object_id == regular_traintainer.object_id }).to eq traintainer_count
+ end
+
+ it 'has count of reduced capacity reviewers' do
+ expect(weighted_reviewers.count(&:reduced_capacity)).to eq reduced_capacity_reviewer_count + reduced_capacity_traintainer_count
+ expect(weighted_reviewers.count { |r| r.object_id == reduced_capacity_reviewer.object_id }).to eq reduced_capacity_reviewer_count
+ expect(weighted_reviewers.count { |r| r.object_id == reduced_capacity_traintainer.object_id }).to eq reduced_capacity_traintainer_count
+ end
+ end
+end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index cfaaf849b09..2f74e766a11 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
it {
expect(data[:user]).to eq(
{
+ id: user.id,
name: user.name,
username: user.username,
avatar_url: user.avatar_url(only_path: false),
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index e5dfff33a2a..297d87708d8 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(project_data).to eq(project.hook_attrs(backward: false))
expect(data[:merge_request]).to be_nil
expect(data[:user]).to eq({
+ id: user.id,
name: user.name,
username: user.username,
avatar_url: user.avatar_url(only_path: false),
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index a763dc08b73..6b709cba5b3 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers do
+ include Database::TableSchemaHelpers
+
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
end
@@ -96,6 +98,131 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#create_table_with_constraints' do
+ let(:table_name) { :test_table }
+ let(:column_attributes) do
+ [
+ { name: 'id', sql_type: 'bigint', null: false, default: nil },
+ { name: 'created_at', sql_type: 'timestamp with time zone', null: false, default: nil },
+ { name: 'updated_at', sql_type: 'timestamp with time zone', null: false, default: nil },
+ { name: 'some_id', sql_type: 'integer', null: false, default: nil },
+ { name: 'active', sql_type: 'boolean', null: false, default: 'true' },
+ { name: 'name', sql_type: 'text', null: true, default: nil }
+ ]
+ end
+
+ before do
+ allow(model).to receive(:transaction_open?).and_return(true)
+ end
+
+ context 'when no check constraints are defined' do
+ it 'creates the table as expected' do
+ model.create_table_with_constraints table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name
+ end
+
+ expect_table_columns_to_match(column_attributes, table_name)
+ end
+ end
+
+ context 'when check constraints are defined' do
+ context 'when the text_limit is explicity named' do
+ it 'creates the table as expected' do
+ model.create_table_with_constraints table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name
+
+ t.text_limit :name, 255, name: 'check_name_length'
+ t.check_constraint :some_id_is_positive, 'some_id > 0'
+ end
+
+ expect_table_columns_to_match(column_attributes, table_name)
+
+ expect_check_constraint(table_name, 'check_name_length', 'char_length(name) <= 255')
+ expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0')
+ end
+ end
+
+ context 'when the text_limit is not named' do
+ it 'creates the table as expected, naming the text limit' do
+ model.create_table_with_constraints table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name
+
+ t.text_limit :name, 255
+ t.check_constraint :some_id_is_positive, 'some_id > 0'
+ end
+
+ expect_table_columns_to_match(column_attributes, table_name)
+
+ expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255')
+ expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0')
+ end
+ end
+
+ it 'runs the change within a with_lock_retries' do
+ expect(model).to receive(:with_lock_retries).ordered.and_yield
+ expect(model).to receive(:create_table).ordered.and_call_original
+ expect(model).to receive(:execute).with(<<~SQL).ordered
+ ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255)
+ SQL
+
+ model.create_table_with_constraints table_name do |t|
+ t.text :name
+ t.text_limit :name, 255
+ end
+ end
+
+ context 'when constraints are given invalid names' do
+ let(:expected_max_length) { described_class::MAX_IDENTIFIER_NAME_LENGTH }
+ let(:expected_error_message) { "The maximum allowed constraint name is #{expected_max_length} characters" }
+
+ context 'when the explicit text limit name is not valid' do
+ it 'raises an error' do
+ too_long_length = expected_max_length + 1
+
+ expect do
+ model.create_table_with_constraints table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name
+
+ t.text_limit :name, 255, name: ('a' * too_long_length)
+ t.check_constraint :some_id_is_positive, 'some_id > 0'
+ end
+ end.to raise_error(expected_error_message)
+ end
+ end
+
+ context 'when a check constraint name is not valid' do
+ it 'raises an error' do
+ too_long_length = expected_max_length + 1
+
+ expect do
+ model.create_table_with_constraints table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name
+
+ t.text_limit :name, 255
+ t.check_constraint ('a' * too_long_length), 'some_id > 0'
+ end
+ end.to raise_error(expected_error_message)
+ end
+ end
+ end
+ end
+ end
+
describe '#add_concurrent_index' do
context 'outside a transaction' do
before do
@@ -1548,6 +1675,69 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#initialize_conversion_of_integer_to_bigint' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:issue) { create(:issue, project: project) }
+ let!(:event) do
+ create(:event, :created, project: project, target: issue, author: user)
+ end
+
+ context 'in a transaction' do
+ it 'raises RuntimeError' do
+ allow(model).to receive(:transaction_open?).and_return(true)
+
+ expect { model.initialize_conversion_of_integer_to_bigint(:events, :id) }
+ .to raise_error(RuntimeError)
+ end
+ end
+
+ context 'outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'creates a bigint column and starts backfilling it' do
+ expect(model)
+ .to receive(:add_column)
+ .with(
+ :events,
+ 'id_convert_to_bigint',
+ :bigint,
+ default: 0,
+ null: false
+ )
+
+ expect(model)
+ .to receive(:install_rename_triggers)
+ .with(:events, :id, 'id_convert_to_bigint')
+
+ expect(model).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
+
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_in)
+ .ordered
+ .with(
+ 2.minutes,
+ 'CopyColumnUsingBackgroundMigrationJob',
+ [event.id, event.id, :events, :id, :id, 'id_convert_to_bigint', 100]
+ )
+
+ expect(Gitlab::BackgroundMigration)
+ .to receive(:steal)
+ .ordered
+ .with('CopyColumnUsingBackgroundMigrationJob')
+
+ model.initialize_conversion_of_integer_to_bigint(
+ :events,
+ :id,
+ batch_size: 300,
+ sub_batch_size: 100
+ )
+ end
+ end
+ end
+
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
ActiveRecord::Base.connection.execute(
diff --git a/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb b/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
index 56399941662..ec89f2ed61c 100644
--- a/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::PartitionCreator do
- include PartitioningHelpers
+ include Database::PartitioningHelpers
include ExclusiveLeaseHelpers
describe '.register' do
diff --git a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
index d47666eeffd..8e27797208c 100644
--- a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform' do
- include TableSchemaHelpers
+ include Database::TableSchemaHelpers
subject(:replace_table) { described_class.new(original_table, replacement_table, archived_table, 'id').perform }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index 7d88c17c9b3..93dbd9d7c30 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers do
- include TriggerHelpers
+ include Database::TriggerHelpers
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
index 7f61ff759fc..603f3dc41af 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
- include TableSchemaHelpers
+ include Database::TableSchemaHelpers
let(:migration) do
ActiveRecord::Migration.new.extend(described_class)
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index f10ff704c17..b50e02c7043 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -3,25 +3,36 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers do
- include PartitioningHelpers
- include TriggerHelpers
- include TableSchemaHelpers
+ include Database::PartitioningHelpers
+ include Database::TriggerHelpers
+ include Database::TableSchemaHelpers
let(:migration) do
ActiveRecord::Migration.new.extend(described_class)
end
let_it_be(:connection) { ActiveRecord::Base.connection }
- let(:source_table) { :audit_events }
+ let(:source_table) { :_test_original_table }
let(:partitioned_table) { '_test_migration_partitioned_table' }
let(:function_name) { '_test_migration_function_name' }
let(:trigger_name) { '_test_migration_trigger_name' }
let(:partition_column) { 'created_at' }
let(:min_date) { Date.new(2019, 12) }
let(:max_date) { Date.new(2020, 3) }
+ let(:source_model) { Class.new(ActiveRecord::Base) }
before do
allow(migration).to receive(:puts)
+
+ migration.create_table source_table do |t|
+ t.string :name, null: false
+ t.integer :age, null: false
+ t.datetime partition_column
+ t.datetime :updated_at
+ end
+
+ source_model.table_name = source_table
+
allow(migration).to receive(:transaction_open?).and_return(false)
allow(migration).to receive(:make_partitioned_table_name).and_return(partitioned_table)
allow(migration).to receive(:make_sync_function_name).and_return(function_name)
@@ -81,14 +92,11 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
context 'when the given table does not have a primary key' do
- let(:source_table) { :_partitioning_migration_helper_test_table }
- let(:partition_column) { :some_field }
-
it 'raises an error' do
- migration.create_table source_table, id: false do |t|
- t.integer :id
- t.datetime partition_column
- end
+ migration.execute(<<~SQL)
+ ALTER TABLE #{source_table}
+ DROP CONSTRAINT #{source_table}_pkey
+ SQL
expect do
migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
@@ -97,12 +105,12 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
context 'when an invalid partition column is given' do
- let(:partition_column) { :_this_is_not_real }
+ let(:invalid_column) { :_this_is_not_real }
it 'raises an error' do
expect do
- migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
- end.to raise_error(/partition column #{partition_column} does not exist/)
+ migration.partition_table_by_date source_table, invalid_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/partition column #{invalid_column} does not exist/)
end
end
@@ -126,19 +134,19 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'with a non-integer primary key datatype' do
before do
- connection.create_table :another_example, id: false do |t|
+ connection.create_table non_int_table, id: false do |t|
t.string :identifier, primary_key: true
t.timestamp :created_at
end
end
- let(:source_table) { :another_example }
+ let(:non_int_table) { :another_example }
let(:old_primary_key) { 'identifier' }
it 'does not change the primary key datatype' do
- migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+ migration.partition_table_by_date non_int_table, partition_column, min_date: min_date, max_date: max_date
- original_pk_column = connection.columns(source_table).find { |c| c.name == old_primary_key }
+ original_pk_column = connection.columns(non_int_table).find { |c| c.name == old_primary_key }
pk_column = connection.columns(partitioned_table).find { |c| c.name == old_primary_key }
expect(pk_column).not_to be_nil
@@ -176,11 +184,9 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
context 'when min_date is not given' do
- let(:source_table) { :todos }
-
context 'with records present already' do
before do
- create(:todo, created_at: Date.parse('2019-11-05'))
+ source_model.create!(name: 'Test', age: 10, created_at: Date.parse('2019-11-05'))
end
it 'creates a partition spanning over each month from the first record' do
@@ -248,13 +254,12 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
describe 'keeping data in sync with the partitioned table' do
- let(:source_table) { :todos }
- let(:model) { Class.new(ActiveRecord::Base) }
+ let(:partitioned_model) { Class.new(ActiveRecord::Base) }
let(:timestamp) { Time.utc(2019, 12, 1, 12).round }
before do
- model.primary_key = :id
- model.table_name = partitioned_table
+ partitioned_model.primary_key = :id
+ partitioned_model.table_name = partitioned_table
end
it 'creates a trigger function on the original table' do
@@ -270,50 +275,50 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
it 'syncs inserts to the partitioned tables' do
migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
- expect(model.count).to eq(0)
+ expect(partitioned_model.count).to eq(0)
- first_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
- second_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+ first_record = source_model.create!(name: 'Bob', age: 20, created_at: timestamp, updated_at: timestamp)
+ second_record = source_model.create!(name: 'Alice', age: 30, created_at: timestamp, updated_at: timestamp)
- expect(model.count).to eq(2)
- expect(model.find(first_todo.id).attributes).to eq(first_todo.attributes)
- expect(model.find(second_todo.id).attributes).to eq(second_todo.attributes)
+ expect(partitioned_model.count).to eq(2)
+ expect(partitioned_model.find(first_record.id).attributes).to eq(first_record.attributes)
+ expect(partitioned_model.find(second_record.id).attributes).to eq(second_record.attributes)
end
it 'syncs updates to the partitioned tables' do
migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
- first_todo = create(:todo, :pending, commit_id: nil, created_at: timestamp, updated_at: timestamp)
- second_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+ first_record = source_model.create!(name: 'Bob', age: 20, created_at: timestamp, updated_at: timestamp)
+ second_record = source_model.create!(name: 'Alice', age: 30, created_at: timestamp, updated_at: timestamp)
- expect(model.count).to eq(2)
+ expect(partitioned_model.count).to eq(2)
- first_copy = model.find(first_todo.id)
- second_copy = model.find(second_todo.id)
+ first_copy = partitioned_model.find(first_record.id)
+ second_copy = partitioned_model.find(second_record.id)
- expect(first_copy.attributes).to eq(first_todo.attributes)
- expect(second_copy.attributes).to eq(second_todo.attributes)
+ expect(first_copy.attributes).to eq(first_record.attributes)
+ expect(second_copy.attributes).to eq(second_record.attributes)
- first_todo.update(state_event: 'done', commit_id: 'abc123', updated_at: timestamp + 1.second)
+ first_record.update!(age: 21, updated_at: timestamp + 1.hour)
- expect(model.count).to eq(2)
- expect(first_copy.reload.attributes).to eq(first_todo.attributes)
- expect(second_copy.reload.attributes).to eq(second_todo.attributes)
+ expect(partitioned_model.count).to eq(2)
+ expect(first_copy.reload.attributes).to eq(first_record.attributes)
+ expect(second_copy.reload.attributes).to eq(second_record.attributes)
end
it 'syncs deletes to the partitioned tables' do
migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
- first_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
- second_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+ first_record = source_model.create!(name: 'Bob', age: 20, created_at: timestamp, updated_at: timestamp)
+ second_record = source_model.create!(name: 'Alice', age: 30, created_at: timestamp, updated_at: timestamp)
- expect(model.count).to eq(2)
+ expect(partitioned_model.count).to eq(2)
- first_todo.destroy
+ first_record.destroy!
- expect(model.count).to eq(1)
- expect(model.find_by_id(first_todo.id)).to be_nil
- expect(model.find(second_todo.id).attributes).to eq(second_todo.attributes)
+ expect(partitioned_model.count).to eq(1)
+ expect(partitioned_model.find_by_id(first_record.id)).to be_nil
+ expect(partitioned_model.find(second_record.id).attributes).to eq(second_record.attributes)
end
end
end
@@ -388,13 +393,12 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
context 'when records exist in the source table' do
- let(:source_table) { 'todos' }
let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
let(:sub_batch_size) { described_class::SUB_BATCH_SIZE }
let(:pause_seconds) { described_class::PAUSE_SECONDS }
- let!(:first_id) { create(:todo).id }
- let!(:second_id) { create(:todo).id }
- let!(:third_id) { create(:todo).id }
+ let!(:first_id) { source_model.create!(name: 'Bob', age: 20).id }
+ let!(:second_id) { source_model.create!(name: 'Alice', age: 30).id }
+ let!(:third_id) { source_model.create!(name: 'Sam', age: 40).id }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
@@ -410,10 +414,10 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- first_job_arguments = [first_id, second_id, source_table, partitioned_table, 'id']
+ first_job_arguments = [first_id, second_id, source_table.to_s, partitioned_table, 'id']
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments])
- second_job_arguments = [third_id, third_id, source_table, partitioned_table, 'id']
+ second_job_arguments = [third_id, third_id, source_table.to_s, partitioned_table, 'id']
expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments])
end
end
@@ -482,7 +486,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
describe '#finalize_backfilling_partitioned_table' do
- let(:source_table) { 'todos' }
let(:source_column) { 'id' }
context 'when the table is not allowed' do
@@ -536,27 +539,27 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'when there is missed data' do
let(:partitioned_model) { Class.new(ActiveRecord::Base) }
let(:timestamp) { Time.utc(2019, 12, 1, 12).round }
- let!(:todo1) { create(:todo, created_at: timestamp, updated_at: timestamp) }
- let!(:todo2) { create(:todo, created_at: timestamp, updated_at: timestamp) }
- let!(:todo3) { create(:todo, created_at: timestamp, updated_at: timestamp) }
- let!(:todo4) { create(:todo, created_at: timestamp, updated_at: timestamp) }
+ let!(:record1) { source_model.create!(name: 'Bob', age: 20, created_at: timestamp, updated_at: timestamp) }
+ let!(:record2) { source_model.create!(name: 'Alice', age: 30, created_at: timestamp, updated_at: timestamp) }
+ let!(:record3) { source_model.create!(name: 'Sam', age: 40, created_at: timestamp, updated_at: timestamp) }
+ let!(:record4) { source_model.create!(name: 'Sue', age: 50, created_at: timestamp, updated_at: timestamp) }
let!(:pending_job1) do
create(:background_migration_job,
class_name: described_class::MIGRATION_CLASS_NAME,
- arguments: [todo1.id, todo2.id, source_table, partitioned_table, source_column])
+ arguments: [record1.id, record2.id, source_table, partitioned_table, source_column])
end
let!(:pending_job2) do
create(:background_migration_job,
class_name: described_class::MIGRATION_CLASS_NAME,
- arguments: [todo3.id, todo3.id, source_table, partitioned_table, source_column])
+ arguments: [record3.id, record3.id, source_table, partitioned_table, source_column])
end
let!(:succeeded_job) do
create(:background_migration_job, :succeeded,
class_name: described_class::MIGRATION_CLASS_NAME,
- arguments: [todo4.id, todo4.id, source_table, partitioned_table, source_column])
+ arguments: [record4.id, record4.id, source_table, partitioned_table, source_column])
end
before do
@@ -575,17 +578,17 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
it 'idempotently cleans up after failed background migrations' do
expect(partitioned_model.count).to eq(0)
- partitioned_model.insert!(todo2.attributes)
+ partitioned_model.insert!(record2.attributes)
expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill|
allow(backfill).to receive(:transaction_open?).and_return(false)
expect(backfill).to receive(:perform)
- .with(todo1.id, todo2.id, source_table, partitioned_table, source_column)
+ .with(record1.id, record2.id, source_table, partitioned_table, source_column)
.and_call_original
expect(backfill).to receive(:perform)
- .with(todo3.id, todo3.id, source_table, partitioned_table, source_column)
+ .with(record3.id, record3.id, source_table, partitioned_table, source_column)
.and_call_original
end
@@ -593,12 +596,12 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(partitioned_model.count).to eq(3)
- [todo1, todo2, todo3].each do |original|
+ [record1, record2, record3].each do |original|
copy = partitioned_model.find(original.id)
expect(copy.attributes).to eq(original.attributes)
end
- expect(partitioned_model.find_by_id(todo4.id)).to be_nil
+ expect(partitioned_model.find_by_id(record4.id)).to be_nil
[pending_job1, pending_job2].each do |job|
expect(job.reload).to be_succeeded
diff --git a/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb b/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb
index 934e2274358..2c550f14a08 100644
--- a/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb
+++ b/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb
@@ -24,107 +24,48 @@ RSpec.describe Gitlab::Database::PostgresHll::BatchDistinctCounter do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
- context 'different distribution of relation records' do
- [10, 100, 100_000].each do |spread|
- context "records are spread within #{spread}" do
- before do
- ids = (1..spread).to_a.sample(10)
- create_list(:issue, 10).each_with_index do |issue, i|
- issue.id = ids[i]
- end
- end
-
- it 'counts table' do
- expect(described_class.new(model).estimate_distinct_count).to be_within(error_rate).percent_of(10)
- end
- end
- end
- end
-
context 'unit test for different counting parameters' do
before_all do
create_list(:issue, 3, author: user)
create_list(:issue, 2, author: another_user)
end
- describe '#estimate_distinct_count' do
- it 'counts table' do
- expect(described_class.new(model).estimate_distinct_count).to be_within(error_rate).percent_of(5)
- end
-
- it 'counts with column field' do
- expect(described_class.new(model, column).estimate_distinct_count).to be_within(error_rate).percent_of(2)
- end
-
- it 'counts with :id field' do
- expect(described_class.new(model, :id).estimate_distinct_count).to be_within(error_rate).percent_of(5)
- end
-
- it 'counts with "id" field' do
- expect(described_class.new(model, "id").estimate_distinct_count).to be_within(error_rate).percent_of(5)
- end
-
- it 'counts with table.column field' do
- expect(described_class.new(model, "#{model.table_name}.#{column}").estimate_distinct_count).to be_within(error_rate).percent_of(2)
- end
-
- it 'counts with Arel column' do
- expect(described_class.new(model, model.arel_table[column]).estimate_distinct_count).to be_within(error_rate).percent_of(2)
- end
-
- it 'counts over joined relations' do
- expect(described_class.new(model.joins(:author), "users.email").estimate_distinct_count).to be_within(error_rate).percent_of(2)
- end
-
- it 'counts with :column field with batch_size of 50K' do
- expect(described_class.new(model, column).estimate_distinct_count(batch_size: 50_000)).to be_within(error_rate).percent_of(2)
- end
-
- it 'will not count table with a batch size less than allowed' do
- expect(described_class.new(model, column).estimate_distinct_count(batch_size: small_batch_size)).to eq(fallback)
- end
-
- it 'counts with different number of batches and aggregates total result' do
- stub_const('Gitlab::Database::PostgresHll::BatchDistinctCounter::MIN_REQUIRED_BATCH_SIZE', 0)
-
- [1, 2, 4, 5, 6].each { |i| expect(described_class.new(model).estimate_distinct_count(batch_size: i)).to be_within(error_rate).percent_of(5) }
- end
-
- it 'counts with a start and finish' do
- expect(described_class.new(model, column).estimate_distinct_count(start: model.minimum(:id), finish: model.maximum(:id))).to be_within(error_rate).percent_of(2)
+ describe '#execute' do
+ it 'builds hll buckets' do
+ expect(described_class.new(model).execute).to be_an_instance_of(Gitlab::Database::PostgresHll::Buckets)
end
- it "defaults the batch size to #{Gitlab::Database::PostgresHll::BatchDistinctCounter::DEFAULT_BATCH_SIZE}" do
+ it "defaults batch size to #{Gitlab::Database::PostgresHll::BatchDistinctCounter::DEFAULT_BATCH_SIZE}" do
min_id = model.minimum(:id)
batch_end_id = min_id + calculate_batch_size(Gitlab::Database::PostgresHll::BatchDistinctCounter::DEFAULT_BATCH_SIZE)
expect(model).to receive(:where).with("id" => min_id..batch_end_id).and_call_original
- described_class.new(model).estimate_distinct_count
+ described_class.new(model).execute
end
context 'when a transaction is open' do
let(:in_transaction) { true }
it 'raises an error' do
- expect { described_class.new(model, column).estimate_distinct_count }.to raise_error('BatchCount can not be run inside a transaction')
+ expect { described_class.new(model, column).execute }.to raise_error('BatchCount can not be run inside a transaction')
end
end
context 'disallowed configurations' do
let(:default_batch_size) { Gitlab::Database::PostgresHll::BatchDistinctCounter::DEFAULT_BATCH_SIZE }
- it 'returns fallback if start is bigger than finish' do
- expect(described_class.new(model, column).estimate_distinct_count(start: 1, finish: 0)).to eq(fallback)
+ it 'raises WRONG_CONFIGURATION_ERROR if start is bigger than finish' do
+ expect { described_class.new(model, column).execute(start: 1, finish: 0) }.to raise_error(described_class::WRONG_CONFIGURATION_ERROR)
end
- it 'returns fallback if data volume exceeds upper limit' do
+ it 'raises WRONG_CONFIGURATION_ERROR if data volume exceeds upper limit' do
large_finish = Gitlab::Database::PostgresHll::BatchDistinctCounter::MAX_DATA_VOLUME + 1
- expect(described_class.new(model, column).estimate_distinct_count(start: 1, finish: large_finish)).to eq(fallback)
+ expect { described_class.new(model, column).execute(start: 1, finish: large_finish) }.to raise_error(described_class::WRONG_CONFIGURATION_ERROR)
end
- it 'returns fallback if batch size is less than min required' do
- expect(described_class.new(model, column).estimate_distinct_count(batch_size: small_batch_size)).to eq(fallback)
+ it 'raises WRONG_CONFIGURATION_ERROR if batch size is less than min required' do
+ expect { described_class.new(model, column).execute(batch_size: small_batch_size) }.to raise_error(described_class::WRONG_CONFIGURATION_ERROR)
end
end
end
diff --git a/spec/lib/gitlab/database/postgres_hll/buckets_spec.rb b/spec/lib/gitlab/database/postgres_hll/buckets_spec.rb
new file mode 100644
index 00000000000..b4d8fd4a449
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_hll/buckets_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresHll::Buckets do
+ let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE } # HyperLogLog is a probabilistic algorithm, which provides estimated data, with given error margin
+ let(:buckets_hash_5) { { 121 => 2, 126 => 1, 141 => 1, 383 => 1, 56 => 1 } }
+ let(:buckets_hash_2) { { 141 => 1, 56 => 1 } }
+
+ describe '#estimated_distinct_count' do
+ it 'provides estimated cardinality', :aggregate_failures do
+ expect(described_class.new(buckets_hash_5).estimated_distinct_count).to be_within(error_rate).percent_of(5)
+ expect(described_class.new(buckets_hash_2).estimated_distinct_count).to be_within(error_rate).percent_of(2)
+ expect(described_class.new({}).estimated_distinct_count).to eq 0
+ expect(described_class.new.estimated_distinct_count).to eq 0
+ end
+ end
+
+ describe '#merge_hash!' do
+ let(:hash_a) { { 1 => 1, 2 => 3 } }
+ let(:hash_b) { { 1 => 2, 2 => 1 } }
+
+ it 'merges two hashes together into union of two sets' do
+ expect(described_class.new(hash_a).merge_hash!(hash_b).to_json).to eq described_class.new(1 => 2, 2 => 3).to_json
+ end
+ end
+
+ describe '#to_json' do
+ it 'serialize HyperLogLog buckets as hash' do
+ expect(described_class.new(1 => 5).to_json).to eq '{"1":5}'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
index f45d959c0de..ae6362ba812 100644
--- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -3,65 +3,79 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Reindexing::Coordinator do
+ include Database::DatabaseHelpers
include ExclusiveLeaseHelpers
describe '.perform' do
- subject { described_class.new(indexes).perform }
+ subject { described_class.new(index, notifier).perform }
- let(:indexes) { [instance_double(Gitlab::Database::PostgresIndex), instance_double(Gitlab::Database::PostgresIndex)] }
- let(:reindexers) { [instance_double(Gitlab::Database::Reindexing::ConcurrentReindex), instance_double(Gitlab::Database::Reindexing::ConcurrentReindex)] }
+ before do
+ swapout_view_for_table(:postgres_indexes)
+
+ allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
+ end
+
+ let(:index) { create(:postgres_index) }
+ let(:notifier) { instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil) }
+ let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ConcurrentReindex, perform: nil) }
+ let(:action) { create(:reindex_action, index: index) }
let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
let(:lease_key) { 'gitlab/database/reindexing/coordinator' }
let(:lease_timeout) { 1.day }
let(:uuid) { 'uuid' }
- before do
- allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).and_yield
+ context 'locking' do
+ it 'acquires a lock while reindexing' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
- indexes.zip(reindexers).each do |index, reindexer|
- allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
- allow(reindexer).to receive(:perform)
- end
- end
+ expect(reindexer).to receive(:perform).ordered
- it 'performs concurrent reindexing for each index' do
- indexes.zip(reindexers).each do |index, reindexer|
- expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).ordered.and_return(reindexer)
- expect(reindexer).to receive(:perform)
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
+
+ subject
end
- subject
+ it 'does not perform reindexing actions if lease is not granted' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).not_to receive(:new)
+
+ subject
+ end
end
- it 'keeps track of actions and creates ReindexAction records' do
- indexes.each do |index|
- expect(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).with(index).and_yield
+ context 'notifications' do
+ it 'sends #notify_start before reindexing' do
+ expect(notifier).to receive(:notify_start).with(action).ordered
+ expect(reindexer).to receive(:perform).ordered
+
+ subject
end
- subject
+ it 'sends #notify_end after reindexing and updating the action is done' do
+ expect(action).to receive(:finish).ordered
+ expect(notifier).to receive(:notify_end).with(action).ordered
+
+ subject
+ end
end
- context 'locking' do
- it 'acquires a lock while reindexing' do
- indexes.each do |index|
- expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
- action = instance_double(Gitlab::Database::Reindexing::ConcurrentReindex)
- expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).ordered.with(index).and_return(action)
- expect(action).to receive(:perform).ordered
- expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
- end
+ context 'action tracking' do
+ it 'calls #finish on the action' do
+ expect(reindexer).to receive(:perform).ordered
+ expect(action).to receive(:finish).ordered
subject
end
- it 'does does not perform reindexing actions if lease is not granted' do
- indexes.each do |index|
- expect(lease).to receive(:try_obtain).ordered.and_return(false)
- expect(Gitlab::Database::Reindexing::ConcurrentReindex).not_to receive(:new)
- end
+ it 'upon error, it still calls finish and raises the error' do
+ expect(reindexer).to receive(:perform).ordered.and_raise('something went wrong')
+ expect(action).to receive(:finish).ordered
- subject
+ expect { subject }.to raise_error(/something went wrong/)
+
+ expect(action).to be_failed
end
end
end
diff --git a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
new file mode 100644
index 00000000000..e76718fe48a
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do
+ include Database::DatabaseHelpers
+
+ let(:api_key) { "foo" }
+ let(:api_url) { "http://bar"}
+ let(:additional_tag) { "some-tag" }
+
+ let(:action) { create(:reindex_action) }
+
+ before do
+ swapout_view_for_table(:postgres_indexes)
+ end
+
+ let(:headers) do
+ {
+ 'Content-Type': 'application/json',
+ 'Authorization': "Bearer #{api_key}"
+ }
+ end
+
+ let(:response) { double('response', success?: true) }
+
+ def expect_api_call(payload)
+ expect(Gitlab::HTTP).to receive(:post).with("#{api_url}/api/annotations", body: payload.to_json, headers: headers, allow_local_requests: true).and_return(response)
+ end
+
+ shared_examples_for 'interacting with Grafana annotations API' do
+ it 'POSTs a JSON payload' do
+ expect_api_call(payload)
+
+ expect(subject).to be_truthy
+ end
+
+ context 'on error' do
+ it 'does not raise the error and returns false' do
+ allow(Gitlab::HTTP).to receive(:post).and_raise('something went wrong')
+
+ expect(subject).to be_falsey
+ end
+
+ context 'when request was not successful' do
+ it 'returns false' do
+ expect_api_call(payload)
+ allow(response).to receive(:success?).and_return(false)
+
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ context 'without api_key' do
+ let(:api_key) { '' }
+
+ it 'does not post anything' do
+ expect(Gitlab::HTTP).not_to receive(:post)
+
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'without api_url' do
+ let(:api_url) { '' }
+
+ it 'does not post anything' do
+ expect(Gitlab::HTTP).not_to receive(:post)
+
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ describe '#notify_start' do
+ context 'additional tag is nil' do
+ subject { described_class.new(api_key, api_url, nil).notify_start(action) }
+
+ let(:payload) do
+ {
+ time: (action.action_start.utc.to_f * 1000).to_i,
+ tags: ['reindex', action.index.tablename, action.index.name],
+ text: "Started reindexing of #{action.index.name} on #{action.index.tablename}"
+ }
+ end
+
+ it_behaves_like 'interacting with Grafana annotations API'
+ end
+
+ context 'additional tag is not nil' do
+ subject { described_class.new(api_key, api_url, additional_tag).notify_start(action) }
+
+ let(:payload) do
+ {
+ time: (action.action_start.utc.to_f * 1000).to_i,
+ tags: ['reindex', additional_tag, action.index.tablename, action.index.name],
+ text: "Started reindexing of #{action.index.name} on #{action.index.tablename}"
+ }
+ end
+
+ it_behaves_like 'interacting with Grafana annotations API'
+ end
+ end
+
+ describe '#notify_end' do
+ context 'additional tag is nil' do
+ subject { described_class.new(api_key, api_url, nil).notify_end(action) }
+
+ let(:payload) do
+ {
+ time: (action.action_start.utc.to_f * 1000).to_i,
+ tags: ['reindex', action.index.tablename, action.index.name],
+ text: "Finished reindexing of #{action.index.name} on #{action.index.tablename} (#{action.state})",
+ timeEnd: (action.action_end.utc.to_f * 1000).to_i,
+ isRegion: true
+ }
+ end
+
+ it_behaves_like 'interacting with Grafana annotations API'
+ end
+
+ context 'additional tag is not nil' do
+ subject { described_class.new(api_key, api_url, additional_tag).notify_end(action) }
+
+ let(:payload) do
+ {
+ time: (action.action_start.utc.to_f * 1000).to_i,
+ tags: ['reindex', additional_tag, action.index.tablename, action.index.name],
+ text: "Finished reindexing of #{action.index.name} on #{action.index.tablename} (#{action.state})",
+ timeEnd: (action.action_end.utc.to_f * 1000).to_i,
+ isRegion: true
+ }
+ end
+
+ it_behaves_like 'interacting with Grafana annotations API'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
index a5e2f368f40..4466679a099 100644
--- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
- include DatabaseHelpers
+ include Database::DatabaseHelpers
subject { described_class.new(Gitlab::Database::PostgresIndex.all).to_a }
diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
index 225f23d2135..a8f196d8f0e 100644
--- a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
@@ -2,91 +2,83 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Reindexing::ReindexAction, '.keep_track_of' do
- let(:index) { double('index', identifier: 'public.something', ondisk_size_bytes: 10240, reload: nil, bloat_size: 42) }
- let(:size_after) { 512 }
+RSpec.describe Gitlab::Database::Reindexing::ReindexAction do
+ include Database::DatabaseHelpers
- it 'yields to the caller' do
- expect { |b| described_class.keep_track_of(index, &b) }.to yield_control
- end
+ let(:index) { create(:postgres_index) }
- def find_record
- described_class.find_by(index_identifier: index.identifier)
+ before_all do
+ swapout_view_for_table(:postgres_indexes)
end
- it 'creates the record with a start time and updates its end time' do
- freeze_time do
- described_class.keep_track_of(index) do
- expect(find_record.action_start).to be_within(1.second).of(Time.zone.now)
+ describe '.create_for' do
+ subject { described_class.create_for(index) }
- travel(10.seconds)
- end
+ it 'creates a new record for the given index' do
+ freeze_time do
+ record = subject
- duration = find_record.action_end - find_record.action_start
+ expect(record.index_identifier).to eq(index.identifier)
+ expect(record.action_start).to eq(Time.zone.now)
+ expect(record.ondisk_size_bytes_start).to eq(index.ondisk_size_bytes)
+ expect(subject.bloat_estimate_bytes_start).to eq(index.bloat_size)
- expect(duration).to be_within(1.second).of(10.seconds)
+ expect(record).to be_persisted
+ end
end
end
- it 'creates the record with its status set to :started and updates its state to :finished' do
- described_class.keep_track_of(index) do
- expect(find_record).to be_started
- end
+ describe '#finish' do
+ subject { action.finish }
- expect(find_record).to be_finished
- end
+ let(:action) { build(:reindex_action, index: index) }
- it 'creates the record with the indexes start size and updates its end size' do
- described_class.keep_track_of(index) do
- expect(find_record.ondisk_size_bytes_start).to eq(index.ondisk_size_bytes)
+ it 'sets #action_end' do
+ freeze_time do
+ subject
- expect(index).to receive(:reload).once
- allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+ expect(action.action_end).to eq(Time.zone.now)
+ end
end
- expect(find_record.ondisk_size_bytes_end).to eq(size_after)
- end
+ it 'sets #ondisk_size_bytes_end after reloading the index record' do
+ new_size = 4711
+ expect(action.index).to receive(:reload).ordered
+ expect(action.index).to receive(:ondisk_size_bytes).and_return(new_size).ordered
+
+ subject
- it 'creates the record with the indexes bloat estimate' do
- described_class.keep_track_of(index) do
- expect(find_record.bloat_estimate_bytes_start).to eq(index.bloat_size)
+ expect(action.ondisk_size_bytes_end).to eq(new_size)
end
- end
- context 'in case of errors' do
- it 'sets the state to failed' do
- expect do
- described_class.keep_track_of(index) do
- raise 'something went wrong'
- end
- end.to raise_error(/something went wrong/)
+ context 'setting #state' do
+ it 'sets #state to finished if not given' do
+ action.state = nil
- expect(find_record).to be_failed
- end
+ subject
- it 'records the end time' do
- freeze_time do
- expect do
- described_class.keep_track_of(index) do
- raise 'something went wrong'
- end
- end.to raise_error(/something went wrong/)
+ expect(action).to be_finished
+ end
+
+ it 'sets #state to finished if not set to started' do
+ action.state = :started
- expect(find_record.action_end).to be_within(1.second).of(Time.zone.now)
+ subject
+
+ expect(action).to be_finished
end
- end
- it 'records the resulting index size' do
- expect(index).to receive(:reload).once
- allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+ it 'does not change state if set to failed' do
+ action.state = :failed
+
+ expect { subject }.not_to change { action.state }
+ end
+ end
- expect do
- described_class.keep_track_of(index) do
- raise 'something went wrong'
- end
- end.to raise_error(/something went wrong/)
+ it 'saves the record' do
+ expect(action).to receive(:save!)
- expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ subject
end
end
end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index eb78a5fe8ea..b2f038e8b62 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -11,13 +11,16 @@ RSpec.describe Gitlab::Database::Reindexing do
let(:coordinator) { instance_double(Gitlab::Database::Reindexing::Coordinator) }
let(:index_selection) { instance_double(Gitlab::Database::Reindexing::IndexSelection) }
let(:candidate_indexes) { double }
- let(:indexes) { double }
+ let(:indexes) { [double, double] }
it 'delegates to Coordinator' do
expect(Gitlab::Database::Reindexing::IndexSelection).to receive(:new).with(candidate_indexes).and_return(index_selection)
expect(index_selection).to receive(:take).with(2).and_return(indexes)
- expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).with(indexes).and_return(coordinator)
- expect(coordinator).to receive(:perform)
+
+ indexes.each do |index|
+ expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).with(index).and_return(coordinator)
+ expect(coordinator).to receive(:perform)
+ end
subject
end
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 4048fc69591..417bf3e363a 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
let(:prometheus_settings) do
{
- enable: true,
- listen_address: 'localhost:9090'
+ enabled: true,
+ server_address: 'localhost:9090'
}
end
@@ -63,13 +63,13 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
application_setting.update(allow_local_requests_from_web_hooks_and_services: true)
end
- shared_examples 'has prometheus service' do |listen_address|
+ shared_examples 'has prometheus service' do |server_address|
it do
expect(result[:status]).to eq(:success)
prometheus = project.prometheus_service
expect(prometheus).not_to eq(nil)
- expect(prometheus.api_url).to eq(listen_address)
+ expect(prometheus.api_url).to eq(server_address)
expect(prometheus.active).to eq(true)
expect(prometheus.manual_configuration).to eq(true)
end
@@ -202,25 +202,25 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
context 'with non default prometheus address' do
- let(:listen_address) { 'https://localhost:9090' }
+ let(:server_address) { 'https://localhost:9090' }
let(:prometheus_settings) do
{
- enable: true,
- listen_address: listen_address
+ enabled: true,
+ server_address: server_address
}
end
it_behaves_like 'has prometheus service', 'https://localhost:9090'
context 'with :9090 symbol' do
- let(:listen_address) { :':9090' }
+ let(:server_address) { :':9090' }
it_behaves_like 'has prometheus service', 'http://localhost:9090'
end
context 'with 0.0.0.0:9090' do
- let(:listen_address) { '0.0.0.0:9090' }
+ let(:server_address) { '0.0.0.0:9090' }
it_behaves_like 'has prometheus service', 'http://localhost:9090'
end
@@ -251,8 +251,8 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
context 'when prometheus setting is disabled in gitlab.yml' do
let(:prometheus_settings) do
{
- enable: false,
- listen_address: 'http://localhost:9090'
+ enabled: false,
+ server_address: 'http://localhost:9090'
}
end
@@ -262,8 +262,8 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
end
- context 'when prometheus listen address is blank in gitlab.yml' do
- let(:prometheus_settings) { { enable: true, listen_address: '' } }
+ context 'when prometheus server address is blank in gitlab.yml' do
+ let(:prometheus_settings) { { enabled: true, server_address: '' } }
it 'does not configure prometheus' do
expect(result).to include(status: :success)
@@ -296,8 +296,8 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
context 'when prometheus manual configuration cannot be saved' do
let(:prometheus_settings) do
{
- enable: true,
- listen_address: 'httpinvalid://localhost:9090'
+ enabled: true,
+ server_address: 'httpinvalid://localhost:9090'
}
end
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index a7f6ea0cbfb..c9a20f40462 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -752,4 +752,62 @@ RSpec.describe Gitlab::Diff::Position do
expect(subject.file_hash).to eq(Digest::SHA1.hexdigest(subject.file_path))
end
end
+
+ describe '#multiline?' do
+ let(:end_line_code) { "ab09011fa121d0a2bb9fa4ca76094f2482b902b7_#{end_old_line}_#{end_new_line}" }
+
+ let(:line_range) do
+ {
+ "start" => {
+ "line_code" => "ab09011fa121d0a2bb9fa4ca76094f2482b902b7_18_18",
+ "type" => nil,
+ "old_line" => 18,
+ "new_line" => 18
+ },
+ "end" => {
+ "line_code" => end_line_code,
+ "type" => nil,
+ "old_line" => end_old_line,
+ "new_line" => end_new_line
+ }
+ }
+ end
+
+ subject(:multiline) do
+ described_class.new(
+ line_range: line_range,
+ position_type: position_type
+ )
+ end
+
+ let(:end_old_line) { 20 }
+ let(:end_new_line) { 20 }
+
+ context 'when the position type is text' do
+ let(:position_type) { "text" }
+
+ context 'when the start lines equal the end lines' do
+ let(:end_old_line) { 18 }
+ let(:end_new_line) { 18 }
+
+ it "returns true" do
+ expect(subject.multiline?).to be_falsey
+ end
+ end
+
+ context 'when the start lines do not equal the end lines' do
+ it "returns true" do
+ expect(subject.multiline?).to be_truthy
+ end
+ end
+ end
+
+ context 'when the position type is not text' do
+ let(:position_type) { "image" }
+
+ it "returns false" do
+ expect(subject.multiline?).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index ef448ee96a4..8872800069a 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -4,146 +4,50 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
include_context :email_shared_context
- let!(:sent_notification) do
- SentNotification.record_note(note, user.id, mail_key)
- end
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
let(:noteable) { note.noteable }
let(:note) { create(:diff_note_on_merge_request, project: project) }
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
let(:email_raw) { fixture_file('emails/valid_reply.eml') }
+ let!(:sent_notification) do
+ SentNotification.record_note(note, user.id, mail_key)
+ end
it_behaves_like :reply_processing_shared_examples
+ it_behaves_like :note_handler_shared_examples do
+ let(:recipient) { sent_notification.recipient }
+
+ let(:update_commands_only) { fixture_file('emails/update_commands_only_reply.eml')}
+ let(:no_content) { fixture_file('emails/no_content_reply.eml') }
+ let(:commands_in_reply) { fixture_file('emails/commands_in_reply.eml') }
+ let(:with_quick_actions) { fixture_file('emails/valid_reply_with_quick_actions.eml') }
+ end
+
before do
stub_incoming_email_setting(enabled: true, address: "reply+%{key}@appmail.adventuretime.ooo")
stub_config_setting(host: 'localhost')
end
- context "when the recipient address doesn't include a mail key" do
- let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "") }
+ context 'when the recipient address does not include a mail key' do
+ let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, '') }
- it "raises a UnknownIncomingEmail" do
+ it 'raises a UnknownIncomingEmail' do
expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail)
end
end
- context "when no sent notification for the mail key could be found" do
+ context 'when no sent notification for the mail key could be found' do
let(:email_raw) { fixture_file('emails/wrong_mail_key.eml') }
- it "raises a SentNotificationNotFoundError" do
+ it 'raises a SentNotificationNotFoundError' do
expect { receiver.execute }.to raise_error(Gitlab::Email::SentNotificationNotFoundError)
end
end
- context "when the noteable could not be found" do
- before do
- noteable.destroy
- end
-
- it "raises a NoteableNotFoundError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::NoteableNotFoundError)
- end
- end
-
- context "when the note could not be saved" do
- before do
- allow_any_instance_of(Note).to receive(:persisted?).and_return(false)
- end
-
- it "raises an InvalidNoteError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
- end
-
- context 'because the note was update commands only' do
- let!(:email_raw) { fixture_file("emails/update_commands_only_reply.eml") }
-
- context 'and current user cannot update noteable' do
- it 'raises a CommandsOnlyNoteError' do
- expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
- end
- end
-
- context "and current user can update noteable" do
- before do
- project.add_developer(user)
- end
-
- it 'does not raise an error' do
- expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
-
- expect(noteable.reload).to be_closed
- end
- end
- end
- end
-
- context 'when the note contains quick actions' do
- let!(:email_raw) { fixture_file("emails/commands_in_reply.eml") }
-
- context 'and current user cannot update the noteable' do
- it 'only executes the commands that the user can perform' do
- expect { receiver.execute }
- .to change { noteable.notes.user.count }.by(1)
- .and change { user.todos_pending_count }.from(0).to(1)
-
- expect(noteable.reload).to be_open
- end
- end
-
- context 'and current user can update noteable' do
- before do
- project.add_developer(user)
- end
-
- it 'posts a note and updates the noteable' do
- expect(TodoService.new.todo_exist?(noteable, user)).to be_falsy
-
- expect { receiver.execute }
- .to change { noteable.notes.user.count }.by(1)
- .and change { user.todos_pending_count }.from(0).to(1)
-
- expect(noteable.reload).to be_closed
- end
- end
- end
-
- context "when the reply is blank" do
- let!(:email_raw) { fixture_file("emails/no_content_reply.eml") }
-
- it "raises an EmptyEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::EmptyEmailError)
- end
- end
-
- shared_examples "checks permissions on noteable" do
- context "when user has access" do
- before do
- project.add_reporter(user)
- end
-
- it "creates a comment" do
- expect { receiver.execute }.to change { noteable.notes.count }.by(1)
- end
- end
-
- context "when user does not have access" do
- it "raises UserNotAuthorizedError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotAuthorizedError)
- end
- end
- end
-
- context "when discussion is locked" do
- before do
- noteable.update_attribute(:discussion_locked, true)
- end
-
- it_behaves_like "checks permissions on noteable"
- end
-
- context "when issue is confidential" do
+ context 'when issue is confidential' do
let(:issue) { create(:issue, project: project) }
let(:note) { create(:note, noteable: issue, project: project) }
@@ -151,17 +55,17 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
issue.update_attribute(:confidential, true)
end
- it_behaves_like "checks permissions on noteable"
+ it_behaves_like :checks_permissions_on_noteable_examples
end
shared_examples 'a reply to existing comment' do
- it "creates a comment" do
+ it 'creates a comment' do
expect { receiver.execute }.to change { noteable.notes.count }.by(1)
new_note = noteable.notes.last
expect(new_note.author).to eq(sent_notification.recipient)
expect(new_note.position).to eq(note.position)
- expect(new_note.note).to include("I could not disagree more.")
+ expect(new_note.note).to include('I could not disagree more.')
expect(new_note.in_reply_to?(note)).to be_truthy
if note.part_of_discussion?
@@ -172,32 +76,14 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- context "when everything is fine" do
+ # additional shared tests in :reply_processing_shared_examples
+ context 'when everything is fine' do
before do
setup_attachment
end
it_behaves_like 'a reply to existing comment'
- it "adds all attachments" do
- expect_next_instance_of(Gitlab::Email::AttachmentUploader) do |uploader|
- expect(uploader).to receive(:execute).with(upload_parent: project, uploader_class: FileUploader).and_return(
- [
- {
- url: "uploads/image.png",
- alt: "image",
- markdown: markdown
- }
- ]
- )
- end
-
- receiver.execute
-
- note = noteable.notes.last
- expect(note.note).to include(markdown)
- end
-
context 'when sub-addressing is not supported' do
before do
stub_incoming_email_setting(enabled: true, address: nil)
@@ -228,75 +114,9 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- context "when note is not a discussion" do
+ context 'when note is not a discussion' do
let(:note) { create(:note_on_merge_request, project: project) }
it_behaves_like 'a reply to existing comment'
end
-
- context 'when the service desk' do
- let(:project) { create(:project, :public, service_desk_enabled: true) }
- let(:support_bot) { User.support_bot }
- let(:noteable) { create(:issue, project: project, author: support_bot, title: 'service desk issue') }
- let(:note) { create(:note, project: project, noteable: noteable) }
- let(:email_raw) { fixture_file('emails/valid_reply_with_quick_actions.eml') }
-
- let!(:sent_notification) do
- SentNotification.record_note(note, support_bot.id, mail_key)
- end
-
- context 'is enabled' do
- before do
- allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
- project.project_feature.update!(issues_access_level: issues_access_level)
- end
-
- context 'when issues are enabled for everyone' do
- let(:issues_access_level) { ProjectFeature::ENABLED }
-
- it 'creates a comment' do
- expect { receiver.execute }.to change { noteable.notes.count }.by(1)
- end
-
- context 'when quick actions are present' do
- it 'encloses quick actions with code span markdown' do
- receiver.execute
- noteable.reload
-
- note = Note.last
- expect(note.note).to include("Jake out\n\n`/close`\n`/title test`")
- expect(noteable.title).to eq('service desk issue')
- expect(noteable).to be_opened
- end
- end
- end
-
- context 'when issues are protected members only' do
- let(:issues_access_level) { ProjectFeature::PRIVATE }
-
- it 'creates a comment' do
- expect { receiver.execute }.to change { noteable.notes.count }.by(1)
- end
- end
-
- context 'when issues are disabled' do
- let(:issues_access_level) { ProjectFeature::DISABLED }
-
- it 'does not create a comment' do
- expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotAuthorizedError)
- end
- end
- end
-
- context 'is disabled' do
- before do
- allow(Gitlab::ServiceDesk).to receive(:enabled?).and_return(false)
- allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(false)
- end
-
- it 'does not create a comment' do
- expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound)
- end
- end
- end
end
diff --git a/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb
new file mode 100644
index 00000000000..94f28d3399a
--- /dev/null
+++ b/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Handler::CreateNoteOnIssuableHandler do
+ include_context :email_shared_context
+
+ let_it_be(:user) { create(:user, email: 'jake@adventuretime.ooo', incoming_email_token: 'auth_token') }
+ let_it_be(:namespace) { create(:namespace, path: 'gitlabhq') }
+ let_it_be(:project) { create(:project, :public, namespace: namespace, path: 'gitlabhq') }
+
+ let!(:noteable) { create(:issue, project: project) }
+ let(:email_raw) { email_fixture('emails/valid_note_on_issuable.eml') }
+
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.adventuretime.ooo")
+ stub_config_setting(host: 'localhost')
+ end
+
+ it_behaves_like :reply_processing_shared_examples
+
+ it_behaves_like :note_handler_shared_examples, true do
+ let_it_be(:recipient) { user }
+
+ let(:update_commands_only) { email_reply_fixture('emails/update_commands_only_reply.eml') }
+ let(:no_content) { email_reply_fixture('emails/no_content_reply.eml') }
+ let(:commands_in_reply) { email_reply_fixture('emails/commands_in_reply.eml') }
+ let(:with_quick_actions) { email_reply_fixture('emails/valid_reply_with_quick_actions.eml') }
+ end
+
+ context 'when the recipient address does not include a mail key' do
+ let(:mail_key) { 'gitlabhq-gitlabhq-project_id-auth_token-issue-issue_iid' }
+ let(:email_raw) { fixture_file('emails/valid_note_on_issuable.eml').gsub(mail_key, '') }
+
+ it 'raises an UnknownIncomingEmail' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail)
+ end
+ end
+
+ context 'when issue is confidential' do
+ before do
+ noteable.update_attribute(:confidential, true)
+ end
+
+ it_behaves_like :checks_permissions_on_noteable_examples
+ end
+
+ def email_fixture(path)
+ fixture_file(path)
+ .gsub('project_id', project.project_id.to_s)
+ .gsub('issue_iid', noteable.iid.to_s)
+ end
+
+ def email_reply_fixture(path)
+ reply_address = 'reply+59d8df8370b7e95c5a49fbf86aeb2c93'
+ note_address = "incoming+#{project.full_path_slug}-#{project.project_id}-#{user.incoming_email_token}-issue-#{noteable.iid}"
+
+ fixture_file(path)
+ .gsub(reply_address, note_address)
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 32b451f8329..b1ffbedc7bf 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -191,16 +191,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound)
end
end
-
- context 'when service_desk_custom_address feature is disabled' do
- before do
- stub_feature_flags(service_desk_custom_address: false)
- end
-
- it 'bounces the email' do
- expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound)
- end
- end
end
end
diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb
index 2cd8c31e6b2..eff6fb63a5f 100644
--- a/spec/lib/gitlab/email/handler_spec.rb
+++ b/spec/lib/gitlab/email/handler_spec.rb
@@ -60,8 +60,9 @@ RSpec.describe Gitlab::Email::Handler do
describe 'regexps are set properly' do
let(:addresses) do
- %W(sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX} sent_notification_key path-to-project-123-user_email_token-merge-request) +
- %W(sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX_LEGACY} sent_notification_key path-to-project-123-user_email_token-issue) +
+ %W(sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX} sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX_LEGACY}) +
+ %w(sent_notification_key path-to-project-123-user_email_token-merge-request) +
+ %w(path-to-project-123-user_email_token-issue path-to-project-123-user_email_token-issue-123) +
%w(path/to/project+user_email_token path/to/project+merge-request+user_email_token some/project)
end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 68a46b11487..764478ad1d7 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -236,7 +236,7 @@ RSpec.describe Gitlab::ErrorTracking do
context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } }
- let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) }
+ let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller, cause: nil) }
it 'includes the extra data from the exception in the tracking information' do
track_exception
@@ -247,7 +247,7 @@ RSpec.describe Gitlab::ErrorTracking do
end
context 'the exception implements :sentry_extra_data, which returns nil' do
- let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller) }
+ let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller, cause: nil) }
let(:extra) { { issue_url: issue_url } }
it 'just includes the other extra info' do
@@ -287,10 +287,23 @@ RSpec.describe Gitlab::ErrorTracking do
let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
it 'injects the normalized sql query into extra' do
+ allow(Raven.client.transport).to receive(:send_event) do |event|
+ expect(event.extra).to include(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ end
+
track_exception
+ end
+ end
- expect(Raven).to have_received(:capture_exception)
- .with(exception, a_hash_including(extra: a_hash_including(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')))
+ context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do
+ let(:exception) { RuntimeError.new(cause: ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1')) }
+
+ it 'injects the normalized sql query into extra' do
+ allow(Raven.client.transport).to receive(:send_event) do |event|
+ expect(event.extra).to include(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ end
+
+ track_exception
end
end
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 03cb89ee033..c47f71c207d 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -156,6 +156,16 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
is_expected.to eq(true)
end
end
+
+ context 'Cookie parameter to force enable experiment' do
+ it 'returns true unconditionally' do
+ cookies[:force_experiment] = 'test_experiment,another_experiment'
+ get :index
+
+ expect(check_experiment(:test_experiment)).to eq(true)
+ expect(check_experiment(:another_experiment)).to eq(true)
+ end
+ end
end
describe '#track_experiment_event', :snowplow do
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
index 7b1d1763010..008e6699597 100644
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ b/spec/lib/gitlab/experimentation/experiment_spec.rb
@@ -14,8 +14,10 @@ RSpec.describe Gitlab::Experimentation::Experiment do
end
before do
- feature = double('FeatureFlag', percentage_of_time_value: percentage )
- expect(Feature).to receive(:get).with(:experiment_key_experiment_percentage).and_return(feature)
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ feature = double('FeatureFlag', percentage_of_time_value: percentage, enabled?: true)
+ allow(Feature).to receive(:get).with(:experiment_key_experiment_percentage).and_return(feature)
end
subject(:experiment) { described_class.new(:experiment_key, **params) }
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index a68c050d829..b503960b8c7 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -38,6 +38,8 @@ RSpec.describe Gitlab::Experimentation do
}
})
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
allow(Gitlab).to receive(:com?).and_return(true)
diff --git a/spec/lib/gitlab/faraday/error_callback_spec.rb b/spec/lib/gitlab/faraday/error_callback_spec.rb
new file mode 100644
index 00000000000..5da4b8adf6a
--- /dev/null
+++ b/spec/lib/gitlab/faraday/error_callback_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Faraday::ErrorCallback do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app, {}) }
+
+ describe '#call' do
+ let(:env) { { url: 'http://target.url' } }
+
+ subject { middleware.call(env) }
+
+ context 'with no errors' do
+ before do
+ expect(app).to receive(:call).with(env).and_return('success')
+ end
+
+ it { is_expected.to eq('success') }
+ end
+
+ context 'with errors' do
+ before do
+ expect(app).to receive(:call).and_raise(ArgumentError, 'Kaboom!')
+ end
+
+ context 'with no callback' do
+ it 'uses the default callback' do
+ expect { subject }.to raise_error(ArgumentError, 'Kaboom!')
+ end
+ end
+
+ context 'with a custom callback' do
+ let(:options) { { callback: callback } }
+
+ it 'uses the custom callback' do
+ count = 0
+ target_url = nil
+ exception_class = nil
+
+ callback = proc do |env, exception|
+ count += 1
+ target_url = env[:url].to_s
+ exception_class = exception.class.name
+ end
+
+ options = { callback: callback }
+ middleware = described_class.new(app, options)
+
+ expect(callback).to receive(:call).and_call_original
+ expect { middleware.call(env) }.to raise_error(ArgumentError, 'Kaboom!')
+ expect(count).to eq(1)
+ expect(target_url).to eq('http://target.url')
+ expect(exception_class).to eq(ArgumentError.name)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/changed_path_spec.rb b/spec/lib/gitlab/git/changed_path_spec.rb
new file mode 100644
index 00000000000..93db107ad5c
--- /dev/null
+++ b/spec/lib/gitlab/git/changed_path_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Git::ChangedPath do
+ subject(:changed_path) { described_class.new(path: path, status: status) }
+
+ let(:path) { 'test_path' }
+
+ describe '#new_file?' do
+ subject(:new_file?) { changed_path.new_file? }
+
+ context 'when it is a new file' do
+ let(:status) { :ADDED }
+
+ it 'returns true' do
+ expect(new_file?).to eq(true)
+ end
+ end
+
+ context 'when it is not a new file' do
+ let(:status) { :MODIFIED }
+
+ it 'returns false' do
+ expect(new_file?).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index d4174a34433..783f0a9ccf7 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -58,7 +58,7 @@ EOT
context 'using a diff that is too large' do
it 'prunes the diff' do
- diff = described_class.new(diff: 'a' * 204800)
+ diff = described_class.new({ diff: 'a' * 204800 })
expect(diff.diff).to be_empty
expect(diff).to be_too_large
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index c917945499c..ef9b5a30c86 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -520,12 +520,13 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
forced: true,
no_tags: true,
timeout: described_class::GITLAB_PROJECTS_TIMEOUT,
- prune: false
+ prune: false,
+ check_tags_changed: false
}
expect(repository.gitaly_repository_client).to receive(:fetch_remote).with('remote-name', expected_opts)
- repository.fetch_remote('remote-name', ssh_auth: ssh_auth, forced: true, no_tags: true, prune: false)
+ repository.fetch_remote('remote-name', ssh_auth: ssh_auth, forced: true, no_tags: true, prune: false, check_tags_changed: false)
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RepositoryService, :fetch_remote do
@@ -1191,25 +1192,25 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:commit_3) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
let(:commit_1_files) do
[
- OpenStruct.new(status: :ADDED, path: "files/executables/ls"),
- OpenStruct.new(status: :ADDED, path: "files/executables/touch"),
- OpenStruct.new(status: :ADDED, path: "files/links/regex.rb"),
- OpenStruct.new(status: :ADDED, path: "files/links/ruby-style-guide.md"),
- OpenStruct.new(status: :ADDED, path: "files/links/touch"),
- OpenStruct.new(status: :MODIFIED, path: ".gitmodules"),
- OpenStruct.new(status: :ADDED, path: "deeper/nested/six"),
- OpenStruct.new(status: :ADDED, path: "nested/six")
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/ls"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/touch"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/links/regex.rb"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/links/ruby-style-guide.md"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/links/touch"),
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: ".gitmodules"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "deeper/nested/six"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "nested/six")
]
end
let(:commit_2_files) do
- [OpenStruct.new(status: :ADDED, path: "bin/executable")]
+ [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "bin/executable")]
end
let(:commit_3_files) do
[
- OpenStruct.new(status: :MODIFIED, path: ".gitmodules"),
- OpenStruct.new(status: :ADDED, path: "gitlab-shell")
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: ".gitmodules"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "gitlab-shell")
]
end
@@ -1217,7 +1218,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
collection = repository.find_changed_paths([commit_1, commit_2, commit_3])
expect(collection).to be_a(Enumerable)
- expect(collection.to_a).to eq(commit_1_files + commit_2_files + commit_3_files)
+ expect(collection.as_json).to eq((commit_1_files + commit_2_files + commit_3_files).as_json)
end
it 'returns no paths when SHAs are invalid' do
@@ -1231,7 +1232,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
collection = repository.find_changed_paths([nil, commit_1])
expect(collection).to be_a(Enumerable)
- expect(collection.to_a).to eq(commit_1_files)
+ expect(collection.as_json).to eq(commit_1_files.as_json)
end
it 'returns no paths when the commits are nil' do
diff --git a/spec/lib/gitlab/git/wiki_page_version_spec.rb b/spec/lib/gitlab/git/wiki_page_version_spec.rb
new file mode 100644
index 00000000000..836fa2449ec
--- /dev/null
+++ b/spec/lib/gitlab/git/wiki_page_version_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Git::WikiPageVersion do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user, username: 'someone') }
+
+ describe '#author_url' do
+ subject(:author_url) { described_class.new(commit, nil).author_url }
+
+ context 'user exists in gitlab' do
+ let(:commit) { create(:commit, project: project, author: user) }
+
+ it 'returns the profile link of the user' do
+ expect(author_url).to eq('http://localhost/someone')
+ end
+ end
+
+ context 'user does not exist in gitlab' do
+ let(:commit) { create(:commit, project: project, author_email: "someone@somewebsite.com") }
+
+ it 'returns a mailto: url' do
+ expect(author_url).to eq('mailto:someone@somewebsite.com')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index f5d8758a78a..777c94035d4 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -29,8 +29,17 @@ RSpec.describe Gitlab::GitAccessSnippet do
let(:actor) { build(:deploy_key) }
it 'does not allow push and pull access' do
- expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:authentication_mechanism])
- expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:authentication_mechanism])
+ expect { push_access_check }.to raise_forbidden(:authentication_mechanism)
+ expect { pull_access_check }.to raise_forbidden(:authentication_mechanism)
+ end
+ end
+
+ describe 'when snippet repository is read-only' do
+ it 'does not allow push and allows pull access' do
+ allow(snippet).to receive(:repository_read_only?).and_return(true)
+
+ expect { push_access_check }.to raise_forbidden(:read_only)
+ expect { pull_access_check }.not_to raise_error
end
end
@@ -58,7 +67,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
let(:snippet) { nil }
it 'blocks access with "not found"' do
- expect { pull_access_check }.to raise_snippet_not_found
+ expect { pull_access_check }.to raise_not_found(:snippet_not_found)
end
end
@@ -66,7 +75,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
let(:snippet) { build_stubbed(:personal_snippet) }
it 'blocks access with "not found"' do
- expect { pull_access_check }.to raise_snippet_not_found
+ expect { pull_access_check }.to raise_not_found(:no_repo)
end
end
end
@@ -81,8 +90,8 @@ RSpec.describe Gitlab::GitAccessSnippet do
it 'blocks access when the user did not accept terms' do
message = /must accept the Terms of Service in order to perform this action/
- expect { push_access_check }.to raise_forbidden(message)
- expect { pull_access_check }.to raise_forbidden(message)
+ expect { push_access_check }.to raise_forbidden_with_message(message)
+ expect { pull_access_check }.to raise_forbidden_with_message(message)
end
it 'allows access when the user accepted the terms' do
@@ -149,8 +158,8 @@ RSpec.describe Gitlab::GitAccessSnippet do
let(:membership) { membership }
it 'respects accessibility' do
- expect { push_access_check }.to raise_snippet_not_found
- expect { pull_access_check }.to raise_snippet_not_found
+ expect { push_access_check }.to raise_not_found(:project_not_found)
+ expect { pull_access_check }.to raise_not_found(:project_not_found)
end
end
end
@@ -172,7 +181,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
- [:guest, :reporter, :maintainer, :author, :admin].each do |membership|
+ [:guest, :reporter, :maintainer, :author].each do |membership|
context membership.to_s do
let(:membership) { membership }
@@ -183,6 +192,24 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
+ context 'admin' do
+ let(:membership) { :admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'cannot perform git pushes' do
+ expect { push_access_check }.to raise_error(described_class::ForbiddenError)
+ expect { pull_access_check }.not_to raise_error
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'cannot perform git operations' do
+ expect { push_access_check }.to raise_error(described_class::ForbiddenError)
+ expect { pull_access_check }.to raise_error(described_class::ForbiddenError)
+ end
+ end
+ end
+
it_behaves_like 'actor is migration bot'
end
@@ -255,7 +282,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
allow(check).to receive(:validate!).and_raise(Gitlab::GitAccess::ForbiddenError, 'foo')
end
- expect { push_access_check }.to raise_forbidden('foo')
+ expect { push_access_check }.to raise_forbidden_with_message('foo')
end
it 'sets the file count limit from Snippet class' do
@@ -372,17 +399,49 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
+ describe 'HEAD realignment' do
+ let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project) }
+
+ shared_examples 'HEAD is updated to the snippet default branch' do
+ let(:actor) { snippet.author }
+
+ specify do
+ expect(snippet).to receive(:change_head_to_default_branch).and_call_original
+
+ subject
+ end
+
+ context 'when an error is raised' do
+ let(:actor) { nil }
+
+ it 'does not realign HEAD' do
+ expect(snippet).not_to receive(:change_head_to_default_branch).and_call_original
+
+ expect { subject }.to raise_error(described_class::ForbiddenError)
+ end
+ end
+ end
+
+ it_behaves_like 'HEAD is updated to the snippet default branch' do
+ subject { push_access_check }
+ end
+
+ it_behaves_like 'HEAD is updated to the snippet default branch' do
+ subject { pull_access_check }
+ end
+ end
+
private
- def raise_snippet_not_found
- raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:snippet_not_found])
+ def raise_not_found(message_key)
+ raise_error(described_class::NotFoundError, described_class.error_message(message_key))
end
- def raise_project_not_found
- raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:project_not_found])
+ def raise_forbidden(message_key)
+ raise_error(Gitlab::GitAccess::ForbiddenError, described_class.error_message(message_key))
end
- def raise_forbidden(message)
+ def raise_forbidden_with_message(message)
raise_error(Gitlab::GitAccess::ForbiddenError, message)
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 780f4329bcc..a0cafe3d763 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccess do
include TermsHelper
include GitHelpers
+ include AdminModeHelper
let(:user) { create(:user) }
@@ -769,19 +770,39 @@ RSpec.describe Gitlab::GitAccess do
describe 'admin user' do
let(:user) { create(:admin) }
- context 'when member of the project' do
- before do
- project.add_reporter(user)
+ context 'when admin mode enabled', :enable_admin_mode do
+ context 'when member of the project' do
+ before do
+ project.add_reporter(user)
+ end
+
+ context 'pull code' do
+ it { expect { pull_access_check }.not_to raise_error }
+ end
end
- context 'pull code' do
- it { expect { pull_access_check }.not_to raise_error }
+ context 'when is not member of the project' do
+ context 'pull code' do
+ it { expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:download]) }
+ end
end
end
- context 'when is not member of the project' do
- context 'pull code' do
- it { expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:download]) }
+ context 'when admin mode disabled' do
+ context 'when member of the project' do
+ before do
+ project.add_reporter(user)
+ end
+
+ context 'pull code' do
+ it { expect { pull_access_check }.not_to raise_error }
+ end
+ end
+
+ context 'when is not member of the project' do
+ context 'pull code' do
+ it { expect { pull_access_check }.to raise_not_found }
+ end
end
end
end
@@ -870,8 +891,13 @@ RSpec.describe Gitlab::GitAccess do
# Expectations are given a custom failure message proc so that it's
# easier to identify which check(s) failed.
it "has the correct permissions for #{role}s" do
- if role == :admin
+ if role == :admin_without_admin_mode
+ skip("All admins are allowed to perform actions https://gitlab.com/gitlab-org/gitlab/-/issues/296509")
+ end
+
+ if [:admin_with_admin_mode, :admin_without_admin_mode].include?(role)
user.update_attribute(:admin, true)
+ enable_admin_mode!(user) if role == :admin_with_admin_mode
project.add_guest(user)
else
project.add_role(user, role)
@@ -897,7 +923,7 @@ RSpec.describe Gitlab::GitAccess do
end
permissions_matrix = {
- admin: {
+ admin_with_admin_mode: {
any: true,
push_new_branch: true,
push_master: true,
@@ -909,6 +935,18 @@ RSpec.describe Gitlab::GitAccess do
merge_into_protected_branch: true
},
+ admin_without_admin_mode: {
+ any: false,
+ push_new_branch: false,
+ push_master: false,
+ push_protected_branch: false,
+ push_remove_protected_branch: false,
+ push_tag: false,
+ push_new_tag: false,
+ push_all: false,
+ merge_into_protected_branch: false
+ },
+
maintainer: {
any: true,
push_new_branch: true,
@@ -1009,7 +1047,7 @@ RSpec.describe Gitlab::GitAccess do
run_permission_checks(permissions_matrix.deep_merge(developer: { push_protected_branch: false, push_all: false, merge_into_protected_branch: false },
maintainer: { push_protected_branch: false, push_all: false, merge_into_protected_branch: false },
- admin: { push_protected_branch: false, push_all: false, merge_into_protected_branch: false }))
+ admin_with_admin_mode: { push_protected_branch: false, push_all: false, merge_into_protected_branch: false }))
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 157c2393ce1..ac4c42d57ee 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -162,11 +162,9 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
.with(request, kind_of(Hash)).and_return([changed_paths_response])
returned_value = described_class.new(repository).find_changed_paths(commits)
+ mapped_expected_value = changed_paths_response.paths.map { |path| Gitlab::Git::ChangedPath.new(status: path.status, path: path.path) }
- mapped_returned_value = returned_value.map(&:to_h)
- mapped_expected_value = changed_paths_response.paths.map(&:to_h)
-
- expect(mapped_returned_value).to eq(mapped_expected_value)
+ expect(returned_value.as_json).to eq(mapped_expected_value.as_json)
end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index f810a5c15a5..7a382df1248 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -131,7 +131,8 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
known_hosts: '',
force: false,
no_tags: false,
- no_prune: false
+ no_prune: false,
+ check_tags_changed: false
)
expect_any_instance_of(Gitaly::RepositoryService::Stub)
@@ -139,7 +140,7 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
.with(expected_request, kind_of(Hash))
.and_return(double(value: true))
- client.fetch_remote(remote, ssh_auth: nil, forced: false, no_tags: false, timeout: 1)
+ client.fetch_remote(remote, ssh_auth: nil, forced: false, no_tags: false, timeout: 1, check_tags_changed: false)
end
context 'SSH auth' do
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 180c6d9e420..3839303b881 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -205,7 +205,7 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
.with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true, remote_name: 'github')
service = double
- expect(Projects::HousekeepingService)
+ expect(Repositories::HousekeepingService)
.to receive(:new).with(project, :gc).and_return(service)
expect(service).to receive(:execute)
diff --git a/spec/lib/gitlab/gitpod_spec.rb b/spec/lib/gitlab/gitpod_spec.rb
deleted file mode 100644
index 717e396f942..00000000000
--- a/spec/lib/gitlab/gitpod_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Gitpod do
- let_it_be(:user) { create(:user) }
-
- before do
- stub_feature_flags(gitpod: feature_scope)
- end
-
- describe '.feature_available?' do
- subject { described_class.feature_available? }
-
- context 'when feature has not been set' do
- let(:feature_scope) { nil }
-
- it { is_expected.to be_truthy }
- end
-
- context 'when feature is disabled' do
- let(:feature_scope) { false }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when feature is enabled globally' do
- let(:feature_scope) { true }
-
- it { is_expected.to be_truthy }
- end
-
- context 'when feature is enabled only to a resource' do
- let(:feature_scope) { user }
-
- it { is_expected.to be_truthy }
- end
- end
-
- describe '.feature_enabled?' do
- let(:current_user) { nil }
-
- subject { described_class.feature_enabled?(current_user) }
-
- context 'when feature has not been set' do
- let(:feature_scope) { nil }
-
- it { is_expected.to be_truthy }
- end
-
- context 'when feature is enabled globally' do
- let(:feature_scope) { true }
-
- it { is_expected.to be_truthy }
- end
-
- context 'when feature is enabled only to a resource' do
- let(:feature_scope) { user }
-
- context 'for the same resource' do
- let(:current_user) { user }
-
- it { is_expected.to be_truthy }
- end
-
- context 'for a different resource' do
- let(:current_user) { create(:user) }
-
- it { is_expected.to be_falsey }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/batch_key_spec.rb b/spec/lib/gitlab/graphql/batch_key_spec.rb
new file mode 100644
index 00000000000..881fba5c1be
--- /dev/null
+++ b/spec/lib/gitlab/graphql/batch_key_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'test_prof/recipes/rspec/let_it_be'
+
+RSpec.describe ::Gitlab::Graphql::BatchKey do
+ let_it_be(:rect) { Struct.new(:len, :width) }
+ let_it_be(:circle) { Struct.new(:radius) }
+ let(:lookahead) { nil }
+ let(:object) { rect.new(2, 3) }
+
+ subject { described_class.new(object, lookahead, object_name: :rect) }
+
+ it 'is equal to keys of the same object, regardless of lookahead or object name' do
+ expect(subject).to eq(described_class.new(rect.new(2, 3)))
+ expect(subject).to eq(described_class.new(rect.new(2, 3), :anything))
+ expect(subject).to eq(described_class.new(rect.new(2, 3), lookahead, object_name: :does_not_matter))
+ expect(subject).not_to eq(described_class.new(rect.new(2, 4)))
+ expect(subject).not_to eq(described_class.new(circle.new(10)))
+ end
+
+ it 'delegates attribute lookup methods to the inner object' do
+ other = rect.new(2, 3)
+
+ expect(subject.hash).to eq(other.hash)
+ expect(subject.len).to eq(other.len)
+ expect(subject.width).to eq(other.width)
+ end
+
+ it 'allows the object to be named more meaningfully' do
+ expect(subject.object).to eq(object)
+ expect(subject.object).to eq(subject.rect)
+ end
+
+ it 'works as a hash key' do
+ h = { subject => :foo }
+
+ expect(h[described_class.new(object)]).to eq(:foo)
+ end
+
+ describe '#requires?' do
+ it 'returns false if the lookahead was not provided' do
+ expect(subject.requires?([:foo])).to be(false)
+ end
+
+ context 'lookahead was provided' do
+ let(:lookahead) { double(:Lookahead) }
+
+ before do
+ allow(lookahead).to receive(:selection).with(Symbol).and_return(lookahead)
+ end
+
+ it 'returns false if the path is empty' do
+ expect(subject.requires?([])).to be(false)
+ end
+
+ context 'it selects the field' do
+ before do
+ allow(lookahead).to receive(:selects?).with(Symbol).once.and_return(true)
+ end
+
+ it 'returns true' do
+ expect(subject.requires?(%i[foo bar baz])).to be(true)
+ end
+ end
+
+ context 'it does not select the field' do
+ before do
+ allow(lookahead).to receive(:selects?).with(Symbol).once.and_return(false)
+ end
+
+ it 'returns false' do
+ expect(subject.requires?(%i[foo bar baz])).to be(false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 0ac54a20fcc..02e67488d3f 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -21,6 +21,47 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
end
+ # see: https://gitlab.com/gitlab-org/gitlab/-/issues/297358
+ context 'the relation has been preloaded' do
+ let(:projects) { Project.all.preload(:issues) }
+ let(:nodes) { projects.first.issues }
+
+ before do
+ project = create(:project)
+ create_list(:issue, 3, project: project)
+ end
+
+ it 'is loaded' do
+ expect(nodes).to be_loaded
+ end
+
+ it 'does not error when accessing pagination information' do
+ connection.first = 2
+
+ expect(connection).to have_attributes(
+ has_previous_page: false,
+ has_next_page: true
+ )
+ end
+
+ it 'can generate cursors' do
+ connection.send(:ordered_items) # necessary to generate the order-list
+
+ expect(connection.cursor_for(nodes.first)).to be_a(String)
+ end
+
+ it 'can read the next page' do
+ connection.send(:ordered_items) # necessary to generate the order-list
+ ordered = nodes.reorder(id: :desc)
+ next_page = described_class.new(nodes,
+ context: context,
+ max_page_size: 3,
+ after: connection.cursor_for(ordered.second))
+
+ expect(next_page.sliced_nodes).to contain_exactly(ordered.third)
+ end
+ end
+
it_behaves_like 'a connection with collection methods'
it_behaves_like 'a redactable connection' do
diff --git a/spec/lib/gitlab/graphql/queries_spec.rb b/spec/lib/gitlab/graphql/queries_spec.rb
new file mode 100644
index 00000000000..6e08a87523f
--- /dev/null
+++ b/spec/lib/gitlab/graphql/queries_spec.rb
@@ -0,0 +1,343 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require "test_prof/recipes/rspec/let_it_be"
+
+RSpec.describe Gitlab::Graphql::Queries do
+ shared_examples 'a valid GraphQL query for the blog schema' do
+ it 'is valid' do
+ expect(subject.validate(schema).second).to be_empty
+ end
+ end
+
+ shared_examples 'an invalid GraphQL query for the blog schema' do
+ it 'is invalid' do
+ expect(subject.validate(schema).second).to match errors
+ end
+ end
+
+ # Toy schema to validate queries against
+ let_it_be(:schema) do
+ author = Class.new(GraphQL::Schema::Object) do
+ graphql_name 'Author'
+ field :name, GraphQL::STRING_TYPE, null: true
+ field :handle, GraphQL::STRING_TYPE, null: false
+ field :verified, GraphQL::BOOLEAN_TYPE, null: false
+ end
+
+ post = Class.new(GraphQL::Schema::Object) do
+ graphql_name 'Post'
+ field :name, GraphQL::STRING_TYPE, null: false
+ field :title, GraphQL::STRING_TYPE, null: false
+ field :content, GraphQL::STRING_TYPE, null: true
+ field :author, author, null: false
+ end
+ author.field :posts, [post], null: false do
+ argument :blog_title, GraphQL::STRING_TYPE, required: false
+ end
+
+ blog = Class.new(GraphQL::Schema::Object) do
+ graphql_name 'Blog'
+ field :title, GraphQL::STRING_TYPE, null: false
+ field :description, GraphQL::STRING_TYPE, null: false
+ field :main_author, author, null: false
+ field :posts, [post], null: false
+ field :post, post, null: true do
+ argument :slug, GraphQL::STRING_TYPE, required: true
+ end
+ end
+
+ Class.new(GraphQL::Schema) do
+ query(Class.new(GraphQL::Schema::Object) do
+ graphql_name 'Query'
+ field :blog, blog, null: true do
+ argument :title, GraphQL::STRING_TYPE, required: true
+ end
+ field :post, post, null: true do
+ argument :slug, GraphQL::STRING_TYPE, required: true
+ end
+ end)
+ end
+ end
+
+ let(:root) do
+ Rails.root / 'fixtures/lib/gitlab/graphql/queries'
+ end
+
+ describe Gitlab::Graphql::Queries::Fragments do
+ subject { described_class.new(root) }
+
+ it 'has the right home' do
+ expect(subject.home).to eq (root / 'app/assets/javascripts').to_s
+ end
+
+ it 'has the right EE home' do
+ expect(subject.home_ee).to eq (root / 'ee/app/assets/javascripts').to_s
+ end
+
+ it 'caches query definitions' do
+ fragment = subject.get('foo')
+
+ expect(fragment).to be_a(::Gitlab::Graphql::Queries::Definition)
+ expect(subject.get('foo')).to be fragment
+ end
+ end
+
+ describe '.all' do
+ it 'is the combination of finding queries in CE and EE' do
+ expect(described_class)
+ .to receive(:find).with(Rails.root / 'app/assets/javascripts').and_return([:ce])
+ expect(described_class)
+ .to receive(:find).with(Rails.root / 'ee/app/assets/javascripts').and_return([:ee])
+
+ expect(described_class.all).to eq([:ce, :ee])
+ end
+ end
+
+ describe '.find' do
+ def definition_of(path)
+ be_a(::Gitlab::Graphql::Queries::Definition)
+ .and(have_attributes(file: path.to_s))
+ end
+
+ it 'find a single specific file' do
+ path = root / 'post_by_slug.graphql'
+
+ expect(described_class.find(path)).to contain_exactly(definition_of(path))
+ end
+
+ it 'ignores files that do not exist' do
+ path = root / 'not_there.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
+ it 'ignores fragments' do
+ path = root / 'author.fragment.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
+ it 'ignores typedefs' do
+ path = root / 'typedefs.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
+ it 'finds all query definitions under a root directory' do
+ found = described_class.find(root)
+
+ expect(found).to include(
+ definition_of(root / 'post_by_slug.graphql'),
+ definition_of(root / 'post_by_slug.with_import.graphql'),
+ definition_of(root / 'post_by_slug.with_import.misspelled.graphql'),
+ definition_of(root / 'duplicate_imports.graphql'),
+ definition_of(root / 'deeply/nested/query.graphql')
+ )
+
+ expect(found).not_to include(
+ definition_of(root / 'typedefs.graphql'),
+ definition_of(root / 'author.fragment.graphql')
+ )
+ end
+ end
+
+ describe Gitlab::Graphql::Queries::Definition do
+ let(:fragments) { Gitlab::Graphql::Queries::Fragments.new(root, '.') }
+
+ subject { described_class.new(root / path, fragments) }
+
+ context 'a simple query' do
+ let(:path) { 'post_by_slug.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a query with an import' do
+ let(:path) { 'post_by_slug.with_import.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a query with duplicate imports' do
+ let(:path) { 'duplicate_imports.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a query importing from ee_else_ce' do
+ let(:path) { 'ee_else_ce.import.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+
+ it 'can resolve the ee fields' do
+ expect(subject.text(mode: :ce)).not_to include('verified')
+ expect(subject.text(mode: :ee)).to include('verified')
+ end
+ end
+
+ context 'a query refering to parent directories' do
+ let(:path) { 'deeply/nested/query.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a query refering to parent directories, incorrectly' do
+ let(:path) { 'deeply/nested/bad_import.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ be_a(::Gitlab::Graphql::Queries::FileNotFound)
+ .and(have_attributes(message: include('deeply/author.fragment.graphql')))
+ )
+ end
+ end
+ end
+
+ context 'a query with a broken import' do
+ let(:path) { 'post_by_slug.with_import.misspelled.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ be_a(::Gitlab::Graphql::Queries::FileNotFound)
+ .and(have_attributes(message: include('auther.fragment.graphql')))
+ )
+ end
+ end
+ end
+
+ context 'a query which imports a file with a broken import' do
+ let(:path) { 'transitive_bad_import.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ be_a(::Gitlab::Graphql::Queries::FileNotFound)
+ .and(have_attributes(message: include('does-not-exist.graphql')))
+ )
+ end
+ end
+ end
+
+ context 'a query containing a client directive' do
+ let(:path) { 'client.query.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+
+ it 'is tagged as a client query' do
+ expect(subject.validate(schema).first).to eq :client_query
+ end
+ end
+
+ context 'a mixed client query, valid' do
+ let(:path) { 'mixed_client.query.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+
+ it 'is not tagged as a client query' do
+ expect(subject.validate(schema).first).not_to eq :client_query
+ end
+ end
+
+ context 'a mixed client query, with skipped argument' do
+ let(:path) { 'mixed_client_skipped_argument.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a mixed client query, with unused fragment' do
+ let(:path) { 'mixed_client_unused_fragment.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a client query, with unused fragment' do
+ let(:path) { 'client_unused_fragment.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+
+ it 'is tagged as a client query' do
+ expect(subject.validate(schema).first).to eq :client_query
+ end
+ end
+
+ context 'a mixed client query, invalid' do
+ let(:path) { 'mixed_client_invalid.query.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(have_attributes(message: include('titlz')))
+ end
+ end
+ end
+
+ context 'a query containing a connection directive' do
+ let(:path) { 'connection.query.graphql' }
+
+ it_behaves_like 'a valid GraphQL query for the blog schema'
+ end
+
+ context 'a query which mentions an incorrect field' do
+ let(:path) { 'wrong_field.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ have_attributes(message: /'createdAt' doesn't exist/),
+ have_attributes(message: /'categories' doesn't exist/)
+ )
+ end
+ end
+ end
+
+ context 'a query which has a missing argument' do
+ let(:path) { 'missing_argument.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ have_attributes(message: include('blog'))
+ )
+ end
+ end
+ end
+
+ context 'a query which has a bad argument' do
+ let(:path) { 'bad_argument.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ have_attributes(message: include('Nullability mismatch on variable $bad'))
+ )
+ end
+ end
+ end
+
+ context 'a query which has a syntax error' do
+ let(:path) { 'syntax-error.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ have_attributes(message: include('Parse error'))
+ )
+ end
+ end
+ end
+
+ context 'a query which has an unused import' do
+ let(:path) { 'unused_import.graphql' }
+
+ it_behaves_like 'an invalid GraphQL query for the blog schema' do
+ let(:errors) do
+ contain_exactly(
+ have_attributes(message: include('AuthorF was defined, but not used'))
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index fba32ae0673..825513bdfc5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -87,6 +87,7 @@ label:
- merge_requests
- priorities
- epic_board_labels
+- epic_lists
milestone:
- group
- project
@@ -524,7 +525,6 @@ project:
- designs
- project_aliases
- external_pull_requests
-- alerts_service
- grafana_integration
- remove_source_branch_after_merge
- deleting_user
@@ -560,6 +560,7 @@ project:
- alert_management_http_integrations
- exported_protected_branches
- incident_management_oncall_schedules
+- debian_distributions
award_emoji:
- awardable
- user
@@ -722,6 +723,7 @@ epic:
- user_mentions
- note_authors
- boards_epic_user_preferences
+- epic_board_positions
epic_issue:
- epic
- issue
diff --git a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
index 3f5661d4ca6..0092c69d0bb 100644
--- a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
ingress: ingress,
egress: egress,
labels: labels,
- resource_version: resource_version
+ resource_version: resource_version,
+ annotations: annotations
)
end
@@ -20,7 +21,7 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
::Kubeclient::Resource.new(
apiVersion: Gitlab::Kubernetes::CiliumNetworkPolicy::API_VERSION,
kind: Gitlab::Kubernetes::CiliumNetworkPolicy::KIND,
- metadata: { name: name, namespace: namespace, resourceVersion: resource_version },
+ metadata: { name: name, namespace: namespace, resourceVersion: resource_version, annotations: annotations },
spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: egress },
description: description
)
@@ -34,6 +35,7 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
let(:description) { 'example-description' }
let(:partial_class_name) { described_class.name.split('::').last }
let(:resource_version) { 101 }
+ let(:annotations) { { 'app.gitlab.com/alert': 'true' } }
let(:ingress) do
[
{
@@ -64,6 +66,8 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
name: example-name
namespace: example-namespace
resourceVersion: 101
+ annotations:
+ app.gitlab.com/alert: "true"
spec:
endpointSelector:
matchLabels:
@@ -157,7 +161,7 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
description: description,
metadata: {
name: name, namespace: namespace, creationTimestamp: '2020-04-14T00:08:30Z',
- labels: { app: 'foo' }, resourceVersion: resource_version
+ labels: { app: 'foo' }, resourceVersion: resource_version, annotations: annotations
},
spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil }
)
@@ -168,7 +172,7 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
apiVersion: Gitlab::Kubernetes::CiliumNetworkPolicy::API_VERSION,
kind: Gitlab::Kubernetes::CiliumNetworkPolicy::KIND,
description: description,
- metadata: { name: name, namespace: namespace, resourceVersion: resource_version, labels: { app: 'foo' } },
+ metadata: { name: name, namespace: namespace, resourceVersion: resource_version, labels: { app: 'foo' }, annotations: annotations },
spec: { endpointSelector: endpoint_selector, ingress: ingress }
)
end
@@ -211,7 +215,7 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
{
apiVersion: Gitlab::Kubernetes::CiliumNetworkPolicy::API_VERSION,
kind: Gitlab::Kubernetes::CiliumNetworkPolicy::KIND,
- metadata: { name: name, namespace: namespace, resourceVersion: resource_version },
+ metadata: { name: name, namespace: namespace, resourceVersion: resource_version, annotations: annotations },
spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: egress },
description: description
}
@@ -248,5 +252,15 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
it { is_expected.to eq(resource) }
end
+
+ context 'without annotations' do
+ let(:annotations) { nil }
+
+ before do
+ resource[:metadata].delete(:annotations)
+ end
+
+ it { is_expected.to eq(resource) }
+ end
end
end
diff --git a/spec/lib/gitlab/kubernetes/kubectl_cmd_spec.rb b/spec/lib/gitlab/kubernetes/kubectl_cmd_spec.rb
index e80bb3dfb07..2e373613269 100644
--- a/spec/lib/gitlab/kubernetes/kubectl_cmd_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kubectl_cmd_spec.rb
@@ -56,9 +56,10 @@ RSpec.describe Gitlab::Kubernetes::KubectlCmd do
describe '.delete_crds_from_group' do
it 'constructs string properly' do
- expected_command = 'kubectl api-resources -o name --api-group foo | xargs kubectl delete --ignore-not-found crd'
+ command = 'kubectl api-resources -o name --api-group foo | xargs -r kubectl delete --ignore-not-found crd'
+ command_with_retries = "for i in $(seq 1 3); do #{command} && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
- expect(described_class.delete_crds_from_group("foo")).to eq expected_command
+ expect(described_class.delete_crds_from_group("foo")).to eq command_with_retries
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/pod_cmd_spec.rb b/spec/lib/gitlab/kubernetes/pod_cmd_spec.rb
new file mode 100644
index 00000000000..51bdbf64741
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/pod_cmd_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::PodCmd do
+ describe '.retry_command' do
+ it 'constructs string properly' do
+ command = 'my command'
+ command_with_retries = "for i in $(seq 1 3); do #{command} && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
+
+ expect(described_class.retry_command(command)).to eq command_with_retries
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
index 7f05f35c941..f751416f4ec 100644
--- a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
@@ -7,15 +7,7 @@ RSpec.describe Gitlab::Metrics::Samplers::ActionCableSampler do
subject { described_class.new(action_cable: action_cable) }
- describe '#interval' do
- it 'samples every five seconds by default' do
- expect(subject.interval).to eq(5)
- end
-
- it 'samples at other intervals if requested' do
- expect(described_class.new(11).interval).to eq(11)
- end
- end
+ it_behaves_like 'metrics sampler', 'ACTION_CABLE_SAMPLER'
describe '#sample' do
let(:pool) { instance_double(Concurrent::ThreadPoolExecutor) }
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index b94d19ff227..9572e9f50be 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -5,15 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
subject { described_class.new }
- describe '#interval' do
- it 'samples every five seconds by default' do
- expect(subject.interval).to eq(5)
- end
-
- it 'samples at other intervals if requested' do
- expect(described_class.new(11).interval).to eq(11)
- end
- end
+ it_behaves_like 'metrics sampler', 'DATABASE_SAMPLER'
describe '#sample' do
before do
diff --git a/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
index 214649d3e7e..2013435a074 100644
--- a/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
@@ -11,15 +11,7 @@ RSpec.describe Gitlab::Metrics::Samplers::PumaSampler do
allow(Gitlab::Metrics::NullMetric).to receive(:instance).and_return(null_metric)
end
- describe '#interval' do
- it 'samples every five seconds by default' do
- expect(subject.interval).to eq(5)
- end
-
- it 'samples at other intervals if requested' do
- expect(described_class.new(11).interval).to eq(11)
- end
- end
+ it_behaves_like 'metrics sampler', 'PUMA_SAMPLER'
describe '#sample' do
before do
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index eb6c83096b9..6f1e0480197 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe Gitlab::Metrics::Samplers::RubySampler do
allow(Gitlab::Metrics::NullMetric).to receive(:instance).and_return(null_metric)
end
+ it_behaves_like 'metrics sampler', 'RUBY_SAMPLER'
+
describe '#initialize' do
it 'sets process_start_time_seconds' do
freeze_time do
@@ -18,16 +20,6 @@ RSpec.describe Gitlab::Metrics::Samplers::RubySampler do
end
end
- describe '#interval' do
- it 'samples every sixty seconds by default' do
- expect(subject.interval).to eq(60)
- end
-
- it 'samples at other intervals if requested' do
- expect(described_class.new(11).interval).to eq(11)
- end
- end
-
describe '#sample' do
it 'adds a metric containing the process resident memory bytes' do
expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return(9000)
diff --git a/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb
index 19477589289..5dabafb7c0b 100644
--- a/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb
@@ -5,15 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Samplers::ThreadsSampler do
subject { described_class.new }
- describe '#interval' do
- it 'samples every five seconds by default' do
- expect(subject.interval).to eq(5)
- end
-
- it 'samples at other intervals if requested' do
- expect(described_class.new(11).interval).to eq(11)
- end
- end
+ it_behaves_like 'metrics sampler', 'THREADS_SAMPLER'
describe '#sample' do
before do
diff --git a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
index 9f2180c4170..7971a7cabd5 100644
--- a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Samplers::UnicornSampler do
subject { described_class.new(1.second) }
+ it_behaves_like 'metrics sampler', 'UNICORN_SAMPLER'
+
describe '#sample' do
let(:unicorn) { Module.new }
let(:raindrops) { double('raindrops') }
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
index 720bd5d79b3..732aa553737 100644
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ b/spec/lib/gitlab/metrics/system_spec.rb
@@ -96,6 +96,25 @@ RSpec.describe Gitlab::Metrics::System do
expect(described_class.memory_usage_uss_pss).to eq(uss: 475136, pss: 515072)
end
end
+
+ describe '.summary' do
+ it 'contains a selection of the available fields' do
+ stub_const('RUBY_DESCRIPTION', 'ruby-3.0-patch1')
+ mock_existing_proc_file('/proc/self/status', proc_status)
+ mock_existing_proc_file('/proc/self/smaps_rollup', proc_smaps_rollup)
+
+ summary = described_class.summary
+
+ expect(summary[:version]).to eq('ruby-3.0-patch1')
+ expect(summary[:gc_stat].keys).to eq(GC.stat.keys)
+ expect(summary[:memory_rss]).to eq(2527232)
+ expect(summary[:memory_uss]).to eq(475136)
+ expect(summary[:memory_pss]).to eq(515072)
+ expect(summary[:time_cputime]).to be_a(Float)
+ expect(summary[:time_realtime]).to be_a(Float)
+ expect(summary[:time_monotonic]).to be_a(Float)
+ end
+ end
end
context 'when /proc files do not exist' do
@@ -128,6 +147,21 @@ RSpec.describe Gitlab::Metrics::System do
expect(described_class.max_open_file_descriptors).to eq(0)
end
end
+
+ describe '.summary' do
+ it 'returns only available fields' do
+ summary = described_class.summary
+
+ expect(summary[:version]).to be_a(String)
+ expect(summary[:gc_stat].keys).to eq(GC.stat.keys)
+ expect(summary[:memory_rss]).to eq(0)
+ expect(summary[:memory_uss]).to eq(0)
+ expect(summary[:memory_pss]).to eq(0)
+ expect(summary[:time_cputime]).to be_a(Float)
+ expect(summary[:time_realtime]).to be_a(Float)
+ expect(summary[:time_monotonic]).to be_a(Float)
+ end
+ end
end
describe '.cpu_time' do
diff --git a/spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb b/spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb
deleted file mode 100644
index 59ec743f6ca..00000000000
--- a/spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Middleware::Multipart::HandlerForJWTParams do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:env) { Rack::MockRequest.env_for('/', method: 'post', params: {}) }
- let_it_be(:message) { { 'rewritten_fields' => {} } }
-
- describe '#allowed_paths' do
- let_it_be(:expected_allowed_paths) do
- [
- Dir.tmpdir,
- ::FileUploader.root,
- ::Gitlab.config.uploads.storage_path,
- ::JobArtifactUploader.workhorse_upload_path,
- ::LfsObjectUploader.workhorse_upload_path,
- File.join(Rails.root, 'public/uploads/tmp')
- ]
- end
-
- let_it_be(:expected_with_packages_path) { expected_allowed_paths + [::Packages::PackageFileUploader.workhorse_upload_path] }
-
- subject { described_class.new(env, message).send(:allowed_paths) }
-
- where(:package_features_enabled, :object_storage_enabled, :direct_upload_enabled, :expected_paths) do
- false | false | true | :expected_allowed_paths
- false | false | false | :expected_allowed_paths
- false | true | true | :expected_allowed_paths
- false | true | false | :expected_allowed_paths
- true | false | true | :expected_with_packages_path
- true | false | false | :expected_with_packages_path
- true | true | true | :expected_allowed_paths
- true | true | false | :expected_with_packages_path
- end
-
- with_them do
- before do
- stub_config(packages: {
- enabled: package_features_enabled,
- object_store: {
- enabled: object_storage_enabled,
- direct_upload: direct_upload_enabled
- },
- storage_path: '/any/dir'
- })
- end
-
- it { is_expected.to eq(send(expected_paths)) }
- end
- end
-end
diff --git a/spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb
index a1e9ac6e425..65ec3535271 100644
--- a/spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb
+++ b/spec/lib/gitlab/middleware/multipart_spec.rb
@@ -21,10 +21,6 @@ RSpec.describe Gitlab::Middleware::Multipart do
middleware.call(env)
end
- before do
- stub_feature_flags(upload_middleware_jwt_params_handler: true)
- end
-
context 'remote file mode' do
let(:mode) { :remote }
@@ -34,7 +30,7 @@ RSpec.describe Gitlab::Middleware::Multipart do
include_context 'with one temporary file for multipart'
let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(key: 'file', filename: filename, remote_id: remote_id).merge('file.path' => '/should/not/be/read') }
+ let(:params) { upload_parameters_for(key: 'file', mode: mode, filename: filename, remote_id: remote_id).merge('file.path' => '/should/not/be/read') }
it 'builds an UploadedFile' do
expect_uploaded_files(original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(file))
@@ -55,14 +51,14 @@ RSpec.describe Gitlab::Middleware::Multipart do
let(:allowed_paths) { [Dir.tmpdir] }
before do
- expect_next_instance_of(::Gitlab::Middleware::Multipart::HandlerForJWTParams) do |handler|
+ expect_next_instance_of(::Gitlab::Middleware::Multipart::Handler) do |handler|
expect(handler).to receive(:allowed_paths).and_return(allowed_paths)
end
end
context 'in allowed paths' do
let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', mode: mode, filename: filename) }
it 'builds an UploadedFile' do
expect_uploaded_files(filepath: uploaded_filepath, original_filename: filename, size: uploaded_file.size, params_path: %w(file))
@@ -75,7 +71,7 @@ RSpec.describe Gitlab::Middleware::Multipart do
let(:allowed_paths) { [] }
let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file') }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', mode: mode) }
it 'returns an error' do
result = subject
@@ -89,7 +85,7 @@ RSpec.describe Gitlab::Middleware::Multipart do
context 'with dummy params in remote mode' do
let(:rewritten_fields) { { 'file' => 'should/not/be/read' } }
- let(:params) { upload_parameters_for(key: 'file') }
+ let(:params) { upload_parameters_for(key: 'file', mode: mode) }
let(:mode) { :remote }
context 'with an invalid secret' do
@@ -128,7 +124,7 @@ RSpec.describe Gitlab::Middleware::Multipart do
RSpec.shared_examples 'rejecting the invalid key' do |key_in_header:, key_in_upload_params:, error_message:|
let(:rewritten_fields) { rewritten_fields_hash(key_in_header => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: key_in_upload_params, filename: filename, remote_id: remote_id) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: key_in_upload_params, mode: mode, filename: filename, remote_id: remote_id) }
it 'raises an error' do
expect { subject }.to raise_error(RuntimeError, error_message)
@@ -171,7 +167,7 @@ RSpec.describe Gitlab::Middleware::Multipart do
let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
let(:crafted_payload) { Base64.urlsafe_encode64({ 'path' => 'test' }.to_json) }
let(:params) do
- upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename, remote_id: remote_id).tap do |params|
+ upload_parameters_for(filepath: uploaded_filepath, key: 'file', mode: mode, filename: filename, remote_id: remote_id).tap do |params|
header, _, sig = params['file.gitlab-workhorse-upload'].split('.')
params['file.gitlab-workhorse-upload'] = [header, crafted_payload, sig].join('.')
end
@@ -187,7 +183,7 @@ RSpec.describe Gitlab::Middleware::Multipart do
let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
let(:params) do
- upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename, remote_id: remote_id).tap do |params|
+ upload_parameters_for(filepath: uploaded_filepath, key: 'file', mode: mode, filename: filename, remote_id: remote_id).tap do |params|
header, payload, sig = params['file.gitlab-workhorse-upload'].split('.')
params['file.gitlab-workhorse-upload'] = [header, payload, "#{sig}modified"].join('.')
end
diff --git a/spec/lib/gitlab/middleware/multipart_with_handler_spec.rb b/spec/lib/gitlab/middleware/multipart_with_handler_spec.rb
deleted file mode 100644
index 8c2af775574..00000000000
--- a/spec/lib/gitlab/middleware/multipart_with_handler_spec.rb
+++ /dev/null
@@ -1,196 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Middleware::Multipart do
- include MultipartHelpers
-
- describe '#call' do
- let(:app) { double(:app) }
- let(:middleware) { described_class.new(app) }
- let(:secret) { Gitlab::Workhorse.secret }
- let(:issuer) { 'gitlab-workhorse' }
-
- subject do
- env = post_env(
- rewritten_fields: rewritten_fields,
- params: params,
- secret: secret,
- issuer: issuer
- )
- middleware.call(env)
- end
-
- before do
- stub_feature_flags(upload_middleware_jwt_params_handler: false)
- end
-
- context 'remote file mode' do
- let(:mode) { :remote }
-
- it_behaves_like 'handling all upload parameters conditions'
-
- context 'and a path set' do
- include_context 'with one temporary file for multipart'
-
- let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(key: 'file', filename: filename, remote_id: remote_id).merge('file.path' => '/should/not/be/read') }
-
- it 'builds an UploadedFile' do
- expect_uploaded_files(original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(file))
-
- subject
- end
- end
- end
-
- context 'local file mode' do
- let(:mode) { :local }
-
- it_behaves_like 'handling all upload parameters conditions'
-
- context 'when file is' do
- include_context 'with one temporary file for multipart'
-
- let(:allowed_paths) { [Dir.tmpdir] }
-
- before do
- expect_next_instance_of(::Gitlab::Middleware::Multipart::Handler) do |handler|
- expect(handler).to receive(:allowed_paths).and_return(allowed_paths)
- end
- end
-
- context 'in allowed paths' do
- let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename) }
-
- it 'builds an UploadedFile' do
- expect_uploaded_files(filepath: uploaded_filepath, original_filename: filename, size: uploaded_file.size, params_path: %w(file))
-
- subject
- end
- end
-
- context 'not in allowed paths' do
- let(:allowed_paths) { [] }
-
- let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file') }
-
- it 'returns an error' do
- result = subject
-
- expect(result[0]).to eq(400)
- expect(result[2]).to include('insecure path used')
- end
- end
- end
- end
-
- context 'with dummy params in remote mode' do
- let(:rewritten_fields) { { 'file' => 'should/not/be/read' } }
- let(:params) { upload_parameters_for(key: 'file') }
- let(:mode) { :remote }
-
- context 'with an invalid secret' do
- let(:secret) { 'INVALID_SECRET' }
-
- it { expect { subject }.to raise_error(JWT::VerificationError) }
- end
-
- context 'with an invalid issuer' do
- let(:issuer) { 'INVALID_ISSUER' }
-
- it { expect { subject }.to raise_error(JWT::InvalidIssuerError) }
- end
-
- context 'with invalid rewritten field key' do
- invalid_keys = [
- '[file]',
- ';file',
- 'file]',
- ';file]',
- 'file]]',
- 'file;;'
- ]
-
- invalid_keys.each do |invalid_key|
- context invalid_key do
- let(:rewritten_fields) { { invalid_key => 'should/not/be/read' } }
-
- it { expect { subject }.to raise_error(RuntimeError, "invalid field: \"#{invalid_key}\"") }
- end
- end
- end
-
- context 'with invalid key in parameters' do
- include_context 'with one temporary file for multipart'
-
- let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'wrong_key', filename: filename, remote_id: remote_id) }
-
- it 'builds no UploadedFile' do
- expect(app).to receive(:call) do |env|
- received_params = get_params(env)
- expect(received_params['file']).to be_nil
- expect(received_params['wrong_key']).to be_nil
- end
-
- subject
- end
- end
-
- context 'with invalid key in header' do
- include_context 'with one temporary file for multipart'
-
- RSpec.shared_examples 'rejecting the invalid key' do |key_in_header:, key_in_upload_params:, error_message:|
- let(:rewritten_fields) { rewritten_fields_hash(key_in_header => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: key_in_upload_params, filename: filename, remote_id: remote_id) }
-
- it 'raises an error' do
- expect { subject }.to raise_error(RuntimeError, error_message)
- end
- end
-
- it_behaves_like 'rejecting the invalid key',
- key_in_header: 'user[avatar',
- key_in_upload_params: 'user[avatar]',
- error_message: 'invalid field: "user[avatar"'
- it_behaves_like 'rejecting the invalid key',
- key_in_header: '[user]avatar',
- key_in_upload_params: 'user[avatar]',
- error_message: 'invalid field: "[user]avatar"'
- it_behaves_like 'rejecting the invalid key',
- key_in_header: 'user[]avatar',
- key_in_upload_params: 'user[avatar]',
- error_message: 'invalid field: "user[]avatar"'
- it_behaves_like 'rejecting the invalid key',
- key_in_header: 'user[avatar[image[url]]]',
- key_in_upload_params: 'user[avatar]',
- error_message: 'invalid field: "user[avatar[image[url]]]"'
- it_behaves_like 'rejecting the invalid key',
- key_in_header: '[]',
- key_in_upload_params: 'user[avatar]',
- error_message: 'invalid field: "[]"'
- it_behaves_like 'rejecting the invalid key',
- key_in_header: 'x' * 11000,
- key_in_upload_params: 'user[avatar]',
- error_message: "invalid field: \"#{'x' * 11000}\""
- end
-
- context 'with key with unbalanced brackets in header' do
- include_context 'with one temporary file for multipart'
-
- let(:invalid_key) { 'user[avatar' }
- let(:rewritten_fields) { rewritten_fields_hash( invalid_key => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'user[avatar]', filename: filename, remote_id: remote_id) }
-
- it 'builds no UploadedFile' do
- expect(app).not_to receive(:call)
-
- expect { subject }.to raise_error(RuntimeError, "invalid field: \"#{invalid_key}\"")
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 8e9f7e372c5..cd89674af0f 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -102,6 +102,7 @@ RSpec.describe Gitlab::PathRegex do
.concat(files_in_public)
.concat(Array(API::API.prefix.to_s))
.concat(sitemap_words)
+ .concat(deprecated_routes)
.compact
.uniq
end
@@ -110,6 +111,11 @@ RSpec.describe Gitlab::PathRegex do
%w(sitemap sitemap.xml sitemap.xml.gz)
end
+ let(:deprecated_routes) do
+ # profile was deprecated in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51646
+ %w(profile)
+ end
+
let(:ee_top_level_words) do
%w(unsubscribes v2)
end
diff --git a/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb b/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb
index bbc8b0d67e0..05cdc5bb79b 100644
--- a/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb
+++ b/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe Gitlab::PerformanceBar::RedisAdapterWhenPeekEnabled do
expect_to_obtain_exclusive_lease(GitlabPerformanceBarStatsWorker::LEASE_KEY, uuid)
expect(GitlabPerformanceBarStatsWorker).to receive(:perform_in).with(GitlabPerformanceBarStatsWorker::WORKER_DELAY, uuid)
expect(client).to receive(:sadd).with(GitlabPerformanceBarStatsWorker::STATS_KEY, uuid)
+ expect(client).to receive(:expire).with(GitlabPerformanceBarStatsWorker::STATS_KEY, GitlabPerformanceBarStatsWorker::STATS_KEY_EXPIRE)
peek_adapter.new(client).save('foo')
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index 98bd2efdbc6..4eb13e63b46 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::ProjectTemplate do
hexo sse_middleman gitpod_spring_petclinic nfhugo
nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
serverless_framework jsonnet cluster_management
+ kotlin_native_linux
]
expect(described_class.all).to be_an(Array)
diff --git a/spec/lib/gitlab/prometheus/internal_spec.rb b/spec/lib/gitlab/prometheus/internal_spec.rb
index 7771d85222a..b08b8813470 100644
--- a/spec/lib/gitlab/prometheus/internal_spec.rb
+++ b/spec/lib/gitlab/prometheus/internal_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Prometheus::Internal do
- let(:listen_address) { 'localhost:9090' }
+ let(:server_address) { 'localhost:9090' }
let(:prometheus_settings) do
{
- enable: true,
- listen_address: listen_address
+ enabled: true,
+ server_address: server_address
}
end
@@ -27,25 +27,25 @@ RSpec.describe Gitlab::Prometheus::Internal do
it_behaves_like 'returns valid uri', 'http://localhost:9090'
context 'with non default prometheus address' do
- let(:listen_address) { 'https://localhost:9090' }
+ let(:server_address) { 'https://localhost:9090' }
it_behaves_like 'returns valid uri', 'https://localhost:9090'
context 'with :9090 symbol' do
- let(:listen_address) { :':9090' }
+ let(:server_address) { :':9090' }
it_behaves_like 'returns valid uri', 'http://localhost:9090'
end
context 'with 0.0.0.0:9090' do
- let(:listen_address) { '0.0.0.0:9090' }
+ let(:server_address) { '0.0.0.0:9090' }
it_behaves_like 'returns valid uri', 'http://localhost:9090'
end
end
- context 'when listen_address is nil' do
- let(:listen_address) { nil }
+ context 'when server_address is nil' do
+ let(:server_address) { nil }
it 'does not fail' do
expect(described_class.uri).to be_nil
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Prometheus::Internal do
end
context 'when prometheus listen address is blank in gitlab.yml' do
- let(:listen_address) { '' }
+ let(:server_address) { '' }
it 'does not configure prometheus' do
expect(described_class.uri).to be_nil
@@ -61,26 +61,6 @@ RSpec.describe Gitlab::Prometheus::Internal do
end
end
- describe '.server_address' do
- context 'self.uri returns valid uri' do
- ['http://localhost:9090', 'https://localhost:9090 '].each do |valid_uri|
- it 'returns correct server address' do
- expect(described_class).to receive(:uri).and_return(valid_uri)
-
- expect(described_class.server_address).to eq('localhost:9090')
- end
- end
- end
-
- context 'self.uri returns nil' do
- it 'returns nil' do
- expect(described_class).to receive(:uri).and_return(nil)
-
- expect(described_class.server_address).to be_nil
- end
- end
- end
-
describe '.prometheus_enabled?' do
it 'returns correct value' do
expect(described_class.prometheus_enabled?).to eq(true)
@@ -89,8 +69,8 @@ RSpec.describe Gitlab::Prometheus::Internal do
context 'when prometheus setting is disabled in gitlab.yml' do
let(:prometheus_settings) do
{
- enable: false,
- listen_address: listen_address
+ enabled: false,
+ server_address: server_address
}
end
@@ -110,9 +90,9 @@ RSpec.describe Gitlab::Prometheus::Internal do
end
end
- describe '.listen_address' do
+ describe '.server_address' do
it 'returns correct value' do
- expect(described_class.listen_address).to eq(listen_address)
+ expect(described_class.server_address).to eq(server_address)
end
context 'when prometheus setting is not present in gitlab.yml' do
@@ -121,7 +101,7 @@ RSpec.describe Gitlab::Prometheus::Internal do
end
it 'does not fail' do
- expect(described_class.listen_address).to be_nil
+ expect(described_class.server_address).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index d72863b0103..5748e1e49e5 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -22,8 +22,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
stub_const("Rack::Attack", fake_rack_attack)
stub_const("Rack::Attack::Request", fake_rack_attack_request)
- # Expect rather than just allow, because this is actually fairly important functionality
- expect(fake_rack_attack).to receive(:throttled_response_retry_after_header=).with(true)
+ allow(fake_rack_attack).to receive(:throttled_response=)
allow(fake_rack_attack).to receive(:throttle)
allow(fake_rack_attack).to receive(:track)
allow(fake_rack_attack).to receive(:safelist)
@@ -36,6 +35,12 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
expect(fake_rack_attack_request).to include(described_class::Request)
end
+ it 'configures the throttle response' do
+ described_class.configure(fake_rack_attack)
+
+ expect(fake_rack_attack).to have_received(:throttled_response=).with(an_instance_of(Proc))
+ end
+
it 'configures the safelist' do
described_class.configure(fake_rack_attack)
@@ -93,4 +98,207 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
end
end
end
+
+ describe '.throttled_response_headers' do
+ where(:matched, :match_data, :headers) do
+ [
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 3600,
+ epoch_time: Time.utc(2021, 1, 5, 10, 29, 30).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '60',
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '1830'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 3600,
+ epoch_time: Time.utc(2021, 1, 5, 10, 59, 59).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '60',
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '1'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 3600,
+ epoch_time: Time.utc(2021, 1, 5, 10, 0, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '60',
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '3600'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 3600,
+ epoch_time: Time.utc(2021, 1, 5, 23, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '60',
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609891200', # Time.utc(2021, 1, 6, 0, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Wed, 06 Jan 2021 00:00:00 GMT', # Next day
+ 'Retry-After' => '1800'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 3400,
+ epoch_time: Time.utc(2021, 1, 5, 10, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '57', # 56.66 requests per minute
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '1800'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 3700,
+ epoch_time: Time.utc(2021, 1, 5, 10, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '62', # 61.66 requests per minute
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '1800'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 59,
+ epoch_time: Time.utc(2021, 1, 5, 10, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '1', # 0.9833 requests per minute
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '1800'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 1.hour,
+ limit: 61,
+ epoch_time: Time.utc(2021, 1, 5, 10, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '2', # 1.016 requests per minute
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609844400', # Time.utc(2021, 1, 5, 11, 0, 0).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 11:00:00 GMT',
+ 'Retry-After' => '1800'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 15.seconds,
+ limit: 10,
+ epoch_time: Time.utc(2021, 1, 5, 10, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '40',
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609842615', # Time.utc(2021, 1, 5, 10, 30, 15).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 10:30:15 GMT',
+ 'Retry-After' => '15'
+ }
+ ],
+ [
+ 'throttle_unauthenticated',
+ {
+ discriminator: '127.0.0.1',
+ count: 3700,
+ period: 27.seconds,
+ limit: 10,
+ epoch_time: Time.utc(2021, 1, 5, 10, 30, 0).to_i
+ },
+ {
+ 'RateLimit-Name' => 'throttle_unauthenticated',
+ 'RateLimit-Limit' => '23',
+ 'RateLimit-Observed' => '3700',
+ 'RateLimit-Remaining' => '0',
+ 'RateLimit-Reset' => '1609842627', # Time.utc(2021, 1, 5, 10, 30, 27).to_i.to_s
+ 'RateLimit-ResetTime' => 'Tue, 05 Jan 2021 10:30:27 GMT',
+ 'Retry-After' => '27'
+ }
+ ]
+ ]
+ end
+
+ with_them do
+ it 'generates accurate throttled headers' do
+ expect(described_class.throttled_response_headers(matched, match_data)).to eql(headers)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 57be9e93af2..c437b6bcceb 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -342,17 +342,36 @@ RSpec.describe Gitlab::SearchResults do
expect(results.limited_issues_count).to eq 4
end
- it 'lists all issues for admin' do
- results = described_class.new(admin, query, limit_projects)
- issues = results.objects('issues')
+ context 'with admin user' do
+ context 'when admin mode enabled', :enable_admin_mode do
+ it 'lists all issues' do
+ results = described_class.new(admin, query, limit_projects)
+ issues = results.objects('issues')
+
+ expect(issues).to include issue
+ expect(issues).to include security_issue_1
+ expect(issues).to include security_issue_2
+ expect(issues).to include security_issue_3
+ expect(issues).to include security_issue_4
+ expect(issues).not_to include security_issue_5
+ expect(results.limited_issues_count).to eq 5
+ end
+ end
- expect(issues).to include issue
- expect(issues).to include security_issue_1
- expect(issues).to include security_issue_2
- expect(issues).to include security_issue_3
- expect(issues).to include security_issue_4
- expect(issues).not_to include security_issue_5
- expect(results.limited_issues_count).to eq 5
+ context 'when admin mode disabled' do
+ it 'does not list confidential issues' do
+ results = described_class.new(admin, query, limit_projects)
+ issues = results.objects('issues')
+
+ expect(issues).to include issue
+ expect(issues).not_to include security_issue_1
+ expect(issues).not_to include security_issue_2
+ expect(issues).not_to include security_issue_3
+ expect(issues).not_to include security_issue_4
+ expect(issues).not_to include security_issue_5
+ expect(results.limited_issues_count).to eq 1
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
index 3d9ffb11ae2..3ba08455d01 100644
--- a/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::AdminMode::Client, :do_not_mock_admin_mode, :request_store do
+RSpec.describe Gitlab::SidekiqMiddleware::AdminMode::Client, :request_store do
include AdminModeHelper
let(:worker) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
index 20f1e88bcf4..e8322b11875 100644
--- a/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::AdminMode::Server, :do_not_mock_admin_mode, :request_store do
+RSpec.describe Gitlab::SidekiqMiddleware::AdminMode::Server, :request_store do
include AdminModeHelper
let(:worker) do
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 4ee9569a0cf..b632fc8bad2 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
"subject",
"body"
],
- "_aj_symbol_keys" => ["args"]
+ ActiveJob::Arguments.const_get('RUBY2_KEYWORDS_KEY', false) => ["args"]
}
],
"executions" => 0,
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
index df949154d4c..a4d8e3957cf 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
@@ -3,15 +3,20 @@
require 'spec_helper'
RSpec.describe Gitlab::SlashCommands::Presenters::IssueMove do
- let_it_be(:admin) { create(:admin) }
+ let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:other_project) { create(:project) }
let_it_be(:old_issue, reload: true) { create(:issue, project: project) }
- let(:new_issue) { Issues::MoveService.new(project, admin).execute(old_issue, other_project) }
+ let(:new_issue) { Issues::MoveService.new(project, user).execute(old_issue, other_project) }
let(:attachment) { subject[:attachments].first }
subject { described_class.new(new_issue).present(old_issue) }
+ before do
+ project.add_developer(user)
+ other_project.add_developer(user)
+ end
+
it { is_expected.to be_a(Hash) }
it 'shows the new issue' do
diff --git a/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb
new file mode 100644
index 00000000000..d1024019a9f
--- /dev/null
+++ b/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Template::GitlabCiSyntaxYmlTemplate do
+ subject { described_class }
+
+ describe '#content' do
+ it 'loads the full file' do
+ template = subject.new(Rails.root.join('lib/gitlab/ci/syntax_templates/Artifacts example.gitlab-ci.yml'))
+
+ expect(template.content).to start_with('#')
+ end
+ end
+
+ it_behaves_like 'file template shared examples', 'Artifacts example', '.gitlab-ci.yml'
+end
diff --git a/spec/lib/gitlab/throttle_spec.rb b/spec/lib/gitlab/throttle_spec.rb
index 7462b2e1c38..50d723193ac 100644
--- a/spec/lib/gitlab/throttle_spec.rb
+++ b/spec/lib/gitlab/throttle_spec.rb
@@ -30,4 +30,32 @@ RSpec.describe Gitlab::Throttle do
end
end
end
+
+ describe '.rate_limiting_response_text' do
+ subject { described_class.rate_limiting_response_text }
+
+ context 'when the setting is not present' do
+ before do
+ stub_application_setting(rate_limiting_response_text: '')
+ end
+
+ it 'returns the default value with a trailing newline' do
+ expect(subject).to eq(described_class::DEFAULT_RATE_LIMITING_RESPONSE_TEXT + "\n")
+ end
+ end
+
+ context 'when the setting is present' do
+ let(:response_text) do
+ 'Rate limit exceeded; see https://docs.gitlab.com/ee/user/gitlab_com/#gitlabcom-specific-rate-limits for more details'
+ end
+
+ before do
+ stub_application_setting(rate_limiting_response_text: response_text)
+ end
+
+ it 'returns the default value with a trailing newline' do
+ expect(subject).to eq(response_text + "\n")
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
new file mode 100644
index 00000000000..acf7aeb303a
--- /dev/null
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::StandardContext do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:namespace) { create(:namespace) }
+
+ let(:snowplow_context) { subject.to_context }
+
+ describe '#to_context' do
+ context 'with no arguments' do
+ it 'creates a Snowplow context with no data' do
+ snowplow_context.to_json[:data].each do |_, v|
+ expect(v).to be_nil
+ end
+ end
+ end
+
+ context 'with extra data' do
+ subject { described_class.new(foo: 'bar') }
+
+ it 'creates a Snowplow context with the given data' do
+ expect(snowplow_context.to_json.dig(:data, :foo)).to eq('bar')
+ end
+ end
+
+ context 'with namespace' do
+ subject { described_class.new(namespace: namespace) }
+
+ it 'creates a Snowplow context using the given data' do
+ expect(snowplow_context.to_json.dig(:data, :namespace_id)).to eq(namespace.id)
+ expect(snowplow_context.to_json.dig(:data, :project_id)).to be_nil
+ end
+ end
+
+ context 'with project' do
+ subject { described_class.new(project: project) }
+
+ it 'creates a Snowplow context using the given data' do
+ expect(snowplow_context.to_json.dig(:data, :namespace_id)).to eq(project.namespace.id)
+ expect(snowplow_context.to_json.dig(:data, :project_id)).to eq(project.id)
+ end
+ end
+
+ context 'with project and namespace' do
+ subject { described_class.new(namespace: namespace, project: project) }
+
+ it 'creates a Snowplow context using the given data' do
+ expect(snowplow_context.to_json.dig(:data, :namespace_id)).to eq(namespace.id)
+ expect(snowplow_context.to_json.dig(:data, :project_id)).to eq(project.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 57882de0974..8f1fd49f4c5 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -41,21 +41,42 @@ RSpec.describe Gitlab::Tracking do
allow_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics).to receive(:event)
end
- it 'delegates to snowplow destination' do
- expect_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
- .to receive(:event)
- .with('category', 'action', label: 'label', property: 'property', value: 1.5, context: nil)
+ shared_examples 'delegates to destination' do |klass|
+ context 'with standard context' do
+ it "delegates to #{klass} destination" do
+ expect_any_instance_of(klass).to receive(:event) do |_, category, action, args|
+ expect(category).to eq('category')
+ expect(action).to eq('action')
+ expect(args[:label]).to eq('label')
+ expect(args[:property]).to eq('property')
+ expect(args[:value]).to eq(1.5)
+ expect(args[:context].length).to eq(1)
+ expect(args[:context].first.to_json[:schema]).to eq(Gitlab::Tracking::StandardContext::GITLAB_STANDARD_SCHEMA_URL)
+ expect(args[:context].first.to_json[:data]).to include(foo: 'bar')
+ end
- described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5)
- end
+ described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5,
+ standard_context: Gitlab::Tracking::StandardContext.new(foo: 'bar'))
+ end
+ end
- it 'delegates to ProductAnalytics destination' do
- expect_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics)
- .to receive(:event)
- .with('category', 'action', label: 'label', property: 'property', value: 1.5, context: nil)
+ context 'without standard context' do
+ it "delegates to #{klass} destination" do
+ expect_any_instance_of(klass).to receive(:event) do |_, category, action, args|
+ expect(category).to eq('category')
+ expect(action).to eq('action')
+ expect(args[:label]).to eq('label')
+ expect(args[:property]).to eq('property')
+ expect(args[:value]).to eq(1.5)
+ end
- described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5)
+ described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5)
+ end
+ end
end
+
+ include_examples 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow
+ include_examples 'delegates to destination', Gitlab::Tracking::Destinations::ProductAnalytics
end
describe '.self_describing_event' do
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index c892f1f0410..6d055fe3643 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe Gitlab::UrlBuilder do
where(:factory, :path_generator) do
:project | ->(project) { "/#{project.full_path}" }
+ :board | ->(board) { "/#{board.project.full_path}/-/boards/#{board.id}" }
+ :group_board | ->(board) { "/groups/#{board.group.full_path}/-/boards/#{board.id}" }
:commit | ->(commit) { "/#{commit.project.full_path}/-/commit/#{commit.id}" }
:issue | ->(issue) { "/#{issue.project.full_path}/-/issues/#{issue.iid}" }
:merge_request | ->(merge_request) { "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}" }
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
new file mode 100644
index 00000000000..e101f837324
--- /dev/null
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::MetricDefinition do
+ let(:attributes) do
+ {
+ name: 'uuid',
+ description: 'GitLab instance unique identifier',
+ value_type: 'string',
+ product_category: 'collection',
+ stage: 'growth',
+ status: 'data_available',
+ default_generation: 'generation_1',
+ full_path: {
+ generation_1: 'uuid',
+ generation_2: 'license.uuid'
+ },
+ group: 'group::product analytics',
+ time_frame: 'none',
+ data_source: 'database',
+ distribution: %w(ee ce),
+ tier: %w(free starter premium ultimate bronze silver gold)
+ }
+ end
+
+ let(:path) { File.join('metrics', 'uuid.yml') }
+ let(:definition) { described_class.new(path, attributes) }
+ let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+
+ it 'has all definitons valid' do
+ expect { described_class.definitions }.not_to raise_error(Gitlab::Usage::Metric::InvalidMetricError)
+ end
+
+ describe '#key' do
+ subject { definition.key }
+
+ it 'returns a symbol from name' do
+ is_expected.to eq('uuid')
+ end
+ end
+
+ describe '#validate' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:attribute, :value) do
+ :name | nil
+ :description | nil
+ :value_type | nil
+ :value_type | 'test'
+ :status | nil
+ :default_generation | nil
+ :group | nil
+ :time_frame | nil
+ :time_frame | '29d'
+ :data_source | 'other'
+ :data_source | nil
+ :distribution | nil
+ :distribution | 'test'
+ :tier | %w(test ee)
+ end
+
+ with_them do
+ before do
+ attributes[attribute] = value
+ end
+
+ it 'raise exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+
+ described_class.new(path, attributes).validate!
+ end
+ end
+ end
+
+ describe '.load_all!' do
+ let(:metric1) { Dir.mktmpdir('metric1') }
+ let(:metric2) { Dir.mktmpdir('metric2') }
+ let(:definitions) { {} }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(metric1, '**', '*.yml'),
+ File.join(metric2, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.send(:load_all!) }
+
+ it 'has empty list when there are no definition files' do
+ is_expected.to be_empty
+ end
+
+ it 'has one metric when there is one file' do
+ write_metric(metric1, path, yaml_content)
+
+ is_expected.to be_one
+ end
+
+ it 'when the same meric is defined multiple times raises exception' do
+ write_metric(metric1, path, yaml_content)
+ write_metric(metric2, path, yaml_content)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+
+ subject
+ end
+
+ after do
+ FileUtils.rm_rf(metric1)
+ FileUtils.rm_rf(metric2)
+ end
+
+ def write_metric(metric, path, content)
+ path = File.join(metric, path)
+ dir = File.dirname(path)
+ FileUtils.mkdir_p(dir)
+ File.write(path, content)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb
new file mode 100644
index 00000000000..40671d980d6
--- /dev/null
+++ b/spec/lib/gitlab/usage/metric_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metric do
+ describe '#definition' do
+ it 'returns generation_1 metric definiton' do
+ expect(described_class.new(default_generation_path: 'uuid').definition).to be_an(Gitlab::Usage::MetricDefinition)
+ end
+ end
+
+ describe '#unflatten_default_path' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:default_generation_path, :value, :expected_hash) do
+ 'uuid' | nil | { uuid: nil }
+ 'uuid' | '1111' | { uuid: '1111' }
+ 'counts.issues' | nil | { counts: { issues: nil } }
+ 'counts.issues' | 100 | { counts: { issues: 100 } }
+ 'usage_activity_by_stage.verify.ci_builds' | 100 | { usage_activity_by_stage: { verify: { ci_builds: 100 } } }
+ end
+
+ with_them do
+ subject { described_class.new(default_generation_path: default_generation_path, value: value).unflatten_default_path }
+
+ it { is_expected.to eq(expected_hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
new file mode 100644
index 00000000000..ba7bfe47bc9
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
+ let(:project_id) { 1 }
+
+ describe '.track_unique_project_event' do
+ described_class::TEMPLATE_TO_EVENT.keys.each do |template|
+ context "when given template #{template}" do
+ it_behaves_like 'tracking unique hll events', :usage_data_track_ci_templates_unique_projects do
+ subject(:request) { described_class.track_unique_project_event(project_id: project_id, template: template) }
+
+ let(:target_id) { "p_ci_templates_#{described_class::TEMPLATE_TO_EVENT[template]}" }
+ let(:expected_type) { instance_of(Integer) }
+ end
+ end
+ end
+
+ it 'does not track templates outside of TEMPLATE_TO_EVENT' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to(
+ receive(:track_event)
+ )
+ Dir.glob(File.join('lib', 'gitlab', 'ci', 'templates', '**'), base: Rails.root) do |template|
+ next if described_class::TEMPLATE_TO_EVENT.key?(template)
+
+ described_class.track_unique_project_event(project_id: 1, template: template)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index b6a60c09d3d..b8eddc0ca7f 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -24,6 +24,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
describe '.categories' do
it 'gets all unique category names' do
expect(described_class.categories).to contain_exactly(
+ 'deploy_token_packages',
+ 'user_packages',
'compliance',
'analytics',
'ide_edit',
@@ -34,18 +36,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'testing',
'issues_edit',
'ci_secrets_management',
- 'maven_packages',
- 'npm_packages',
- 'conan_packages',
- 'nuget_packages',
- 'pypi_packages',
- 'composer_packages',
- 'generic_packages',
- 'golang_packages',
- 'debian_packages',
- 'container_packages',
- 'tag_packages',
- 'snippets'
+ 'snippets',
+ 'code_review',
+ 'terraform',
+ 'ci_templates'
)
end
end
@@ -95,7 +89,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'does not track the event' do
stub_application_setting(usage_ping_enabled: false)
- described_class.track_event(entity1, weekly_event, Date.current)
+ described_class.track_event(weekly_event, values: entity1, time: Date.current)
expect(Gitlab::Redis::HLL).not_to receive(:add)
end
@@ -109,20 +103,27 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'tracks event when using symbol' do
expect(Gitlab::Redis::HLL).to receive(:add)
- described_class.track_event(entity1, :g_analytics_contribution)
+ described_class.track_event(:g_analytics_contribution, values: entity1)
+ end
+
+ it 'tracks events with multiple values' do
+ values = [entity1, entity2]
+ expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_{analytics}_contribution/, value: values, expiry: 84.days)
+
+ described_class.track_event(:g_analytics_contribution, values: values)
end
it "raise error if metrics don't have same aggregation" do
- expect { described_class.track_event(entity1, different_aggregation, Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
+ expect { described_class.track_event(different_aggregation, values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
end
it 'raise error if metrics of unknown aggregation' do
- expect { described_class.track_event(entity1, 'unknown', Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
+ expect { described_class.track_event('unknown', values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
context 'for weekly events' do
it 'sets the keys in Redis to expire automatically after the given expiry time' do
- described_class.track_event(entity1, "g_analytics_contribution")
+ described_class.track_event("g_analytics_contribution", values: entity1)
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a
@@ -135,7 +136,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
it 'sets the keys in Redis to expire automatically after 6 weeks by default' do
- described_class.track_event(entity1, "g_compliance_dashboard")
+ described_class.track_event("g_compliance_dashboard", values: entity1)
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a
@@ -150,7 +151,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
context 'for daily events' do
it 'sets the keys in Redis to expire after the given expiry time' do
- described_class.track_event(entity1, "g_analytics_search")
+ described_class.track_event("g_analytics_search", values: entity1)
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-g_{analytics}_search").to_a
@@ -163,7 +164,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
it 'sets the keys in Redis to expire after 29 days by default' do
- described_class.track_event(entity1, "no_slot")
+ described_class.track_event("no_slot", values: entity1)
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-{no_slot}").to_a
@@ -180,12 +181,19 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
describe '.track_event_in_context' do
context 'with valid contex' do
- it 'increments conext event counte' do
+ it 'increments context event counter' do
expect(Gitlab::Redis::HLL).to receive(:add) do |kwargs|
expect(kwargs[:key]).to match(/^#{default_context}\_.*/)
end
- described_class.track_event_in_context(entity1, context_event, default_context)
+ described_class.track_event_in_context(context_event, values: entity1, context: default_context)
+ end
+
+ it 'tracks events with multiple values' do
+ values = [entity1, entity2]
+ expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_{analytics}_contribution/, value: values, expiry: 84.days)
+
+ described_class.track_event_in_context(:g_analytics_contribution, values: values, context: default_context)
end
end
@@ -193,7 +201,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'does not increment a counter' do
expect(Gitlab::Redis::HLL).not_to receive(:add)
- described_class.track_event_in_context(entity1, context_event, '')
+ described_class.track_event_in_context(context_event, values: entity1, context: '')
end
end
@@ -201,7 +209,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'does not increment a counter' do
expect(Gitlab::Redis::HLL).not_to receive(:add)
- described_class.track_event_in_context(entity1, context_event, invalid_context)
+ described_class.track_event_in_context(context_event, values: entity1, context: invalid_context)
end
end
end
@@ -209,35 +217,35 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
- described_class.track_event(entity1, weekly_event, Date.current)
- described_class.track_event(entity2, weekly_event, Date.current)
+ described_class.track_event(weekly_event, values: entity1, time: Date.current)
+ described_class.track_event(weekly_event, values: entity2, time: Date.current)
# Events last week
- described_class.track_event(entity1, weekly_event, 2.days.ago)
- described_class.track_event(entity1, weekly_event, 2.days.ago)
- described_class.track_event(entity1, no_slot, 2.days.ago)
+ described_class.track_event(weekly_event, values: entity1, time: 2.days.ago)
+ described_class.track_event(weekly_event, values: entity1, time: 2.days.ago)
+ described_class.track_event(no_slot, values: entity1, time: 2.days.ago)
# Events 2 weeks ago
- described_class.track_event(entity1, weekly_event, 2.weeks.ago)
+ described_class.track_event(weekly_event, values: entity1, time: 2.weeks.ago)
# Events 4 weeks ago
- described_class.track_event(entity3, weekly_event, 4.weeks.ago)
- described_class.track_event(entity4, weekly_event, 29.days.ago)
+ described_class.track_event(weekly_event, values: entity3, time: 4.weeks.ago)
+ described_class.track_event(weekly_event, values: entity4, time: 29.days.ago)
# events in current day should be counted in daily aggregation
- described_class.track_event(entity1, daily_event, Date.current)
- described_class.track_event(entity2, daily_event, Date.current)
+ described_class.track_event(daily_event, values: entity1, time: Date.current)
+ described_class.track_event(daily_event, values: entity2, time: Date.current)
# Events last week
- described_class.track_event(entity1, daily_event, 2.days.ago)
- described_class.track_event(entity1, daily_event, 2.days.ago)
+ described_class.track_event(daily_event, values: entity1, time: 2.days.ago)
+ described_class.track_event(daily_event, values: entity1, time: 2.days.ago)
# Events 2 weeks ago
- described_class.track_event(entity1, daily_event, 14.days.ago)
+ described_class.track_event(daily_event, values: entity1, time: 14.days.ago)
# Events 4 weeks ago
- described_class.track_event(entity3, daily_event, 28.days.ago)
- described_class.track_event(entity4, daily_event, 29.days.ago)
+ described_class.track_event(daily_event, values: entity3, time: 28.days.ago)
+ described_class.track_event(daily_event, values: entity4, time: 29.days.ago)
end
it 'raise error if metrics are not in the same slot' do
@@ -345,10 +353,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
allow(described_class).to receive(:known_events).and_return(known_events)
allow(described_class).to receive(:categories).and_return(%w(category1 category2))
- described_class.track_event_in_context([entity1, entity3], 'event_name_1', default_context, 2.days.ago)
- described_class.track_event_in_context(entity3, 'event_name_1', default_context, 2.days.ago)
- described_class.track_event_in_context(entity3, 'event_name_1', invalid_context, 2.days.ago)
- described_class.track_event_in_context([entity1, entity2], 'event_name_2', '', 2.weeks.ago)
+ described_class.track_event_in_context('event_name_1', values: [entity1, entity3], context: default_context, time: 2.days.ago)
+ described_class.track_event_in_context('event_name_1', values: entity3, context: default_context, time: 2.days.ago)
+ described_class.track_event_in_context('event_name_1', values: entity3, context: invalid_context, time: 2.days.ago)
+ described_class.track_event_in_context('event_name_2', values: [entity1, entity2], context: '', time: 2.weeks.ago)
end
subject(:unique_events) { described_class.unique_events(event_names: event_names, start_date: 4.weeks.ago, end_date: Date.current, context: context) }
@@ -386,13 +394,13 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
allow(described_class).to receive(:known_events).and_return(known_events)
allow(described_class).to receive(:categories).and_return(%w(category1 category2))
- described_class.track_event(entity1, 'event1_slot', 2.days.ago)
- described_class.track_event(entity2, 'event2_slot', 2.days.ago)
- described_class.track_event(entity3, 'event2_slot', 2.weeks.ago)
+ described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity2, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity3, time: 2.weeks.ago)
# events in different slots
- described_class.track_event(entity2, 'event3', 2.days.ago)
- described_class.track_event(entity2, 'event4', 2.days.ago)
+ described_class.track_event('event3', values: entity2, time: 2.days.ago)
+ described_class.track_event('event4', values: entity2, time: 2.days.ago)
end
it 'returns the number of unique events for all known events' do
@@ -516,23 +524,23 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
subject(:aggregated_metrics_data) { described_class.aggregated_metrics_weekly_data }
before do
- described_class.track_event(entity1, 'event1_slot', 2.days.ago)
- described_class.track_event(entity2, 'event1_slot', 2.days.ago)
- described_class.track_event(entity3, 'event1_slot', 2.days.ago)
- described_class.track_event(entity1, 'event2_slot', 2.days.ago)
- described_class.track_event(entity2, 'event2_slot', 3.days.ago)
- described_class.track_event(entity3, 'event2_slot', 3.days.ago)
- described_class.track_event(entity1, 'event3_slot', 3.days.ago)
- described_class.track_event(entity2, 'event3_slot', 3.days.ago)
- described_class.track_event(entity2, 'event5_slot', 3.days.ago)
+ described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event1_slot', values: entity2, time: 2.days.ago)
+ described_class.track_event('event1_slot', values: entity3, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity2, time: 3.days.ago)
+ described_class.track_event('event2_slot', values: entity3, time: 3.days.ago)
+ described_class.track_event('event3_slot', values: entity1, time: 3.days.ago)
+ described_class.track_event('event3_slot', values: entity2, time: 3.days.ago)
+ described_class.track_event('event5_slot', values: entity2, time: 3.days.ago)
# events out of time scope
- described_class.track_event(entity3, 'event2_slot', 8.days.ago)
+ described_class.track_event('event2_slot', values: entity3, time: 8.days.ago)
# events in different slots
- described_class.track_event(entity1, 'event4', 2.days.ago)
- described_class.track_event(entity2, 'event4', 2.days.ago)
- described_class.track_event(entity4, 'event4', 2.days.ago)
+ described_class.track_event('event4', values: entity1, time: 2.days.ago)
+ described_class.track_event('event4', values: entity2, time: 2.days.ago)
+ described_class.track_event('event4', values: entity4, time: 2.days.ago)
end
it_behaves_like 'aggregated_metrics_data'
@@ -543,23 +551,23 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it_behaves_like 'aggregated_metrics_data' do
before do
- described_class.track_event(entity1, 'event1_slot', 2.days.ago)
- described_class.track_event(entity2, 'event1_slot', 2.days.ago)
- described_class.track_event(entity3, 'event1_slot', 2.days.ago)
- described_class.track_event(entity1, 'event2_slot', 2.days.ago)
- described_class.track_event(entity2, 'event2_slot', 3.days.ago)
- described_class.track_event(entity3, 'event2_slot', 3.days.ago)
- described_class.track_event(entity1, 'event3_slot', 3.days.ago)
- described_class.track_event(entity2, 'event3_slot', 10.days.ago)
- described_class.track_event(entity2, 'event5_slot', 4.weeks.ago.advance(days: 1))
+ described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event1_slot', values: entity2, time: 2.days.ago)
+ described_class.track_event('event1_slot', values: entity3, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity2, time: 3.days.ago)
+ described_class.track_event('event2_slot', values: entity3, time: 3.days.ago)
+ described_class.track_event('event3_slot', values: entity1, time: 3.days.ago)
+ described_class.track_event('event3_slot', values: entity2, time: 10.days.ago)
+ described_class.track_event('event5_slot', values: entity2, time: 4.weeks.ago.advance(days: 1))
# events out of time scope
- described_class.track_event(entity1, 'event5_slot', 4.weeks.ago.advance(days: -1))
+ described_class.track_event('event5_slot', values: entity1, time: 4.weeks.ago.advance(days: -1))
# events in different slots
- described_class.track_event(entity1, 'event4', 2.days.ago)
- described_class.track_event(entity2, 'event4', 2.days.ago)
- described_class.track_event(entity4, 'event4', 2.days.ago)
+ described_class.track_event('event4', values: entity1, time: 2.days.ago)
+ described_class.track_event('event4', values: entity2, time: 2.days.ago)
+ described_class.track_event('event4', values: entity4, time: 2.days.ago)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..c7b208cfb31
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:merge_request) { build(:merge_request, id: 1) }
+ let(:user) { build(:user, id: 1) }
+ let(:note) { build(:note, author: user) }
+
+ shared_examples_for 'a tracked merge request unique event' do
+ specify do
+ expect { 3.times { subject } }
+ .to change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: action,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }
+ .by(1)
+ end
+ end
+
+ describe '.track_mr_diffs_action' do
+ subject { described_class.track_mr_diffs_action(merge_request: merge_request) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_DIFFS_ACTION }
+ end
+ end
+
+ describe '.track_mr_diffs_single_file_action' do
+ subject { described_class.track_mr_diffs_single_file_action(merge_request: merge_request, user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_DIFFS_SINGLE_FILE_ACTION }
+ end
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_DIFFS_USER_SINGLE_FILE_ACTION }
+ end
+ end
+
+ describe '.track_create_mr_action' do
+ subject { described_class.track_create_mr_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_CREATE_ACTION }
+ end
+ end
+
+ describe '.track_close_mr_action' do
+ subject { described_class.track_close_mr_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_CLOSE_ACTION }
+ end
+ end
+
+ describe '.track_merge_mr_action' do
+ subject { described_class.track_merge_mr_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_MERGE_ACTION }
+ end
+ end
+
+ describe '.track_reopen_mr_action' do
+ subject { described_class.track_reopen_mr_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_REOPEN_ACTION }
+ end
+ end
+
+ describe '.track_create_comment_action' do
+ subject { described_class.track_create_comment_action(note: note) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_CREATE_COMMENT_ACTION }
+ end
+
+ context 'when the note is multiline diff note' do
+ let(:note) { build(:diff_note_on_merge_request, author: user) }
+
+ before do
+ allow(note).to receive(:multiline?).and_return(true)
+ end
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_CREATE_MULTILINE_COMMENT_ACTION }
+ end
+ end
+ end
+
+ describe '.track_edit_comment_action' do
+ subject { described_class.track_edit_comment_action(note: note) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_EDIT_COMMENT_ACTION }
+ end
+
+ context 'when the note is multiline diff note' do
+ let(:note) { build(:diff_note_on_merge_request, author: user) }
+
+ before do
+ allow(note).to receive(:multiline?).and_return(true)
+ end
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_EDIT_MULTILINE_COMMENT_ACTION }
+ end
+ end
+ end
+
+ describe '.track_remove_comment_action' do
+ subject { described_class.track_remove_comment_action(note: note) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_REMOVE_COMMENT_ACTION }
+ end
+
+ context 'when the note is multiline diff note' do
+ let(:note) { build(:diff_note_on_merge_request, author: user) }
+
+ before do
+ allow(note).to receive(:multiline?).and_return(true)
+ end
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_REMOVE_MULTILINE_COMMENT_ACTION }
+ end
+ end
+ end
+
+ describe '.track_create_review_note_action' do
+ subject { described_class.track_create_review_note_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_CREATE_REVIEW_NOTE_ACTION }
+ end
+ end
+
+ describe '.track_publish_review_action' do
+ subject { described_class.track_publish_review_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_PUBLISH_REVIEW_ACTION }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/guest_package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index d018100b041..7b5efb11034 100644
--- a/spec/lib/gitlab/usage_data_counters/guest_package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::UsageDataCounters::GuestPackageEventCounter, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_redis_shared_state do
shared_examples_for 'usage counter with totals' do |counter|
it 'increments counter and returns total count' do
expect(described_class.read(counter)).to eq(0)
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::GuestPackageEventCounter, :clean_gitla
end
it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 33
+ expect(described_class::KNOWN_EVENTS.size).to eq 45
end
described_class::KNOWN_EVENTS.each do |event|
@@ -24,8 +24,8 @@ RSpec.describe Gitlab::UsageDataCounters::GuestPackageEventCounter, :clean_gitla
describe '.fetch_supported_event' do
subject { described_class.fetch_supported_event(event_name) }
- let(:event_name) { 'package_guest_i_package_composer_guest_push' }
+ let(:event_name) { 'package_events_i_package_composer_push_package' }
- it { is_expected.to eq 'i_package_composer_guest_push' }
+ it { is_expected.to eq 'i_package_composer_push_package' }
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 4d12bb6bd8c..fd02521622c 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -680,7 +680,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it { is_expected.to include(:kubernetes_agent_gitops_sync) }
it { is_expected.to include(:static_site_editor_views) }
- it { is_expected.to include(:package_guest_i_package_composer_guest_pull) }
+ it { is_expected.to include(:package_events_i_package_pull_package) }
+ it { is_expected.to include(:package_events_i_package_delete_package_by_user) }
+ it { is_expected.to include(:package_events_i_package_conan_push_package) }
end
describe '.usage_data_counters' do
@@ -1260,7 +1262,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.redis_hll_counters }
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
- let(:ineligible_total_categories) { %w[source_code testing ci_secrets_management incident_management_alerts snippets] }
+ let(:ineligible_total_categories) { %w[source_code ci_secrets_management incident_management_alerts snippets terraform] }
it 'has all known_events' do
expect(subject).to have_key(:redis_hll_counters)
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index 748a8336a25..97fff030906 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -45,10 +45,20 @@ RSpec.describe Gitlab::UserAccess do
let(:empty_project) { create(:project_empty_repo) }
let(:project_access) { described_class.new(user, container: empty_project) }
- it 'returns true for admins' do
- user.update!(admin: true)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns true for admins' do
+ user.update!(admin: true)
- expect(access.can_push_to_branch?('master')).to be_truthy
+ expect(access.can_push_to_branch?('master')).to be_truthy
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'returns false for admins' do
+ user.update!(admin: true)
+
+ expect(access.can_push_to_branch?('master')).to be_falsey
+ end
end
it 'returns true if user is maintainer' do
@@ -85,10 +95,20 @@ RSpec.describe Gitlab::UserAccess do
let(:branch) { create :protected_branch, project: project, name: "test" }
let(:not_existing_branch) { create :protected_branch, :developers_can_merge, project: project }
- it 'returns true for admins' do
- user.update!(admin: true)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns true for admins' do
+ user.update!(admin: true)
- expect(access.can_push_to_branch?(branch.name)).to be_truthy
+ expect(access.can_push_to_branch?(branch.name)).to be_truthy
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'returns false for admins' do
+ user.update!(admin: true)
+
+ expect(access.can_push_to_branch?(branch.name)).to be_falsey
+ end
end
it 'returns true if user is a maintainer' do
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 521d6584a20..dfc381d0ef2 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -38,32 +38,116 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#estimate_batch_distinct_count' do
+ let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE } # HyperLogLog is a probabilistic algorithm, which provides estimated data, with given error margin
let(:relation) { double(:relation) }
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
it 'delegates counting to counter class instance' do
+ buckets = instance_double(Gitlab::Database::PostgresHll::Buckets)
+
expect_next_instance_of(Gitlab::Database::PostgresHll::BatchDistinctCounter, relation, 'column') do |instance|
- expect(instance).to receive(:estimate_distinct_count)
+ expect(instance).to receive(:execute)
.with(batch_size: nil, start: nil, finish: nil)
- .and_return(5)
+ .and_return(buckets)
end
+ expect(buckets).to receive(:estimated_distinct_count).and_return(5)
expect(described_class.estimate_batch_distinct_count(relation, 'column')).to eq(5)
end
- it 'returns default fallback value when counting fails due to database error' do
- stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
- allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'quasi integration test for different counting parameters' do
+ # HyperLogLog http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf algorithm
+ # used in estimate_batch_distinct_count produce probabilistic
+ # estimations of unique values present in dataset, because of that its results
+ # are always off by some small factor from real value. However for given
+ # dataset it provide consistent and deterministic result. In the following context
+ # analyzed sets consist of values:
+ # build_needs set: ['1', '2', '3', '4', '5']
+ # ci_build set ['a', 'b']
+ # with them, current implementation is expected to consistently report
+ # 5.217656147118495 and 2.0809220082170614 values
+ # This test suite is expected to assure, that HyperLogLog implementation
+ # behaves consistently between changes made to other parts of codebase.
+ # In case of fine tuning or changes to HyperLogLog algorithm implementation
+ # one should run in depth analysis of accuracy with supplementary rake tasks
+ # currently under implementation at https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51118
+ # and adjust used values in this context accordingly.
+ let_it_be(:build) { create(:ci_build, name: 'a') }
+ let_it_be(:another_build) { create(:ci_build, name: 'b') }
+
+ let(:model) { Ci::BuildNeed }
+ let(:column) { :name }
+ let(:build_needs_estimated_cardinality) { 5.217656147118495 }
+ let(:ci_builds_estimated_cardinality) { 2.0809220082170614 }
+
+ context 'different counting parameters' do
+ before_all do
+ 1.upto(3) { |i| create(:ci_build_need, name: i, build: build) }
+ 4.upto(5) { |i| create(:ci_build_need, name: i, build: another_build) }
+ end
+
+ it 'counts with symbol passed in column argument' do
+ expect(described_class.estimate_batch_distinct_count(model, column)).to eq(build_needs_estimated_cardinality)
+ end
+
+ it 'counts with string passed in column argument' do
+ expect(described_class.estimate_batch_distinct_count(model, column.to_s)).to eq(build_needs_estimated_cardinality)
+ end
+
+ it 'counts with table.column passed in column argument' do
+ expect(described_class.estimate_batch_distinct_count(model, "#{model.table_name}.#{column}")).to eq(build_needs_estimated_cardinality)
+ end
+
+ it 'counts with Arel passed in column argument' do
+ expect(described_class.estimate_batch_distinct_count(model, model.arel_table[column])).to eq(build_needs_estimated_cardinality)
+ end
+
+ it 'counts over joined relations' do
+ expect(described_class.estimate_batch_distinct_count(model.joins(:build), "ci_builds.name")).to eq(ci_builds_estimated_cardinality)
+ end
- expect(described_class.estimate_batch_distinct_count(relation)).to eq(15)
+ it 'counts with :column field with batch_size of 50K' do
+ expect(described_class.estimate_batch_distinct_count(model, column, batch_size: 50_000)).to eq(build_needs_estimated_cardinality)
+ end
+
+ it 'counts with different number of batches and aggregates total result' do
+ stub_const('Gitlab::Database::PostgresHll::BatchDistinctCounter::MIN_REQUIRED_BATCH_SIZE', 0)
+
+ [1, 2, 4, 5, 6].each { |i| expect(described_class.estimate_batch_distinct_count(model, column, batch_size: i)).to eq(build_needs_estimated_cardinality) }
+ end
+
+ it 'counts with a start and finish' do
+ expect(described_class.estimate_batch_distinct_count(model, column, start: model.minimum(:id), finish: model.maximum(:id))).to eq(build_needs_estimated_cardinality)
+ end
+ end
end
- it 'logs error and returns DISTRIBUTED_HLL_FALLBACK value when counting raises any error', :aggregate_failures do
- error = StandardError.new('')
- stub_const("Gitlab::Utils::UsageData::DISTRIBUTED_HLL_FALLBACK", 15)
- allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(error)
+ describe 'error handling' do
+ before do
+ stub_const("Gitlab::Utils::UsageData::FALLBACK", 3)
+ stub_const("Gitlab::Utils::UsageData::DISTRIBUTED_HLL_FALLBACK", 4)
+ end
+
+ it 'returns fallback if counter raises WRONG_CONFIGURATION_ERROR' do
+ expect(described_class.estimate_batch_distinct_count(relation, 'id', start: 1, finish: 0)).to eq 3
+ end
+
+ it 'returns default fallback value when counting fails due to database error' do
+ allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
- expect(described_class.estimate_batch_distinct_count(relation)).to eq(15)
+ expect(described_class.estimate_batch_distinct_count(relation)).to eq(3)
+ end
+
+ it 'logs error and returns DISTRIBUTED_HLL_FALLBACK value when counting raises any error', :aggregate_failures do
+ error = StandardError.new('')
+ allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(error)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ expect(described_class.estimate_batch_distinct_count(relation)).to eq(4)
+ end
end
end
@@ -193,7 +277,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'when Prometheus server address is available from settings' do
before do
expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
- expect(Gitlab::Prometheus::Internal).to receive(:server_address).and_return('prom:9090')
+ expect(Gitlab::Prometheus::Internal).to receive(:uri).and_return('http://prom:9090')
end
it_behaves_like 'try to query Prometheus with given address'
@@ -256,7 +340,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'tracks redis hll event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
described_class.track_usage_event(event_name, value)
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 36257a0605b..1052d4cbacc 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -392,6 +392,23 @@ RSpec.describe Gitlab::Utils do
end
end
+ describe ".safe_downcase!" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:str, :result) do
+ "test".freeze | "test"
+ "Test".freeze | "test"
+ "test" | "test"
+ "Test" | "test"
+ end
+
+ with_them do
+ it "downcases the string" do
+ expect(described_class.safe_downcase!(str)).to eq(result)
+ end
+ end
+ end
+
describe '.parse_url' do
it 'returns Addressable::URI object' do
expect(described_class.parse_url('http://gitlab.com')).to be_instance_of(Addressable::URI)
diff --git a/spec/lib/gitlab/uuid_spec.rb b/spec/lib/gitlab/uuid_spec.rb
index a2e28f5a24d..44c1d30fce0 100644
--- a/spec/lib/gitlab/uuid_spec.rb
+++ b/spec/lib/gitlab/uuid_spec.rb
@@ -49,4 +49,23 @@ RSpec.describe Gitlab::UUID do
it { is_expected.to eq(production_proper_uuid) }
end
end
+
+ describe 'v5?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:test_string, :is_uuid_v5) do
+ 'not even a uuid' | false
+ 'this-seems-like-a-uuid' | false
+ 'thislook-more-5lik-eava-liduuidbutno' | false
+ '9f470438-db0f-37b7-9ca9-1d47104c339a' | false
+ '9f470438-db0f-47b7-9ca9-1d47104c339a' | false
+ '9f470438-db0f-57b7-9ca9-1d47104c339a' | true
+ end
+
+ with_them do
+ subject { described_class.v5?(test_string) }
+
+ it { is_expected.to be(is_uuid_v5) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/visibility_level_spec.rb b/spec/lib/gitlab/visibility_level_spec.rb
index 2ac343cd1e7..63c31c82d59 100644
--- a/spec/lib/gitlab/visibility_level_spec.rb
+++ b/spec/lib/gitlab/visibility_level_spec.rb
@@ -22,13 +22,25 @@ RSpec.describe Gitlab::VisibilityLevel do
end
describe '.levels_for_user' do
- it 'returns all levels for an admin' do
- user = build(:user, :admin)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns all levels for an admin' do
+ user = build(:user, :admin)
+
+ expect(described_class.levels_for_user(user))
+ .to eq([Gitlab::VisibilityLevel::PRIVATE,
+ Gitlab::VisibilityLevel::INTERNAL,
+ Gitlab::VisibilityLevel::PUBLIC])
+ end
+ end
- expect(described_class.levels_for_user(user))
- .to eq([Gitlab::VisibilityLevel::PRIVATE,
- Gitlab::VisibilityLevel::INTERNAL,
- Gitlab::VisibilityLevel::PUBLIC])
+ context 'when admin mode is disabled' do
+ it 'returns INTERNAL and PUBLIC for an admin' do
+ user = build(:user, :admin)
+
+ expect(described_class.levels_for_user(user))
+ .to eq([Gitlab::VisibilityLevel::INTERNAL,
+ Gitlab::VisibilityLevel::PUBLIC])
+ end
end
it 'returns INTERNAL and PUBLIC for internal users' do
@@ -119,28 +131,4 @@ RSpec.describe Gitlab::VisibilityLevel do
end
end
end
-
- describe '#visibility_level_decreased?' do
- let(:project) { create(:project, :internal) }
-
- context 'when visibility level decreases' do
- before do
- project.update!(visibility_level: described_class::PRIVATE)
- end
-
- it 'returns true' do
- expect(project.visibility_level_decreased?).to be(true)
- end
- end
-
- context 'when visibility level does not decrease' do
- before do
- project.update!(visibility_level: described_class::PUBLIC)
- end
-
- it 'returns false' do
- expect(project.visibility_level_decreased?).to be(false)
- end
- end
- end
end
diff --git a/spec/lib/release_highlights/validator/entry_spec.rb b/spec/lib/release_highlights/validator/entry_spec.rb
new file mode 100644
index 00000000000..648356e63ba
--- /dev/null
+++ b/spec/lib/release_highlights/validator/entry_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ReleaseHighlights::Validator::Entry do
+ subject(:entry) { described_class.new(document.root.children.first) }
+
+ let(:document) { YAML.parse(File.read(yaml_path)) }
+ let(:yaml_path) { 'spec/fixtures/whats_new/blank.yml' }
+
+ describe 'validations' do
+ before do
+ allow(entry).to receive(:value_for).and_call_original
+ end
+
+ context 'with a valid entry' do
+ let(:yaml_path) { 'spec/fixtures/whats_new/valid.yml' }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with an invalid entry' do
+ let(:yaml_path) { 'spec/fixtures/whats_new/invalid.yml' }
+
+ it 'returns line numbers in errors' do
+ subject.valid?
+
+ expect(entry.errors[:packages].first).to match('(line 6)')
+ end
+ end
+
+ context 'with a blank entry' do
+ it 'validate presence of title, body and stage' do
+ subject.valid?
+
+ expect(subject.errors[:title]).not_to be_empty
+ expect(subject.errors[:body]).not_to be_empty
+ expect(subject.errors[:stage]).not_to be_empty
+ expect(subject.errors[:packages]).not_to be_empty
+ end
+
+ it 'validates boolean value of "self-managed" and "gitlab-com"' do
+ allow(entry).to receive(:value_for).with('self-managed').and_return('nope')
+ allow(entry).to receive(:value_for).with('gitlab-com').and_return('yerp')
+
+ subject.valid?
+
+ expect(subject.errors[:'self-managed']).to include(/must be a boolean/)
+ expect(subject.errors[:'gitlab-com']).to include(/must be a boolean/)
+ end
+
+ it 'validates URI of "url" and "image_url"' do
+ allow(entry).to receive(:value_for).with('image_url').and_return('imgur/gitlab_feature.gif')
+ allow(entry).to receive(:value_for).with('url').and_return('gitlab/newest_release.html')
+
+ subject.valid?
+
+ expect(subject.errors[:url]).to include(/must be a URL/)
+ expect(subject.errors[:image_url]).to include(/must be a URL/)
+ end
+
+ it 'validates release is numerical' do
+ allow(entry).to receive(:value_for).with('release').and_return('one')
+
+ subject.valid?
+
+ expect(subject.errors[:release]).to include(/is not a number/)
+ end
+
+ it 'validates published_at is a date' do
+ allow(entry).to receive(:value_for).with('published_at').and_return('christmas day')
+
+ subject.valid?
+
+ expect(subject.errors[:published_at]).to include(/must be valid Date/)
+ end
+
+ it 'validates packages are included in list' do
+ allow(entry).to receive(:value_for).with('packages').and_return(['ALL'])
+
+ subject.valid?
+
+ expect(subject.errors[:packages].first).to include("must be one of", "Core", "Starter", "Premium", "Ultimate")
+ end
+ end
+ end
+end
diff --git a/spec/lib/release_highlights/validator_spec.rb b/spec/lib/release_highlights/validator_spec.rb
new file mode 100644
index 00000000000..e68d9145dcd
--- /dev/null
+++ b/spec/lib/release_highlights/validator_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ReleaseHighlights::Validator do
+ let(:validator) { described_class.new(file: yaml_path) }
+ let(:yaml_path) { 'spec/fixtures/whats_new/valid.yml' }
+ let(:invalid_yaml_path) { 'spec/fixtures/whats_new/invalid.yml' }
+
+ describe '#valid?' do
+ subject { validator.valid? }
+
+ context 'with a valid file' do
+ it 'passes entries to entry validator and returns true' do
+ expect(ReleaseHighlights::Validator::Entry).to receive(:new).exactly(:twice).and_call_original
+ expect(subject).to be true
+ expect(validator.errors).to be_empty
+ end
+ end
+
+ context 'with invalid file' do
+ let(:yaml_path) { invalid_yaml_path }
+
+ it 'returns false and has errors' do
+ expect(subject).to be false
+ expect(validator.errors).not_to be_empty
+ end
+ end
+ end
+
+ describe '.validate_all!' do
+ subject { described_class.validate_all! }
+
+ before do
+ allow(ReleaseHighlight).to receive(:file_paths).and_return(yaml_paths)
+ end
+
+ context 'with valid files' do
+ let(:yaml_paths) { [yaml_path, yaml_path] }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with an invalid file' do
+ let(:yaml_paths) { [invalid_yaml_path, yaml_path] }
+
+ it { is_expected.to be false }
+ end
+ end
+
+ describe '.error_message' do
+ subject do
+ described_class.validate_all!
+ described_class.error_message
+ end
+
+ before do
+ allow(ReleaseHighlight).to receive(:file_paths).and_return([yaml_path])
+ end
+
+ context 'with a valid file' do
+ it { is_expected.to be_empty }
+ end
+
+ context 'with an invalid file' do
+ let(:yaml_path) { invalid_yaml_path }
+
+ it 'returns a nice error message' do
+ expect(subject).to eq(<<-MESSAGE.strip_heredoc)
+ ---------------------------------------------------------
+ Validation failed for spec/fixtures/whats_new/invalid.yml
+ ---------------------------------------------------------
+ * Packages must be one of ["Core", "Starter", "Premium", "Ultimate"] (line 6)
+
+ MESSAGE
+ end
+ end
+ end
+
+ describe 'when validating all files' do
+ it 'they should have no errors' do
+ expect(described_class.validate_all!).to be_truthy, described_class.error_message
+ end
+ end
+end
diff --git a/spec/lib/uploaded_file_spec.rb b/spec/lib/uploaded_file_spec.rb
index 8425e1dbd46..ececc84bc93 100644
--- a/spec/lib/uploaded_file_spec.rb
+++ b/spec/lib/uploaded_file_spec.rb
@@ -27,12 +27,12 @@ RSpec.describe UploadedFile do
end
it 'handles a blank path' do
- params['file.path'] = ''
+ params['path'] = ''
# Not a real file, so can't determine size itself
- params['file.size'] = 1.byte
+ params['size'] = 1.byte
- expect { described_class.from_params(params, :file, upload_path) }
+ expect { described_class.from_params(params, upload_path) }
.not_to raise_error
end
end
@@ -50,7 +50,7 @@ RSpec.describe UploadedFile do
end
end
- describe '.from_params_without_field' do
+ describe '.from_params' do
let(:upload_path) { nil }
after do
@@ -58,7 +58,7 @@ RSpec.describe UploadedFile do
end
subject do
- described_class.from_params_without_field(params, [upload_path, Dir.tmpdir])
+ described_class.from_params(params, [upload_path, Dir.tmpdir])
end
context 'when valid file is specified' do
@@ -170,190 +170,6 @@ RSpec.describe UploadedFile do
end
end
end
-
- describe '.from_params' do
- let(:upload_path) { nil }
- let(:file_path_override) { nil }
-
- after do
- FileUtils.rm_r(upload_path) if upload_path
- end
-
- subject do
- described_class.from_params(params, :file, [upload_path, Dir.tmpdir], file_path_override)
- end
-
- RSpec.shared_context 'filepath override' do
- let(:temp_file_override) { Tempfile.new(%w[override override], temp_dir) }
- let(:file_path_override) { temp_file_override.path }
-
- before do
- FileUtils.touch(temp_file_override)
- end
-
- after do
- FileUtils.rm_f(temp_file_override)
- end
- end
-
- context 'when valid file is specified' do
- context 'only local path is specified' do
- let(:params) do
- { 'file.path' => temp_file.path }
- end
-
- it { is_expected.not_to be_nil }
-
- it "generates filename from path" do
- expect(subject.original_filename).to eq(::File.basename(temp_file.path))
- end
- end
-
- context 'all parameters are specified' do
- context 'with a filepath' do
- let(:params) do
- { 'file.path' => temp_file.path,
- 'file.name' => 'dir/my file&.txt',
- 'file.type' => 'my/type',
- 'file.sha256' => 'sha256' }
- end
-
- it_behaves_like 'using the file path',
- filename: 'my_file_.txt',
- content_type: 'my/type',
- sha256: 'sha256',
- path_suffix: 'test'
- end
-
- context 'with a filepath override' do
- include_context 'filepath override'
-
- let(:params) do
- { 'file.path' => temp_file.path,
- 'file.name' => 'dir/my file&.txt',
- 'file.type' => 'my/type',
- 'file.sha256' => 'sha256' }
- end
-
- it_behaves_like 'using the file path',
- filename: 'my_file_.txt',
- content_type: 'my/type',
- sha256: 'sha256',
- path_suffix: 'override'
- end
-
- context 'with a remote id' do
- let(:params) do
- {
- 'file.name' => 'dir/my file&.txt',
- 'file.sha256' => 'sha256',
- 'file.remote_url' => 'http://localhost/file',
- 'file.remote_id' => '1234567890',
- 'file.etag' => 'etag1234567890',
- 'file.size' => '123456'
- }
- end
-
- it_behaves_like 'using the remote id',
- filename: 'my_file_.txt',
- content_type: 'application/octet-stream',
- sha256: 'sha256',
- size: 123456,
- remote_id: '1234567890'
- end
-
- context 'with a path and a remote id' do
- let(:params) do
- {
- 'file.path' => temp_file.path,
- 'file.name' => 'dir/my file&.txt',
- 'file.sha256' => 'sha256',
- 'file.remote_url' => 'http://localhost/file',
- 'file.remote_id' => '1234567890',
- 'file.etag' => 'etag1234567890',
- 'file.size' => '123456'
- }
- end
-
- it_behaves_like 'using the remote id',
- filename: 'my_file_.txt',
- content_type: 'application/octet-stream',
- sha256: 'sha256',
- size: 123456,
- remote_id: '1234567890'
- end
-
- context 'with a path override and a remote id' do
- include_context 'filepath override'
-
- let(:params) do
- {
- 'file.name' => 'dir/my file&.txt',
- 'file.sha256' => 'sha256',
- 'file.remote_url' => 'http://localhost/file',
- 'file.remote_id' => '1234567890',
- 'file.etag' => 'etag1234567890',
- 'file.size' => '123456'
- }
- end
-
- it_behaves_like 'using the remote id',
- filename: 'my_file_.txt',
- content_type: 'application/octet-stream',
- sha256: 'sha256',
- size: 123456,
- remote_id: '1234567890'
- end
- end
- end
-
- context 'when no params are specified' do
- let(:params) do
- {}
- end
-
- it "does not return an object" do
- is_expected.to be_nil
- end
- end
-
- context 'when verifying allowed paths' do
- let(:params) do
- { 'file.path' => temp_file.path }
- end
-
- context 'when file is stored in system temporary folder' do
- let(:temp_dir) { Dir.tmpdir }
-
- it "succeeds" do
- is_expected.not_to be_nil
- end
- end
-
- context 'when file is stored in user provided upload path' do
- let(:upload_path) { Dir.mktmpdir }
- let(:temp_dir) { upload_path }
-
- it "succeeds" do
- is_expected.not_to be_nil
- end
- end
-
- context 'when file is stored outside of user provided upload path' do
- let!(:generated_dir) { Dir.mktmpdir }
- let!(:temp_dir) { Dir.mktmpdir }
-
- before do
- # We overwrite default temporary path
- allow(Dir).to receive(:tmpdir).and_return(generated_dir)
- end
-
- it "raises an error" do
- expect { subject }.to raise_error(UploadedFile::InvalidPathError, /insecure path used/)
- end
- end
- end
- end
end
describe '.initialize' do
diff --git a/spec/mailers/emails/merge_requests_spec.rb b/spec/mailers/emails/merge_requests_spec.rb
index 412cdff3aba..34665d943ab 100644
--- a/spec/mailers/emails/merge_requests_spec.rb
+++ b/spec/mailers/emails/merge_requests_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Emails::MergeRequests do
}
end
- it { expect(subject).to have_content('This attachment has been truncated to avoid exceeding the maximum allowed attachment size of 15MB.') }
+ it { expect(subject).to have_content('attachment has been truncated to avoid exceeding the maximum allowed attachment size of 15 MB.') }
end
end
end
diff --git a/spec/mailers/emails/pipelines_spec.rb b/spec/mailers/emails/pipelines_spec.rb
index f0f23fe2097..3ac68721357 100644
--- a/spec/mailers/emails/pipelines_spec.rb
+++ b/spec/mailers/emails/pipelines_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe Emails::Pipelines do
shared_examples_for 'correct pipeline information' do
it 'has a correct information' do
expect(subject)
- .to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
- "#{status} for #{pipeline.source_ref} | " \
+ .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
+ "#{project.name} | " \
"#{pipeline.short_sha}".to_s
expect(subject).to have_body_text pipeline.source_ref
@@ -29,8 +29,8 @@ RSpec.describe Emails::Pipelines do
it 'has correct information that there is no merge request link' do
expect(subject)
- .to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
- "#{status} for #{pipeline.source_ref} | " \
+ .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
+ "#{project.name} | " \
"#{pipeline.short_sha}".to_s
expect(subject).to have_body_text pipeline.source_ref
@@ -49,9 +49,9 @@ RSpec.describe Emails::Pipelines do
it 'has correct information that there is a merge request link' do
expect(subject)
- .to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
- "#{status} for #{pipeline.source_ref} | " \
- "#{pipeline.short_sha} in !#{merge_request.iid}".to_s
+ .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
+ "#{project.name} | " \
+ "#{pipeline.short_sha}".to_s
expect(subject).to have_body_text merge_request.to_reference
expect(subject).to have_body_text pipeline.source_ref
@@ -71,9 +71,9 @@ RSpec.describe Emails::Pipelines do
it 'has correct information that there is a merge request link' do
expect(subject)
- .to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
- "#{status} for #{pipeline.source_ref} | " \
- "#{pipeline.short_sha} in !#{merge_request.iid}".to_s
+ .to have_subject "#{status} pipeline for #{pipeline.source_ref} | " \
+ "#{project.name} | " \
+ "#{pipeline.short_sha}".to_s
expect(subject).to have_body_text merge_request.to_reference
expect(subject).to have_body_text pipeline.source_ref
@@ -89,8 +89,8 @@ RSpec.describe Emails::Pipelines do
let(:sha) { project.commit(ref).sha }
it_behaves_like 'correct pipeline information' do
- let(:status) { 'succeeded' }
- let(:status_text) { 'Your pipeline has passed.' }
+ let(:status) { 'Succesful' }
+ let(:status_text) { "Pipeline ##{pipeline.id} has passed!" }
end
end
@@ -102,8 +102,8 @@ RSpec.describe Emails::Pipelines do
let(:sha) { project.commit(ref).sha }
it_behaves_like 'correct pipeline information' do
- let(:status) { 'failed' }
- let(:status_text) { 'Your pipeline has failed.' }
+ let(:status) { 'Failed' }
+ let(:status_text) { "Pipeline ##{pipeline.id} has failed!" }
end
end
@@ -115,8 +115,8 @@ RSpec.describe Emails::Pipelines do
let(:sha) { project.commit(ref).sha }
it_behaves_like 'correct pipeline information' do
- let(:status) { 'been fixed' }
- let(:status_text) { 'Your pipeline has been fixed!' }
+ let(:status) { 'Fixed' }
+ let(:status_text) { "Pipeline has been fixed and ##{pipeline.id} has passed!" }
end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 3ebc2fc1e36..53ce200eed5 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -224,6 +224,10 @@ RSpec.describe Notify do
let(:model) { issue }
end
+ it 'contains a link to the issue' do
+ is_expected.to have_body_text(issue.to_reference(full: false))
+ end
+
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'an unsubscribeable thread'
it_behaves_like 'appearance header and footer enabled'
@@ -1611,6 +1615,88 @@ RSpec.describe Notify do
is_expected.to have_body_text group_member.invite_email
end
end
+
+ describe 'group expiration date updated' do
+ let_it_be(:group_member) { create(:group_member, group: group, expires_at: 1.day.from_now) }
+
+ context 'when expiration date is changed' do
+ subject { described_class.member_expiration_date_updated_email('group', group_member.id) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
+ context 'when expiration date is one day away' do
+ it 'contains all the useful information' do
+ is_expected.to have_subject 'Group membership expiration date changed'
+ is_expected.to have_body_text group_member.user.name
+ is_expected.to have_body_text group.name
+ is_expected.to have_body_text group.web_url
+ is_expected.to have_body_text group_group_members_url(group, search: group_member.user.username)
+ is_expected.to have_body_text 'day.'
+ is_expected.not_to have_body_text 'days.'
+ end
+ end
+
+ context 'when expiration date is more than one day away' do
+ before do
+ group_member.update!(expires_at: 20.days.from_now)
+ end
+
+ it 'contains all the useful information' do
+ is_expected.to have_subject 'Group membership expiration date changed'
+ is_expected.to have_body_text group_member.user.name
+ is_expected.to have_body_text group.name
+ is_expected.to have_body_text group.web_url
+ is_expected.to have_body_text group_group_members_url(group, search: group_member.user.username)
+ is_expected.to have_body_text 'days.'
+ is_expected.not_to have_body_text 'day.'
+ end
+ end
+
+ context 'when a group member is newly given an expiration date' do
+ let_it_be(:group_member) { create(:group_member, group: group) }
+
+ before do
+ group_member.update!(expires_at: 5.days.from_now)
+ end
+
+ subject { described_class.member_expiration_date_updated_email('group', group_member.id) }
+
+ it 'contains all the useful information' do
+ is_expected.to have_subject 'Group membership expiration date changed'
+ is_expected.to have_body_text group_member.user.name
+ is_expected.to have_body_text group.name
+ is_expected.to have_body_text group.web_url
+ is_expected.to have_body_text group_group_members_url(group, search: group_member.user.username)
+ is_expected.to have_body_text 'days.'
+ is_expected.not_to have_body_text 'day.'
+ end
+ end
+ end
+
+ context 'when expiration date is removed' do
+ before do
+ group_member.update!(expires_at: nil)
+ end
+
+ subject { described_class.member_expiration_date_updated_email('group', group_member.id) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
+ it 'contains all the useful information' do
+ is_expected.to have_subject 'Group membership expiration date removed'
+ is_expected.to have_body_text group_member.user.name
+ is_expected.to have_body_text group.name
+ end
+ end
+ end
end
describe 'confirmation if email changed' do
diff --git a/spec/migrations/20210112143418_remove_duplicate_services2_spec.rb b/spec/migrations/20210112143418_remove_duplicate_services2_spec.rb
new file mode 100644
index 00000000000..9a57d9bf84d
--- /dev/null
+++ b/spec/migrations/20210112143418_remove_duplicate_services2_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210112143418_remove_duplicate_services2.rb')
+
+RSpec.describe RemoveDuplicateServices2 do
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:services) { table(:services) }
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ namespaces.create!(id: 1, name: 'group', path: 'group')
+
+ projects.create!(id: 1, namespace_id: 1) # duplicate services
+ projects.create!(id: 2, namespace_id: 1) # normal services
+ projects.create!(id: 3, namespace_id: 1) # no services
+ projects.create!(id: 4, namespace_id: 1) # duplicate services
+ projects.create!(id: 5, namespace_id: 1) # duplicate services
+
+ services.create!(id: 1, project_id: 1, type: 'JiraService')
+ services.create!(id: 2, project_id: 1, type: 'JiraService')
+ services.create!(id: 3, project_id: 2, type: 'JiraService')
+ services.create!(id: 4, project_id: 4, type: 'AsanaService')
+ services.create!(id: 5, project_id: 4, type: 'AsanaService')
+ services.create!(id: 6, project_id: 4, type: 'JiraService')
+ services.create!(id: 7, project_id: 4, type: 'JiraService')
+ services.create!(id: 8, project_id: 4, type: 'SlackService')
+ services.create!(id: 9, project_id: 4, type: 'SlackService')
+ services.create!(id: 10, project_id: 5, type: 'JiraService')
+ services.create!(id: 11, project_id: 5, type: 'JiraService')
+
+ # Services without a project_id should be ignored
+ services.create!(id: 12, type: 'JiraService')
+ services.create!(id: 13, type: 'JiraService')
+ end
+
+ it 'schedules background jobs for all projects with duplicate services' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, 1, 4)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 5)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_has_external_wiki_trigger_spec.rb b/spec/migrations/add_has_external_wiki_trigger_spec.rb
new file mode 100644
index 00000000000..10c6888c87e
--- /dev/null
+++ b/spec/migrations/add_has_external_wiki_trigger_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddHasExternalWikiTrigger do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:services) { table(:services) }
+
+ before do
+ @namespace = namespaces.create!(name: 'foo', path: 'foo')
+ @project = projects.create!(namespace_id: @namespace.id)
+ end
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger' do
+ it 'sets `has_external_wiki` to true when active `ExternalWikiService` is inserted' do
+ expect do
+ services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+ end.to change { @project.reload.has_external_wiki }.to(true)
+ end
+
+ it 'does not set `has_external_wiki` to true when service is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+
+ expect do
+ services.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+
+ it 'does not set `has_external_wiki` to true when inactive `ExternalWikiService` is inserted' do
+ expect do
+ services.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+
+ it 'does not set `has_external_wiki` to true when active other service is inserted' do
+ expect do
+ services.create!(type: 'MyService', active: true, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+
+ describe 'UPDATE trigger' do
+ it 'sets `has_external_wiki` to true when `ExternalWikiService` is made active' do
+ service = services.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
+
+ expect do
+ service.update!(active: true)
+ end.to change { @project.reload.has_external_wiki }.to(true)
+ end
+
+ it 'sets `has_external_wiki` to false when `ExternalWikiService` is made inactive' do
+ service = services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+
+ expect do
+ service.update!(active: false)
+ end.to change { @project.reload.has_external_wiki }.to(false)
+ end
+
+ it 'does not change `has_external_wiki` when service is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ service = services.create!(type: 'ExternalWikiService', active: false, project_id: different_project.id)
+
+ expect do
+ service.update!(active: true)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+
+ describe 'DELETE trigger' do
+ it 'sets `has_external_wiki` to false when `ExternalWikiService` is deleted' do
+ service = services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+
+ expect do
+ service.delete
+ end.to change { @project.reload.has_external_wiki }.to(false)
+ end
+
+ it 'does not change `has_external_wiki` when service is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ service = services.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
+
+ expect do
+ service.delete
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ it 'drops the INSERT trigger' do
+ expect do
+ services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+
+ it 'drops the UPDATE trigger' do
+ service = services.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
+ @project.update!(has_external_wiki: false)
+
+ expect do
+ service.update!(active: true)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+
+ it 'drops the DELETE trigger' do
+ service = services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+ @project.update!(has_external_wiki: true)
+
+ expect do
+ service.delete
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+end
diff --git a/spec/migrations/remove_alerts_service_records_spec.rb b/spec/migrations/remove_alerts_service_records_spec.rb
new file mode 100644
index 00000000000..eaf9f90b445
--- /dev/null
+++ b/spec/migrations/remove_alerts_service_records_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210107194543_remove_alerts_service_records.rb')
+
+RSpec.describe RemoveAlertsServiceRecords do
+ let(:services) { table(:services) }
+ let(:alerts_service_data) { table(:alerts_service_data) }
+
+ before do
+ 5.times do
+ service = services.create!(type: 'AlertsService')
+ alerts_service_data.create!(service_id: service.id)
+ end
+
+ services.create!(type: 'SomeOtherType')
+ end
+
+ it 'removes services records of type AlertsService and corresponding data', :aggregate_failures do
+ expect(services.count).to eq(6)
+ expect(alerts_service_data.count).to eq(5)
+
+ migrate!
+
+ expect(services.count).to eq(1)
+ expect(services.first.type).to eq('SomeOtherType')
+ expect(services.where(type: 'AlertsService')).to be_empty
+ expect(alerts_service_data.all).to be_empty
+ end
+end
diff --git a/spec/migrations/remove_duplicate_labels_from_groups_spec.rb b/spec/migrations/remove_duplicate_labels_from_groups_spec.rb
new file mode 100644
index 00000000000..eff2c6a37e4
--- /dev/null
+++ b/spec/migrations/remove_duplicate_labels_from_groups_spec.rb
@@ -0,0 +1,227 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200716234259_remove_duplicate_labels_from_group.rb')
+
+RSpec.describe RemoveDuplicateLabelsFromGroup do
+ let(:labels_table) { table(:labels) }
+ let(:labels) { labels_table.all }
+ let(:projects_table) { table(:projects) }
+ let(:projects) { projects_table.all }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespaces) { namespaces_table.all }
+ let(:backup_labels_table) { table(:backup_labels) }
+ let(:backup_labels) { backup_labels_table.all }
+ # for those cases where we can't use the activerecord class because the `type` column
+ # makes it think it has polymorphism and should be/have a Label subclass
+ let(:sql_backup_labels) { ApplicationRecord.connection.execute('SELECT * from backup_labels') }
+
+ # all the possible tables with records that may have a relationship with a label
+ let(:analytics_cycle_analytics_group_stages_table) { table(:analytics_cycle_analytics_group_stages) }
+ let(:analytics_cycle_analytics_project_stages_table) { table(:analytics_cycle_analytics_project_stages) }
+ let(:board_labels_table) { table(:board_labels) }
+ let(:label_links_table) { table(:label_links) }
+ let(:label_priorities_table) { table(:label_priorities) }
+ let(:lists_table) { table(:lists) }
+ let(:resource_label_events_table) { table(:resource_label_events) }
+
+ let!(:group_one) { namespaces_table.create!(id: 1, type: 'Group', name: 'group', path: 'group') }
+ let!(:project_one) do
+ projects_table.create!(id: 1, name: 'project', path: 'project',
+ visibility_level: 0, namespace_id: group_one.id)
+ end
+
+ let(:label_title) { 'bug' }
+ let(:label_color) { 'red' }
+ let(:label_description) { 'nice label' }
+ let(:project_id) { project_one.id }
+ let(:group_id) { group_one.id }
+ let(:other_title) { 'feature' }
+
+ let(:group_label_attributes) do
+ {
+ title: label_title, color: label_color, group_id: group_id, type: 'GroupLabel', template: false, description: label_description
+ }
+ end
+
+ let(:migration) { described_class.new }
+
+ describe 'removing full duplicates' do
+ context 'when there are no duplicate labels' do
+ let!(:first_label) { labels_table.create!(group_label_attributes.merge(id: 1, title: "a different label")) }
+ let!(:second_label) { labels_table.create!(group_label_attributes.merge(id: 2, title: "a totally different label")) }
+
+ it 'does not remove anything' do
+ expect { migration.up }.not_to change { backup_labels_table.count }
+ end
+
+ it 'restores removed records when rolling back - no change' do
+ migration.up
+
+ expect { migration.down }.not_to change { labels_table.count }
+ end
+ end
+
+ context 'with duplicates with no relationships' do
+ let!(:first_label) { labels_table.create!(group_label_attributes.merge(id: 1)) }
+ let!(:second_label) { labels_table.create!(group_label_attributes.merge(id: 2)) }
+ let!(:third_label) { labels_table.create!(group_label_attributes.merge(id: 3, title: other_title)) }
+ let!(:fourth_label) { labels_table.create!(group_label_attributes.merge(id: 4, title: other_title)) }
+
+ it 'creates a backup record for each removed record' do
+ expect { migration.up }.to change { backup_labels_table.count }.from(0).to(2)
+ end
+
+ it 'creates the correct backup records with `create` restore_action' do
+ migration.up
+
+ expect(sql_backup_labels.find { |bl| bl["id"] == 2 }).to include(second_label.attributes.merge("restore_action" => described_class::CREATE, "new_title" => nil, "created_at" => anything, "updated_at" => anything))
+ expect(sql_backup_labels.find { |bl| bl["id"] == 4 }).to include(fourth_label.attributes.merge("restore_action" => described_class::CREATE, "new_title" => nil, "created_at" => anything, "updated_at" => anything))
+ end
+
+ it 'deletes all but one' do
+ migration.up
+
+ expect { second_label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { fourth_label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'restores removed records on rollback' do
+ second_label_attributes = modified_attributes(second_label)
+ fourth_label_attributes = modified_attributes(fourth_label)
+
+ migration.up
+
+ migration.down
+
+ expect(second_label.attributes).to include(second_label_attributes)
+ expect(fourth_label.attributes).to include(fourth_label_attributes)
+ end
+ end
+
+ context 'two duplicate records, one of which has a relationship' do
+ let!(:first_label) { labels_table.create!(group_label_attributes.merge(id: 1)) }
+ let!(:second_label) { labels_table.create!(group_label_attributes.merge(id: 2)) }
+ let!(:label_priority) { label_priorities_table.create!(label_id: second_label.id, project_id: project_id, priority: 1) }
+
+ it 'does not remove anything' do
+ expect { migration.up }.not_to change { labels_table.count }
+ end
+
+ it 'does not create a backup record with `create` restore_action' do
+ expect { migration.up }.not_to change { backup_labels_table.where(restore_action: described_class::CREATE).count }
+ end
+
+ it 'restores removed records when rolling back - no change' do
+ migration.up
+
+ expect { migration.down }.not_to change { labels_table.count }
+ end
+ end
+
+ context 'multiple duplicates, a subset of which have relationships' do
+ let!(:first_label) { labels_table.create!(group_label_attributes.merge(id: 1)) }
+ let!(:second_label) { labels_table.create!(group_label_attributes.merge(id: 2)) }
+ let!(:label_priority_for_second_label) { label_priorities_table.create!(label_id: second_label.id, project_id: project_id, priority: 1) }
+ let!(:third_label) { labels_table.create!(group_label_attributes.merge(id: 3)) }
+ let!(:fourth_label) { labels_table.create!(group_label_attributes.merge(id: 4)) }
+ let!(:label_priority_for_fourth_label) { label_priorities_table.create!(label_id: fourth_label.id, project_id: project_id, priority: 2) }
+
+ it 'creates a backup record with `create` restore_action for each removed record' do
+ expect { migration.up }.to change { backup_labels_table.where(restore_action: described_class::CREATE).count }.from(0).to(1)
+ end
+
+ it 'creates the correct backup records' do
+ migration.up
+
+ expect(sql_backup_labels.find { |bl| bl["id"] == 3 }).to include(third_label.attributes.merge("restore_action" => described_class::CREATE, "new_title" => nil, "created_at" => anything, "updated_at" => anything))
+ end
+
+ it 'deletes the duplicate record' do
+ migration.up
+
+ expect { first_label.reload }.not_to raise_error
+ expect { second_label.reload }.not_to raise_error
+ expect { third_label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'restores removed records on rollback' do
+ third_label_attributes = modified_attributes(third_label)
+
+ migration.up
+ migration.down
+
+ expect(third_label.attributes).to include(third_label_attributes)
+ end
+ end
+ end
+
+ describe 'renaming partial duplicates' do
+ # partial duplicates - only group_id and title match. Distinct colour prevents deletion.
+ context 'when there are no duplicate labels' do
+ let!(:first_label) { labels_table.create!(group_label_attributes.merge(id: 1, title: "a unique label", color: 'green')) }
+ let!(:second_label) { labels_table.create!(group_label_attributes.merge(id: 2, title: "a totally different, unique, label", color: 'blue')) }
+
+ it 'does not rename anything' do
+ expect { migration.up }.not_to change { backup_labels_table.count }
+ end
+ end
+
+ context 'with duplicates with no relationships' do
+ let!(:first_label) { labels_table.create!(group_label_attributes.merge(id: 1, color: 'green')) }
+ let!(:second_label) { labels_table.create!(group_label_attributes.merge(id: 2, color: 'blue')) }
+ let!(:third_label) { labels_table.create!(group_label_attributes.merge(id: 3, title: other_title, color: 'purple')) }
+ let!(:fourth_label) { labels_table.create!(group_label_attributes.merge(id: 4, title: other_title, color: 'yellow')) }
+
+ it 'creates a backup record for each renamed record' do
+ expect { migration.up }.to change { backup_labels_table.count }.from(0).to(2)
+ end
+
+ it 'creates the correct backup records with `rename` restore_action' do
+ migration.up
+
+ expect(sql_backup_labels.find { |bl| bl["id"] == 2 }).to include(second_label.attributes.merge("restore_action" => described_class::RENAME, "created_at" => anything, "updated_at" => anything))
+ expect(sql_backup_labels.find { |bl| bl["id"] == 4 }).to include(fourth_label.attributes.merge("restore_action" => described_class::RENAME, "created_at" => anything, "updated_at" => anything))
+ end
+
+ it 'modifies the titles of the partial duplicates' do
+ migration.up
+
+ expect(second_label.reload.title).to match(/#{label_title}_duplicate#{second_label.id}$/)
+ expect(fourth_label.reload.title).to match(/#{other_title}_duplicate#{fourth_label.id}$/)
+ end
+
+ it 'restores renamed records on rollback' do
+ second_label_attributes = modified_attributes(second_label)
+ fourth_label_attributes = modified_attributes(fourth_label)
+
+ migration.up
+
+ migration.down
+
+ expect(second_label.reload.attributes).to include(second_label_attributes)
+ expect(fourth_label.reload.attributes).to include(fourth_label_attributes)
+ end
+
+ context 'when the labels have a long title that might overflow' do
+ let(:long_title) { "a" * 255 }
+
+ before do
+ first_label.update_attribute(:title, long_title)
+ second_label.update_attribute(:title, long_title)
+ end
+
+ it 'keeps the length within the limit' do
+ migration.up
+
+ expect(second_label.reload.title).to eq("#{"a" * 244}_duplicate#{second_label.id}")
+ expect(second_label.title.length).to eq(255)
+ end
+ end
+ end
+ end
+
+ def modified_attributes(label)
+ label.attributes.except('created_at', 'updated_at')
+ end
+end
diff --git a/spec/migrations/schedule_populate_finding_uuid_for_vulnerability_feedback_spec.rb b/spec/migrations/schedule_populate_finding_uuid_for_vulnerability_feedback_spec.rb
new file mode 100644
index 00000000000..d8bdefd5546
--- /dev/null
+++ b/spec/migrations/schedule_populate_finding_uuid_for_vulnerability_feedback_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SchedulePopulateFindingUuidForVulnerabilityFeedback do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:vulnerability_feedback) { table(:vulnerability_feedback) }
+
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+ let(:user) { users.create!(username: 'john.doe', projects_limit: 1) }
+
+ let(:common_feedback_params) { { feedback_type: 0, category: 0, project_id: project.id, author_id: user.id } }
+ let!(:feedback_1) { vulnerability_feedback.create!(**common_feedback_params, project_fingerprint: 'foo') }
+ let!(:feedback_2) { vulnerability_feedback.create!(**common_feedback_params, project_fingerprint: 'bar') }
+ let!(:feedback_3) { vulnerability_feedback.create!(**common_feedback_params, project_fingerprint: 'zoo', finding_uuid: SecureRandom.uuid) }
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ it 'schedules the background jobs', :aggregate_failures do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to be(3)
+ expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(2.minutes, feedback_1.id, feedback_1.id)
+ expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(4.minutes, feedback_2.id, feedback_2.id)
+ expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(6.minutes, feedback_3.id, feedback_3.id)
+ end
+end
diff --git a/spec/models/alert_management/http_integration_spec.rb b/spec/models/alert_management/http_integration_spec.rb
index 910df51801a..ddd65e723eb 100644
--- a/spec/models/alert_management/http_integration_spec.rb
+++ b/spec/models/alert_management/http_integration_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe AlertManagement::HttpIntegration do
context 'with valid JSON schema' do
let(:attribute_mapping) do
{
- title: { path: %w(a b c), type: 'string' },
+ title: { path: %w(a b c), type: 'string', label: 'Title' },
description: { path: %w(a), type: 'string' }
}
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index ea03cbc3706..4755d700d72 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -72,6 +72,7 @@ RSpec.describe ApplicationSetting do
it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
it { is_expected.to validate_numericality_of(:container_registry_delete_tags_service_timeout).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
@@ -313,7 +314,7 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_presence_of(:max_attachment_size) }
- it do
+ specify do
is_expected.to validate_numericality_of(:max_attachment_size)
.only_integer
.is_greater_than(0)
@@ -321,13 +322,13 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_presence_of(:max_import_size) }
- it do
+ specify do
is_expected.to validate_numericality_of(:max_import_size)
.only_integer
.is_greater_than_or_equal_to(0)
end
- it do
+ specify do
is_expected.to validate_numericality_of(:local_markdown_version)
.only_integer
.is_greater_than_or_equal_to(0)
@@ -472,7 +473,7 @@ RSpec.describe ApplicationSetting do
end
[:gitaly_timeout_default, :gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name|
- it do
+ specify do
is_expected.to validate_presence_of(timeout_name)
is_expected.to validate_numericality_of(timeout_name).only_integer
.is_greater_than_or_equal_to(0)
@@ -733,6 +734,27 @@ RSpec.describe ApplicationSetting do
is_expected.to be_invalid
end
end
+
+ context 'throttle_* settings' do
+ where(:throttle_setting) do
+ %i[
+ throttle_unauthenticated_requests_per_period
+ throttle_unauthenticated_period_in_seconds
+ throttle_authenticated_api_requests_per_period
+ throttle_authenticated_api_period_in_seconds
+ throttle_authenticated_web_requests_per_period
+ throttle_authenticated_web_period_in_seconds
+ ]
+ end
+
+ with_them do
+ it { is_expected.to allow_value(3).for(throttle_setting) }
+ it { is_expected.not_to allow_value(-3).for(throttle_setting) }
+ it { is_expected.not_to allow_value(0).for(throttle_setting) }
+ it { is_expected.not_to allow_value('three').for(throttle_setting) }
+ it { is_expected.not_to allow_value(nil).for(throttle_setting) }
+ end
+ end
end
context 'restrict creating duplicates' do
@@ -821,7 +843,7 @@ RSpec.describe ApplicationSetting do
context 'validations' do
it { is_expected.to validate_presence_of(:diff_max_patch_bytes) }
- it do
+ specify do
is_expected.to validate_numericality_of(:diff_max_patch_bytes)
.only_integer
.is_greater_than_or_equal_to(Gitlab::Git::Diff::DEFAULT_MAX_PATCH_BYTES)
@@ -850,12 +872,13 @@ RSpec.describe ApplicationSetting do
end
end
- describe '#instance_review_permitted?', :request_store do
+ describe '#instance_review_permitted?', :request_store, :use_clean_rails_memory_store_caching do
subject { setting.instance_review_permitted? }
before do
- RequestStore.store[:current_license] = nil
- expect(Rails.cache).to receive(:fetch).and_return(
+ allow(License).to receive(:current).and_return(nil) if Gitlab.ee?
+ allow(Rails.cache).to receive(:fetch).and_call_original
+ expect(Rails.cache).to receive(:fetch).with('limited_users_count', anything).and_return(
::ApplicationSetting::INSTANCE_REVIEW_MIN_USERS + users_over_minimum
)
end
diff --git a/spec/models/audit_event_partitioned_spec.rb b/spec/models/audit_event_archived_spec.rb
index ab48e291f78..43a2e8434b0 100644
--- a/spec/models/audit_event_partitioned_spec.rb
+++ b/spec/models/audit_event_archived_spec.rb
@@ -2,35 +2,35 @@
require 'spec_helper'
-RSpec.describe AuditEventPartitioned do
+RSpec.describe AuditEventArchived do
let(:source_table) { AuditEvent }
- let(:partitioned_table) { described_class }
+ let(:destination_table) { described_class }
it 'has the same columns as the source table' do
column_names_from_source_table = column_names(source_table)
- column_names_from_partioned_table = column_names(partitioned_table)
+ column_names_from_destination_table = column_names(destination_table)
- expect(column_names_from_partioned_table).to match_array(column_names_from_source_table)
+ expect(column_names_from_destination_table).to match_array(column_names_from_source_table)
end
it 'has the same null constraints as the source table' do
constraints_from_source_table = null_constraints(source_table)
- constraints_from_partitioned_table = null_constraints(partitioned_table)
+ constraints_from_destination_table = null_constraints(destination_table)
- expect(constraints_from_partitioned_table.to_a).to match_array(constraints_from_source_table.to_a)
+ expect(constraints_from_destination_table.to_a).to match_array(constraints_from_source_table.to_a)
end
it 'inserts the same record as the one in the source table', :aggregate_failures do
- expect { create(:audit_event) }.to change { partitioned_table.count }.by(1)
+ expect { create(:audit_event) }.to change { destination_table.count }.by(1)
event_from_source_table = source_table.connection.select_one(
"SELECT * FROM #{source_table.table_name} ORDER BY created_at desc LIMIT 1"
)
- event_from_partitioned_table = partitioned_table.connection.select_one(
- "SELECT * FROM #{partitioned_table.table_name} ORDER BY created_at desc LIMIT 1"
+ event_from_destination_table = destination_table.connection.select_one(
+ "SELECT * FROM #{destination_table.table_name} ORDER BY created_at desc LIMIT 1"
)
- expect(event_from_partitioned_table).to eq(event_from_source_table)
+ expect(event_from_destination_table).to eq(event_from_source_table)
end
def column_names(table)
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 11dcecd50ca..4f09f6f1da4 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -356,14 +356,6 @@ RSpec.describe Ci::Bridge do
describe '#dependency_variables' do
subject { bridge.dependency_variables }
- shared_context 'when ci_bridge_dependency_variables is disabled' do
- before do
- stub_feature_flags(ci_bridge_dependency_variables: false)
- end
-
- it { is_expected.to be_empty }
- end
-
context 'when downloading from previous stages' do
let!(:prepare1) { create(:ci_build, name: 'prepare1', pipeline: pipeline, stage_idx: 0) }
let!(:bridge) { create(:ci_bridge, pipeline: pipeline, stage_idx: 1) }
@@ -374,8 +366,6 @@ RSpec.describe Ci::Bridge do
it 'inherits only dependent variables' do
expect(subject.to_hash).to eq(job_variable_1.key => job_variable_1.value)
end
-
- it_behaves_like 'when ci_bridge_dependency_variables is disabled'
end
context 'when using needs' do
@@ -397,8 +387,6 @@ RSpec.describe Ci::Bridge do
it 'inherits only needs with artifacts variables' do
expect(subject.to_hash).to eq(job_variable_1.key => job_variable_1.value)
end
-
- it_behaves_like 'when ci_bridge_dependency_variables is disabled'
end
end
end
diff --git a/spec/models/ci/build_need_spec.rb b/spec/models/ci/build_need_spec.rb
index 43cce073918..c2cf9027055 100644
--- a/spec/models/ci/build_need_spec.rb
+++ b/spec/models/ci/build_need_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::BuildNeed, model: true do
let(:build_need) { build(:ci_build_need) }
- it { is_expected.to belong_to(:build) }
+ it { is_expected.to belong_to(:build).class_name('Ci::Processable') }
it { is_expected.to validate_presence_of(:build) }
it { is_expected.to validate_presence_of(:name) }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 9f412d64d56..c2029b9240b 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -717,6 +717,22 @@ RSpec.describe Ci::Build do
end
end
+ describe '#artifacts_public?' do
+ subject { build.artifacts_public? }
+
+ context 'artifacts with defaults' do
+ let(:build) { create(:ci_build, :artifacts) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'non public artifacts' do
+ let(:build) { create(:ci_build, :artifacts, :non_public_artifacts) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#artifacts_expired?' do
subject { build.artifacts_expired? }
@@ -1149,26 +1165,12 @@ RSpec.describe Ci::Build do
end
context 'when transits to skipped' do
- context 'when cd_skipped_deployment_status is disabled' do
- before do
- stub_feature_flags(cd_skipped_deployment_status: false)
- build.skip!
- end
-
- it 'transits deployment status to canceled' do
- expect(deployment).to be_canceled
- end
+ before do
+ build.skip!
end
- context 'when cd_skipped_deployment_status is enabled' do
- before do
- stub_feature_flags(cd_skipped_deployment_status: project)
- build.skip!
- end
-
- it 'transits deployment status to skipped' do
- expect(deployment).to be_skipped
- end
+ it 'transits deployment status to skipped' do
+ expect(deployment).to be_skipped
end
end
@@ -2456,6 +2458,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true, masked: false },
{ key: 'CI_PROJECT_REPOSITORY_LANGUAGES', value: project.repository_languages.map(&:name).join(',').downcase, public: true, masked: false },
{ key: 'CI_DEFAULT_BRANCH', value: project.default_branch, public: true, masked: false },
+ { key: 'CI_PROJECT_CONFIG_PATH', value: project.ci_config_path_or_default, public: true, masked: false },
{ key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true, masked: false },
{ key: 'CI_PAGES_URL', value: project.pages_url, public: true, masked: false },
{ key: 'CI_DEPENDENCY_PROXY_SERVER', value: "#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}", public: true, masked: false },
@@ -2986,7 +2989,7 @@ RSpec.describe Ci::Build do
let(:ci_config_path) { { key: 'CI_CONFIG_PATH', value: 'custom', public: true, masked: false } }
before do
- expect_any_instance_of(Project).to receive(:ci_config_path) { 'custom' }
+ project.update!(ci_config_path: 'custom')
end
it { is_expected.to include(ci_config_path) }
@@ -4343,7 +4346,7 @@ RSpec.describe Ci::Build do
end
describe '#supported_runner?' do
- let_it_be(:build) { create(:ci_build) }
+ let_it_be_with_refind(:build) { create(:ci_build) }
subject { build.supported_runner?(runner_features) }
@@ -4408,6 +4411,41 @@ RSpec.describe Ci::Build do
it { is_expected.to be_falsey }
end
end
+
+ context 'when `return_exit_code` feature is required by build' do
+ let(:options) { { allow_failure_criteria: { exit_codes: [1] } } }
+
+ before do
+ build.update!(options: options)
+ end
+
+ context 'when runner provides given feature' do
+ let(:runner_features) { { return_exit_code: true } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when runner does not provide given feature' do
+ let(:runner_features) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the runner does not provide all of the required features' do
+ let(:options) do
+ {
+ allow_failure_criteria: { exit_codes: [1] },
+ artifacts: { reports: { junit: "junit.xml" } }
+ }
+ end
+
+ let(:runner_features) { { return_exit_code: true } }
+
+ it 'requires `upload_multiple_artifacts` too' do
+ is_expected.to be_falsey
+ end
+ end
+ end
end
describe '#deployment_status' do
@@ -4737,22 +4775,6 @@ RSpec.describe Ci::Build do
describe '#debug_mode?' do
subject { build.debug_mode? }
- context 'when feature is disabled' do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- end
-
- it { is_expected.to eq false }
-
- context 'when in variables' do
- before do
- create(:ci_instance_variable, key: 'CI_DEBUG_TRACE', value: 'true')
- end
-
- it { is_expected.to eq false }
- end
- end
-
context 'when CI_DEBUG_TRACE=true is in variables' do
context 'when in instance variables' do
before do
@@ -4807,4 +4829,159 @@ RSpec.describe Ci::Build do
it { is_expected.to eq false }
end
end
+
+ describe '#drop_with_exit_code!' do
+ let(:exit_code) { 1 }
+ let(:options) { {} }
+
+ before do
+ build.options.merge!(options)
+ build.save!
+ end
+
+ subject(:drop_with_exit_code) do
+ build.drop_with_exit_code!(:unknown_failure, exit_code)
+ end
+
+ shared_examples 'drops the build without changing allow_failure' do
+ it 'does not change allow_failure' do
+ expect { drop_with_exit_code }
+ .not_to change { build.reload.allow_failure }
+ end
+
+ it 'drops the build' do
+ expect { drop_with_exit_code }
+ .to change { build.reload.failed? }
+ end
+ end
+
+ context 'when exit_codes are not defined' do
+ it_behaves_like 'drops the build without changing allow_failure'
+ end
+
+ context 'when allow_failure_criteria is nil' do
+ let(:options) { { allow_failure_criteria: nil } }
+
+ it_behaves_like 'drops the build without changing allow_failure'
+ end
+
+ context 'when exit_codes is nil' do
+ let(:options) do
+ {
+ allow_failure_criteria: {
+ exit_codes: nil
+ }
+ }
+ end
+
+ it_behaves_like 'drops the build without changing allow_failure'
+ end
+
+ context 'when exit_codes do not match' do
+ let(:options) do
+ {
+ allow_failure_criteria: {
+ exit_codes: [2, 3, 4]
+ }
+ }
+ end
+
+ it_behaves_like 'drops the build without changing allow_failure'
+ end
+
+ context 'with matching exit codes' do
+ let(:options) do
+ { allow_failure_criteria: { exit_codes: [1, 2, 3] } }
+ end
+
+ it 'changes allow_failure' do
+ expect { drop_with_exit_code }
+ .to change { build.reload.allow_failure }
+ end
+
+ it 'drops the build' do
+ expect { drop_with_exit_code }
+ .to change { build.reload.failed? }
+ end
+
+ it 'is executed inside a transaction' do
+ expect(build).to receive(:drop!)
+ .with(:unknown_failure)
+ .and_raise(ActiveRecord::Rollback)
+
+ expect(build).to receive(:conditionally_allow_failure!)
+ .with(1)
+ .and_call_original
+
+ expect { drop_with_exit_code }
+ .not_to change { build.reload.allow_failure }
+ end
+
+ context 'when exit_code is nil' do
+ let(:exit_code) {}
+
+ it_behaves_like 'drops the build without changing allow_failure'
+ end
+
+ context 'when ci_allow_failure_with_exit_codes is disabled' do
+ before do
+ stub_feature_flags(ci_allow_failure_with_exit_codes: false)
+ end
+
+ it_behaves_like 'drops the build without changing allow_failure'
+ end
+ end
+ end
+
+ describe '#exit_codes_defined?' do
+ let(:options) { {} }
+
+ before do
+ build.options.merge!(options)
+ end
+
+ subject(:exit_codes_defined) do
+ build.exit_codes_defined?
+ end
+
+ context 'without allow_failure_criteria' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when exit_codes is nil' do
+ let(:options) do
+ {
+ allow_failure_criteria: {
+ exit_codes: nil
+ }
+ }
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when exit_codes is an empty array' do
+ let(:options) do
+ {
+ allow_failure_criteria: {
+ exit_codes: []
+ }
+ }
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when exit_codes are defined' do
+ let(:options) do
+ {
+ allow_failure_criteria: {
+ exit_codes: [5, 6]
+ }
+ }
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
end
diff --git a/spec/models/commit_with_pipeline_spec.rb b/spec/models/ci/commit_with_pipeline_spec.rb
index c4b6deebae0..4dd288bde62 100644
--- a/spec/models/commit_with_pipeline_spec.rb
+++ b/spec/models/ci/commit_with_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe CommitWithPipeline do
+RSpec.describe Ci::CommitWithPipeline do
let(:project) { create(:project, :public, :repository) }
let(:commit) { described_class.new(project.commit) }
diff --git a/spec/models/ci/group_spec.rb b/spec/models/ci/group_spec.rb
index c20b7e61044..6c96e659a34 100644
--- a/spec/models/ci/group_spec.rb
+++ b/spec/models/ci/group_spec.rb
@@ -54,6 +54,18 @@ RSpec.describe Ci::Group do
.to be_a(Gitlab::Ci::Status::Failed)
end
end
+
+ context 'when one of the commit statuses in the group is allowed to fail' do
+ let(:jobs) do
+ [create(:ci_build, :failed, :allowed_to_fail),
+ create(:ci_build, :success)]
+ end
+
+ it 'fabricates a new detailed status object' do
+ expect(subject.detailed_status(double(:user)))
+ .to be_a(Gitlab::Ci::Status::SuccessWarning)
+ end
+ end
end
describe '.fabricate' do
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index ef21ca8f100..796947be4c8 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -219,6 +219,39 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.unlocked' do
+ let_it_be(:job_artifact) { create(:ci_job_artifact) }
+
+ context 'with locked pipelines' do
+ before do
+ job_artifact.job.pipeline.artifacts_locked!
+ end
+
+ it 'returns an empty array' do
+ expect(described_class.unlocked).to be_empty
+ end
+ end
+
+ context 'with unlocked pipelines' do
+ before do
+ job_artifact.job.pipeline.unlocked!
+ end
+
+ it 'returns the artifact' do
+ expect(described_class.unlocked).to eq([job_artifact])
+ end
+ end
+ end
+
+ describe '.order_expired_desc' do
+ let_it_be(:first_artifact) { create(:ci_job_artifact, expire_at: 2.days.ago) }
+ let_it_be(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
+ it 'returns ordered artifacts' do
+ expect(described_class.order_expired_desc).to eq([second_artifact, first_artifact])
+ end
+ end
+
describe 'callbacks' do
describe '#schedule_background_upload' do
subject { create(:ci_job_artifact, :archive) }
diff --git a/spec/models/ci/ref_spec.rb b/spec/models/ci/ref_spec.rb
index cb62646532c..0a9cd5ef2ec 100644
--- a/spec/models/ci/ref_spec.rb
+++ b/spec/models/ci/ref_spec.rb
@@ -16,35 +16,49 @@ RSpec.describe Ci::Ref do
stub_const('Ci::PipelineSuccessUnlockArtifactsWorker', unlock_artifacts_worker_spy)
end
- where(:initial_state, :action, :count) do
- :unknown | :succeed! | 1
- :unknown | :do_fail! | 0
- :success | :succeed! | 1
- :success | :do_fail! | 0
- :failed | :succeed! | 1
- :failed | :do_fail! | 0
- :fixed | :succeed! | 1
- :fixed | :do_fail! | 0
- :broken | :succeed! | 1
- :broken | :do_fail! | 0
- :still_failing | :succeed | 1
- :still_failing | :do_fail | 0
- end
+ context 'pipline is locked' do
+ let!(:pipeline) { create(:ci_pipeline, ci_ref_id: ci_ref.id, locked: :artifacts_locked) }
+
+ where(:initial_state, :action, :count) do
+ :unknown | :succeed! | 1
+ :unknown | :do_fail! | 0
+ :success | :succeed! | 1
+ :success | :do_fail! | 0
+ :failed | :succeed! | 1
+ :failed | :do_fail! | 0
+ :fixed | :succeed! | 1
+ :fixed | :do_fail! | 0
+ :broken | :succeed! | 1
+ :broken | :do_fail! | 0
+ :still_failing | :succeed | 1
+ :still_failing | :do_fail | 0
+ end
- with_them do
- context "when transitioning states" do
- before do
- status_value = Ci::Ref.state_machines[:status].states[initial_state].value
- ci_ref.update!(status: status_value)
- end
+ with_them do
+ context "when transitioning states" do
+ before do
+ status_value = Ci::Ref.state_machines[:status].states[initial_state].value
+ ci_ref.update!(status: status_value)
+ end
- it 'calls unlock artifacts service' do
- ci_ref.send(action)
+ it 'calls unlock artifacts service' do
+ ci_ref.send(action)
- expect(unlock_artifacts_worker_spy).to have_received(:perform_async).exactly(count).times
+ expect(unlock_artifacts_worker_spy).to have_received(:perform_async).exactly(count).times
+ end
end
end
end
+
+ context 'pipeline is unlocked' do
+ let!(:pipeline) { create(:ci_pipeline, ci_ref_id: ci_ref.id, locked: :unlocked) }
+
+ it 'does not call unlock artifacts service' do
+ ci_ref.succeed!
+
+ expect(unlock_artifacts_worker_spy).not_to have_received(:perform_async)
+ end
+ end
end
end
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index 41b4ec86233..d0e470bfa42 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe Clusters::Applications::Knative do
subject { knative.install_command }
it 'is initialized with latest version' do
- expect(subject.version).to eq('0.9.0')
+ expect(subject.version).to eq('0.10.0')
end
it_behaves_like 'a command'
@@ -204,8 +204,8 @@ RSpec.describe Clusters::Applications::Knative do
expect(subject.postdelete).to include(*remove_knative_istio_leftovers_script)
expect(subject.postdelete.size).to eq(full_delete_commands_size)
- expect(subject.postdelete[2]).to eq("kubectl api-resources -o name --api-group #{api_groups[0]} | xargs kubectl delete --ignore-not-found crd")
- expect(subject.postdelete[3]).to eq("kubectl api-resources -o name --api-group #{api_groups[1]} | xargs kubectl delete --ignore-not-found crd")
+ expect(subject.postdelete[2]).to include("kubectl api-resources -o name --api-group #{api_groups[0]} | xargs -r kubectl delete --ignore-not-found crd")
+ expect(subject.postdelete[3]).to include("kubectl api-resources -o name --api-group #{api_groups[1]} | xargs -r kubectl delete --ignore-not-found crd")
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index cfa87b3e39e..acbabee9383 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -428,6 +428,19 @@ eos
allow(commit).to receive(:safe_message).and_return(message)
expect(commit.description).to eq(message)
end
+
+ it 'truncates html representation if more than 1Mib' do
+ # Commit message is over 2MiB
+ huge_commit_message = ['panic', ('panic ' * 350000), 'trailing text'].join("\n")
+
+ allow(commit).to receive(:safe_message).and_return(huge_commit_message)
+
+ commit.refresh_markdown_cache
+ description_html = commit.description_html
+
+ expect(description_html.bytesize).to be < 2.megabytes
+ expect(description_html).not_to include('trailing text')
+ end
end
describe "delegation" do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 9824eb91bc7..532f68c2f18 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -61,6 +61,22 @@ RSpec.describe CommitStatus do
expect(commit_status.started_at).to be_present
end
end
+
+ describe 'transitioning to created from skipped or manual' do
+ let(:commit_status) { create(:commit_status, :skipped) }
+
+ it 'does not update user without parameter' do
+ commit_status.process!
+
+ expect { commit_status.process }.not_to change { commit_status.reload.user }
+ end
+
+ it 'updates user with user parameter' do
+ new_user = create(:user)
+
+ expect { commit_status.process(new_user) }.to change { commit_status.reload.user }.to(new_user)
+ end
+ end
end
describe '#processed' do
@@ -503,6 +519,9 @@ RSpec.describe CommitStatus do
subject { commit_status.group_name }
where(:name, :group_name) do
+ 'rspec1' | 'rspec1'
+ 'rspec1 0 1' | 'rspec1'
+ 'rspec1 0/2' | 'rspec1'
'rspec:windows' | 'rspec:windows'
'rspec:windows 0' | 'rspec:windows 0'
'rspec:windows 0 test' | 'rspec:windows 0 test'
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index f05189abdd2..ebc838e86a6 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -54,4 +54,23 @@ RSpec.describe Ci::Artifactable do
end
end
end
+
+ context 'ActiveRecord scopes' do
+ let_it_be(:recently_expired_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+ let_it_be(:later_expired_artifact) { create(:ci_job_artifact, expire_at: 2.days.ago) }
+ let_it_be(:not_expired_artifact) { create(:ci_job_artifact, expire_at: 1.day.from_now) }
+
+ describe '.expired_before' do
+ it 'returns expired artifacts' do
+ expect(Ci::JobArtifact.expired_before(1.hour.ago))
+ .to match_array([recently_expired_artifact, later_expired_artifact])
+ end
+ end
+
+ describe '.expired' do
+ it 'returns a limited number of expired artifacts' do
+ expect(Ci::JobArtifact.expired(1).order_id_asc).to eq([recently_expired_artifact])
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index 8b70753633c..5f4e5d4bd98 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -56,5 +56,21 @@ RSpec.describe EachBatch do
it_behaves_like 'each_batch handling', {}
it_behaves_like 'each_batch handling', { order_hint: :updated_at }
+
+ it 'orders ascending by default' do
+ ids = []
+
+ model.each_batch(of: 1) { |rel| ids.concat(rel.ids) }
+
+ expect(ids).to eq(ids.sort)
+ end
+
+ it 'accepts descending order' do
+ ids = []
+
+ model.each_batch(of: 1, order: :desc) { |rel| ids.concat(rel.ids) }
+
+ expect(ids).to eq(ids.sort.reverse)
+ end
end
end
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index c37582cb65d..5fb3b39f734 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -104,8 +104,8 @@ RSpec.describe Milestoneable do
context "for incidents" do
let(:incident) { build(:incident) }
- it 'returns false' do
- expect(incident.supports_milestone?).to be_falsy
+ it 'returns true' do
+ expect(incident.supports_milestone?).to be_truthy
end
end
end
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index bb7374bf46c..a7117af81a2 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe Noteable do
let(:active_position2) do
Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
+ old_path: 'files/ruby/popen.rb',
+ new_path: 'files/ruby/popen.rb',
old_line: 16,
new_line: 22,
diff_refs: subject.diff_refs
@@ -35,11 +35,11 @@ RSpec.describe Noteable do
let(:outdated_position) do
Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
+ old_path: 'files/ruby/popen.rb',
+ new_path: 'files/ruby/popen.rb',
old_line: nil,
new_line: 9,
- diff_refs: project.commit("874797c3a73b60d2187ed6e2fcabd289ff75171e").diff_refs
+ diff_refs: project.commit('874797c3a73b60d2187ed6e2fcabd289ff75171e').diff_refs
)
end
@@ -80,7 +80,7 @@ RSpec.describe Noteable do
describe '#grouped_diff_discussions' do
let(:grouped_diff_discussions) { subject.grouped_diff_discussions }
- it "includes active discussions" do
+ it 'includes active discussions' do
discussions = grouped_diff_discussions.values.flatten
expect(discussions.count).to eq(2)
@@ -91,17 +91,17 @@ RSpec.describe Noteable do
expect(discussions.last.notes).to eq([active_diff_note3])
end
- it "doesn't include outdated discussions" do
+ it 'does not include outdated discussions' do
expect(grouped_diff_discussions.values.flatten.map(&:id)).not_to include(outdated_diff_note1.discussion_id)
end
- it "groups the discussions by line code" do
+ it 'groups the discussions by line code' do
expect(grouped_diff_discussions[active_diff_note1.line_code].first.id).to eq(active_diff_note1.discussion_id)
expect(grouped_diff_discussions[active_diff_note3.line_code].first.id).to eq(active_diff_note3.discussion_id)
end
end
- context "discussion status" do
+ context 'discussion status' do
let(:first_discussion) { build_stubbed(:discussion_note_on_merge_request, noteable: subject, project: project).to_discussion }
let(:second_discussion) { build_stubbed(:discussion_note_on_merge_request, noteable: subject, project: project).to_discussion }
let(:third_discussion) { build_stubbed(:discussion_note_on_merge_request, noteable: subject, project: project).to_discussion }
@@ -110,56 +110,56 @@ RSpec.describe Noteable do
allow(subject).to receive(:resolvable_discussions).and_return([first_discussion, second_discussion, third_discussion])
end
- describe "#discussions_resolvable?" do
- context "when all discussions are unresolvable" do
+ describe '#discussions_resolvable?' do
+ context 'when all discussions are unresolvable' do
before do
allow(first_discussion).to receive(:resolvable?).and_return(false)
allow(second_discussion).to receive(:resolvable?).and_return(false)
allow(third_discussion).to receive(:resolvable?).and_return(false)
end
- it "returns false" do
+ it 'returns false' do
expect(subject.discussions_resolvable?).to be false
end
end
- context "when some discussions are unresolvable and some discussions are resolvable" do
+ context 'when some discussions are unresolvable and some discussions are resolvable' do
before do
allow(first_discussion).to receive(:resolvable?).and_return(true)
allow(second_discussion).to receive(:resolvable?).and_return(false)
allow(third_discussion).to receive(:resolvable?).and_return(true)
end
- it "returns true" do
+ it 'returns true' do
expect(subject.discussions_resolvable?).to be true
end
end
- context "when all discussions are resolvable" do
+ context 'when all discussions are resolvable' do
before do
allow(first_discussion).to receive(:resolvable?).and_return(true)
allow(second_discussion).to receive(:resolvable?).and_return(true)
allow(third_discussion).to receive(:resolvable?).and_return(true)
end
- it "returns true" do
+ it 'returns true' do
expect(subject.discussions_resolvable?).to be true
end
end
end
- describe "#discussions_resolved?" do
- context "when discussions are not resolvable" do
+ describe '#discussions_resolved?' do
+ context 'when discussions are not resolvable' do
before do
allow(subject).to receive(:discussions_resolvable?).and_return(false)
end
- it "returns false" do
+ it 'returns false' do
expect(subject.discussions_resolved?).to be false
end
end
- context "when discussions are resolvable" do
+ context 'when discussions are resolvable' do
before do
allow(subject).to receive(:discussions_resolvable?).and_return(true)
@@ -168,31 +168,31 @@ RSpec.describe Noteable do
allow(third_discussion).to receive(:resolvable?).and_return(true)
end
- context "when all resolvable discussions are resolved" do
+ context 'when all resolvable discussions are resolved' do
before do
allow(first_discussion).to receive(:resolved?).and_return(true)
allow(third_discussion).to receive(:resolved?).and_return(true)
end
- it "returns true" do
+ it 'returns true' do
expect(subject.discussions_resolved?).to be true
end
end
- context "when some resolvable discussions are not resolved" do
+ context 'when some resolvable discussions are not resolved' do
before do
allow(first_discussion).to receive(:resolved?).and_return(true)
allow(third_discussion).to receive(:resolved?).and_return(false)
end
- it "returns false" do
+ it 'returns false' do
expect(subject.discussions_resolved?).to be false
end
end
end
end
- describe "#discussions_to_be_resolved" do
+ describe '#discussions_to_be_resolved' do
before do
allow(first_discussion).to receive(:to_be_resolved?).and_return(true)
allow(second_discussion).to receive(:to_be_resolved?).and_return(false)
@@ -245,6 +245,12 @@ RSpec.describe Noteable do
end
end
+ describe '.email_creatable_types' do
+ it 'exposes the email creatable types' do
+ expect(described_class.email_creatable_types).to include('Issue')
+ end
+ end
+
describe '#capped_notes_count' do
context 'notes number < 10' do
it 'the number of notes is returned' do
@@ -263,13 +269,13 @@ RSpec.describe Noteable do
end
end
- describe "#has_any_diff_note_positions?" do
- let(:source_branch) { "compare-with-merge-head-source" }
- let(:target_branch) { "compare-with-merge-head-target" }
+ describe '#has_any_diff_note_positions?' do
+ let(:source_branch) { 'compare-with-merge-head-source' }
+ let(:target_branch) { 'compare-with-merge-head-target' }
let(:merge_request) { create(:merge_request, source_branch: source_branch, target_branch: target_branch) }
let!(:note) do
- path = "files/markdown/ruby-style-guide.md"
+ path = 'files/markdown/ruby-style-guide.md'
position = Gitlab::Diff::Position.new(
old_path: path,
@@ -286,20 +292,54 @@ RSpec.describe Noteable do
Discussions::CaptureDiffNotePositionsService.new(merge_request).execute
end
- it "returns true when it has diff note positions" do
+ it 'returns true when it has diff note positions' do
expect(merge_request.has_any_diff_note_positions?).to be(true)
end
- it "returns false when it has notes but no diff note positions" do
+ it 'returns false when it has notes but no diff note positions' do
DiffNotePosition.where(note: note).find_each(&:delete)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
- it "returns false when it has no notes" do
+ it 'returns false when it has no notes' do
merge_request.notes.find_each(&:destroy)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
end
+
+ describe '#creatable_note_email_address' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:source_branch) { 'compare-with-merge-head-source' }
+
+ let(:issue) { create(:issue, project: project) }
+ let(:snippet) { build(:snippet) }
+
+ context 'incoming email enabled' do
+ before do
+ stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
+ end
+
+ it 'returns the address to create a note' do
+ address = "p+#{project.full_path_slug}-#{project.project_id}-#{user.incoming_email_token}-issue-#{issue.iid}@gl.ab"
+
+ expect(issue.creatable_note_email_address(user)).to eq(address)
+ end
+
+ it 'returns nil for unsupported types' do
+ expect(snippet.creatable_note_email_address(user)).to be_nil
+ end
+ end
+
+ context 'incoming email disabled' do
+ before do
+ stub_incoming_email_setting(enabled: false)
+ end
+
+ it 'returns nil' do
+ expect(issue.creatable_note_email_address(user)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/cycle_analytics/code_spec.rb b/spec/models/cycle_analytics/code_spec.rb
deleted file mode 100644
index ca612cba654..00000000000
--- a/spec/models/cycle_analytics/code_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CycleAnalytics#code' do
- extend CycleAnalyticsHelpers::TestGeneration
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
-
- subject { project_level }
-
- context 'with deployment' do
- generate_cycle_analytics_spec(
- phase: :code,
- data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
- start_time_conditions: [["issue mentioned in a commit",
- -> (context, data) do
- context.create_commit_referencing_issue(data[:issue])
- end]],
- end_time_conditions: [["merge request that closes issue is created",
- -> (context, data) do
- context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
- end]],
- post_fn: -> (context, data) do
- end)
-
- context "when a regular merge request (that doesn't close the issue) is created" do
- it "returns nil" do
- issue = create(:issue, project: project)
-
- create_commit_referencing_issue(issue)
- create_merge_request_closing_issue(user, project, issue, message: "Closes nothing")
-
- merge_merge_requests_closing_issue(user, project, issue)
- deploy_master(user, project)
-
- expect(subject[:code].project_median).to be_nil
- end
- end
- end
-
- context 'without deployment' do
- generate_cycle_analytics_spec(
- phase: :code,
- data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
- start_time_conditions: [["issue mentioned in a commit",
- -> (context, data) do
- context.create_commit_referencing_issue(data[:issue])
- end]],
- end_time_conditions: [["merge request that closes issue is created",
- -> (context, data) do
- context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
- end]],
- post_fn: -> (context, data) do
- end)
-
- context "when a regular merge request (that doesn't close the issue) is created" do
- it "returns nil" do
- issue = create(:issue, project: project)
-
- create_commit_referencing_issue(issue)
- create_merge_request_closing_issue(user, project, issue, message: "Closes nothing")
-
- merge_merge_requests_closing_issue(user, project, issue)
-
- expect(subject[:code].project_median).to be_nil
- end
- end
- end
-end
diff --git a/spec/models/cycle_analytics/issue_spec.rb b/spec/models/cycle_analytics/issue_spec.rb
deleted file mode 100644
index 66d21f6925f..00000000000
--- a/spec/models/cycle_analytics/issue_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CycleAnalytics#issue' do
- extend CycleAnalyticsHelpers::TestGeneration
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
-
- subject { project_level }
-
- generate_cycle_analytics_spec(
- phase: :issue,
- data_fn: -> (context) { { issue: context.build(:issue, project: context.project) } },
- start_time_conditions: [["issue created", -> (context, data) { data[:issue].save! }]],
- end_time_conditions: [["issue associated with a milestone",
- -> (context, data) do
- if data[:issue].persisted?
- data[:issue].update!(milestone: context.create(:milestone, project: context.project))
- end
- end],
- ["list label added to issue",
- -> (context, data) do
- if data[:issue].persisted?
- data[:issue].update!(label_ids: [context.create(:list).label_id])
- end
- end]],
- post_fn: -> (context, data) do
- end)
-
- context "when a regular label (instead of a list label) is added to the issue" do
- it "returns nil" do
- regular_label = create(:label)
- issue = create(:issue, project: project)
- issue.update!(label_ids: [regular_label.id])
-
- create_merge_request_closing_issue(user, project, issue)
- merge_merge_requests_closing_issue(user, project, issue)
-
- expect(subject[:issue].project_median).to be_nil
- end
- end
-end
diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb
deleted file mode 100644
index acaf767db01..00000000000
--- a/spec/models/cycle_analytics/plan_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CycleAnalytics#plan' do
- extend CycleAnalyticsHelpers::TestGeneration
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
-
- subject { project_level }
-
- generate_cycle_analytics_spec(
- phase: :plan,
- data_fn: -> (context) do
- {
- issue: context.build(:issue, project: context.project),
- branch_name: context.generate(:branch)
- }
- end,
- start_time_conditions: [["issue associated with a milestone",
- -> (context, data) do
- data[:issue].update!(milestone: context.create(:milestone, project: context.project))
- end],
- ["list label added to issue",
- -> (context, data) do
- data[:issue].update!(label_ids: [context.create(:list).label_id])
- end]],
- end_time_conditions: [["issue mentioned in a commit",
- -> (context, data) do
- context.create_commit_referencing_issue(data[:issue], branch_name: data[:branch_name])
- end]],
- post_fn: -> (context, data) do
- end)
-
- context "when a regular label (instead of a list label) is added to the issue" do
- it "returns nil" do
- branch_name = generate(:branch)
- label = create(:label)
- issue = create(:issue, project: project)
- issue.update!(label_ids: [label.id])
- create_commit_referencing_issue(issue, branch_name: branch_name)
-
- create_merge_request_closing_issue(user, project, issue, source_branch: branch_name)
- merge_merge_requests_closing_issue(user, project, issue)
-
- expect(subject[:issue].project_median).to be_nil
- end
- end
-end
diff --git a/spec/models/cycle_analytics/project_level_spec.rb b/spec/models/cycle_analytics/project_level_spec.rb
deleted file mode 100644
index c2d421c03d8..00000000000
--- a/spec/models/cycle_analytics/project_level_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe CycleAnalytics::ProjectLevel do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
- let_it_be(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
- let_it_be(:milestone) { create(:milestone, project: project) }
- let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
- let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) }
-
- subject { described_class.new(project, options: { from: from_date }) }
-
- describe '#all_medians_by_stage' do
- before do
- allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
- allow(instance).to receive(:issues).and_return([issue])
- end
-
- create_cycle(user, project, issue, mr, milestone, pipeline)
- deploy_master(user, project)
- end
-
- it 'returns every median for each stage for a specific project' do
- values = described_class::STAGES.each_with_object({}) do |stage_name, hsh|
- hsh[stage_name] = subject[stage_name].project_median.presence
- end
-
- expect(subject.all_medians_by_stage).to eq(values)
- end
- end
-end
diff --git a/spec/models/cycle_analytics/project_level_stage_adapter_spec.rb b/spec/models/cycle_analytics/project_level_stage_adapter_spec.rb
new file mode 100644
index 00000000000..9bdee292938
--- /dev/null
+++ b/spec/models/cycle_analytics/project_level_stage_adapter_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CycleAnalytics::ProjectLevelStageAdapter, type: :model do
+ let_it_be(:stage_name) { :review } # pre-defined, default stage
+ let_it_be(:merge_request) do
+ create(:merge_request, created_at: 5.hours.ago).tap do |mr|
+ mr.metrics.update!(merged_at: mr.created_at + 1.hour)
+ end
+ end
+
+ let_it_be(:project) { merge_request.target_project }
+
+ let(:stage) do
+ params = Gitlab::Analytics::CycleAnalytics::DefaultStages.find_by_name!(stage_name).merge(project: project)
+ Analytics::CycleAnalytics::ProjectStage.new(params)
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ subject { described_class.new(stage, from: 1.month.ago, to: Time.zone.now, current_user: merge_request.author) }
+
+ it 'calculates median' do
+ expect(subject.median).to be_within(1.hour).of(0.5)
+ end
+
+ it 'lists events' do
+ expect(subject.events.size).to eq(1)
+ expect(subject.events.first[:title]).to eq(merge_request.title)
+ end
+
+ it 'presents the data as json' do
+ expect(subject.as_json).to include({ title: 'Review', value: 'about 1 hour' })
+ end
+end
diff --git a/spec/models/cycle_analytics/review_spec.rb b/spec/models/cycle_analytics/review_spec.rb
deleted file mode 100644
index 06d9cfbf8c0..00000000000
--- a/spec/models/cycle_analytics/review_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CycleAnalytics#review' do
- extend CycleAnalyticsHelpers::TestGeneration
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
-
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
-
- generate_cycle_analytics_spec(
- phase: :review,
- data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
- start_time_conditions: [["merge request that closes issue is created",
- -> (context, data) do
- context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
- end]],
- end_time_conditions: [["merge request that closes issue is merged",
- -> (context, data) do
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
- end]],
- post_fn: nil)
-
- context "when a regular merge request (that doesn't close the issue) is created and merged" do
- it "returns nil" do
- MergeRequests::MergeService.new(project, user).execute(create(:merge_request))
-
- expect(subject[:review].project_median).to be_nil
- end
- end
-end
diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb
deleted file mode 100644
index 50cb49d6309..00000000000
--- a/spec/models/cycle_analytics/staging_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CycleAnalytics#staging' do
- extend CycleAnalyticsHelpers::TestGeneration
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
-
- subject { project_level }
-
- generate_cycle_analytics_spec(
- phase: :staging,
- data_fn: lambda do |context|
- issue = context.create(:issue, project: context.project)
- { issue: issue, merge_request: context.create_merge_request_closing_issue(context.user, context.project, issue) }
- end,
- start_time_conditions: [["merge request that closes issue is merged",
- -> (context, data) do
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
- end]],
- end_time_conditions: [["merge request that closes issue is deployed to production",
- -> (context, data) do
- context.deploy_master(context.user, context.project)
- end],
- ["production deploy happens after merge request is merged (along with other changes)",
- lambda do |context, data|
- # Make other changes on master
- context.project.repository.commit("this_sha_apparently_does_not_matter")
- context.deploy_master(context.user, context.project)
- end]])
-
- context "when a regular merge request (that doesn't close the issue) is merged and deployed" do
- it "returns nil" do
- merge_request = create(:merge_request)
- MergeRequests::MergeService.new(project, user).execute(merge_request)
- deploy_master(user, project)
-
- expect(subject[:staging].project_median).to be_nil
- end
- end
-
- context "when the deployment happens to a non-production environment" do
- it "returns nil" do
- issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(user, project, issue)
- MergeRequests::MergeService.new(project, user).execute(merge_request)
- deploy_master(user, project, environment: 'staging')
-
- expect(subject[:staging].project_median).to be_nil
- end
- end
-end
diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb
deleted file mode 100644
index 8f65c047b15..00000000000
--- a/spec/models/cycle_analytics/test_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CycleAnalytics#test' do
- extend CycleAnalyticsHelpers::TestGeneration
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:from_date) { 10.days.ago }
- let_it_be(:user) { project.owner }
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user }) }
- let!(:merge_request) { create_merge_request_closing_issue(user, project, issue) }
-
- subject { project_level }
-
- generate_cycle_analytics_spec(
- phase: :test,
- data_fn: lambda do |context|
- issue = context.issue
- merge_request = context.create_merge_request_closing_issue(context.user, context.project, issue)
- pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project, head_pipeline_of: merge_request)
- { pipeline: pipeline, issue: issue }
- end,
- start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]],
- end_time_conditions: [["pipeline is finished", -> (context, data) { data[:pipeline].succeed! }]],
- post_fn: -> (context, data) do
- end)
-
- context "when the pipeline is for a regular merge request (that doesn't close an issue)" do
- it "returns nil" do
- pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
-
- pipeline.run!
- pipeline.succeed!
-
- expect(subject[:test].project_median).to be_nil
- end
- end
-
- context "when the pipeline is not for a merge request" do
- it "returns nil" do
- pipeline = create(:ci_pipeline, ref: "refs/heads/master", sha: project.repository.commit('master').sha)
-
- pipeline.run!
- pipeline.succeed!
-
- expect(subject[:test].project_median).to be_nil
- end
- end
-
- context "when the pipeline is dropped (failed)" do
- it "returns nil" do
- pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
-
- pipeline.run!
- pipeline.drop!
-
- expect(subject[:test].project_median).to be_nil
- end
- end
-
- context "when the pipeline is cancelled" do
- it "returns nil" do
- pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
-
- pipeline.run!
- pipeline.cancel!
-
- expect(subject[:test].project_median).to be_nil
- end
- end
-end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index c962b012a4b..5bc61db6d21 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -227,6 +227,56 @@ RSpec.describe Deployment do
deployment.skip!
end
end
+
+ describe 'synching status to Jira' do
+ let(:deployment) { create(:deployment) }
+
+ let(:worker) { ::JiraConnect::SyncDeploymentsWorker }
+
+ it 'calls the worker on creation' do
+ expect(worker).to receive(:perform_async).with(Integer)
+
+ deployment
+ end
+
+ it 'does not call the worker for skipped deployments' do
+ expect(deployment).to be_present # warm-up, ignore the creation trigger
+
+ expect(worker).not_to receive(:perform_async)
+
+ deployment.skip!
+ end
+
+ %i[run! succeed! drop! cancel!].each do |event|
+ context "when we call pipeline.#{event}" do
+ it 'triggers a Jira synch worker' do
+ expect(worker).to receive(:perform_async).with(deployment.id)
+
+ deployment.send(event)
+ end
+
+ context 'the feature is disabled' do
+ it 'does not trigger a worker' do
+ stub_feature_flags(jira_sync_deployments: false)
+
+ expect(worker).not_to receive(:perform_async)
+
+ deployment.send(event)
+ end
+ end
+
+ context 'the feature is enabled for this project' do
+ it 'does trigger a worker' do
+ stub_feature_flags(jira_sync_deployments: deployment.project)
+
+ expect(worker).to receive(:perform_async)
+
+ deployment.send(event)
+ end
+ end
+ end
+ end
+ end
end
describe '#success?' do
diff --git a/spec/models/experiment_spec.rb b/spec/models/experiment_spec.rb
index 1bf7b8b4850..171bfd116d3 100644
--- a/spec/models/experiment_spec.rb
+++ b/spec/models/experiment_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe Experiment do
context 'when an experiment_user already exists for the given user' do
before do
# Create an existing experiment_user for this experiment and the :control group
- experiment.record_user_and_group(user, :control, context)
+ experiment.record_user_and_group(user, :control)
end
it 'does not create a new experiment_user record' do
@@ -173,15 +173,65 @@ RSpec.describe Experiment do
context 'but the group_type and context has changed' do
let(:group) { :experimental }
- let(:context) { { b: 37 } }
it 'updates the existing experiment_user record with group_type' do
expect { record_user_and_group }.to change { ExperimentUser.last.group_type }
end
+ end
+ end
+
+ context 'when a context already exists' do
+ let_it_be(:context) { { a: 42, 'b' => 34, 'c': { c1: 100, c2: 'c2', e: :e }, d: [1, 3] } }
+ let_it_be(:initial_expected_context) { { 'a' => 42, 'b' => 34, 'c' => { 'c1' => 100, 'c2' => 'c2', 'e' => 'e' }, 'd' => [1, 3] } }
- it 'updates the existing experiment_user record with context' do
+ before do
+ record_user_and_group
+ experiment.record_user_and_group(user, :control, {})
+ end
+
+ it 'has an initial context with stringified keys' do
+ expect(ExperimentUser.last.context).to eq(initial_expected_context)
+ end
+
+ context 'when updated' do
+ before do
record_user_and_group
- expect(ExperimentUser.last.context).to eq({ 'b' => 37 })
+ experiment.record_user_and_group(user, :control, new_context)
+ end
+
+ context 'with an empty context' do
+ let_it_be(:new_context) { {} }
+
+ it 'keeps the initial context' do
+ expect(ExperimentUser.last.context).to eq(initial_expected_context)
+ end
+ end
+
+ context 'with string keys' do
+ let_it_be(:new_context) { { f: :some_symbol } }
+
+ it 'adds new symbols stringified' do
+ expected_context = initial_expected_context.merge('f' => 'some_symbol')
+ expect(ExperimentUser.last.context).to eq(expected_context)
+ end
+ end
+
+ context 'with atomic values or array values' do
+ let_it_be(:new_context) { { b: 97, d: [99] } }
+
+ it 'overrides the values' do
+ expected_context = { 'a' => 42, 'b' => 97, 'c' => { 'c1' => 100, 'c2' => 'c2', 'e' => 'e' }, 'd' => [99] }
+ expect(ExperimentUser.last.context).to eq(expected_context)
+ end
+ end
+
+ context 'with nested hashes' do
+ let_it_be(:new_context) { { c: { g: 107 } } }
+
+ it 'inserts nested additional values in the same keys' do
+ expected_context = initial_expected_context.deep_merge('c' => { 'g' => 107 })
+ expect(ExperimentUser.last.context).to eq(expected_context)
+ end
end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index cc8e744a15c..0acf2b96b74 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe Group do
it { is_expected.to have_one(:dependency_proxy_setting) }
it { is_expected.to have_many(:dependency_proxy_blobs) }
it { is_expected.to have_many(:dependency_proxy_manifests) }
+ it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::GroupDistribution').dependent(:destroy) }
describe '#members & #requesters' do
let(:requester) { create(:user) }
@@ -1751,4 +1752,23 @@ RSpec.describe Group do
it { is_expected.to eq(false) }
end
end
+
+ describe 'with Debian Distributions' do
+ subject { create(:group) }
+
+ let!(:distributions) { create_list(:debian_group_distribution, 2, :with_file, container: subject) }
+
+ it 'removes distribution files on removal' do
+ distribution_file_paths = distributions.map do |distribution|
+ distribution.file.path
+ end
+
+ expect { subject.destroy }
+ .to change {
+ distribution_file_paths.select do |path|
+ File.exist? path
+ end.length
+ }.from(distribution_file_paths.length).to(0)
+ end
+ end
end
diff --git a/spec/models/list_spec.rb b/spec/models/list_spec.rb
index 37158584062..f0b1bc33e84 100644
--- a/spec/models/list_spec.rb
+++ b/spec/models/list_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe List do
it_behaves_like 'having unique enum values'
+ it_behaves_like 'boards listable model', :list
describe 'relationships' do
it { is_expected.to belong_to(:board) }
@@ -14,72 +15,6 @@ RSpec.describe List do
it { is_expected.to validate_presence_of(:board) }
it { is_expected.to validate_presence_of(:label) }
it { is_expected.to validate_presence_of(:list_type) }
- it { is_expected.to validate_presence_of(:position) }
- it { is_expected.to validate_numericality_of(:position).only_integer.is_greater_than_or_equal_to(0) }
-
- context 'when list_type is set to closed' do
- subject { described_class.new(list_type: :closed) }
-
- it { is_expected.not_to validate_presence_of(:label) }
- it { is_expected.not_to validate_presence_of(:position) }
- end
- end
-
- describe '#destroy' do
- it 'can be destroyed when list_type is set to label' do
- subject = create(:list)
-
- expect(subject.destroy).to be_truthy
- end
-
- it 'can not be destroyed when list_type is set to closed' do
- subject = create(:closed_list)
-
- expect(subject.destroy).to be_falsey
- end
- end
-
- describe '#destroyable?' do
- it 'returns true when list_type is set to label' do
- subject.list_type = :label
-
- expect(subject).to be_destroyable
- end
-
- it 'returns false when list_type is set to closed' do
- subject.list_type = :closed
-
- expect(subject).not_to be_destroyable
- end
- end
-
- describe '#movable?' do
- it 'returns true when list_type is set to label' do
- subject.list_type = :label
-
- expect(subject).to be_movable
- end
-
- it 'returns false when list_type is set to closed' do
- subject.list_type = :closed
-
- expect(subject).not_to be_movable
- end
- end
-
- describe '#title' do
- it 'returns label name when list_type is set to label' do
- subject.list_type = :label
- subject.label = Label.new(name: 'Development')
-
- expect(subject.title).to eq 'Development'
- end
-
- it 'returns Closed when list_type is set to closed' do
- subject.list_type = :closed
-
- expect(subject.title).to eq 'Closed'
- end
end
describe '#update_preferences_for' do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 2b24e2d6455..3d3ed6fc54a 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -123,4 +123,16 @@ RSpec.describe GroupMember do
end
end
end
+
+ context 'when group member expiration date is updated' do
+ let_it_be(:group_member) { create(:group_member) }
+
+ it 'emails the user that their group membership expiry has changed' do
+ expect_next_instance_of(NotificationService) do |notification|
+ allow(notification).to receive(:updated_group_member_expiration).with(group_member)
+ end
+
+ group_member.update!(expires_at: 5.days.from_now)
+ end
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 431a60a11a5..1cf197322f5 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -411,6 +411,48 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '.by_squash_commit_sha' do
+ subject { described_class.by_squash_commit_sha(sha) }
+
+ let(:sha) { '123abc' }
+ let(:merge_request) { create(:merge_request, :merged, squash_commit_sha: sha) }
+
+ it 'returns merge requests that match the given squash commit' do
+ is_expected.to eq([merge_request])
+ end
+ end
+
+ describe '.by_related_commit_sha' do
+ subject { described_class.by_related_commit_sha(sha) }
+
+ context 'when commit is a squash commit' do
+ let!(:merge_request) { create(:merge_request, :merged, squash_commit_sha: sha) }
+ let(:sha) { '123abc' }
+
+ it { is_expected.to eq([merge_request]) }
+ end
+
+ context 'when commit is a part of the merge request' do
+ let!(:merge_request) { create(:merge_request, :with_diffs) }
+ let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
+
+ it { is_expected.to eq([merge_request]) }
+ end
+
+ context 'when commit is a merge commit' do
+ let!(:merge_request) { create(:merge_request, :merged, merge_commit_sha: sha) }
+ let(:sha) { '123abc' }
+
+ it { is_expected.to eq([merge_request]) }
+ end
+
+ context 'when commit is not found' do
+ let(:sha) { '0000' }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.by_cherry_pick_sha' do
it 'returns merge requests that match the given merge commit' do
note = create(:track_mr_picking_note, commit_id: '456abc')
@@ -3555,112 +3597,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '#mergeable_with_quick_action?' do
- def create_pipeline(status)
- pipeline = create(:ci_pipeline,
- project: project,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- status: status,
- head_pipeline_of: merge_request)
-
- pipeline
- end
-
- let_it_be(:project) { create(:project, :public, :repository, only_allow_merge_if_pipeline_succeeds: true) }
-
- let(:developer) { create(:user) }
- let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request, source_project: project) }
- let(:mr_sha) { merge_request.diff_head_sha }
-
- before do
- project.add_developer(developer)
- end
-
- context 'when autocomplete_precheck is set to true' do
- it 'is mergeable by developer' do
- expect(merge_request.mergeable_with_quick_action?(developer, autocomplete_precheck: true)).to be_truthy
- end
-
- it 'is not mergeable by normal user' do
- expect(merge_request.mergeable_with_quick_action?(user, autocomplete_precheck: true)).to be_falsey
- end
- end
-
- context 'when autocomplete_precheck is set to false' do
- it 'is mergeable by developer' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_truthy
- end
-
- it 'is not mergeable by normal user' do
- expect(merge_request.mergeable_with_quick_action?(user, last_diff_sha: mr_sha)).to be_falsey
- end
-
- context 'closed MR' do
- before do
- merge_request.update_attribute(:state_id, described_class.available_states[:closed])
- end
-
- it 'is not mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_falsey
- end
- end
-
- context 'MR with WIP' do
- before do
- merge_request.update_attribute(:title, 'WIP: some MR')
- end
-
- it 'is not mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_falsey
- end
- end
-
- context 'sha differs from the MR diff_head_sha' do
- it 'is not mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: 'some other sha')).to be_falsey
- end
- end
-
- context 'sha is not provided' do
- it 'is not mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer)).to be_falsey
- end
- end
-
- context 'with pipeline ok' do
- before do
- create_pipeline(:success)
- end
-
- it 'is mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_truthy
- end
- end
-
- context 'with failing pipeline' do
- before do
- create_pipeline(:failed)
- end
-
- it 'is not mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_falsey
- end
- end
-
- context 'with running pipeline' do
- before do
- create_pipeline(:running)
- end
-
- it 'is mergeable' do
- expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_truthy
- end
- end
- end
- end
-
describe '#pipeline_coverage_delta' do
let!(:merge_request) { create(:merge_request) }
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
new file mode 100644
index 00000000000..097cef8ef3b
--- /dev/null
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespace::PackageSetting do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:namespace) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:namespace) }
+
+ describe '#maven_duplicates_allowed' do
+ it { is_expected.to allow_value(true).for(:maven_duplicates_allowed) }
+ it { is_expected.to allow_value(false).for(:maven_duplicates_allowed) }
+ it { is_expected.not_to allow_value(nil).for(:maven_duplicates_allowed) }
+ end
+
+ describe '#maven_duplicate_exception_regex' do
+ let_it_be(:package_settings) { create(:namespace_package_setting) }
+
+ subject { package_settings }
+
+ valid_regexps = %w[SNAPSHOT .* v.+ v10.1.* (?:v.+|SNAPSHOT|TEMP)]
+ invalid_regexps = ['[', '(?:v.+|SNAPSHOT|TEMP']
+
+ valid_regexps.each do |valid_regexp|
+ it { is_expected.to allow_value(valid_regexp).for(:maven_duplicate_exception_regex) }
+ end
+
+ invalid_regexps.each do |invalid_regexp|
+ it { is_expected.not_to allow_value(invalid_regexp).for(:maven_duplicate_exception_regex) }
+ end
+ end
+ end
+
+ describe '#duplicates_allowed?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.duplicates_allowed?(package) }
+
+ context 'package types with package_settings' do
+ # As more package types gain settings they will be added to this list
+ [:maven_package].each do |format|
+ let_it_be(:package) { create(format) } # rubocop:disable Rails/SaveBang
+ let_it_be(:package_type) { package.package_type }
+ let_it_be(:package_setting) { package.project.namespace.package_settings }
+
+ where(:duplicates_allowed, :duplicate_exception_regex, :result) do
+ true | '' | true
+ false | '' | false
+ false | '.*' | true
+ end
+
+ with_them do
+ context "for #{format}" do
+ before do
+ package_setting.update!(
+ "#{package_type}_duplicates_allowed" => duplicates_allowed,
+ "#{package_type}_duplicate_exception_regex" => duplicate_exception_regex
+ )
+ end
+
+ it { is_expected.to be(result) }
+ end
+ end
+ end
+ end
+
+ context 'package types without package_settings' do
+ [:npm_package, :conan_package, :nuget_package, :pypi_package, :composer_package, :generic_package, :golang_package, :debian_package].each do |format|
+ let_it_be(:package) { create(format) } # rubocop:disable Rails/SaveBang
+ let_it_be(:package_setting) { package.project.namespace.package_settings }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Namespace::PackageSetting::PackageSettingNotImplemented)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/namespace_onboarding_action_spec.rb b/spec/models/namespace_onboarding_action_spec.rb
deleted file mode 100644
index 70dcb989b32..00000000000
--- a/spec/models/namespace_onboarding_action_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe NamespaceOnboardingAction do
- let(:namespace) { build(:namespace) }
-
- describe 'associations' do
- it { is_expected.to belong_to(:namespace).required }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:action) }
- end
-
- describe '.completed?' do
- let(:action) { :subscription_created }
-
- subject { described_class.completed?(namespace, action) }
-
- context 'action created for the namespace' do
- before do
- create(:namespace_onboarding_action, namespace: namespace, action: action)
- end
-
- it { is_expected.to eq(true) }
- end
-
- context 'action created for another namespace' do
- before do
- create(:namespace_onboarding_action, namespace: build(:namespace), action: action)
- end
-
- it { is_expected.to eq(false) }
- end
- end
-
- describe '.create_action' do
- let(:action) { :subscription_created }
-
- subject(:create_action) { described_class.create_action(namespace, action) }
-
- it 'creates the action for the namespace just once' do
- expect { create_action }.to change { count_namespace_actions }.by(1)
-
- expect { create_action }.to change { count_namespace_actions }.by(0)
- end
-
- def count_namespace_actions
- described_class.where(namespace: namespace, action: action).count
- end
- end
-end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 0130618d004..a3c0a43115e 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -19,7 +19,8 @@ RSpec.describe Namespace do
it { is_expected.to have_one :aggregation_schedule }
it { is_expected.to have_one :namespace_settings }
it { is_expected.to have_many :custom_emoji }
- it { is_expected.to have_many :namespace_onboarding_actions }
+ it { is_expected.to have_one :package_setting_relation }
+ it { is_expected.to have_one :onboarding_progress }
end
describe 'validations' do
@@ -1500,4 +1501,24 @@ RSpec.describe Namespace do
end
end
end
+
+ describe '#root?' do
+ subject { namespace.root? }
+
+ context 'when is subgroup' do
+ before do
+ namespace.parent = build(:group)
+ end
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when is root' do
+ it 'returns true' do
+ is_expected.to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/models/onboarding_progress_spec.rb b/spec/models/onboarding_progress_spec.rb
new file mode 100644
index 00000000000..bd951846bb8
--- /dev/null
+++ b/spec/models/onboarding_progress_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe OnboardingProgress do
+ let(:namespace) { create(:namespace) }
+ let(:action) { :subscription_created }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace).required }
+ end
+
+ describe 'validations' do
+ describe 'namespace_is_root_namespace' do
+ subject(:onboarding_progress) { build(:onboarding_progress, namespace: namespace)}
+
+ context 'when associated namespace is root' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'when associated namespace is not root' do
+ let(:namespace) { build(:group, :nested) }
+
+ it 'is invalid' do
+ expect(onboarding_progress).to be_invalid
+ expect(onboarding_progress.errors[:namespace]).to include('must be a root namespace')
+ end
+ end
+ end
+ end
+
+ describe '.onboard' do
+ subject(:onboard) { described_class.onboard(namespace) }
+
+ it 'adds a record for the namespace' do
+ expect { onboard }.to change(described_class, :count).from(0).to(1)
+ end
+
+ context 'when not given a namespace' do
+ let(:namespace) { nil }
+
+ it 'does not add a record for the namespace' do
+ expect { onboard }.not_to change(described_class, :count).from(0)
+ end
+ end
+
+ context 'when not given a root namespace' do
+ let(:namespace) { create(:namespace, parent: build(:namespace)) }
+
+ it 'does not add a record for the namespace' do
+ expect { onboard }.not_to change(described_class, :count).from(0)
+ end
+ end
+ end
+
+ describe '.register' do
+ subject(:register_action) { described_class.register(namespace, action) }
+
+ context 'when the namespace was onboarded' do
+ before do
+ described_class.onboard(namespace)
+ end
+
+ it 'registers the action for the namespace' do
+ expect { register_action }.to change { described_class.completed?(namespace, action) }.from(false).to(true)
+ end
+
+ context 'when the action does not exist' do
+ let(:action) { :foo }
+
+ it 'does not register the action for the namespace' do
+ expect { register_action }.not_to change { described_class.completed?(namespace, action) }.from(nil)
+ end
+ end
+ end
+
+ context 'when the namespace was not onboarded' do
+ it 'does not register the action for the namespace' do
+ expect { register_action }.not_to change { described_class.completed?(namespace, action) }.from(false)
+ end
+ end
+ end
+
+ describe '.completed?' do
+ subject { described_class.completed?(namespace, action) }
+
+ context 'when the namespace has not yet been onboarded' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the namespace has been onboarded but not registered the action yet' do
+ before do
+ described_class.onboard(namespace)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'when the action has been registered' do
+ before do
+ described_class.register(namespace, action)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/debian/file_metadatum_spec.rb b/spec/models/packages/debian/file_metadatum_spec.rb
new file mode 100644
index 00000000000..1215adfa6a1
--- /dev/null
+++ b/spec/models/packages/debian/file_metadatum_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::FileMetadatum, type: :model do
+ RSpec.shared_context 'Debian file metadatum' do |factory, trait|
+ let_it_be_with_reload(:debian_package_file) { create(factory, trait) }
+ let(:debian_file_metadatum) { debian_package_file.debian_file_metadatum }
+
+ subject { debian_file_metadatum }
+ end
+
+ RSpec.shared_examples 'Test Debian file metadatum' do |has_component, has_architecture, has_fields, has_outdated|
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package_file) }
+ end
+
+ describe 'validations' do
+ describe '#package_file' do
+ it { is_expected.to validate_presence_of(:package_file) }
+ end
+
+ describe '#file_type' do
+ it { is_expected.to validate_presence_of(:file_type) }
+ end
+
+ describe '#component' do
+ it "has_component=#{has_component}" do
+ if has_component
+ is_expected.to validate_presence_of(:component)
+ is_expected.to allow_value('main').for(:component)
+ is_expected.not_to allow_value('hé').for(:component)
+ else
+ is_expected.to validate_absence_of(:component)
+ end
+ end
+ end
+
+ describe '#architecture' do
+ it "has_architecture=#{has_architecture}" do
+ if has_architecture
+ is_expected.to validate_presence_of(:architecture)
+ is_expected.to allow_value('amd64').for(:architecture)
+ is_expected.not_to allow_value('-a').for(:architecture)
+ else
+ is_expected.to validate_absence_of(:architecture)
+ end
+ end
+ end
+
+ describe '#fields' do
+ if has_fields
+ it { is_expected.to validate_presence_of(:fields) }
+ it { is_expected.to allow_value({ 'a': 'b' }).for(:fields) }
+ it { is_expected.not_to allow_value({ 'a': { 'b': 'c' } }).for(:fields) }
+ else
+ it { is_expected.to validate_absence_of(:fields) }
+ end
+ end
+
+ describe '#debian_package_type' do
+ before do
+ debian_package_file.package.package_type = :pypi
+ end
+
+ it 'validates package of type debian' do
+ expect(debian_file_metadatum).not_to be_valid
+ expect(debian_file_metadatum.errors.to_a).to contain_exactly('Package file Package type must be Debian')
+ end
+ end
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:factory, :trait, :has_component, :has_architecture, :has_fields) do
+ :debian_package_file | :unknown | false | false | false
+ :debian_package_file | :source | true | false | false
+ :debian_package_file | :dsc | true | false | true
+ :debian_package_file | :deb | true | true | true
+ :debian_package_file | :udeb | true | true | true
+ :debian_package_file | :buildinfo | true | false | true
+ :debian_package_file | :changes | false | false | true
+ end
+
+ with_them do
+ include_context 'Debian file metadatum', params[:factory], params[:trait] do
+ it_behaves_like 'Test Debian file metadatum', params[:has_component], params[:has_architecture], params[:has_fields], params[:has_outdated]
+ end
+ end
+end
diff --git a/spec/models/packages/debian/group_architecture_spec.rb b/spec/models/packages/debian/group_architecture_spec.rb
new file mode 100644
index 00000000000..a6def6e56cd
--- /dev/null
+++ b/spec/models/packages/debian/group_architecture_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::GroupArchitecture do
+ it_behaves_like 'Debian Distribution Architecture', :debian_group_architecture, :group, false
+end
diff --git a/spec/models/packages/debian/group_distribution_spec.rb b/spec/models/packages/debian/group_distribution_spec.rb
new file mode 100644
index 00000000000..90fb0d0e7d8
--- /dev/null
+++ b/spec/models/packages/debian/group_distribution_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::GroupDistribution do
+ it_behaves_like 'Debian Distribution', :debian_group_distribution, :group, false
+end
diff --git a/spec/models/packages/debian/project_architecture_spec.rb b/spec/models/packages/debian/project_architecture_spec.rb
new file mode 100644
index 00000000000..b82ecabc4d4
--- /dev/null
+++ b/spec/models/packages/debian/project_architecture_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProjectArchitecture do
+ it_behaves_like 'Debian Distribution Architecture', :debian_project_architecture, :project, true
+end
diff --git a/spec/models/packages/debian/project_distribution_spec.rb b/spec/models/packages/debian/project_distribution_spec.rb
new file mode 100644
index 00000000000..5f4041ad9fe
--- /dev/null
+++ b/spec/models/packages/debian/project_distribution_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProjectDistribution do
+ it_behaves_like 'Debian Distribution', :debian_project_distribution, :project, true
+end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 82ac159b9cc..ebb10e991ad 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Packages::PackageFile, type: :model do
it { is_expected.to have_one(:conan_file_metadatum) }
it { is_expected.to have_many(:package_file_build_infos).inverse_of(:package_file) }
it { is_expected.to have_many(:pipelines).through(:package_file_build_infos) }
+ it { is_expected.to have_one(:debian_file_metadatum).inverse_of(:package_file).class_name('Packages::Debian::FileMetadatum') }
end
describe 'validations' do
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 16764673474..6645db33503 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -599,6 +599,20 @@ RSpec.describe Packages::Package, type: :model do
end
end
+ describe '.order_by_package_file' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package1) { create(:maven_package, project: project) }
+ let_it_be(:package2) { create(:maven_package, project: project) }
+
+ it 'orders packages their associated package_file\'s created_at date', :aggregate_failures do
+ expect(project.packages.order_by_package_file).to match_array([package1, package1, package1, package2, package2, package2])
+
+ create(:package_file, :xml, package: package1)
+
+ expect(project.packages.order_by_package_file).to match_array([package1, package1, package1, package2, package2, package2, package1])
+ end
+ end
+
describe '#versions' do
let_it_be(:project) { create(:project) }
let_it_be(:package) { create(:maven_package, project: project) }
@@ -731,4 +745,14 @@ RSpec.describe Packages::Package, type: :model do
end
end
end
+
+ describe '#package_settings' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:package) { create(:maven_package, project: project) }
+
+ it 'returns the namespace package_settings' do
+ expect(package.package_settings).to eq(group.package_settings)
+ end
+ end
end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index 6a0402d43a8..843beb4ce23 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe ProjectImportState, type: :model do
allow(after_import_service)
.to receive(:execute) { housekeeping_service.execute }
- allow(Projects::HousekeepingService)
+ allow(Repositories::HousekeepingService)
.to receive(:new) { housekeeping_service }
end
diff --git a/spec/models/project_pages_metadatum_spec.rb b/spec/models/project_pages_metadatum_spec.rb
new file mode 100644
index 00000000000..31a533e0363
--- /dev/null
+++ b/spec/models/project_pages_metadatum_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectPagesMetadatum do
+ describe '.only_on_legacy_storage' do
+ it 'returns only deployed records without deployment' do
+ create(:project) # without pages deployed
+
+ legacy_storage_project = create(:project)
+ legacy_storage_project.mark_pages_as_deployed
+
+ project_with_deployment = create(:project)
+ deployment = create(:pages_deployment, project: project_with_deployment)
+ project_with_deployment.mark_pages_as_deployed
+ project_with_deployment.update_pages_deployment!(deployment)
+
+ expect(described_class.only_on_legacy_storage).to eq([legacy_storage_project.pages_metadatum])
+ end
+ end
+end
diff --git a/spec/models/project_services/alerts_service_spec.rb b/spec/models/project_services/alerts_service_spec.rb
index db25885c76a..75b91c29914 100644
--- a/spec/models/project_services/alerts_service_spec.rb
+++ b/spec/models/project_services/alerts_service_spec.rb
@@ -2,108 +2,38 @@
require 'spec_helper'
+# AlertsService is stripped down to only required methods
+# to avoid errors loading integration-related pages if
+# records are present.
RSpec.describe AlertsService do
let_it_be(:project) { create(:project) }
- let(:service_params) { { project: project, active: active } }
- let(:active) { true }
- let(:service) { described_class.new(service_params) }
+ subject(:service) { described_class.new(project: project) }
- shared_context 'when active' do
- let(:active) { true }
- end
-
- shared_context 'when inactive' do
- let(:active) { false }
- end
-
- shared_context 'when persisted' do
- before do
- service.save!
- service.reload
- end
- end
-
- describe '#url' do
- include Gitlab::Routing
-
- subject { service.url }
+ it { is_expected.to be_valid }
- it { is_expected.to eq(project_alerts_notify_url(project, format: :json)) }
- end
-
- describe '#json_fields' do
- subject { service.json_fields }
+ describe '#to_param' do
+ subject { service.to_param }
- it { is_expected.to eq(%w(active token)) }
+ it { is_expected.to eq('alerts') }
end
- describe '#as_json' do
- subject { service.as_json(only: service.json_fields) }
+ describe '#supported_events' do
+ subject { service.supported_events }
- it { is_expected.to eq('active' => true, 'token' => nil) }
+ it { is_expected.to be_empty }
end
- describe '#token' do
- shared_context 'reset token' do
- before do
- service.token = ''
- service.valid?
- end
- end
-
- shared_context 'assign token' do |token|
- before do
- service.token = token
- service.valid?
- end
- end
-
- shared_examples 'valid token' do
- it { is_expected.to match(/\A\h{32}\z/) }
- end
-
- shared_examples 'no token' do
- it { is_expected.to be_blank }
- end
-
- subject { service.token }
-
- context 'when active' do
- include_context 'when active'
-
- context 'when resetting' do
- let!(:previous_token) { service.token }
-
- include_context 'reset token'
-
- it_behaves_like 'valid token'
-
- it { is_expected.not_to eq(previous_token) }
- end
-
- context 'when assigning' do
- include_context 'assign token', 'random token'
-
- it_behaves_like 'valid token'
- end
- end
-
- context 'when inactive' do
- include_context 'when inactive'
-
- context 'when resetting' do
- let!(:previous_token) { service.token }
-
- include_context 'reset token'
-
- it_behaves_like 'no token'
- end
- end
+ describe '#save' do
+ it 'prevents records from being created or updated' do
+ expect(Gitlab::ProjectServiceLogger).to receive(:error).with(
+ hash_including(message: 'Prevented attempt to save or update deprecated AlertsService')
+ )
- context 'when persisted' do
- include_context 'when persisted'
+ expect(service.save).to be_falsey
- it_behaves_like 'valid token'
+ expect(service.errors.full_messages).to include(
+ 'Alerts endpoint is deprecated and should not be created or modified. Use HTTP Integrations instead.'
+ )
end
end
end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index e7cd3d7f537..cd0873bddd2 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -5,12 +5,21 @@ require 'spec_helper'
RSpec.describe JiraService do
include AssetsHelpers
+ let_it_be(:project) { create(:project, :repository) }
let(:url) { 'http://jira.example.com' }
let(:api_url) { 'http://api-jira.example.com' }
let(:username) { 'jira-username' }
let(:password) { 'jira-password' }
let(:transition_id) { 'test27' }
let(:server_info_results) { { 'deploymentType' => 'Cloud' } }
+ let(:jira_service) do
+ described_class.new(
+ project: project,
+ url: url,
+ username: username,
+ password: password
+ )
+ end
before do
WebMock.stub_request(:get, /serverInfo/).to_return(body: server_info_results.to_json )
@@ -19,7 +28,7 @@ RSpec.describe JiraService do
describe '#options' do
let(:options) do
{
- project: create(:project),
+ project: project,
active: true,
username: 'username',
password: 'test',
@@ -108,7 +117,7 @@ RSpec.describe JiraService do
describe '#create' do
let(:params) do
{
- project: create(:project),
+ project: project,
url: url, api_url: api_url,
username: username, password: password,
jira_issue_transition_id: transition_id
@@ -434,10 +443,23 @@ RSpec.describe JiraService do
end
end
+ describe '#find_issue' do
+ let(:issue_key) { 'JIRA-123' }
+ let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}" }
+
+ before do
+ stub_request(:get, issue_url).with(basic_auth: [username, password])
+ end
+
+ it 'call the Jira API to get the issue' do
+ jira_service.find_issue(issue_key)
+
+ expect(WebMock).to have_requested(:get, issue_url)
+ end
+ end
+
describe '#close_issue' do
let(:custom_base_url) { 'http://custom_url' }
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
shared_examples 'close_issue' do
before do
@@ -445,7 +467,6 @@ RSpec.describe JiraService do
allow(@jira_service).to receive_messages(
project_id: project.id,
project: project,
- service_hook: true,
url: 'http://jira.example.com',
username: 'gitlab_jira_username',
password: 'gitlab_jira_password',
@@ -657,17 +678,7 @@ RSpec.describe JiraService do
end
describe '#create_cross_reference_note' do
- let_it_be(:user) { build_stubbed(:user) }
- let_it_be(:project) { create(:project, :repository) }
- let(:jira_service) do
- described_class.new(
- project: project,
- url: url,
- username: username,
- password: password
- )
- end
-
+ let_it_be(:user) { build_stubbed(:user) }
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
subject { jira_service.create_cross_reference_note(jira_issue, resource, user) }
@@ -732,15 +743,6 @@ RSpec.describe JiraService do
describe '#test' do
let(:server_info_results) { { 'url' => 'http://url', 'deploymentType' => 'Cloud' } }
- let_it_be(:project) { create(:project, :repository) }
- let(:jira_service) do
- described_class.new(
- url: url,
- project: project,
- username: username,
- password: password
- )
- end
def server_info
jira_service.test(nil)
@@ -790,7 +792,6 @@ RSpec.describe JiraService do
}
allow(Gitlab.config).to receive(:issues_tracker).and_return(settings)
- project = create(:project)
service = project.create_jira_service(active: true)
expect(service.url).to eq('http://jira.sample/projects/project_a')
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 76fc5a826c9..8215fb5c336 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
service.api_url = 'http://localhost:9090'
stub_application_setting(self_monitoring_project_id: project.id)
- stub_config(prometheus: { enable: true, listen_address: 'localhost:9090' })
+ stub_config(prometheus: { enable: true, server_address: 'localhost:9090' })
end
it 'allows self-monitoring project to connect to internal Prometheus' do
@@ -242,7 +242,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
stub_config(prometheus: {
enable: true,
- listen_address: api_url
+ server_address: api_url
})
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index a71b0eb842a..a2b51684d4d 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -127,6 +127,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:reviews).inverse_of(:project) }
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
+ it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) }
it { is_expected.to have_many(:pipeline_artifacts) }
it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
@@ -1066,36 +1067,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#cache_has_external_wiki' do
- let_it_be(:project) { create(:project, has_external_wiki: nil) }
-
- it 'stores true if there is any external_wikis' do
- services = double(:service, external_wikis: [ExternalWikiService.new])
- expect(project).to receive(:services).and_return(services)
-
- expect do
- project.cache_has_external_wiki
- end.to change { project.has_external_wiki}.to(true)
- end
-
- it 'stores false if there is no external_wikis' do
- services = double(:service, external_wikis: [])
- expect(project).to receive(:services).and_return(services)
-
- expect do
- project.cache_has_external_wiki
- end.to change { project.has_external_wiki}.to(false)
- end
-
- it 'does not cache data when in a read-only GitLab instance' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
-
- expect do
- project.cache_has_external_wiki
- end.not_to change { project.has_external_wiki }
- end
- end
-
describe '#has_wiki?' do
let(:no_wiki_project) { create(:project, :wiki_disabled, has_external_wiki: false) }
let(:wiki_enabled_project) { create(:project) }
@@ -1135,52 +1106,64 @@ RSpec.describe Project, factory_default: :keep do
describe '#external_wiki' do
let_it_be(:project) { create(:project) }
- context 'with an active external wiki' do
- before do
- create(:service, project: project, type: 'ExternalWikiService', active: true)
- project.external_wiki
- end
+ def subject
+ project.reload.external_wiki
+ end
- it 'sets :has_external_wiki as true' do
- expect(project.has_external_wiki).to be(true)
- end
+ it 'returns an active external wiki' do
+ create(:service, project: project, type: 'ExternalWikiService', active: true)
- it 'sets :has_external_wiki as false if an external wiki service is destroyed later' do
- expect(project.has_external_wiki).to be(true)
+ is_expected.to be_kind_of(ExternalWikiService)
+ end
- project.services.external_wikis.first.destroy
+ it 'does not return an inactive external wiki' do
+ create(:service, project: project, type: 'ExternalWikiService', active: false)
- expect(project.has_external_wiki).to be(false)
- end
+ is_expected.to eq(nil)
end
- context 'with an inactive external wiki' do
- before do
- create(:service, project: project, type: 'ExternalWikiService', active: false)
- end
+ it 'sets Project#has_external_wiki when it is nil' do
+ create(:service, project: project, type: 'ExternalWikiService', active: true)
+ project.update_column(:has_external_wiki, nil)
- it 'sets :has_external_wiki as false' do
- expect(project.has_external_wiki).to be(false)
- end
+ expect { subject }.to change { project.has_external_wiki }.from(nil).to(true)
end
+ end
- context 'with no external wiki' do
- before do
- project.external_wiki
- end
+ describe '#has_external_wiki' do
+ let_it_be(:project) { create(:project) }
- it 'sets :has_external_wiki as false' do
- expect(project.has_external_wiki).to be(false)
- end
+ def subject
+ project.reload.has_external_wiki
+ end
- it 'sets :has_external_wiki as true if an external wiki service is created later' do
- expect(project.has_external_wiki).to be(false)
+ specify { is_expected.to eq(false) }
+ context 'when there is an active external wiki service' do
+ let!(:service) do
create(:service, project: project, type: 'ExternalWikiService', active: true)
+ end
+
+ specify { is_expected.to eq(true) }
+
+ it 'becomes false if the external wiki service is destroyed' do
+ expect do
+ Service.find(service.id).delete
+ end.to change { subject }.to(false)
+ end
- expect(project.has_external_wiki).to be(true)
+ it 'becomes false if the external wiki service becomes inactive' do
+ expect do
+ service.update_column(:active, false)
+ end.to change { subject }.to(false)
end
end
+
+ it 'is false when external wiki service is not active' do
+ create(:service, project: project, type: 'ExternalWikiService', active: false)
+
+ is_expected.to eq(false)
+ end
end
describe '#star_count' do
@@ -1516,63 +1499,13 @@ RSpec.describe Project, factory_default: :keep do
allow(::Gitlab::ServiceDeskEmail).to receive(:config).and_return(config)
end
- context 'when service_desk_custom_address flag is enabled' do
- before do
- stub_feature_flags(service_desk_custom_address: true)
- end
-
- it 'returns custom address when project_key is set' do
- create(:service_desk_setting, project: project, project_key: 'key1')
-
- expect(subject).to eq("foo+#{project.full_path_slug}-key1@bar.com")
- end
+ it 'returns custom address when project_key is set' do
+ create(:service_desk_setting, project: project, project_key: 'key1')
- it_behaves_like 'with incoming email address'
+ expect(subject).to eq("foo+#{project.full_path_slug}-key1@bar.com")
end
- context 'when service_desk_custom_address flag is disabled' do
- before do
- stub_feature_flags(service_desk_custom_address: false)
- end
-
- it_behaves_like 'with incoming email address'
- end
- end
- end
-
- describe '.service_desk_custom_address_enabled?' do
- let_it_be(:project) { create(:project, service_desk_enabled: true) }
-
- subject(:address_enabled) { project.service_desk_custom_address_enabled? }
-
- context 'when service_desk_email is enabled' do
- before do
- allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(true)
- end
-
- it 'returns true' do
- expect(address_enabled).to be_truthy
- end
-
- context 'when service_desk_custom_address flag is disabled' do
- before do
- stub_feature_flags(service_desk_custom_address: false)
- end
-
- it 'returns false' do
- expect(address_enabled).to be_falsey
- end
- end
- end
-
- context 'when service_desk_email is disabled' do
- before do
- allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(false)
- end
-
- it 'returns false when service_desk_email is disabled' do
- expect(address_enabled).to be_falsey
- end
+ it_behaves_like 'with incoming email address'
end
end
@@ -3044,56 +2977,9 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#pushes_since_gc' do
- let(:project) { build_stubbed(:project) }
-
- after do
- project.reset_pushes_since_gc
- end
-
- context 'without any pushes' do
- it 'returns 0' do
- expect(project.pushes_since_gc).to eq(0)
- end
- end
-
- context 'with a number of pushes' do
- it 'returns the number of pushes' do
- 3.times { project.increment_pushes_since_gc }
-
- expect(project.pushes_since_gc).to eq(3)
- end
- end
- end
-
- describe '#increment_pushes_since_gc' do
- let(:project) { build_stubbed(:project) }
-
- after do
- project.reset_pushes_since_gc
- end
-
- it 'increments the number of pushes since the last GC' do
- 3.times { project.increment_pushes_since_gc }
-
- expect(project.pushes_since_gc).to eq(3)
- end
- end
-
- describe '#reset_pushes_since_gc' do
- let(:project) { build_stubbed(:project) }
-
- after do
- project.reset_pushes_since_gc
- end
-
- it 'resets the number of pushes since the last GC' do
- 3.times { project.increment_pushes_since_gc }
-
- project.reset_pushes_since_gc
-
- expect(project.pushes_since_gc).to eq(0)
- end
+ it_behaves_like 'can housekeep repository' do
+ let(:resource) { build_stubbed(:project) }
+ let(:resource_key) { 'projects' }
end
describe '#deployment_variables' do
@@ -4548,6 +4434,24 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#predefined_project_variables' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ subject { project.predefined_project_variables.to_runner_variables }
+
+ specify do
+ expect(subject).to include({ key: 'CI_PROJECT_CONFIG_PATH', value: Ci::Pipeline::DEFAULT_CONFIG_PATH, public: true, masked: false })
+ end
+
+ context 'when ci config path is overridden' do
+ before do
+ project.update!(ci_config_path: 'random.yml')
+ end
+
+ it { expect(subject).to include({ key: 'CI_PROJECT_CONFIG_PATH', value: 'random.yml', public: true, masked: false }) }
+ end
+ end
+
describe '#auto_devops_enabled?' do
before do
Feature.enable_percentage_of_actors(:force_autodevops_on_by_default, 0)
@@ -6002,6 +5906,43 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#set_first_pages_deployment!' do
+ let(:project) { create(:project) }
+ let(:deployment) { create(:pages_deployment, project: project) }
+
+ it "creates new metadata record if none exists yet and sets deployment" do
+ project.pages_metadatum.destroy!
+ project.reload
+
+ project.set_first_pages_deployment!(deployment)
+
+ expect(project.pages_metadatum.reload.pages_deployment).to eq(deployment)
+ end
+
+ it "updates the existing metadara record with deployment" do
+ expect do
+ project.set_first_pages_deployment!(deployment)
+ end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil).to(deployment)
+ end
+
+ it 'only updates metadata for this project' do
+ other_project = create(:project)
+
+ expect do
+ project.set_first_pages_deployment!(deployment)
+ end.not_to change { other_project.pages_metadatum.reload.pages_deployment }.from(nil)
+ end
+
+ it 'does nothing if metadata already references some deployment' do
+ existing_deployment = create(:pages_deployment, project: project)
+ project.set_first_pages_deployment!(existing_deployment)
+
+ expect do
+ project.set_first_pages_deployment!(deployment)
+ end.not_to change { project.pages_metadatum.reload.pages_deployment }.from(existing_deployment)
+ end
+ end
+
describe '#has_pool_repsitory?' do
it 'returns false when it does not have a pool repository' do
subject = create(:project, :repository)
@@ -6275,28 +6216,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#alerts_service_activated?' do
- let!(:project) { create(:project) }
-
- subject { project.alerts_service_activated? }
-
- context 'when project has an activated alerts service' do
- before do
- create(:alerts_service, project: project)
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when project has an inactive alerts service' do
- before do
- create(:alerts_service, :inactive, project: project)
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
describe '#prometheus_service_active?' do
let(:project) { create(:project) }
@@ -6456,6 +6375,25 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe 'with Debian Distributions' do
+ subject { create(:project) }
+
+ let!(:distributions) { create_list(:debian_project_distribution, 2, :with_file, container: subject) }
+
+ it 'removes distribution files on removal' do
+ distribution_file_paths = distributions.map do |distribution|
+ distribution.file.path
+ end
+
+ expect { subject.destroy }
+ .to change {
+ distribution_file_paths.select do |path|
+ File.exist? path
+ end.length
+ }.from(distribution_file_paths.length).to(0)
+ end
+ end
+
describe '#environments_for_scope' do
let_it_be(:project, reload: true) { create(:project) }
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 2e82fcf5511..8001d009901 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -42,4 +42,10 @@ RSpec.describe ProjectWiki do
end
end
end
+
+ it_behaves_like 'can housekeep repository' do
+ let_it_be(:resource) { create(:project_wiki) }
+
+ let(:resource_key) { 'project_wikis' }
+ end
end
diff --git a/spec/models/protectable_dropdown_spec.rb b/spec/models/protectable_dropdown_spec.rb
index c51197234ca..918c3078405 100644
--- a/spec/models/protectable_dropdown_spec.rb
+++ b/spec/models/protectable_dropdown_spec.rb
@@ -14,21 +14,33 @@ RSpec.describe ProtectableDropdown do
end
describe '#protectable_ref_names' do
- before do
- project.protected_branches.create(name: 'master')
- end
+ context 'when project repository is not empty' do
+ before do
+ project.protected_branches.create(name: 'master')
+ end
+
+ it { expect(subject.protectable_ref_names).to include('feature') }
+ it { expect(subject.protectable_ref_names).not_to include('master') }
+
+ it "includes branches matching a protected branch wildcard" do
+ expect(subject.protectable_ref_names).to include('feature')
- it { expect(subject.protectable_ref_names).to include('feature') }
- it { expect(subject.protectable_ref_names).not_to include('master') }
+ create(:protected_branch, name: 'feat*', project: project)
- it "includes branches matching a protected branch wildcard" do
- expect(subject.protectable_ref_names).to include('feature')
+ subject = described_class.new(project.reload, :branches)
+
+ expect(subject.protectable_ref_names).to include('feature')
+ end
+ end
- create(:protected_branch, name: 'feat*', project: project)
+ context 'when project repository is empty' do
+ let(:project) { create(:project) }
- subject = described_class.new(project.reload, :branches)
+ it "returns empty list" do
+ subject = described_class.new(project, :branches)
- expect(subject.protectable_ref_names).to include('feature')
+ expect(subject.protectable_ref_names).to be_empty
+ end
end
end
end
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index ac252e6d6cf..749b9b8e1ab 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ReleaseHighlight do
- let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')) }
+ let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')).grep(/\d*\_(\d*\_\d*)\.yml$/) }
before do
allow(Dir).to receive(:glob).with(Rails.root.join('data', 'whats_new', '*.yml')).and_return(fixture_dir_glob)
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index fea15ea00c8..b436c2e1088 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Release do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
- let(:release) { create(:release, project: project, author: user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:release) { create(:release, project: project, author: user) }
it { expect(release).to be_valid }
@@ -132,8 +132,10 @@ RSpec.describe Release do
end
describe '#milestone_titles' do
- let(:release) { create(:release, :with_milestones) }
+ let_it_be(:milestone_1) { create(:milestone, project: project, title: 'Milestone 1') }
+ let_it_be(:milestone_2) { create(:milestone, project: project, title: 'Milestone 2') }
+ let_it_be(:release) { create(:release, project: project, milestones: [milestone_1, milestone_2]) }
- it { expect(release.milestone_titles).to eq(release.milestones.map {|m| m.title }.sort.join(", "))}
+ it { expect(release.milestone_titles).to eq("#{milestone_1.title}, #{milestone_2.title}")}
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index c1f073e26d1..dd54a701282 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -123,7 +123,7 @@ RSpec.describe Repository do
options = { message: 'test tag message\n',
tagger: { name: 'John Smith', email: 'john@gmail.com' } }
- rugged_repo(repository).tags.create(annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', options)
+ rugged_repo(repository).tags.create(annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', **options)
double_first = double(committed_date: Time.current - 1.second)
double_last = double(committed_date: Time.current)
@@ -2028,6 +2028,22 @@ RSpec.describe Repository do
end
end
+ describe '#lookup' do
+ before do
+ allow(repository.raw_repository).to receive(:lookup).and_return('interesting_blob')
+ end
+
+ it 'uses the lookup cache' do
+ 2.times.each { repository.lookup('sha1') }
+
+ expect(repository.raw_repository).to have_received(:lookup).once
+ end
+
+ it 'returns the correct value' do
+ expect(repository.lookup('sha1')).to eq('interesting_blob')
+ end
+ end
+
describe '#after_create' do
it 'calls expire_status_cache' do
expect(repository).to receive(:expire_status_cache)
diff --git a/spec/models/snippet_repository_storage_move_spec.rb b/spec/models/snippet_repository_storage_move_spec.rb
index c9feff0c22f..357951f8859 100644
--- a/spec/models/snippet_repository_storage_move_spec.rb
+++ b/spec/models/snippet_repository_storage_move_spec.rb
@@ -8,6 +8,6 @@ RSpec.describe SnippetRepositoryStorageMove, type: :model do
let(:repository_storage_factory_key) { :snippet_repository_storage_move }
let(:error_key) { :snippet }
- let(:repository_storage_worker) { nil } # TODO set to SnippetUpdateRepositoryStorageWorker after https://gitlab.com/gitlab-org/gitlab/-/issues/218991 is implemented
+ let(:repository_storage_worker) { SnippetUpdateRepositoryStorageWorker }
end
end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index f87259ea048..68d183d5d55 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -796,4 +796,90 @@ RSpec.describe Snippet do
it_behaves_like 'can move repository storage' do
let_it_be(:container) { create(:snippet, :repository) }
end
+
+ describe '#change_head_to_default_branch' do
+ let(:head_path) { Rails.root.join(TestEnv.repos_path, "#{snippet.disk_path}.git", 'HEAD') }
+
+ subject { snippet.change_head_to_default_branch }
+
+ context 'when repository does not exist' do
+ let(:snippet) { create(:snippet) }
+
+ it 'does nothing' do
+ expect(snippet.repository_exists?).to eq false
+ expect(snippet.repository.raw_repository).not_to receive(:write_ref)
+
+ subject
+ end
+ end
+
+ context 'when repository is empty' do
+ let(:snippet) { create(:snippet, :empty_repo) }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return(default_branch)
+ end
+
+ context 'when default branch in settings is "master"' do
+ let(:default_branch) { 'master' }
+
+ it 'does nothing' do
+ expect(File.read(head_path).squish).to eq 'ref: refs/heads/master'
+
+ expect(snippet.repository.raw_repository).not_to receive(:write_ref)
+
+ subject
+ end
+ end
+
+ context 'when default branch in settings is different from "master"' do
+ let(:default_branch) { 'main' }
+
+ it 'changes the HEAD reference to the default branch' do
+ expect(File.read(head_path).squish).to eq 'ref: refs/heads/master'
+
+ subject
+
+ expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ end
+ end
+ end
+
+ context 'when repository is not empty' do
+ let(:snippet) { create(:snippet, :empty_repo) }
+
+ before do
+ populate_snippet_repo
+ end
+
+ context 'when HEAD branch is empty' do
+ it 'changes HEAD to default branch' do
+ File.write(head_path, 'ref: refs/heads/non_existen_branch')
+ expect(File.read(head_path).squish).to eq 'ref: refs/heads/non_existen_branch'
+
+ subject
+
+ expect(File.read(head_path).squish).to eq 'ref: refs/heads/main'
+ expect(snippet.list_files('HEAD')).not_to be_empty
+ end
+ end
+
+ context 'when HEAD branch is not empty' do
+ it 'does nothing' do
+ File.write(head_path, 'ref: refs/heads/main')
+
+ expect(snippet.repository.raw_repository).not_to receive(:write_ref)
+
+ subject
+ end
+ end
+
+ def populate_snippet_repo
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+
+ data = [{ file_path: 'new_file_test', content: 'bar' }]
+ snippet.snippet_repository.multi_files_action(snippet.author, data, branch_name: 'main', message: 'foo')
+ end
+ end
+ end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index ef91e4a5a71..ed311314086 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -27,6 +27,22 @@ RSpec.describe Terraform::State do
end
end
+ describe '#destroy' do
+ let(:terraform_state) { create(:terraform_state) }
+ let(:user) { terraform_state.project.creator }
+
+ it 'deletes when the state is unlocked' do
+ expect(terraform_state.destroy).to be_truthy
+ end
+
+ it 'fails to delete when the state is locked', :aggregate_failures do
+ terraform_state.update!(lock_xid: SecureRandom.uuid, locked_by_user: user, locked_at: Time.current)
+
+ expect(terraform_state.destroy).to be_falsey
+ expect(terraform_state.errors.full_messages).to eq(["You cannot remove the State file because it's locked. Unlock the State file first before removing it."])
+ end
+ end
+
describe '#latest_file' do
let(:terraform_state) { create(:terraform_state, :with_version) }
let(:latest_version) { terraform_state.latest_version }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index fb05c9e8052..0935d3576a4 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4084,6 +4084,7 @@ RSpec.describe User do
cache_mock = double
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count'])
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'review_requested_open_merge_requests_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
@@ -4163,6 +4164,20 @@ RSpec.describe User do
end
end
+ describe '#review_requested_open_merge_requests_count' do
+ it 'returns number of open merge requests from non-archived projects' do
+ user = create(:user)
+ project = create(:project, :public)
+ archived_project = create(:project, :public, :archived)
+
+ create(:merge_request, source_project: project, author: user, reviewers: [user])
+ create(:merge_request, :closed, source_project: project, author: user, reviewers: [user])
+ create(:merge_request, source_project: archived_project, author: user, reviewers: [user])
+
+ expect(user.review_requested_open_merge_requests_count(force: true)).to eq 1
+ end
+ end
+
describe '#assigned_open_issues_count' do
it 'returns number of open issues from non-archived projects' do
user = create(:user)
@@ -4265,7 +4280,7 @@ RSpec.describe User do
it 'adds the namespace errors to the user' do
user.update(username: new_username)
- expect(user.errors.full_messages.first).to eq('Username has already been taken')
+ expect(user.errors.full_messages.first).to eq('A user, alias, or group already exists with that username.')
end
end
end
@@ -5074,9 +5089,10 @@ RSpec.describe User do
end
describe '#hook_attrs' do
- it 'includes name, username, avatar_url, and email' do
+ it 'includes id, name, username, avatar_url, and email' do
user = create(:user)
user_attributes = {
+ id: user.id,
name: user.name,
username: user.username,
avatar_url: user.avatar_url(only_path: false),
diff --git a/spec/policies/group_member_policy_spec.rb b/spec/policies/group_member_policy_spec.rb
index 9e58ea81ef3..6099e4549b1 100644
--- a/spec/policies/group_member_policy_spec.rb
+++ b/spec/policies/group_member_policy_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe GroupMemberPolicy do
+ include DesignManagementTestHelpers
+
let(:guest) { create(:user) }
let(:owner) { create(:user) }
let(:group) { create(:group, :private) }
@@ -28,22 +30,64 @@ RSpec.describe GroupMemberPolicy do
permissions.each { |p| is_expected.not_to be_allowed(p) }
end
- context 'with guest user' do
- let(:current_user) { guest }
+ context 'with anonymous user' do
+ let(:group) { create(:group, :public) }
+ let(:current_user) { nil }
+ let(:membership) { guest.members.first }
it do
- expect_disallowed(:member_related_permissions)
+ expect_disallowed(:read_design_activity, *member_related_permissions)
+ expect_allowed(:read_group)
+ end
+
+ context 'design management is enabled' do
+ before do
+ create(:project, :public, group: group) # Necessary to enable design management
+ enable_design_management
+ end
+
+ specify do
+ expect_allowed(:read_design_activity)
+ end
+ end
+
+ context 'for a private group' do
+ let(:group) { create(:group, :private) }
+
+ specify do
+ expect_disallowed(:read_group, :read_design_activity, *member_related_permissions)
+ end
+ end
+
+ context 'for an internal group' do
+ let(:group) { create(:group, :internal) }
+
+ specify do
+ expect_disallowed(:read_group, :read_design_activity, *member_related_permissions)
+ end
end
end
+ context 'with guest user, for own membership' do
+ let(:current_user) { guest }
+
+ specify { expect_disallowed(:update_group_member) }
+ specify { expect_allowed(:read_group, :destroy_group_member) }
+ end
+
+ context 'with guest user, for other membership' do
+ let(:current_user) { guest }
+ let(:membership) { owner.members.first }
+
+ specify { expect_disallowed(:destroy_group_member, :update_group_member) }
+ specify { expect_allowed(:read_group) }
+ end
+
context 'with one owner' do
let(:current_user) { owner }
- it do
- expect_disallowed(:destroy_group_member)
- expect_disallowed(:update_group_member)
- expect_allowed(:read_group)
- end
+ specify { expect_disallowed(*member_related_permissions) }
+ specify { expect_allowed(:read_group) }
end
context 'with more than one owner' do
@@ -53,10 +97,7 @@ RSpec.describe GroupMemberPolicy do
group.add_owner(create(:user))
end
- it do
- expect_allowed(:destroy_group_member)
- expect_allowed(:update_group_member)
- end
+ specify { expect_allowed(*member_related_permissions) }
end
context 'with the group parent' do
diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespace_policy_spec.rb
index 514d7303ad7..b9823273de8 100644
--- a/spec/policies/namespace_policy_spec.rb
+++ b/spec/policies/namespace_policy_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe NamespacePolicy do
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, owner: owner) }
- let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects] }
+ let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :create_package_settings, :read_package_settings] }
subject { described_class.new(current_user, namespace) }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index c21d3b0939f..e6650549f7f 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -401,6 +401,48 @@ RSpec.describe ProjectPolicy do
end
end
+ describe 'set_pipeline_variables' do
+ context 'when user is developer' do
+ let(:current_user) { developer }
+
+ context 'when project allows user defined variables' do
+ before do
+ project.update!(restrict_user_defined_variables: false)
+ end
+
+ it { is_expected.to be_allowed(:set_pipeline_variables) }
+ end
+
+ context 'when project restricts use of user defined variables' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ it { is_expected.not_to be_allowed(:set_pipeline_variables) }
+ end
+ end
+
+ context 'when user is maintainer' do
+ let(:current_user) { maintainer }
+
+ context 'when project allows user defined variables' do
+ before do
+ project.update!(restrict_user_defined_variables: false)
+ end
+
+ it { is_expected.to be_allowed(:set_pipeline_variables) }
+ end
+
+ context 'when project restricts use of user defined variables' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ it { is_expected.to be_allowed(:set_pipeline_variables) }
+ end
+ end
+ end
+
context 'support bot' do
let(:current_user) { User.support_bot }
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index ce4c5a2db7d..f78ad38f4e8 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -189,7 +189,21 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected.to contain_exactly("+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}",
- "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
+ "+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
+ end
+
+ it 'uses a SHA in the persistent refspec' do
+ expect(subject[0]).to match(/^\+[0-9a-f]{40}:refs\/pipelines\/[0-9]+$/)
+ end
+
+ context 'when the scalability_ci_fetch_sha feature flag is disabled' do
+ before do
+ stub_feature_flags(scalability_ci_fetch_sha: false)
+ end
+
+ it 'fetches the ref by name' do
+ expect(subject[0]).to eq("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
+ end
end
context 'when ref is tag' do
@@ -197,7 +211,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected.to contain_exactly("+refs/tags/#{build.ref}:refs/tags/#{build.ref}",
- "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
+ "+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
end
context 'when GIT_DEPTH is zero' do
@@ -208,7 +222,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected.to contain_exactly('+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*',
- "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
+ "+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
end
end
end
@@ -224,7 +238,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
+ .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
end
context 'when GIT_DEPTH is zero' do
@@ -234,7 +248,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
'+refs/heads/*:refs/remotes/origin/*',
'+refs/tags/*:refs/tags/*')
end
@@ -244,7 +258,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
let(:merge_request) { create(:merge_request, :with_legacy_detached_merge_request_pipeline) }
it 'returns the correct refspecs' do
- is_expected.to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ is_expected.to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
end
end
@@ -262,7 +276,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'exposes the persistent pipeline ref' do
is_expected
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
end
end
diff --git a/spec/presenters/packages/nuget/service_index_presenter_spec.rb b/spec/presenters/packages/nuget/service_index_presenter_spec.rb
index 19ef890e19f..9c95fbc8fd2 100644
--- a/spec/presenters/packages/nuget/service_index_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/service_index_presenter_spec.rb
@@ -4,25 +4,64 @@ require 'spec_helper'
RSpec.describe ::Packages::Nuget::ServiceIndexPresenter do
let_it_be(:project) { create(:project) }
- let_it_be(:presenter) { described_class.new(project) }
+ let_it_be(:group) { create(:group) }
+
+ let(:presenter) { described_class.new(target) }
describe '#version' do
subject { presenter.version }
- it { is_expected.to eq '3.0.0' }
+ context 'for a group' do
+ let(:target) { group }
+
+ it { is_expected.to eq '3.0.0' }
+ end
+
+ context 'for a project' do
+ let(:target) { project }
+
+ it { is_expected.to eq '3.0.0' }
+ end
end
describe '#resources' do
subject { presenter.resources }
- it 'has valid resources' do
- expect(subject.size).to eq 8
- subject.each do |resource|
- %i[@id @type comment].each do |field|
- expect(resource).to have_key(field)
- expect(resource[field]).to be_a(String)
+ shared_examples 'returning valid resources' do |resources_count: 8, include_publish_service: true|
+ it 'has valid resources' do
+ expect(subject.size).to eq resources_count
+ subject.each do |resource|
+ %i[@id @type comment].each do |field|
+ expect(resource).to have_key(field)
+ expect(resource[field]).to be_a(String)
+ end
+ end
+ end
+
+ it "does #{'not ' unless include_publish_service}return the publish resource" do
+ services_types = subject.map { |res| res[:@type] }
+
+ described_class::SERVICE_VERSIONS[:publish].each do |publish_service_version|
+ if include_publish_service
+ expect(services_types).to include(publish_service_version)
+ else
+ expect(services_types).not_to include(publish_service_version)
+ end
end
end
end
+
+ context 'for a group' do
+ let(:target) { group }
+
+ # at the group level we don't have the publish and download service
+ it_behaves_like 'returning valid resources', resources_count: 6, include_publish_service: false
+ end
+
+ context 'for a project' do
+ let(:target) { project }
+
+ it_behaves_like 'returning valid resources'
+ end
end
end
diff --git a/spec/rack_servers/puma_spec.rb b/spec/rack_servers/puma_spec.rb
index 80595b267fa..6729119fe92 100644
--- a/spec/rack_servers/puma_spec.rb
+++ b/spec/rack_servers/puma_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe 'Puma' do
end
after(:all) do
- WebMock.disable_net_connect!(allow_localhost: true)
+ webmock_enable!
Process.kill('TERM', @puma_master_pid)
rescue Errno::ESRCH
end
diff --git a/spec/rack_servers/unicorn_spec.rb b/spec/rack_servers/unicorn_spec.rb
index 5887b49d269..52d44b6e7e0 100644
--- a/spec/rack_servers/unicorn_spec.rb
+++ b/spec/rack_servers/unicorn_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Unicorn' do
end
after(:all) do
- WebMock.disable_net_connect!(allow_localhost: true)
+ webmock_enable!
Process.kill('TERM', @unicorn_master_pid)
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 3cc8764de4a..9fd30213133 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -102,6 +102,7 @@ RSpec.describe API::API do
Labkit::Context.current.to_h.tap do |log_context|
expect(log_context).to match('correlation_id' => an_instance_of(String),
'meta.caller_id' => '/api/:version/projects/:id/issues',
+ 'meta.remote_ip' => an_instance_of(String),
'meta.project' => project.full_path,
'meta.root_namespace' => project.namespace.full_path,
'meta.user' => user.username,
@@ -117,6 +118,7 @@ RSpec.describe API::API do
Labkit::Context.current.to_h.tap do |log_context|
expect(log_context).to match('correlation_id' => an_instance_of(String),
'meta.caller_id' => '/api/:version/users',
+ 'meta.remote_ip' => an_instance_of(String),
'meta.feature_category' => 'users')
end
end
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index 36fc6101b84..ca6492396cd 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -53,17 +53,6 @@ RSpec.describe API::Boards do
end
end
- describe "PUT /projects/:id/boards/:board_id" do
- let(:url) { "/projects/#{board_parent.id}/boards/#{board.id}" }
-
- it 'updates the issue board' do
- put api(url, user), params: { name: 'changed board name' }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('changed board name')
- end
- end
-
describe "DELETE /projects/:id/boards/:board_id" do
let(:url) { "/projects/#{board_parent.id}/boards/#{board.id}" }
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
index e9d793d5a22..f4c99307b1a 100644
--- a/spec/requests/api/ci/runner/jobs_put_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -78,6 +78,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when an exit_code is provided' do
+ context 'when the exit_codes are acceptable' do
+ before do
+ job.options[:allow_failure_criteria] = { exit_codes: [1] }
+ job.save!
+ end
+
+ it 'accepts an exit code' do
+ update_job(state: 'failed', exit_code: 1)
+
+ expect(job.reload).to be_failed
+ expect(job.allow_failure).to be_truthy
+ expect(job).to be_unknown_failure
+ end
+ end
+
+ context 'when the exit_codes are not defined' do
+ it 'ignore the exit code' do
+ update_job(state: 'failed', exit_code: 1)
+
+ expect(job.reload).to be_failed
+ expect(job.allow_failure).to be_falsy
+ expect(job).to be_unknown_failure
+ end
+ end
+ end
+
context 'when failure_reason is script_failure' do
before do
update_job(state: 'failed', failure_reason: 'script_failure')
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 2dc92417892..74d8e3f7ae8 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
'sha' => job.sha,
'before_sha' => job.before_sha,
'ref_type' => 'branch',
- 'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ 'refspecs' => ["+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
'depth' => project.ci_default_git_depth }
end
@@ -284,7 +284,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
'+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*')
end
@@ -346,7 +346,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
'+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*')
end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index d2f208d0079..663b69b1b76 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -42,5 +42,12 @@ RSpec.describe API::DebianProjectPackages do
it_behaves_like 'Debian project repository PUT endpoint', :created, nil
end
+
+ describe 'PUT projects/:id/-/packages/debian/incoming/:file_name/authorize' do
+ let(:method) { :put }
+ let(:url) { "/projects/#{project.id}/-/packages/debian/incoming/#{file_name}/authorize" }
+
+ it_behaves_like 'Debian project repository PUT endpoint', :created, nil, is_authorize: true
+ end
end
end
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index b8e79853486..d162d288129 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -3,8 +3,16 @@
require 'spec_helper'
RSpec.describe API::GenericPackages do
+ include HttpBasicAuthHelpers
+
let_it_be(:personal_access_token) { create(:personal_access_token) }
let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:deploy_token_rw) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:project_deploy_token_rw) { create(:project_deploy_token, deploy_token: deploy_token_rw, project: project) }
+ let_it_be(:deploy_token_ro) { create(:deploy_token, read_package_registry: true, write_package_registry: false) }
+ let_it_be(:project_deploy_token_ro) { create(:project_deploy_token, deploy_token: deploy_token_ro, project: project) }
+ let_it_be(:deploy_token_wo) { create(:deploy_token, read_package_registry: false, write_package_registry: true) }
+ let_it_be(:project_deploy_token_wo) { create(:project_deploy_token, deploy_token: deploy_token_wo, project: project) }
let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:workhorse_header) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
let(:user) { personal_access_token.user }
@@ -22,6 +30,23 @@ RSpec.describe API::GenericPackages do
personal_access_token_header('wrong token')
when :invalid_job_token
job_token_header('wrong token')
+ when :user_basic_auth
+ user_basic_auth_header(user)
+ when :invalid_user_basic_auth
+ basic_auth_header('invalid user', 'invalid password')
+ end
+ end
+
+ def deploy_token_auth_header
+ case authenticate_with
+ when :deploy_token_rw
+ deploy_token_header(deploy_token_rw.token)
+ when :deploy_token_ro
+ deploy_token_header(deploy_token_ro.token)
+ when :deploy_token_wo
+ deploy_token_header(deploy_token_wo.token)
+ when :invalid_deploy_token
+ deploy_token_header('wrong token')
end
end
@@ -33,6 +58,10 @@ RSpec.describe API::GenericPackages do
{ Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER => value || ci_build.token }
end
+ def deploy_token_header(value)
+ { Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => value }
+ end
+
shared_examples 'secure endpoint' do
before do
project.add_developer(user)
@@ -54,19 +83,35 @@ RSpec.describe API::GenericPackages do
'PUBLIC' | :guest | true | :personal_access_token | :forbidden
'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | true | :user_basic_auth | :success
+ 'PUBLIC' | :guest | true | :user_basic_auth | :forbidden
+ 'PUBLIC' | :developer | true | :invalid_user_basic_auth | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_user_basic_auth | :unauthorized
'PUBLIC' | :developer | false | :personal_access_token | :forbidden
'PUBLIC' | :guest | false | :personal_access_token | :forbidden
'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :user_basic_auth | :forbidden
+ 'PUBLIC' | :guest | false | :user_basic_auth | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_user_basic_auth | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_user_basic_auth | :unauthorized
'PUBLIC' | :anonymous | false | :none | :unauthorized
'PRIVATE' | :developer | true | :personal_access_token | :success
'PRIVATE' | :guest | true | :personal_access_token | :forbidden
'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | true | :user_basic_auth | :success
+ 'PRIVATE' | :guest | true | :user_basic_auth | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_user_basic_auth | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_user_basic_auth | :unauthorized
'PRIVATE' | :developer | false | :personal_access_token | :not_found
'PRIVATE' | :guest | false | :personal_access_token | :not_found
'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :user_basic_auth | :not_found
+ 'PRIVATE' | :guest | false | :user_basic_auth | :not_found
+ 'PRIVATE' | :developer | false | :invalid_user_basic_auth | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_user_basic_auth | :unauthorized
'PRIVATE' | :anonymous | false | :none | :unauthorized
'PUBLIC' | :developer | true | :job_token | :success
'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
@@ -90,6 +135,21 @@ RSpec.describe API::GenericPackages do
expect(response).to have_gitlab_http_status(expected_status)
end
end
+
+ where(:authenticate_with, :expected_status) do
+ :deploy_token_rw | :success
+ :deploy_token_wo | :success
+ :deploy_token_ro | :forbidden
+ :invalid_deploy_token | :unauthorized
+ end
+
+ with_them do
+ it "responds with #{params[:expected_status]}" do
+ authorize_upload_file(workhorse_header.merge(deploy_token_auth_header))
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
end
context 'application security' do
@@ -138,20 +198,34 @@ RSpec.describe API::GenericPackages do
where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
'PUBLIC' | :guest | true | :personal_access_token | :forbidden
+ 'PUBLIC' | :guest | true | :user_basic_auth | :forbidden
'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | true | :invalid_user_basic_auth | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_user_basic_auth | :unauthorized
'PUBLIC' | :developer | false | :personal_access_token | :forbidden
'PUBLIC' | :guest | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | false | :user_basic_auth | :forbidden
+ 'PUBLIC' | :guest | false | :user_basic_auth | :forbidden
'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :invalid_user_basic_auth | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_user_basic_auth | :unauthorized
'PUBLIC' | :anonymous | false | :none | :unauthorized
'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :guest | true | :user_basic_auth | :forbidden
'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | true | :invalid_user_basic_auth | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_user_basic_auth | :unauthorized
'PRIVATE' | :developer | false | :personal_access_token | :not_found
'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :user_basic_auth | :not_found
+ 'PRIVATE' | :guest | false | :user_basic_auth | :not_found
'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :invalid_user_basic_auth | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_user_basic_auth | :unauthorized
'PRIVATE' | :anonymous | false | :none | :unauthorized
'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
'PUBLIC' | :developer | false | :job_token | :forbidden
@@ -175,6 +249,21 @@ RSpec.describe API::GenericPackages do
expect(response).to have_gitlab_http_status(expected_status)
end
end
+
+ where(:authenticate_with, :expected_status) do
+ :deploy_token_ro | :forbidden
+ :invalid_deploy_token | :unauthorized
+ end
+
+ with_them do
+ it "responds with #{params[:expected_status]}" do
+ headers = workhorse_header.merge(deploy_token_auth_header)
+
+ upload_file(params, headers)
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
end
context 'when user can upload packages and has valid credentials' do
@@ -182,43 +271,58 @@ RSpec.describe API::GenericPackages do
project.add_developer(user)
end
- it 'creates package and package file when valid personal access token is used' do
- headers = workhorse_header.merge(personal_access_token_header)
+ shared_examples 'creates a package and package file' do
+ it 'creates a package and package file' do
+ headers = workhorse_header.merge(auth_header)
- expect { upload_file(params, headers) }
- .to change { project.packages.generic.count }.by(1)
- .and change { Packages::PackageFile.count }.by(1)
+ expect { upload_file(params, headers) }
+ .to change { project.packages.generic.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:created)
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
- package = project.packages.generic.last
- expect(package.name).to eq('mypackage')
- expect(package.version).to eq('0.0.1')
- expect(package.original_build_info).to be_nil
+ package = project.packages.generic.last
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
- package_file = package.package_files.last
- expect(package_file.file_name).to eq('myfile.tar.gz')
+ if should_set_build_info
+ expect(package.original_build_info.pipeline).to eq(ci_build.pipeline)
+ else
+ expect(package.original_build_info).to be_nil
+ end
+
+ package_file = package.package_files.last
+ expect(package_file.file_name).to eq('myfile.tar.gz')
+ end
end
end
- it 'creates package, package file, and package build info when valid job token is used' do
- headers = workhorse_header.merge(job_token_header)
-
- expect { upload_file(params, headers) }
- .to change { project.packages.generic.count }.by(1)
- .and change { Packages::PackageFile.count }.by(1)
+ context 'when valid personal access token is used' do
+ it_behaves_like 'creates a package and package file' do
+ let(:auth_header) { personal_access_token_header }
+ let(:should_set_build_info) { false }
+ end
+ end
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:created)
+ context 'when valid basic auth is used' do
+ it_behaves_like 'creates a package and package file' do
+ let(:auth_header) { user_basic_auth_header(user) }
+ let(:should_set_build_info) { false }
+ end
+ end
- package = project.packages.generic.last
- expect(package.name).to eq('mypackage')
- expect(package.version).to eq('0.0.1')
- expect(package.original_build_info.pipeline).to eq(ci_build.pipeline)
+ context 'when valid deploy token is used' do
+ it_behaves_like 'creates a package and package file' do
+ let(:auth_header) { deploy_token_header(deploy_token_wo.token) }
+ let(:should_set_build_info) { false }
+ end
+ end
- package_file = package.package_files.last
- expect(package_file.file_name).to eq('myfile.tar.gz')
+ context 'when valid job token is used' do
+ it_behaves_like 'creates a package and package file' do
+ let(:auth_header) { job_token_header }
+ let(:should_set_build_info) { true }
end
end
@@ -309,21 +413,37 @@ RSpec.describe API::GenericPackages do
where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
'PUBLIC' | :developer | true | :personal_access_token | :success
'PUBLIC' | :guest | true | :personal_access_token | :success
+ 'PUBLIC' | :developer | true | :user_basic_auth | :success
+ 'PUBLIC' | :guest | true | :user_basic_auth | :success
'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | true | :invalid_user_basic_auth | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_user_basic_auth | :unauthorized
'PUBLIC' | :developer | false | :personal_access_token | :success
'PUBLIC' | :guest | false | :personal_access_token | :success
+ 'PUBLIC' | :developer | false | :user_basic_auth | :success
+ 'PUBLIC' | :guest | false | :user_basic_auth | :success
'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :invalid_user_basic_auth | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_user_basic_auth | :unauthorized
'PUBLIC' | :anonymous | false | :none | :unauthorized
'PRIVATE' | :developer | true | :personal_access_token | :success
'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :user_basic_auth | :success
+ 'PRIVATE' | :guest | true | :user_basic_auth | :forbidden
'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | true | :invalid_user_basic_auth | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_user_basic_auth | :unauthorized
'PRIVATE' | :developer | false | :personal_access_token | :not_found
'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :user_basic_auth | :not_found
+ 'PRIVATE' | :guest | false | :user_basic_auth | :not_found
'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :invalid_user_basic_auth | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_user_basic_auth | :unauthorized
'PRIVATE' | :anonymous | false | :none | :unauthorized
'PUBLIC' | :developer | true | :job_token | :success
'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
@@ -347,6 +467,21 @@ RSpec.describe API::GenericPackages do
expect(response).to have_gitlab_http_status(expected_status)
end
end
+
+ where(:authenticate_with, :expected_status) do
+ :deploy_token_rw | :success
+ :deploy_token_wo | :success
+ :deploy_token_ro | :success
+ :invalid_deploy_token | :unauthorized
+ end
+
+ with_them do
+ it "responds with #{params[:expected_status]}" do
+ download_file(deploy_token_auth_header)
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
end
context 'event tracking' do
diff --git a/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb b/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
index e086ce02942..db8a412e45c 100644
--- a/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
+++ b/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Getting Ci Cd Setting' do
let(:fields) do
<<~QUERY
- #{all_graphql_fields_for('ProjectCiCdSetting')}
+ #{all_graphql_fields_for('ProjectCiCdSetting', max_depth: 1)}
QUERY
end
@@ -43,8 +43,10 @@ RSpec.describe 'Getting Ci Cd Setting' do
it_behaves_like 'a working graphql query'
- specify { expect(settings_data['mergePipelinesEnabled']).to eql project.ci_cd_settings.merge_pipelines_enabled? }
- specify { expect(settings_data['mergeTrainsEnabled']).to eql project.ci_cd_settings.merge_trains_enabled? }
- specify { expect(settings_data['project']['id']).to eql "gid://gitlab/Project/#{project.id}" }
+ it 'fetches the settings data' do
+ expect(settings_data['mergePipelinesEnabled']).to eql project.ci_cd_settings.merge_pipelines_enabled?
+ expect(settings_data['mergeTrainsEnabled']).to eql project.ci_cd_settings.merge_trains_enabled?
+ expect(settings_data['keepLatestArtifact']).to eql project.ci_keep_latest_artifact?
+ end
end
end
diff --git a/spec/requests/api/graphql/ci/config_spec.rb b/spec/requests/api/graphql/ci/config_spec.rb
index b682470e0a1..8ede6e1538c 100644
--- a/spec/requests/api/graphql/ci/config_spec.rb
+++ b/spec/requests/api/graphql/ci/config_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe 'Query.ciConfig' do
subject(:post_graphql_query) { post_graphql(query, current_user: user) }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
let_it_be(:content) do
File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_includes.yml'))
@@ -16,20 +17,41 @@ RSpec.describe 'Query.ciConfig' do
let(:query) do
%(
query {
- ciConfig(content: "#{content}") {
+ ciConfig(projectPath: "#{project.full_path}", content: "#{content}", dryRun: false) {
status
errors
stages {
- name
- groups {
+ nodes {
name
- size
- jobs {
- name
- groupName
- stage
- needs {
+ groups {
+ nodes {
name
+ size
+ jobs {
+ nodes {
+ name
+ groupName
+ stage
+ script
+ beforeScript
+ afterScript
+ allowFailure
+ only {
+ refs
+ }
+ when
+ except {
+ refs
+ }
+ environment
+ tags
+ needs {
+ nodes {
+ name
+ }
+ }
+ }
+ }
}
}
}
@@ -39,53 +61,259 @@ RSpec.describe 'Query.ciConfig' do
)
end
- before do
- post_graphql_query
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql_query
+ end
end
- it_behaves_like 'a working graphql query'
-
it 'returns the correct structure' do
+ post_graphql_query
+
expect(graphql_data['ciConfig']).to eq(
"status" => "VALID",
"errors" => [],
"stages" =>
- [
- {
- "name" => "build",
- "groups" =>
- [
+ {
+ "nodes" =>
+ [
+ {
+ "name" => "build",
+ "groups" =>
{
- "name" => "rspec",
- "size" => 2,
- "jobs" =>
+ "nodes" =>
[
- { "name" => "rspec 0 1", "groupName" => "rspec", "stage" => "build", "needs" => [] },
- { "name" => "rspec 0 2", "groupName" => "rspec", "stage" => "build", "needs" => [] }
+ {
+ "name" => "rspec",
+ "size" => 2,
+ "jobs" =>
+ {
+ "nodes" =>
+ [
+ {
+ "name" => "rspec 0 1",
+ "groupName" => "rspec",
+ "stage" => "build",
+ "script" => ["rake spec"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => false,
+ "only" => { "refs" => %w[branches master] },
+ "when" => "on_success",
+ "except" => nil,
+ "environment" => nil,
+ "tags" => %w[ruby postgres],
+ "needs" => { "nodes" => [] }
+ },
+ {
+ "name" => "rspec 0 2",
+ "groupName" => "rspec",
+ "stage" => "build",
+ "script" => ["rake spec"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => true,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "on_failure",
+ "except" => nil,
+ "environment" => nil,
+ "tags" => [],
+ "needs" => { "nodes" => [] }
+ }
+ ]
+ }
+ },
+ {
+ "name" => "spinach", "size" => 1, "jobs" =>
+ {
+ "nodes" =>
+ [
+ {
+ "name" => "spinach",
+ "groupName" => "spinach",
+ "stage" => "build",
+ "script" => ["rake spinach"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => false,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "on_success",
+ "except" => { "refs" => ["tags"] },
+ "environment" => nil,
+ "tags" => [],
+ "needs" => { "nodes" => [] }
+ }
+ ]
+ }
+ }
]
- },
+ }
+ },
+ {
+ "name" => "test",
+ "groups" =>
{
- "name" => "spinach", "size" => 1, "jobs" =>
+ "nodes" =>
[
- { "name" => "spinach", "groupName" => "spinach", "stage" => "build", "needs" => [] }
+ {
+ "name" => "docker",
+ "size" => 1,
+ "jobs" =>
+ {
+ "nodes" => [
+ {
+ "name" => "docker",
+ "groupName" => "docker",
+ "stage" => "test",
+ "script" => ["curl http://dockerhub/URL"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => true,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "manual",
+ "except" => { "refs" => ["branches"] },
+ "environment" => nil,
+ "tags" => [],
+ "needs" => { "nodes" => [{ "name" => "spinach" }, { "name" => "rspec 0 1" }] }
+ }
+ ]
+ }
+ }
]
}
- ]
- },
+ },
+ {
+ "name" => "deploy",
+ "groups" =>
+ {
+ "nodes" =>
+ [
+ {
+ "name" => "deploy_job",
+ "size" => 1,
+ "jobs" =>
+ {
+ "nodes" => [
+ {
+ "name" => "deploy_job",
+ "groupName" => "deploy_job",
+ "stage" => "deploy",
+ "script" => ["echo 'done'"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => false,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "on_success",
+ "except" => nil,
+ "environment" => "production",
+ "tags" => [],
+ "needs" => { "nodes" => [] }
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ ]
+ }
+ )
+ end
+
+ context 'when the config file includes other files' do
+ let_it_be(:content) do
+ YAML.dump(
+ include: 'other_file.yml',
+ rspec: {
+ script: 'rspec'
+ }
+ )
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:blob_data_at).with(an_instance_of(String), 'other_file.yml') do
+ YAML.dump(
+ build: {
+ script: 'build'
+ }
+ )
+ end
+ end
+
+ post_graphql_query
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns the correct structure with included files' do
+ expect(graphql_data['ciConfig']).to eq(
+ "status" => "VALID",
+ "errors" => [],
+ "stages" =>
{
- "name" => "test",
- "groups" =>
+ "nodes" =>
[
{
- "name" => "docker",
- "size" => 1,
- "jobs" => [
- { "name" => "docker", "groupName" => "docker", "stage" => "test", "needs" => [{ "name" => "spinach" }, { "name" => "rspec 0 1" }] }
- ]
+ "name" => "test",
+ "groups" =>
+ {
+ "nodes" =>
+ [
+ {
+ "name" => "build",
+ "size" => 1,
+ "jobs" =>
+ {
+ "nodes" =>
+ [
+ {
+ "name" => "build",
+ "stage" => "test",
+ "groupName" => "build",
+ "script" => ["build"],
+ "afterScript" => [],
+ "beforeScript" => [],
+ "allowFailure" => false,
+ "environment" => nil,
+ "except" => nil,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "on_success",
+ "tags" => [],
+ "needs" => { "nodes" => [] }
+}
+ ]
+ }
+ },
+ {
+ "name" => "rspec",
+ "size" => 1,
+ "jobs" =>
+ {
+ "nodes" =>
+ [
+ { "name" => "rspec",
+ "stage" => "test",
+ "groupName" => "rspec",
+ "script" => ["rspec"],
+ "afterScript" => [],
+ "beforeScript" => [],
+ "allowFailure" => false,
+ "environment" => nil,
+ "except" => nil,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "on_success",
+ "tags" => [],
+ "needs" => { "nodes" => [] } }
+ ]
+ }
+ }
+ ]
+ }
}
]
}
- ]
- )
+ )
+ end
end
end
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 19954c4e52f..3fb89d6e815 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -7,48 +7,74 @@ RSpec.describe 'Query.project.pipeline' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:user) { create(:user) }
- def first(field)
- [field.pluralize, 'nodes', 0]
+ def all(*fields)
+ fields.flat_map { |f| [f, :nodes] }
end
describe '.stages.groups.jobs' do
let(:pipeline) do
pipeline = create(:ci_pipeline, project: project, user: user)
- stage = create(:ci_stage_entity, pipeline: pipeline, name: 'first')
- create(:commit_status, stage_id: stage.id, pipeline: pipeline, name: 'my test job')
+ stage = create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'first')
+ create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job')
pipeline
end
- let(:jobs_graphql_data) { graphql_data.dig(*%w[project pipeline], *first('stage'), *first('group'), 'jobs', 'nodes') }
+ let(:jobs_graphql_data) { graphql_data_at(:project, :pipeline, *all(:stages, :groups, :jobs)) }
+
+ let(:first_n) { var('Int') }
let(:query) do
- %(
- query {
- project(fullPath: "#{project.full_path}") {
- pipeline(iid: "#{pipeline.iid}") {
- stages {
- nodes {
- name
- groups {
- nodes {
- name
- jobs {
- nodes {
- name
- pipeline {
- id
- }
- }
- }
- }
- }
+ with_signature([first_n], wrap_fields(query_graphql_path([
+ [:project, { full_path: project.full_path }],
+ [:pipeline, { iid: pipeline.iid.to_s }],
+ [:stages, { first: first_n }]
+ ], stage_fields)))
+ end
+
+ let(:stage_fields) do
+ <<~FIELDS
+ nodes {
+ name
+ groups {
+ nodes {
+ name
+ jobs {
+ nodes {
+ name
+ needs {
+ nodes { #{all_graphql_fields_for('CiBuildNeed')} }
+ }
+ pipeline {
+ id
}
}
}
}
}
- )
+ }
+ FIELDS
+ end
+
+ context 'when there are build needs' do
+ before do
+ pipeline.statuses.each do |build|
+ create_list(:ci_build_need, 2, build: build)
+ end
+ end
+
+ it 'reports the build needs' do
+ post_graphql(query, current_user: user)
+
+ expect(jobs_graphql_data).to contain_exactly a_hash_including(
+ 'needs' => a_hash_including(
+ 'nodes' => contain_exactly(
+ a_hash_including('name' => String),
+ a_hash_including('name' => String)
+ )
+ )
+ )
+ end
end
it 'returns the jobs of a pipeline stage' do
@@ -57,60 +83,43 @@ RSpec.describe 'Query.project.pipeline' do
expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job'))
end
- it 'avoids N+1 queries', :aggregate_failures do
- control_count = ActiveRecord::QueryRecorder.new do
- post_graphql(query, current_user: user)
+ describe 'performance' do
+ before do
+ build_stage = create(:ci_stage_entity, position: 2, name: 'build', project: project, pipeline: pipeline)
+ test_stage = create(:ci_stage_entity, position: 3, name: 'test', project: project, pipeline: pipeline)
+ create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 1 2')
+ create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 2 2')
+ create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 1 2')
+ create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 2 2')
end
- build_stage = create(:ci_stage_entity, name: 'build', pipeline: pipeline)
- test_stage = create(:ci_stage_entity, name: 'test', pipeline: pipeline)
- create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 1 2')
- create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 2 2')
- create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 1 2')
- create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 2 2')
+ it 'can find the first stage' do
+ post_graphql(query, current_user: user, variables: first_n.with(1))
- expect do
- post_graphql(query, current_user: user)
- end.not_to exceed_query_limit(control_count)
+ expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job'))
+ end
- expect(response).to have_gitlab_http_status(:ok)
+ it 'can find all stages' do
+ post_graphql(query, current_user: user, variables: first_n.with(3))
- build_stage = graphql_data.dig('project', 'pipeline', 'stages', 'nodes').find do |stage|
- stage['name'] == 'build'
+ expect(jobs_graphql_data).to contain_exactly(
+ a_hash_including('name' => 'my test job'),
+ a_hash_including('name' => 'docker 1 2'),
+ a_hash_including('name' => 'docker 2 2'),
+ a_hash_including('name' => 'rspec 1 2'),
+ a_hash_including('name' => 'rspec 2 2')
+ )
end
- test_stage = graphql_data.dig('project', 'pipeline', 'stages', 'nodes').find do |stage|
- stage['name'] == 'test'
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: user, variables: first_n.with(1))
+ end
+
+ expect do
+ post_graphql(query, current_user: user, variables: first_n.with(3))
+ end.not_to exceed_query_limit(control_count)
end
- docker_group = build_stage.dig('groups', 'nodes').first
- rspec_group = test_stage.dig('groups', 'nodes').first
-
- expect(docker_group['name']).to eq('docker')
- expect(rspec_group['name']).to eq('rspec')
-
- docker_jobs = docker_group.dig('jobs', 'nodes')
- rspec_jobs = rspec_group.dig('jobs', 'nodes')
-
- expect(docker_jobs).to eq([
- {
- 'name' => 'docker 1 2',
- 'pipeline' => { 'id' => pipeline.to_global_id.to_s }
- },
- {
- 'name' => 'docker 2 2',
- 'pipeline' => { 'id' => pipeline.to_global_id.to_s }
- }
- ])
-
- expect(rspec_jobs).to eq([
- {
- 'name' => 'rspec 1 2',
- 'pipeline' => { 'id' => pipeline.to_global_id.to_s }
- },
- {
- 'name' => 'rspec 2 2',
- 'pipeline' => { 'id' => pipeline.to_global_id.to_s }
- }
- ])
end
end
diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb
index 414ddabbac9..7933251b8e9 100644
--- a/spec/requests/api/graphql/ci/pipelines_spec.rb
+++ b/spec/requests/api/graphql/ci/pipelines_spec.rb
@@ -6,53 +6,59 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository, :public) }
- let_it_be(:first_user) { create(:user) }
- let_it_be(:second_user) { create(:user) }
+ let_it_be(:user) { create(:user) }
describe '.jobs' do
- let_it_be(:query) do
- %(
- query {
- project(fullPath: "#{project.full_path}") {
- pipelines {
- nodes {
- jobs {
- nodes {
- name
- }
- }
- }
- }
- }
- }
- )
+ let(:first_n) { var('Int') }
+ let(:query_path) do
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipelines, { first: first_n }],
+ [:nodes],
+ [:jobs],
+ [:nodes]
+ ]
end
- it 'fetches the jobs without an N+1' do
+ let(:query) do
+ with_signature([first_n], wrap_fields(query_graphql_path(query_path, :name)))
+ end
+
+ before_all do
pipeline = create(:ci_pipeline, project: project)
create(:ci_build, pipeline: pipeline, name: 'Job 1')
-
- control_count = ActiveRecord::QueryRecorder.new do
- post_graphql(query, current_user: first_user)
- end
-
pipeline = create(:ci_pipeline, project: project)
create(:ci_build, pipeline: pipeline, name: 'Job 2')
+ end
- expect do
- post_graphql(query, current_user: second_user)
- end.not_to exceed_query_limit(control_count)
+ it 'limits the results' do
+ post_graphql(query, current_user: user, variables: first_n.with(1))
- expect(response).to have_gitlab_http_status(:ok)
+ expect(graphql_data_at(*query_path.map(&:first))).to contain_exactly a_hash_including(
+ 'name' => 'Job 2'
+ )
+ end
- pipelines_data = graphql_data.dig('project', 'pipelines', 'nodes')
+ it 'fetches all results' do
+ post_graphql(query, current_user: user)
- job_names = pipelines_data.map do |pipeline_data|
- jobs_data = pipeline_data.dig('jobs', 'nodes')
- jobs_data.map { |job_data| job_data['name'] }
- end.flatten
+ expect(graphql_data_at(*query_path.map(&:first))).to contain_exactly(
+ a_hash_including('name' => 'Job 1'),
+ a_hash_including('name' => 'Job 2')
+ )
+ end
+
+ it 'fetches the jobs without an N+1' do
+ first_user = create(:personal_access_token).user
+ second_user = create(:personal_access_token).user
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: first_user, variables: first_n.with(1))
+ end
- expect(job_names).to contain_exactly('Job 1', 'Job 2')
+ expect do
+ post_graphql(query, current_user: second_user)
+ end.not_to exceed_query_limit(control_count)
end
end
@@ -80,7 +86,7 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
create(:ci_build, :dast, name: 'DAST Job 1', pipeline: pipeline)
create(:ci_build, :sast, name: 'SAST Job 1', pipeline: pipeline)
- post_graphql(query, current_user: first_user)
+ post_graphql(query, current_user: user)
expect(response).to have_gitlab_http_status(:ok)
@@ -96,9 +102,9 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
end
describe 'upstream' do
- let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: first_user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let_it_be(:upstream_project) { create(:project, :repository, :public) }
- let_it_be(:upstream_pipeline) { create(:ci_pipeline, project: upstream_project, user: first_user) }
+ let_it_be(:upstream_pipeline) { create(:ci_pipeline, project: upstream_project, user: user) }
let(:upstream_pipelines_graphql_data) { graphql_data.dig(*%w[project pipelines nodes]).first['upstream'] }
let(:query) do
@@ -120,7 +126,7 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
before do
create(:ci_sources_pipeline, source_pipeline: upstream_pipeline, pipeline: pipeline )
- post_graphql(query, current_user: first_user)
+ post_graphql(query, current_user: user)
end
it_behaves_like 'a working graphql query'
@@ -131,15 +137,18 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
context 'when fetching the upstream pipeline from the pipeline' do
it 'avoids N+1 queries' do
+ first_user = create(:user)
+ second_user = create(:user)
+
control_count = ActiveRecord::QueryRecorder.new do
post_graphql(query, current_user: first_user)
end
- pipeline_2 = create(:ci_pipeline, project: project, user: first_user)
- upstream_pipeline_2 = create(:ci_pipeline, project: upstream_project, user: first_user)
+ pipeline_2 = create(:ci_pipeline, project: project, user: user)
+ upstream_pipeline_2 = create(:ci_pipeline, project: upstream_project, user: user)
create(:ci_sources_pipeline, source_pipeline: upstream_pipeline_2, pipeline: pipeline_2 )
- pipeline_3 = create(:ci_pipeline, project: project, user: first_user)
- upstream_pipeline_3 = create(:ci_pipeline, project: upstream_project, user: first_user)
+ pipeline_3 = create(:ci_pipeline, project: project, user: user)
+ upstream_pipeline_3 = create(:ci_pipeline, project: upstream_project, user: user)
create(:ci_sources_pipeline, source_pipeline: upstream_pipeline_3, pipeline: pipeline_3 )
expect do
@@ -152,12 +161,12 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
end
describe 'downstream' do
- let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: first_user) }
- let(:pipeline_2) { create(:ci_pipeline, project: project, user: first_user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let(:pipeline_2) { create(:ci_pipeline, project: project, user: user) }
let_it_be(:downstream_project) { create(:project, :repository, :public) }
- let_it_be(:downstream_pipeline_a) { create(:ci_pipeline, project: downstream_project, user: first_user) }
- let_it_be(:downstream_pipeline_b) { create(:ci_pipeline, project: downstream_project, user: first_user) }
+ let_it_be(:downstream_pipeline_a) { create(:ci_pipeline, project: downstream_project, user: user) }
+ let_it_be(:downstream_pipeline_b) { create(:ci_pipeline, project: downstream_project, user: user) }
let(:pipelines_graphql_data) { graphql_data.dig(*%w[project pipelines nodes]) }
@@ -183,7 +192,7 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
create(:ci_sources_pipeline, source_pipeline: pipeline, pipeline: downstream_pipeline_a)
create(:ci_sources_pipeline, source_pipeline: pipeline_2, pipeline: downstream_pipeline_b)
- post_graphql(query, current_user: first_user)
+ post_graphql(query, current_user: user)
end
it_behaves_like 'a working graphql query'
@@ -198,16 +207,19 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
context 'when fetching the downstream pipelines from the pipeline' do
it 'avoids N+1 queries' do
+ first_user = create(:user)
+ second_user = create(:user)
+
control_count = ActiveRecord::QueryRecorder.new do
post_graphql(query, current_user: first_user)
end
- downstream_pipeline_2a = create(:ci_pipeline, project: downstream_project, user: first_user)
+ downstream_pipeline_2a = create(:ci_pipeline, project: downstream_project, user: user)
create(:ci_sources_pipeline, source_pipeline: pipeline, pipeline: downstream_pipeline_2a)
- downsteam_pipeline_3a = create(:ci_pipeline, project: downstream_project, user: first_user)
+ downsteam_pipeline_3a = create(:ci_pipeline, project: downstream_project, user: user)
create(:ci_sources_pipeline, source_pipeline: pipeline, pipeline: downsteam_pipeline_3a)
- downstream_pipeline_2b = create(:ci_pipeline, project: downstream_project, user: first_user)
+ downstream_pipeline_2b = create(:ci_pipeline, project: downstream_project, user: user)
create(:ci_sources_pipeline, source_pipeline: pipeline_2, pipeline: downstream_pipeline_2b)
downsteam_pipeline_3b = create(:ci_pipeline, project: downstream_project, user: first_user)
create(:ci_sources_pipeline, source_pipeline: pipeline_2, pipeline: downsteam_pipeline_3b)
diff --git a/spec/requests/api/graphql/group/group_members_spec.rb b/spec/requests/api/graphql/group/group_members_spec.rb
index 3554e22cdf2..452610ab18f 100644
--- a/spec/requests/api/graphql/group/group_members_spec.rb
+++ b/spec/requests/api/graphql/group/group_members_spec.rb
@@ -10,15 +10,13 @@ RSpec.describe 'getting group members information' do
let_it_be(:user_1) { create(:user, username: 'user') }
let_it_be(:user_2) { create(:user, username: 'test') }
- let(:member_data) { graphql_data['group']['groupMembers']['edges'] }
-
before_all do
[user_1, user_2].each { |user| parent_group.add_guest(user) }
end
context 'when the request is correct' do
it_behaves_like 'a working graphql query' do
- before_all do
+ before do
fetch_members
end
end
@@ -80,12 +78,10 @@ RSpec.describe 'getting group members information' do
end
context 'when unauthenticated' do
- it 'returns nothing' do
+ it 'returns visible members' do
fetch_members(current_user: nil)
- expect(graphql_errors).to be_nil
- expect(response).to have_gitlab_http_status(:success)
- expect(member_data).to be_empty
+ expect_array_response(user_1, user_2)
end
end
@@ -112,8 +108,8 @@ RSpec.describe 'getting group members information' do
def expect_array_response(*items)
expect(response).to have_gitlab_http_status(:success)
- expect(member_data).to be_an Array
- expect(member_data.map { |node| node["node"]["user"]["id"] })
- .to match_array(items.map { |u| global_id_of(u) })
+ member_gids = graphql_data_at(:group, :group_members, :edges, :node, :user, :id)
+
+ expect(member_gids).to match_array(items.map { |u| global_id_of(u) })
end
end
diff --git a/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb b/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb
index e5803f50474..cd423d7764a 100644
--- a/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb
@@ -42,6 +42,8 @@ RSpec.describe 'Creating a todo for the alert' do
context 'todo already exists' do
before do
+ stub_feature_flags(multiple_todos: false)
+
create(:todo, :pending, project: project, user: user, target: alert)
end
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
index a285cebc805..e594d67aab4 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
@@ -39,21 +39,7 @@ RSpec.describe 'Creating a new HTTP Integration' do
project.add_maintainer(current_user)
end
- it 'creates a new integration' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- new_integration = ::AlertManagement::HttpIntegration.last!
- integration_response = mutation_response['integration']
-
- expect(response).to have_gitlab_http_status(:success)
- expect(integration_response['id']).to eq(GitlabSchema.id_from_object(new_integration).to_s)
- expect(integration_response['type']).to eq('HTTP')
- expect(integration_response['name']).to eq(new_integration.name)
- expect(integration_response['active']).to eq(new_integration.active)
- expect(integration_response['token']).to eq(new_integration.token)
- expect(integration_response['url']).to eq(new_integration.url)
- expect(integration_response['apiUrl']).to eq(nil)
- end
+ it_behaves_like 'creating a new HTTP integration'
[:project_path, :active, :name].each do |argument|
context "without required argument #{argument}" do
diff --git a/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb b/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb
new file mode 100644
index 00000000000..283badeaf33
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CiCdSettingsUpdate' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, ci_keep_latest_artifact: true) }
+ let(:variables) { { full_path: project.full_path, keep_latest_artifact: false } }
+ let(:mutation) { graphql_mutation(:ci_cd_settings_update, variables) }
+
+ context 'when unauthorized' do
+ let(:user) { create(:user) }
+
+ shared_examples 'unauthorized' do
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+ end
+
+ context 'when not a project member' do
+ it_behaves_like 'unauthorized'
+ end
+
+ context 'when a non-admin project member' do
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'unauthorized'
+ end
+ end
+
+ context 'when authorized' do
+ let_it_be(:user) { project.owner }
+
+ it 'updates ci cd settings' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(project.ci_keep_latest_artifact).to eq(false)
+ end
+
+ context 'when bad arguments are provided' do
+ let(:variables) { { full_path: '', keep_latest_artifact: false } }
+
+ it 'returns the errors' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
new file mode 100644
index 00000000000..749373e7b8d
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Updating the package settings' do
+ include GraphqlHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+
+ let(:params) do
+ {
+ namespace_path: namespace.full_path,
+ maven_duplicates_allowed: false,
+ maven_duplicate_exception_regex: 'foo-.*'
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(:update_namespace_package_settings, params) do
+ <<~QL
+ packageSettings {
+ mavenDuplicatesAllowed
+ mavenDuplicateExceptionRegex
+ }
+ errors
+ QL
+ end
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:update_namespace_package_settings) }
+ let(:package_settings_response) { mutation_response['packageSettings'] }
+
+ RSpec.shared_examples 'returning a success' do
+ it_behaves_like 'returning response status', :success
+
+ it 'returns the updated package settings', :aggregate_failures do
+ subject
+
+ expect(mutation_response['errors']).to be_empty
+ expect(package_settings_response['mavenDuplicatesAllowed']).to eq(params[:maven_duplicates_allowed])
+ expect(package_settings_response['mavenDuplicateExceptionRegex']).to eq(params[:maven_duplicate_exception_regex])
+ end
+ end
+
+ RSpec.shared_examples 'rejecting invalid regex' do
+ context "for field mavenDuplicateExceptionRegex" do
+ let_it_be(:invalid_regex) { '][' }
+
+ let(:params) do
+ {
+ :namespace_path => namespace.full_path,
+ 'mavenDuplicateExceptionRegex' => invalid_regex
+ }
+ end
+
+ it_behaves_like 'returning response status', :success
+
+ it_behaves_like 'not creating the namespace package setting'
+
+ it 'returns an error', :aggregate_failures do
+ subject
+
+ expect(graphql_errors.size).to eq(1)
+ expect(graphql_errors.first['message']).to include("#{invalid_regex} is an invalid regexp")
+ end
+ end
+ end
+
+ RSpec.shared_examples 'accepting the mutation request updating the package settings' do
+ it_behaves_like 'updating the namespace package setting attributes',
+ from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT' },
+ to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'foo-.*' }
+
+ it_behaves_like 'returning a success'
+ it_behaves_like 'rejecting invalid regex'
+ end
+
+ RSpec.shared_examples 'accepting the mutation request creating the package settings' do
+ it_behaves_like 'creating the namespace package setting'
+ it_behaves_like 'returning a success'
+ it_behaves_like 'rejecting invalid regex'
+ end
+
+ RSpec.shared_examples 'denying the mutation request' do
+ it_behaves_like 'not creating the namespace package setting'
+
+ it_behaves_like 'returning response status', :success
+
+ it 'returns no response' do
+ subject
+
+ expect(mutation_response).to be_nil
+ end
+ end
+
+ describe 'post graphql mutation' do
+ subject { post_graphql_mutation(mutation, current_user: user) }
+
+ context 'with existing package settings' do
+ let_it_be(:package_settings, reload: true) { create(:namespace_package_setting, :group) }
+ let_it_be(:namespace, reload: true) { package_settings.namespace }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'accepting the mutation request updating the package settings'
+ :developer | 'accepting the mutation request updating the package settings'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ namespace.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'without existing package settings' do
+ let_it_be(:namespace, reload: true) { create(:group) }
+ let(:package_settings) { namespace.package_settings }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'accepting the mutation request creating the package settings'
+ :developer | 'accepting the mutation request creating the package settings'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ namespace.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/releases/create_spec.rb b/spec/requests/api/graphql/mutations/releases/create_spec.rb
index d745eb3083d..79bdcec7944 100644
--- a/spec/requests/api/graphql/mutations/releases/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/create_spec.rb
@@ -116,11 +116,9 @@ RSpec.describe 'Creation of a new release' do
context 'when all available mutation arguments are provided' do
it_behaves_like 'no errors'
- # rubocop: disable CodeReuse/ActiveRecord
it 'returns the new release data' do
create_release
- release = mutation_response[:release]
expected_direct_asset_url = Gitlab::Routing.url_helpers.project_release_url(project, Release.find_by(tag: tag_name)) << "/downloads#{asset_link[:directAssetPath]}"
expected_attributes = {
@@ -139,21 +137,17 @@ RSpec.describe 'Creation of a new release' do
directAssetUrl: expected_direct_asset_url
}]
}
+ },
+ milestones: {
+ nodes: [
+ { title: '12.3' },
+ { title: '12.4' }
+ ]
}
- }
-
- expect(release).to include(expected_attributes)
+ }.with_indifferent_access
- # Right now the milestones are returned in a non-deterministic order.
- # This `milestones` test should be moved up into the expect(release)
- # above (and `.to include` updated to `.to eq`) once
- # https://gitlab.com/gitlab-org/gitlab/-/issues/259012 is addressed.
- expect(release['milestones']['nodes']).to match_array([
- { 'title' => '12.4' },
- { 'title' => '12.3' }
- ])
+ expect(mutation_response[:release]).to eq(expected_attributes)
end
- # rubocop: enable CodeReuse/ActiveRecord
end
context 'when only the required mutation arguments are provided' do
diff --git a/spec/requests/api/graphql/mutations/releases/update_spec.rb b/spec/requests/api/graphql/mutations/releases/update_spec.rb
index 19320c3393c..c9a6c3abd57 100644
--- a/spec/requests/api/graphql/mutations/releases/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/update_spec.rb
@@ -116,15 +116,7 @@ RSpec.describe 'Updating an existing release' do
it 'updates the correct field and returns the release' do
update_release
- expect(mutation_response[:release]).to include(expected_attributes.merge(updates).except(:milestones))
-
- # Right now the milestones are returned in a non-deterministic order.
- # Because of this, we need to test milestones separately to allow
- # for them to be returned in any order.
- # Once https://gitlab.com/gitlab-org/gitlab/-/issues/259012 has been
- # fixed, this special milestone handling can be removed.
- expected_milestones = expected_attributes.merge(updates)[:milestones]
- expect(mutation_response[:release][:milestones][:nodes]).to match_array(expected_milestones[:nodes])
+ expect(mutation_response[:release]).to eq(expected_attributes.merge(updates))
end
end
diff --git a/spec/requests/api/graphql/namespace/package_settings_spec.rb b/spec/requests/api/graphql/namespace/package_settings_spec.rb
new file mode 100644
index 00000000000..6af098e902f
--- /dev/null
+++ b/spec/requests/api/graphql/namespace/package_settings_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting namespace package settings in a namespace' do
+ include GraphqlHelpers
+
+ let_it_be(:package_settings) { create(:namespace_package_setting) }
+ let_it_be(:namespace) { package_settings.namespace }
+ let_it_be(:current_user) { namespace.owner }
+ let(:package_settings_response) { graphql_data.dig('namespace', 'packageSettings') }
+ let(:fields) { all_graphql_fields_for('PackageSettings') }
+
+ let(:query) do
+ graphql_query_for(
+ 'namespace',
+ { 'fullPath' => namespace.full_path },
+ query_graphql_field('package_settings', {}, fields)
+ )
+ end
+
+ subject { post_graphql(query, current_user: current_user) }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ subject
+ end
+
+ it 'matches the JSON schema' do
+ expect(package_settings_response).to match_schema('graphql/namespace/package_settings')
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/packages/package_composer_details_spec.rb b/spec/requests/api/graphql/packages/package_composer_details_spec.rb
new file mode 100644
index 00000000000..1a2cf4a972a
--- /dev/null
+++ b/spec/requests/api/graphql/packages/package_composer_details_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'package composer details' do
+ using RSpec::Parameterized::TableSyntax
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:composer_package, project: project) }
+ let_it_be(:composer_metadatum) do
+ # we are forced to manually create the metadatum, without using the factory to force the sha to be a string
+ # and avoid an error where gitaly can't find the repository
+ create(:composer_metadatum, package: package, target_sha: 'foo_sha', composer_json: { name: 'name', type: 'type', license: 'license', version: 1 })
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'packageComposerDetails',
+ { id: package_global_id },
+ all_graphql_fields_for('PackageComposerDetails', max_depth: 2)
+ )
+ end
+
+ let(:user) { project.owner }
+ let(:package_global_id) { package.to_global_id.to_s }
+ let(:package_composer_details_response) { graphql_data.dig('packageComposerDetails') }
+
+ subject { post_graphql(query, current_user: user) }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ subject
+ end
+
+ it 'matches the JSON schema' do
+ expect(package_composer_details_response).to match_schema('graphql/packages/package_composer_details')
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
index dd001a73349..9ab94f1d749 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
@@ -60,19 +60,35 @@ RSpec.describe 'getting Alert Management Alert Assignees' do
expect(second_assignees).to be_empty
end
- it 'avoids N+1 queries' do
- base_count = ActiveRecord::QueryRecorder.new do
- post_graphql(query, current_user: current_user)
+ describe 'performance' do
+ let(:first_n) { var('Int') }
+ let(:params) { { first: first_n } }
+ let(:limited_query) { with_signature([first_n], query) }
+
+ before do
+ create(:alert_management_alert, project: project, assignees: [current_user])
+ end
+
+ it 'can limit results' do
+ post_graphql(limited_query, current_user: current_user, variables: first_n.with(1))
+
+ expect(alerts.size).to eq 1
+ expect(alerts).not_to include(a_hash_including('iid' => first_alert.iid.to_s))
end
- # An N+1 would mean a new alert would increase the query count
- third_alert = create(:alert_management_alert, project: project, assignees: [current_user])
+ it 'can include all results' do
+ post_graphql(limited_query, current_user: current_user)
- expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(base_count)
+ expect(alerts.size).to be > 1
+ expect(alerts).to include(a_hash_including('iid' => first_alert.iid.to_s))
+ end
- third_assignees = assignees[third_alert.iid.to_s]
+ it 'avoids N+1 queries' do
+ base_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(limited_query, current_user: current_user, variables: first_n.with(1))
+ end
- expect(third_assignees.length).to eq(1)
- expect(third_assignees.first).to include('username' => current_user.username)
+ expect { post_graphql(limited_query, current_user: current_user) }.not_to exceed_query_limit(base_count)
+ end
end
end
diff --git a/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb
index 1350cba119b..5d46f370756 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb
@@ -65,16 +65,28 @@ RSpec.describe 'getting Alert Management Alert Notes' do
expect(second_notes_result).to be_empty
end
- it 'avoids N+1 queries' do
- base_count = ActiveRecord::QueryRecorder.new do
- post_graphql(query, current_user: current_user)
+ describe 'performance' do
+ let(:first_n) { var('Int') }
+ let(:params) { { first: first_n } }
+
+ before do
+ # An N+1 would mean a new alert would increase the query count
+ create(:alert_management_alert, project: project)
end
- # An N+1 would mean a new alert would increase the query count
- create(:alert_management_alert, project: project)
+ it 'avoids N+1 queries' do
+ q = with_signature([first_n], query)
- expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(base_count)
- expect(alerts_result.length).to eq(3)
+ base_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(q, current_user: current_user, variables: first_n.with(1))
+ expect(alerts_result.length).to eq(1)
+ end
+
+ expect do
+ post_graphql(q, current_user: current_user, variables: first_n.with(3))
+ expect(alerts_result.length).to eq(3)
+ end.not_to exceed_query_limit(base_count)
+ end
end
context 'for non-system notes' do
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index fae52fe814d..e1b867ad097 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -9,12 +9,13 @@ RSpec.describe 'getting merge request information nested in a project' do
let(:current_user) { create(:user) }
let(:merge_request_graphql_data) { graphql_data['project']['mergeRequest'] }
let!(:merge_request) { create(:merge_request, source_project: project) }
+ let(:mr_fields) { all_graphql_fields_for('MergeRequest') }
let(:query) do
graphql_query_for(
'project',
{ 'fullPath' => project.full_path },
- query_graphql_field('mergeRequest', iid: merge_request.iid.to_s)
+ query_graphql_field('mergeRequest', { iid: merge_request.iid.to_s }, mr_fields)
)
end
@@ -43,6 +44,38 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data['author']['username']).to eq(merge_request.author.username)
end
+ context 'the merge_request has reviewers' do
+ let(:mr_fields) do
+ <<~SELECT
+ reviewers { nodes { id username } }
+ participants { nodes { id username } }
+ SELECT
+ end
+
+ before do
+ merge_request.reviewers << create_list(:user, 2)
+ end
+
+ it 'includes reviewers' do
+ expected = merge_request.reviewers.map do |r|
+ a_hash_including('id' => global_id_of(r), 'username' => r.username)
+ end
+
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :merge_request, :reviewers, :nodes)).to match_array(expected)
+ expect(graphql_data_at(:project, :merge_request, :participants, :nodes)).to include(*expected)
+ end
+
+ it 'suppresses reviewers if reviewers are not allowed' do
+ stub_feature_flags(merge_request_reviewers: false)
+
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :merge_request, :reviewers)).to be_nil
+ end
+ end
+
it 'includes diff stats' do
be_natural = an_instance_of(Integer).and(be >= 0)
@@ -219,4 +252,41 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data['mergeStatus']).to eq('checking')
end
end
+
+ # see: https://gitlab.com/gitlab-org/gitlab/-/issues/297358
+ context 'when the notes have been preloaded (by participants)' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!) {
+ project(fullPath: $path) {
+ mrs: mergeRequests(first: 1) {
+ nodes {
+ participants { nodes { id } }
+ notes(first: 1) {
+ pageInfo { endCursor hasPreviousPage hasNextPage }
+ nodes { id }
+ }
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ before do
+ create_list(:note_on_merge_request, 3, project: project, noteable: merge_request)
+ end
+
+ it 'does not error' do
+ post_graphql(query,
+ current_user: current_user,
+ variables: { path: project.full_path })
+
+ expect(graphql_data_at(:project, :mrs, :nodes, :notes, :pageInfo)).to contain_exactly a_hash_including(
+ 'endCursor' => String,
+ 'hasNextPage' => true,
+ 'hasPreviousPage' => false
+ )
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index c05a620bb62..c85cb8b2ffe 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -23,9 +23,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
graphql_query_for(
:project,
{ full_path: project.full_path },
- query_graphql_field(:merge_requests, search_params, [
- query_graphql_field(:nodes, nil, fields)
- ])
+ query_nodes(:merge_requests, fields, args: search_params)
)
end
@@ -50,24 +48,22 @@ RSpec.describe 'getting merge request listings nested in a project' do
project.repository.expire_branches_cache
end
+ let(:graphql_data) do
+ GitlabSchema.execute(query, context: { current_user: current_user }).to_h['data']
+ end
+
context 'selecting any single scalar field' do
where(:field) do
scalar_fields_of('MergeRequest').map { |name| [name] }
end
with_them do
- it_behaves_like 'a working graphql query' do
- let(:query) do
- query_merge_requests([:iid, field].uniq)
- end
-
- before do
- post_graphql(query, current_user: current_user)
- end
-
- it 'selects the correct MR' do
- expect(results).to contain_exactly(a_hash_including('iid' => fresh_mr.iid.to_s))
- end
+ let(:query) do
+ query_merge_requests([:iid, field].uniq)
+ end
+
+ it 'selects the correct MR' do
+ expect(results).to contain_exactly(a_hash_including('iid' => fresh_mr.iid.to_s))
end
end
end
@@ -87,19 +83,13 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
with_them do
- it_behaves_like 'a working graphql query' do
- let(:query) do
- fld = is_connection ? query_graphql_field(:nodes, nil, [subfield]) : subfield
- query_merge_requests([:iid, query_graphql_field(field, nil, [fld])])
- end
-
- before do
- post_graphql(query, current_user: current_user)
- end
-
- it 'selects the correct MR' do
- expect(results).to contain_exactly(a_hash_including('iid' => fresh_mr.iid.to_s))
- end
+ let(:query) do
+ fld = is_connection ? query_graphql_field(:nodes, nil, [subfield]) : subfield
+ query_merge_requests([:iid, query_graphql_field(field, nil, [fld])])
+ end
+
+ it 'selects the correct MR' do
+ expect(results).to contain_exactly(a_hash_including('iid' => fresh_mr.iid.to_s))
end
end
end
@@ -254,6 +244,115 @@ RSpec.describe 'getting merge request listings nested in a project' do
include_examples 'N+1 query check'
end
+
+ context 'when requesting reviewers' do
+ let(:requested_fields) { ['reviewers { nodes { username } }'] }
+
+ before do
+ merge_request_a.reviewers << create(:user)
+ merge_request_a.reviewers << create(:user)
+ merge_request_c.reviewers << create(:user)
+ end
+
+ it 'returns the reviewers' do
+ execute_query
+
+ expect(results).to include a_hash_including('reviewers' => {
+ 'nodes' => match_array(merge_request_a.reviewers.map do |r|
+ a_hash_including('username' => r.username)
+ end)
+ })
+ end
+
+ context 'the feature flag is disabled' do
+ before do
+ stub_feature_flags(merge_request_reviewers: false)
+ end
+
+ it 'does not return reviewers' do
+ execute_query
+
+ expect(results).to all(match a_hash_including('reviewers' => be_nil))
+ end
+ end
+
+ include_examples 'N+1 query check'
+ end
+ end
+
+ describe 'performance' do
+ let(:mr_fields) do
+ <<~SELECT
+ assignees { nodes { username } }
+ reviewers { nodes { username } }
+ participants { nodes { username } }
+ headPipeline { status }
+ SELECT
+ end
+
+ let(:query) do
+ <<~GQL
+ query($first: Int) {
+ project(fullPath: "#{project.full_path}") {
+ mergeRequests(first: $first) {
+ nodes { #{mr_fields} }
+ }
+ }
+ }
+ GQL
+ end
+
+ before_all do
+ project.add_developer(current_user)
+ mrs = create_list(:merge_request, 10, :closed, :with_head_pipeline,
+ source_project: project,
+ author: current_user)
+ mrs.each do |mr|
+ mr.assignees << create(:user)
+ mr.assignees << current_user
+ mr.reviewers << create(:user)
+ mr.reviewers << current_user
+ end
+ end
+
+ before do
+ # Confounding factor: makes DB calls in EE
+ allow(Gitlab::Database).to receive(:read_only?).and_return(false)
+ end
+
+ def run_query(number)
+ # Ensure that we have a fresh request store and batch-context between runs
+ result = run_with_clean_state(query,
+ context: { current_user: current_user },
+ variables: { first: number }
+ )
+
+ graphql_dig_at(result.to_h, :data, :project, :merge_requests, :nodes)
+ end
+
+ def user_collection
+ { 'nodes' => all(match(a_hash_including('username' => be_present))) }
+ end
+
+ it 'returns appropriate results' do
+ mrs = run_query(2)
+
+ expect(mrs.size).to eq(2)
+ expect(mrs).to all(
+ match(
+ a_hash_including(
+ 'assignees' => user_collection,
+ 'reviewers' => user_collection,
+ 'participants' => user_collection,
+ 'headPipeline' => { 'status' => be_present }
+ )))
+ end
+
+ it 'can lookahead to eliminate N+1 queries' do
+ baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
+
+ expect { run_query(10) }.not_to exceed_query_limit(baseline)
+ end
end
describe 'sorting and pagination' do
diff --git a/spec/requests/api/graphql/project/project_members_spec.rb b/spec/requests/api/graphql/project/project_members_spec.rb
index cb937432ef7..984a0adb8c6 100644
--- a/spec/requests/api/graphql/project/project_members_spec.rb
+++ b/spec/requests/api/graphql/project/project_members_spec.rb
@@ -11,15 +11,13 @@ RSpec.describe 'getting project members information' do
let_it_be(:user_1) { create(:user, username: 'user') }
let_it_be(:user_2) { create(:user, username: 'test') }
- let(:member_data) { graphql_data['project']['projectMembers']['edges'] }
-
before_all do
[user_1, user_2].each { |user| parent_group.add_guest(user) }
end
context 'when the request is correct' do
it_behaves_like 'a working graphql query' do
- before_all do
+ before do
fetch_members(project: parent_project)
end
end
@@ -114,8 +112,7 @@ RSpec.describe 'getting project members information' do
def expect_array_response(*items)
expect(response).to have_gitlab_http_status(:success)
- expect(member_data).to be_an Array
- expect(member_data.map { |node| node['node']['user']['id'] })
- .to match_array(items.map { |u| global_id_of(u) })
+ member_gids = graphql_data_at(:project, :project_members, :edges, :node, :user, :id)
+ expect(member_gids).to match_array(items.map { |u| global_id_of(u) })
end
end
diff --git a/spec/requests/api/graphql/project/release_spec.rb b/spec/requests/api/graphql/project/release_spec.rb
index 99b15ff00b1..ccc2825da25 100644
--- a/spec/requests/api/graphql/project/release_spec.rb
+++ b/spec/requests/api/graphql/project/release_spec.rb
@@ -76,11 +76,11 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
it 'finds all milestones associated to a release' do
post_query
- expected = release.milestones.map do |milestone|
+ expected = release.milestones.order_by_dates_and_title.map do |milestone|
{ 'id' => global_id_of(milestone), 'title' => milestone.title }
end
- expect(data).to match_array(expected)
+ expect(data).to eq(expected)
end
end
@@ -427,4 +427,33 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
end
end
end
+
+ describe 'milestone order' do
+ let(:path) { path_prefix }
+ let(:current_user) { stranger }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be_with_reload(:release) { create(:release, project: project) }
+
+ let(:release_fields) do
+ query_graphql_field(%{
+ milestones {
+ nodes {
+ title
+ }
+ }
+ })
+ end
+
+ let(:actual_milestone_title_order) do
+ post_query
+
+ data.dig('milestones', 'nodes').map { |m| m['title'] }
+ end
+
+ before do
+ release.update!(milestones: [milestone_2, milestone_1])
+ end
+
+ it_behaves_like 'correct release milestone order'
+ end
end
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index b29f9ae913f..2cdd7273b18 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -6,26 +6,21 @@ RSpec.describe 'getting project information' do
include GraphqlHelpers
let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
let_it_be(:current_user) { create(:user) }
- let(:fields) { all_graphql_fields_for(Project, max_depth: 2, excluded: %w(jiraImports services)) }
+ let(:project_fields) { all_graphql_fields_for('project'.to_s.classify, max_depth: 1) }
let(:query) do
- graphql_query_for(:project, { full_path: project.full_path }, fields)
+ graphql_query_for(:project, { full_path: project.full_path }, project_fields)
end
context 'when the user has full access to the project' do
- let(:full_access_query) do
- graphql_query_for(:project, { full_path: project.full_path },
- all_graphql_fields_for('Project', max_depth: 2))
- end
-
before do
project.add_maintainer(current_user)
end
it 'includes the project', :use_clean_rails_memory_store_caching, :request_store do
- post_graphql(full_access_query, current_user: current_user)
+ post_graphql(query, current_user: current_user)
expect(graphql_data['project']).not_to be_nil
end
@@ -49,12 +44,12 @@ RSpec.describe 'getting project information' do
end
context 'when there are pipelines present' do
+ let(:project_fields) { query_nodes(:pipelines) }
+
before do
create(:ci_pipeline, project: project)
end
- let(:fields) { query_nodes(:pipelines) }
-
it 'is included in the pipelines connection' do
post_graphql(query, current_user: current_user)
@@ -108,55 +103,6 @@ RSpec.describe 'getting project information' do
end
end
- describe 'performance' do
- before_all do
- project.add_developer(current_user)
- mrs = create_list(:merge_request, 10, :closed, :with_head_pipeline,
- source_project: project,
- author: current_user)
- mrs.each do |mr|
- mr.assignees << create(:user)
- mr.assignees << current_user
- end
- end
-
- def run_query(number)
- q = <<~GQL
- query {
- project(fullPath: "#{project.full_path}") {
- mergeRequests(first: #{number}) {
- nodes {
- assignees { nodes { username } }
- headPipeline { status }
- }
- }
- }
- }
- GQL
-
- post_graphql(q, current_user: current_user)
- end
-
- it 'returns appropriate results' do
- run_query(2)
-
- mrs = graphql_data.dig('project', 'mergeRequests', 'nodes')
-
- expect(mrs.size).to eq(2)
- expect(mrs).to all(
- match(
- a_hash_including(
- 'assignees' => { 'nodes' => all(match(a_hash_including('username' => be_present))) },
- 'headPipeline' => { 'status' => be_present }
- )))
- end
-
- it 'can lookahead to eliminate N+1 queries' do
- baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
- expect { run_query(10) }.not_to exceed_query_limit(baseline)
- end
- end
-
context 'when the user does not have access to the project' do
it 'returns an empty field' do
post_graphql(query, current_user: current_user)
diff --git a/spec/requests/api/graphql/user/starred_projects_query_spec.rb b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
index b098058a735..6cb02068f2a 100644
--- a/spec/requests/api/graphql/user/starred_projects_query_spec.rb
+++ b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
@@ -17,7 +17,13 @@ RSpec.describe 'Getting starredProjects of the user' do
let_it_be(:user, reload: true) { create(:user) }
let(:user_fields) { 'starredProjects { nodes { id } }' }
- let(:starred_projects) { graphql_data_at(:user, :starred_projects, :nodes) }
+ let(:current_user) { nil }
+
+ let(:starred_projects) do
+ post_graphql(query, current_user: current_user)
+
+ graphql_data_at(:user, :starred_projects, :nodes)
+ end
before do
project_b.add_reporter(user)
@@ -26,11 +32,13 @@ RSpec.describe 'Getting starredProjects of the user' do
user.toggle_star(project_a)
user.toggle_star(project_b)
user.toggle_star(project_c)
-
- post_graphql(query)
end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query)
+ end
+ end
it 'found only public project' do
expect(starred_projects).to contain_exactly(
@@ -41,10 +49,6 @@ RSpec.describe 'Getting starredProjects of the user' do
context 'the current user is the user' do
let(:current_user) { user }
- before do
- post_graphql(query, current_user: current_user)
- end
-
it 'found all projects' do
expect(starred_projects).to contain_exactly(
a_hash_including('id' => global_id_of(project_a)),
@@ -56,11 +60,10 @@ RSpec.describe 'Getting starredProjects of the user' do
context 'the current user is a member of a private project the user starred' do
let_it_be(:other_user) { create(:user) }
+ let(:current_user) { other_user }
before do
project_b.add_reporter(other_user)
-
- post_graphql(query, current_user: other_user)
end
it 'finds public and member projects' do
@@ -74,7 +77,6 @@ RSpec.describe 'Getting starredProjects of the user' do
context 'the user has a private profile' do
before do
user.update!(private_profile: true)
- post_graphql(query, current_user: current_user)
end
context 'the current user does not have access to view the private profile of the user' do
diff --git a/spec/requests/api/graphql/users_spec.rb b/spec/requests/api/graphql/users_spec.rb
index 72d86c10df1..22b68fbc9bb 100644
--- a/spec/requests/api/graphql/users_spec.rb
+++ b/spec/requests/api/graphql/users_spec.rb
@@ -54,6 +54,52 @@ RSpec.describe 'Users' do
)
end
end
+
+ context 'when admins is true' do
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:another_admin) { create(:user, :admin) }
+
+ let(:query) { graphql_query_for(:users, { admins: true }, 'nodes { id }') }
+
+ context 'current user is not an admin' do
+ let(:post_query) { post_graphql(query, current_user: current_user) }
+
+ it_behaves_like 'a working users query'
+
+ it 'includes all non-admin users', :aggregate_failures do
+ post_graphql(query)
+
+ expect(graphql_data.dig('users', 'nodes')).to include(
+ { "id" => user1.to_global_id.to_s },
+ { "id" => user2.to_global_id.to_s },
+ { "id" => user3.to_global_id.to_s },
+ { "id" => current_user.to_global_id.to_s },
+ { "id" => admin.to_global_id.to_s },
+ { "id" => another_admin.to_global_id.to_s }
+ )
+ end
+ end
+
+ context 'when current user is an admin' do
+ it_behaves_like 'a working users query'
+
+ it 'includes only admins', :aggregate_failures do
+ post_graphql(query, current_user: admin)
+
+ expect(graphql_data.dig('users', 'nodes')).to include(
+ { "id" => another_admin.to_global_id.to_s },
+ { "id" => admin.to_global_id.to_s }
+ )
+
+ expect(graphql_data.dig('users', 'nodes')).not_to include(
+ { "id" => user1.to_global_id.to_s },
+ { "id" => user2.to_global_id.to_s },
+ { "id" => user3.to_global_id.to_s },
+ { "id" => current_user.to_global_id.to_s }
+ )
+ end
+ end
+ end
end
describe 'sorting and pagination' do
diff --git a/spec/requests/api/group_packages_spec.rb b/spec/requests/api/group_packages_spec.rb
index 72ba25c59af..26895e473de 100644
--- a/spec/requests/api/group_packages_spec.rb
+++ b/spec/requests/api/group_packages_spec.rb
@@ -6,8 +6,9 @@ RSpec.describe API::GroupPackages do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group, name: 'project A') }
let_it_be(:user) { create(:user) }
+ let(:params) { {} }
- subject { get api(url) }
+ subject { get api(url), params: params }
describe 'GET /groups/:id/packages' do
let(:url) { "/groups/#{group.id}/packages" }
@@ -142,6 +143,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returning response status', :bad_request
end
+ it_behaves_like 'with versionless packages'
it_behaves_like 'does not cause n^2 queries'
end
end
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index aeb8e3642ed..2ea237469b1 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -30,6 +30,10 @@ RSpec.describe API::Invitations do
api("/#{source.model_name.plural}/#{source.id}/invitations", user)
end
+ def invite_member_by_email(source, source_type, email, created_by)
+ create(:"#{source_type}_member", invite_token: '123', invite_email: email, source: source, user: nil, created_by: created_by)
+ end
+
shared_examples 'POST /:source_type/:id/invitations' do |source_type|
context "with :source_type == #{source_type.pluralize}" do
it_behaves_like 'a 404 response when source is private' do
@@ -280,10 +284,6 @@ RSpec.describe API::Invitations do
expect(json_response.first['created_by_name']).to eq(developer.name)
expect(json_response.first['user_name']).to eq(nil)
end
-
- def invite_member_by_email(source, source_type, email, created_by)
- create(:"#{source_type}_member", invite_token: '123', invite_email: email, source: source, user: nil, created_by: created_by)
- end
end
end
@@ -298,4 +298,80 @@ RSpec.describe API::Invitations do
let(:source) { group }
end
end
+
+ shared_examples 'DELETE /:source_type/:id/invitations/:email' do |source_type|
+ def invite_api(source, user, email)
+ api("/#{source.model_name.plural}/#{source.id}/invitations/#{email}", user)
+ end
+
+ context "with :source_type == #{source_type.pluralize}" do
+ let!(:invite) { invite_member_by_email(source, source_type, developer.email, developer) }
+
+ it_behaves_like 'a 404 response when source is private' do
+ let(:route) { delete api("/#{source_type.pluralize}/#{source.id}/invitations/#{invite.invite_email}", stranger) }
+ end
+
+ context 'when authenticated as a non-member or member with insufficient rights' do
+ %i[access_requester stranger].each do |type|
+ context "as a #{type}" do
+ it 'returns 403' do
+ user = public_send(type)
+
+ delete invite_api(source, user, invite.invite_email)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ end
+
+ context 'when authenticated as a member and deleting themself' do
+ it 'does not delete the member' do
+ expect do
+ delete invite_api(source, developer, invite.invite_email)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { source.members.count }
+ end
+ end
+
+ context 'when authenticated as a maintainer/owner' do
+ it 'deletes the member and returns 204 with no content' do
+ expect do
+ delete invite_api(source, maintainer, invite.invite_email)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { source.members.count }.by(-1)
+ end
+ end
+
+ it 'returns 404 if member does not exist' do
+ delete invite_api(source, maintainer, non_existing_record_id)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 422 for a valid request if the resource was not destroyed' do
+ allow_next_instance_of(::Members::DestroyService) do |instance|
+ allow(instance).to receive(:execute).with(invite).and_return(invite)
+ end
+
+ delete invite_api(source, maintainer, invite.invite_email)
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+ end
+
+ describe 'DELETE /projects/:id/inviations/:email' do
+ it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'project' do
+ let(:source) { project }
+ end
+ end
+
+ describe 'DELETE /groups/:id/inviations/:email' do
+ it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'group' do
+ let(:source) { group }
+ end
+ end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index f8521818845..6f854a28cec 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -260,6 +260,36 @@ RSpec.describe API::Jobs do
end
end
+ context 'when project is public with artifacts that are non public' do
+ let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+
+ it 'rejects access to artifacts' do
+ project.update_column(:visibility_level,
+ Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:public_builds, true)
+
+ get_artifact_file(artifact)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'with the non_public_artifacts feature flag disabled' do
+ before do
+ stub_feature_flags(non_public_artifacts: false)
+ end
+
+ it 'allows access to artifacts' do
+ project.update_column(:visibility_level,
+ Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:public_builds, true)
+
+ get_artifact_file(artifact)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
context 'when project is public with builds access disabled' do
it 'rejects access to artifacts' do
project.update_column(:visibility_level,
@@ -396,6 +426,33 @@ RSpec.describe API::Jobs do
end
end
+ context 'when public project guest and artifacts are non public' do
+ let(:api_user) { guest }
+ let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+
+ before do
+ project.update_column(:visibility_level,
+ Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:public_builds, true)
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ it 'rejects access and hides existence of artifacts' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'with the non_public_artifacts feature flag disabled' do
+ before do
+ stub_feature_flags(non_public_artifacts: false)
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ it 'allows access to artifacts' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
@@ -580,6 +637,33 @@ RSpec.describe API::Jobs do
end
end
+ context 'when project is public with non public artifacts' do
+ let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline, user: api_user) }
+ let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
+ let(:public_builds) { true }
+
+ it 'rejects access and hides existence of artifacts', :sidekiq_might_not_need_inline do
+ get_artifact_file(artifact)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response).to have_key('message')
+ expect(response.headers.to_h)
+ .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
+ end
+
+ context 'with the non_public_artifacts feature flag disabled' do
+ before do
+ stub_feature_flags(non_public_artifacts: false)
+ end
+
+ it 'allows access to artifacts', :sidekiq_might_not_need_inline do
+ get_artifact_file(artifact)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
context 'when project is private' do
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
let(:public_builds) { true }
@@ -743,32 +827,6 @@ RSpec.describe API::Jobs do
expect(response).to have_gitlab_http_status(expected_status)
end
end
-
- context 'with restrict_access_to_build_debug_mode feature disabled' do
- before do
- stub_feature_flags(restrict_access_to_build_debug_mode: false)
- end
-
- where(:public_builds, :user_project_role, :expected_status) do
- true | 'developer' | :ok
- true | 'guest' | :ok
- false | 'developer' | :ok
- false | 'guest' | :forbidden
- end
-
- with_them do
- before do
- project.update!(public_builds: public_builds)
- project.add_role(user, user_project_role)
-
- get api("/projects/#{project.id}/jobs/#{job.id}/trace", api_user)
- end
-
- it 'renders trace to authorized users' do
- expect(response).to have_gitlab_http_status(expected_status)
- end
- end
- end
end
end
@@ -923,15 +981,32 @@ RSpec.describe API::Jobs do
post api("/projects/#{project.id}/jobs/#{job.id}/play", api_user)
end
- context 'on an playable job' do
- let(:job) { create(:ci_build, :manual, project: project, pipeline: pipeline) }
+ context 'on a playable job' do
+ let_it_be(:job) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: project) }
+
+ before do
+ project.add_developer(user)
+ end
context 'when user is authorized to trigger a manual action' do
- it 'plays the job' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['user']['id']).to eq(user.id)
- expect(json_response['id']).to eq(job.id)
- expect(job.reload).to be_pending
+ context 'that is a bridge' do
+ it 'plays the job' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['user']['id']).to eq(user.id)
+ expect(json_response['id']).to eq(job.id)
+ expect(job.reload).to be_pending
+ end
+ end
+
+ context 'that is a build' do
+ let_it_be(:job) { create(:ci_build, :manual, project: project, pipeline: pipeline) }
+
+ it 'plays the job' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['user']['id']).to eq(user.id)
+ expect(json_response['id']).to eq(job.id)
+ expect(job.reload).to be_pending
+ end
end
end
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index b368f6e329c..6db6de4b533 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -520,14 +520,29 @@ RSpec.describe API::Labels do
expect(json_response['color']).to eq(label1.color)
end
- it 'returns 200 if group label already exists' do
- create(:group_label, title: label1.name, group: group)
+ context 'if group label already exists' do
+ let!(:group_label) { create(:group_label, title: label1.name, group: group) }
- expect { put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name } }
- .to change(project.labels, :count).by(-1)
- .and change(group.labels, :count).by(0)
+ it 'returns a status of 200' do
+ put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name }
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'does not change the group label count' do
+ expect { put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name } }
+ .not_to change(group.labels, :count)
+ end
+
+ it 'does not change the group label max (reuses the same ID)' do
+ expect { put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name } }
+ .not_to change(group.labels, :max)
+ end
+
+ it 'changes the project label count' do
+ expect { put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name } }
+ .to change(project.labels, :count).by(-1)
+ end
end
it 'returns 403 if guest promotes label' do
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index aecbcfb5b5a..2653653c896 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe API::Lint do
end
context 'with invalid configuration' do
- let(:yaml_content) { '{ image: "ruby:2.7", services: ["postgres"] }' }
+ let(:yaml_content) { '{ image: "ruby:2.7", services: ["postgres"], invalid }' }
it 'responds with errors about invalid configuration' do
post api('/ci/lint'), params: { content: yaml_content }
@@ -71,7 +71,7 @@ RSpec.describe API::Lint do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('invalid')
expect(json_response['warnings']).to eq([])
- expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
+ expect(json_response['errors']).to eq(['jobs invalid config should implement a script: or a trigger: keyword', 'jobs config should contain at least one visible job'])
end
it 'outputs expanded yaml content' do
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index f9ba819c9aa..e5d11fb1218 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe API::MavenPackages do
include WorkhorseHelpers
- let_it_be(:group) { create(:group) }
+ let_it_be_with_refind(:package_settings) { create(:namespace_package_setting, :group) }
+ let_it_be(:group) { package_settings.namespace }
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public, namespace: group) }
let_it_be(:package, reload: true) { create(:maven_package, project: project, name: project.full_path) }
@@ -18,6 +19,7 @@ RSpec.describe API::MavenPackages do
let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) }
let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) }
+ let(:package_name) { 'com/example/my-app' }
let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
let(:headers_with_token) { headers.merge('Private-Token' => personal_access_token.token) }
@@ -669,6 +671,50 @@ RSpec.describe API::MavenPackages do
end
end
+ context 'when package duplicates are not allowed' do
+ let(:package_name) { package.name }
+ let(:version) { package.version }
+
+ before do
+ package_settings.update!(maven_duplicates_allowed: false)
+ end
+
+ shared_examples 'storing the package file' do
+ it 'stores the file', :aggregate_failures do
+ expect { upload_file_with_token(params: params) }.to change { package.package_files.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(jar_file.file_name).to eq(file_upload.original_filename)
+ end
+ end
+
+ it 'rejects the request', :aggregate_failures do
+ expect { upload_file_with_token(params: params) }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('Duplicate package is not allowed')
+ end
+
+ context 'when uploading different non-duplicate files to the same package' do
+ let!(:package) { create(:maven_package, project: project, name: project.full_path) }
+
+ before do
+ package_file = package.package_files.find_by(file_name: 'my-app-1.0-20180724.124855-1.jar')
+ package_file.destroy!
+ end
+
+ it_behaves_like 'storing the package file'
+ end
+
+ context 'when the package name matches the exception regex' do
+ before do
+ package_settings.update!(maven_duplicate_exception_regex: '.*')
+ end
+
+ it_behaves_like 'storing the package file'
+ end
+ end
+
context 'for sha1 file' do
let(:dummy_package) { double(Packages::Package) }
@@ -698,7 +744,7 @@ RSpec.describe API::MavenPackages do
end
def upload_file(params: {}, request_headers: headers, file_extension: 'jar')
- url = "/projects/#{project.id}/packages/maven/com/example/my-app/#{version}/my-app-1.0-20180724.124855-1.#{file_extension}"
+ url = "/projects/#{project.id}/packages/maven/#{package_name}/#{version}/my-app-1.0-20180724.124855-1.#{file_extension}"
workhorse_finalize(
api(url),
method: :put,
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 4339f1dd830..3a3eae73932 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -440,6 +440,7 @@ RSpec.describe API::MergeRequests do
milestone: milestone,
author: user,
assignees: [user],
+ reviewers: [user2],
source_project: project,
target_project: project,
source_branch: 'what',
@@ -498,6 +499,71 @@ RSpec.describe API::MergeRequests do
expect(mr['assignee']['id']).not_to eq(user2.id)
end
end
+
+ context 'filter by reviewer' do
+ context 'with reviewer_id' do
+ context 'with an id' do
+ let(:params) { { not: { reviewer_id: user2.id } } }
+
+ it 'returns merge requests that do not have the given reviewer' do
+ get api(endpoint_path, user), params: { not: { reviewer_id: user2.id } }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(4)
+ expect(json_response.map { |mr| mr['id'] }).not_to include(merge_request2)
+ end
+ end
+
+ context 'with Any' do
+ let(:params) { { not: { reviewer_id: 'Any' } } }
+
+ it 'returns a 400' do
+ # Any is not supported for negated filter
+ get api(endpoint_path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('not[reviewer_id] is invalid')
+ end
+ end
+
+ context 'with None' do
+ let(:params) { { not: { reviewer_id: 'None' } } }
+
+ it 'returns a 400' do
+ # None is not supported for negated filter
+ get api(endpoint_path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('not[reviewer_id] is invalid')
+ end
+ end
+ end
+
+ context 'with reviewer_username' do
+ let(:params) { { not: { reviewer_username: user2.username } } }
+
+ it 'returns merge requests that do not have the given reviewer' do
+ get api(endpoint_path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(4)
+ expect(json_response.map { |mr| mr['id'] }).not_to include(merge_request2)
+ end
+ end
+
+ context 'when both reviewer_id and reviewer_username' do
+ let(:params) { { not: { reviewer_id: user2.id, reviewer_username: user2.username } } }
+
+ it 'returns a 400' do
+ get api('/merge_requests', user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('not[reviewer_id], not[reviewer_username] are mutually exclusive')
+ end
+ end
+ end
end
context 'source_branch param' do
@@ -666,6 +732,79 @@ RSpec.describe API::MergeRequests do
end
end
+ context 'filter by reviewer' do
+ let_it_be(:review_requested_mr1) do
+ create(:merge_request, :unique_branches, author: user, reviewers: [user2], source_project: project2, target_project: project2)
+ end
+
+ let_it_be(:review_requested_mr2) do
+ create(:merge_request, :unique_branches, author: user2, reviewers: [user], source_project: project2, target_project: project2)
+ end
+
+ let(:params) { { scope: :all } }
+
+ context 'with reviewer_id' do
+ let(:params) { super().merge(reviewer_id: reviewer_id) }
+
+ context 'with an id' do
+ let(:reviewer_id) { user2.id }
+
+ it 'returns review requested merge requests for the given user' do
+ get api('/merge_requests', user), params: params
+
+ expect_response_contain_exactly(review_requested_mr1.id)
+ end
+ end
+
+ context 'with Any' do
+ let(:reviewer_id) { 'Any' }
+
+ it 'returns review requested merge requests for any user' do
+ get api('/merge_requests', user), params: params
+
+ expect_response_contain_exactly(review_requested_mr1.id, review_requested_mr2.id)
+ end
+ end
+
+ context 'with None' do
+ let(:reviewer_id) { 'None' }
+
+ it 'returns merge requests that has no assigned reviewers' do
+ get api('/merge_requests', user), params: params
+
+ expect_response_contain_exactly(
+ merge_request.id,
+ merge_request_closed.id,
+ merge_request_merged.id,
+ merge_request_locked.id,
+ merge_request2.id
+ )
+ end
+ end
+ end
+
+ context 'with reviewer_username' do
+ let(:params) { super().merge(reviewer_username: user2.username) }
+
+ it 'returns review requested merge requests for the given user' do
+ get api('/merge_requests', user), params: params
+
+ expect_response_contain_exactly(review_requested_mr1.id)
+ end
+ end
+
+ context 'with both reviewer_id and reviewer_username' do
+ let(:params) { super().merge(reviewer_id: user2.id, reviewer_username: user2.username) }
+
+ it 'returns a 400' do
+ get api('/merge_requests', user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('reviewer_id, reviewer_username are mutually exclusive')
+ end
+ end
+ end
+
it 'returns an array of merge requests assigned to the given user' do
merge_request3 = create(:merge_request, :simple, author: user, assignees: [user2], source_project: project2, target_project: project2, source_branch: 'other-branch')
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
new file mode 100644
index 00000000000..f7e81494660
--- /dev/null
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::NugetGroupPackages do
+ include_context 'nuget api setup'
+
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
+ let_it_be_with_reload(:project) { create(:project, namespace: subgroup) }
+ let_it_be(:deploy_token) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token, group: group) }
+
+ let(:target_type) { 'groups' }
+
+ shared_examples 'handling all endpoints' do
+ describe 'GET /api/v4/groups/:id/-/packages/nuget' do
+ it_behaves_like 'handling nuget service requests', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/index.json" }
+ end
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/index' do
+ it_behaves_like 'handling nuget metadata requests with package name', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/metadata/#{package_name}/index.json" }
+ end
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/*package_version' do
+ it_behaves_like 'handling nuget metadata requests with package name and package version', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/metadata/#{package_name}/#{package.version}.json" }
+ end
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/query' do
+ it_behaves_like 'handling nuget search requests', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/query?#{query_parameters.to_query}" }
+ end
+ end
+ end
+
+ context 'with a subgroup' do
+ # Bug: deploy tokens at parent group will not see the subgroup.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/285495
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token, group: subgroup) }
+
+ let(:target) { subgroup }
+
+ it_behaves_like 'handling all endpoints'
+
+ def update_visibility_to(visibility)
+ project.update!(visibility_level: visibility)
+ subgroup.update!(visibility_level: visibility)
+ end
+ end
+
+ context 'a group' do
+ let(:target) { group }
+
+ it_behaves_like 'handling all endpoints'
+
+ context 'with dummy packages and anonymous request' do
+ let_it_be(:package_name) { 'Dummy.Package' }
+ let_it_be(:packages) { create_list(:nuget_package, 5, :with_metadatum, name: package_name, project: project) }
+ let_it_be(:tags) { packages.each { |pkg| create(:packages_tag, package: pkg, name: 'test') } }
+
+ let(:search_term) { 'umm' }
+ let(:take) { 26 }
+ let(:skip) { 0 }
+ let(:include_prereleases) { true }
+ let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases } }
+
+ subject { get api(url), headers: {}}
+
+ shared_examples 'handling mixed visibilities' do
+ where(:group_visibility, :subgroup_visibility, :expected_status) do
+ 'PUBLIC' | 'PUBLIC' | :unauthorized
+ 'PUBLIC' | 'INTERNAL' | :unauthorized
+ 'PUBLIC' | 'PRIVATE' | :unauthorized
+ 'INTERNAL' | 'INTERNAL' | :unauthorized
+ 'INTERNAL' | 'PRIVATE' | :unauthorized
+ 'PRIVATE' | 'PRIVATE' | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like 'returning response status', params[:expected_status]
+ end
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/index' do
+ it_behaves_like 'handling mixed visibilities' do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/metadata/#{package_name}/index.json" }
+ end
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/*package_version' do
+ it_behaves_like 'handling mixed visibilities' do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/metadata/#{package_name}/#{packages.first.version}.json" }
+ end
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/query' do
+ it_behaves_like 'handling mixed visibilities' do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/query?#{query_parameters.to_query}" }
+ end
+ end
+ end
+
+ def update_visibility_to(visibility)
+ project.update!(visibility_level: visibility)
+ subgroup.update!(visibility_level: visibility)
+ group.update!(visibility_level: visibility)
+ end
+ end
+end
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index df1daf39144..813ebc35ede 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -2,99 +2,202 @@
require 'spec_helper'
RSpec.describe API::NugetProjectPackages do
- include WorkhorseHelpers
- include PackagesManagerApiSpecHelpers
- include HttpBasicAuthHelpers
+ include_context 'nuget api setup'
- let_it_be(:user) { create(:user) }
- let_it_be(:project, reload: true) { create(:project, :public) }
- let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:project) { create(:project, :public) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
+ let(:target) { project }
+ let(:target_type) { 'projects' }
+
describe 'GET /api/v4/projects/:id/packages/nuget' do
it_behaves_like 'handling nuget service requests' do
- let(:url) { "/projects/#{project.id}/packages/nuget/index.json" }
+ let(:url) { "/projects/#{target.id}/packages/nuget/index.json" }
end
end
describe 'GET /api/v4/projects/:id/packages/nuget/metadata/*package_name/index' do
it_behaves_like 'handling nuget metadata requests with package name' do
- let(:url) { "/projects/#{project.id}/packages/nuget/metadata/#{package_name}/index.json" }
+ let(:url) { "/projects/#{target.id}/packages/nuget/metadata/#{package_name}/index.json" }
end
end
describe 'GET /api/v4/projects/:id/packages/nuget/metadata/*package_name/*package_version' do
it_behaves_like 'handling nuget metadata requests with package name and package version' do
- let(:url) { "/projects/#{project.id}/packages/nuget/metadata/#{package_name}/#{package.version}.json" }
+ let(:url) { "/projects/#{target.id}/packages/nuget/metadata/#{package_name}/#{package.version}.json" }
end
end
describe 'GET /api/v4/projects/:id/packages/nuget/query' do
it_behaves_like 'handling nuget search requests' do
- let(:url) { "/projects/#{project.id}/packages/nuget/query?#{query_parameters.to_query}" }
+ let(:url) { "/projects/#{target.id}/packages/nuget/query?#{query_parameters.to_query}" }
end
end
+ describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/index' do
+ let_it_be(:package_name) { 'Dummy.Package' }
+ let_it_be(:packages) { create_list(:nuget_package, 5, name: package_name, project: project) }
+
+ let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package_name}/index.json" }
+
+ subject { get api(url) }
+
+ context 'with valid target' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget download versions request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget download versions request' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process nuget download versions request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget download versions request' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process nuget download versions request' | :success
+ 'PRIVATE' | :developer | true | true | 'process nuget download versions request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+
+ subject { get api(url), headers: headers }
+
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+
+ it_behaves_like 'deploy token for package GET requests'
+
+ it_behaves_like 'rejects nuget access with unknown target id'
+
+ it_behaves_like 'rejects nuget access with invalid target id'
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/*package_version/*package_filename' do
+ let_it_be(:package_name) { 'Dummy.Package' }
+ let_it_be(:package) { create(:nuget_package, project: project, name: package_name) }
+
+ let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.nupkg" }
+
+ subject { get api(url) }
+
+ context 'with valid target' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget download content request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget download content request' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process nuget download content request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget download content request' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process nuget download content request' | :success
+ 'PRIVATE' | :developer | true | true | 'process nuget download content request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+
+ subject { get api(url), headers: headers }
+
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+
+ it_behaves_like 'deploy token for package GET requests' do
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+
+ it_behaves_like 'rejects nuget access with unknown target id'
+
+ it_behaves_like 'rejects nuget access with invalid target id'
+ end
+
describe 'PUT /api/v4/projects/:id/packages/nuget/authorize' do
let_it_be(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let_it_be(:workhorse_header) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
- let(:url) { "/projects/#{project.id}/packages/nuget/authorize" }
+ let(:url) { "/projects/#{target.id}/packages/nuget/authorize" }
let(:headers) { {} }
subject { put api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget workhorse authorization' | :success
- 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process nuget workhorse authorization' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_header) }
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget workhorse authorization' | :success
+ 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process nuget workhorse authorization' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_header) }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package uploads'
+ it_behaves_like 'deploy token for package uploads'
- it_behaves_like 'rejects nuget access with unknown project id'
+ it_behaves_like 'rejects nuget access with unknown target id'
- it_behaves_like 'rejects nuget access with invalid project id'
- end
+ it_behaves_like 'rejects nuget access with invalid target id'
end
describe 'PUT /api/v4/projects/:id/packages/nuget' do
let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:workhorse_header) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
let_it_be(:file_name) { 'package.nupkg' }
- let(:url) { "/projects/#{project.id}/packages/nuget" }
+ let(:url) { "/projects/#{target.id}/packages/nuget" }
let(:headers) { {} }
let(:params) { { package: temp_file(file_name) } }
let(:file_key) { :package }
@@ -111,170 +214,61 @@ RSpec.describe API::NugetProjectPackages do
)
end
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget upload' | :created
- 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process nuget upload' | :created
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_header) }
-
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget upload' | :created
+ 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process nuget upload' | :created
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
end
- it_behaves_like 'deploy token for package uploads'
-
- it_behaves_like 'rejects nuget access with unknown project id'
-
- it_behaves_like 'rejects nuget access with invalid project id'
-
- context 'file size above maximum limit' do
- let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_header) }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_header) }
before do
- allow_next_instance_of(UploadedFile) do |uploaded_file|
- allow(uploaded_file).to receive(:size).and_return(project.actual_limits.nuget_max_file_size + 1)
- end
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
- it_behaves_like 'returning response status', :bad_request
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
- end
-
- describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/index' do
- let_it_be(:package_name) { 'Dummy.Package' }
- let_it_be(:packages) { create_list(:nuget_package, 5, name: package_name, project: project) }
- let(:url) { "/projects/#{project.id}/packages/nuget/download/#{package_name}/index.json" }
-
- subject { get api(url) }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget download versions request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget download versions request' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget download versions request' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget download versions request' | :success
- 'PUBLIC' | :developer | false | true | 'process nuget download versions request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget download versions request' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget download versions request' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget download versions request' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget download versions request' | :success
- 'PRIVATE' | :developer | true | true | 'process nuget download versions request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
+ it_behaves_like 'deploy token for package uploads'
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ it_behaves_like 'rejects nuget access with unknown target id'
- subject { get api(url), headers: headers }
+ it_behaves_like 'rejects nuget access with invalid target id'
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ context 'file size above maximum limit' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_header) }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:size).and_return(project.actual_limits.nuget_max_file_size + 1)
end
end
- it_behaves_like 'deploy token for package GET requests'
-
- it_behaves_like 'rejects nuget access with unknown project id'
-
- it_behaves_like 'rejects nuget access with invalid project id'
+ it_behaves_like 'returning response status', :bad_request
end
end
- describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/*package_version/*package_filename' do
- let_it_be(:package_name) { 'Dummy.Package' }
- let_it_be(:package) { create(:nuget_package, project: project, name: package_name) }
-
- let(:url) { "/projects/#{project.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.nupkg" }
-
- subject { get api(url) }
-
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget download content request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget download content request' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget download content request' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget download content request' | :success
- 'PUBLIC' | :developer | false | true | 'process nuget download content request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget download content request' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget download content request' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget download content request' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget download content request' | :success
- 'PRIVATE' | :developer | true | true | 'process nuget download content request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
-
- subject { get api(url), headers: headers }
-
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
- end
-
- it_behaves_like 'deploy token for package GET requests'
-
- it_behaves_like 'rejects nuget access with unknown project id'
-
- it_behaves_like 'rejects nuget access with invalid project id'
- end
+ def update_visibility_to(visibility)
+ project.update!(visibility_level: visibility)
end
end
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index 4c8599d1a20..eb86df36dbb 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -11,12 +11,13 @@ RSpec.describe API::ProjectPackages do
let!(:another_package) { create(:npm_package) }
let(:no_package_url) { "/projects/#{project.id}/packages/0" }
let(:wrong_package_url) { "/projects/#{project.id}/packages/#{another_package.id}" }
+ let(:params) { {} }
describe 'GET /projects/:id/packages' do
let(:url) { "/projects/#{project.id}/packages" }
let(:package_schema) { 'public_api/v4/packages/packages' }
- subject { get api(url) }
+ subject { get api(url), params: params }
context 'without the need for a license' do
context 'project is public' do
@@ -118,6 +119,7 @@ RSpec.describe API::ProjectPackages do
end
end
+ it_behaves_like 'with versionless packages'
it_behaves_like 'does not cause n^2 queries'
end
end
diff --git a/spec/requests/api/project_repository_storage_moves_spec.rb b/spec/requests/api/project_repository_storage_moves_spec.rb
index 15e69c2aa16..5e200312d1f 100644
--- a/spec/requests/api/project_repository_storage_moves_spec.rb
+++ b/spec/requests/api/project_repository_storage_moves_spec.rb
@@ -3,220 +3,10 @@
require 'spec_helper'
RSpec.describe API::ProjectRepositoryStorageMoves do
- include AccessMatchersForRequest
-
- let_it_be(:user) { create(:admin) }
- let_it_be(:project) { create(:project, :repository).tap { |project| project.track_project_repository } }
- let_it_be(:storage_move) { create(:project_repository_storage_move, :scheduled, container: project) }
-
- shared_examples 'get single project repository storage move' do
- let(:project_repository_storage_move_id) { storage_move.id }
-
- def get_project_repository_storage_move
- get api(url, user)
- end
-
- it 'returns a project repository storage move' do
- get_project_repository_storage_move
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/project_repository_storage_move')
- expect(json_response['id']).to eq(storage_move.id)
- expect(json_response['state']).to eq(storage_move.human_state_name)
- end
-
- context 'non-existent project repository storage move' do
- let(:project_repository_storage_move_id) { non_existing_record_id }
-
- it 'returns not found' do
- get_project_repository_storage_move
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- describe 'permissions' do
- it { expect { get_project_repository_storage_move }.to be_allowed_for(:admin) }
- it { expect { get_project_repository_storage_move }.to be_denied_for(:user) }
- end
- end
-
- shared_examples 'get project repository storage move list' do
- def get_project_repository_storage_moves
- get api(url, user)
- end
-
- it 'returns project repository storage moves' do
- get_project_repository_storage_moves
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(response).to match_response_schema('public_api/v4/project_repository_storage_moves')
- expect(json_response.size).to eq(1)
- expect(json_response.first['id']).to eq(storage_move.id)
- expect(json_response.first['state']).to eq(storage_move.human_state_name)
- end
-
- it 'avoids N+1 queries', :request_store do
- # prevent `let` from polluting the control
- get_project_repository_storage_moves
-
- control = ActiveRecord::QueryRecorder.new { get_project_repository_storage_moves }
-
- create(:project_repository_storage_move, :scheduled, container: project)
-
- expect { get_project_repository_storage_moves }.not_to exceed_query_limit(control)
- end
-
- it 'returns the most recently created first' do
- storage_move_oldest = create(:project_repository_storage_move, :scheduled, container: project, created_at: 2.days.ago)
- storage_move_middle = create(:project_repository_storage_move, :scheduled, container: project, created_at: 1.day.ago)
-
- get_project_repository_storage_moves
-
- json_ids = json_response.map {|storage_move| storage_move['id'] }
- expect(json_ids).to eq([
- storage_move.id,
- storage_move_middle.id,
- storage_move_oldest.id
- ])
- end
-
- describe 'permissions' do
- it { expect { get_project_repository_storage_moves }.to be_allowed_for(:admin) }
- it { expect { get_project_repository_storage_moves }.to be_denied_for(:user) }
- end
- end
-
- describe 'GET /project_repository_storage_moves' do
- it_behaves_like 'get project repository storage move list' do
- let(:url) { '/project_repository_storage_moves' }
- end
- end
-
- describe 'GET /project_repository_storage_moves/:repository_storage_move_id' do
- it_behaves_like 'get single project repository storage move' do
- let(:url) { "/project_repository_storage_moves/#{project_repository_storage_move_id}" }
- end
- end
-
- describe 'GET /projects/:id/repository_storage_moves' do
- it_behaves_like 'get project repository storage move list' do
- let(:url) { "/projects/#{project.id}/repository_storage_moves" }
- end
- end
-
- describe 'GET /projects/:id/repository_storage_moves/:repository_storage_move_id' do
- it_behaves_like 'get single project repository storage move' do
- let(:url) { "/projects/#{project.id}/repository_storage_moves/#{project_repository_storage_move_id}" }
- end
- end
-
- describe 'POST /projects/:id/repository_storage_moves' do
- let(:url) { "/projects/#{project.id}/repository_storage_moves" }
- let(:destination_storage_name) { 'test_second_storage' }
-
- def create_project_repository_storage_move
- post api(url, user), params: { destination_storage_name: destination_storage_name }
- end
-
- before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
- end
-
- it 'schedules a project repository storage move' do
- create_project_repository_storage_move
-
- storage_move = project.repository_storage_moves.last
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response).to match_response_schema('public_api/v4/project_repository_storage_move')
- expect(json_response['id']).to eq(storage_move.id)
- expect(json_response['state']).to eq('scheduled')
- expect(json_response['source_storage_name']).to eq('default')
- expect(json_response['destination_storage_name']).to eq(destination_storage_name)
- end
-
- describe 'permissions' do
- it { expect { create_project_repository_storage_move }.to be_allowed_for(:admin) }
- it { expect { create_project_repository_storage_move }.to be_denied_for(:user) }
- end
-
- context 'destination_storage_name is missing' do
- let(:destination_storage_name) { nil }
-
- it 'schedules a project repository storage move' do
- create_project_repository_storage_move
-
- storage_move = project.repository_storage_moves.last
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response).to match_response_schema('public_api/v4/project_repository_storage_move')
- expect(json_response['id']).to eq(storage_move.id)
- expect(json_response['state']).to eq('scheduled')
- expect(json_response['source_storage_name']).to eq('default')
- expect(json_response['destination_storage_name']).to be_present
- end
- end
- end
-
- describe 'POST /project_repository_storage_moves' do
- let(:source_storage_name) { 'default' }
- let(:destination_storage_name) { 'test_second_storage' }
-
- def create_project_repository_storage_moves
- post api('/project_repository_storage_moves', user), params: {
- source_storage_name: source_storage_name,
- destination_storage_name: destination_storage_name
- }
- end
-
- before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
- end
-
- it 'schedules the worker' do
- expect(ProjectScheduleBulkRepositoryShardMovesWorker).to receive(:perform_async).with(source_storage_name, destination_storage_name)
-
- create_project_repository_storage_moves
-
- expect(response).to have_gitlab_http_status(:accepted)
- end
-
- context 'source_storage_name is invalid' do
- let(:destination_storage_name) { 'not-a-real-storage' }
-
- it 'gives an error' do
- create_project_repository_storage_moves
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'destination_storage_name is missing' do
- let(:destination_storage_name) { nil }
-
- it 'schedules the worker' do
- expect(ProjectScheduleBulkRepositoryShardMovesWorker).to receive(:perform_async).with(source_storage_name, destination_storage_name)
-
- create_project_repository_storage_moves
-
- expect(response).to have_gitlab_http_status(:accepted)
- end
- end
-
- context 'destination_storage_name is invalid' do
- let(:destination_storage_name) { 'not-a-real-storage' }
-
- it 'gives an error' do
- create_project_repository_storage_moves
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- describe 'normal user' do
- it { expect { create_project_repository_storage_moves }.to be_denied_for(:user) }
- end
+ it_behaves_like 'repository_storage_moves API', 'projects' do
+ let_it_be(:container) { create(:project, :repository).tap { |project| project.track_project_repository } }
+ let_it_be(:storage_move) { create(:project_repository_storage_move, :scheduled, container: container) }
+ let(:repository_storage_move_factory) { :project_repository_storage_move }
+ let(:bulk_worker_klass) { ProjectScheduleBulkRepositoryShardMovesWorker }
end
end
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index d242d49fc1b..fc1035fc17d 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -53,6 +53,15 @@ RSpec.describe API::ProjectTemplates do
expect(json_response).to satisfy_one { |template| template['key'] == 'Android' }
end
+ it 'returns gitlab_ci_syntax_ymls' do
+ get api("/projects/#{public_project.id}/templates/gitlab_ci_syntax_ymls")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response).to satisfy_one { |template| template['key'] == 'Artifacts example' }
+ end
+
it 'returns licenses' do
get api("/projects/#{public_project.id}/templates/licenses")
@@ -163,6 +172,14 @@ RSpec.describe API::ProjectTemplates do
expect(json_response['name']).to eq('Android')
end
+ it 'returns a specific gitlab_ci_syntax_yml' do
+ get api("/projects/#{public_project.id}/templates/gitlab_ci_syntax_ymls/Artifacts%20example")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('Artifacts example')
+ end
+
it 'returns a specific metrics_dashboard_yml' do
get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls/Default")
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index ad5468fb54c..4acd0eea448 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1583,6 +1583,7 @@ RSpec.describe API::Projects do
expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access)
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['allow_merge_on_skipped_pipeline']).to eq(project.allow_merge_on_skipped_pipeline)
+ expect(json_response['restrict_user_defined_variables']).to eq(project.restrict_user_defined_variables?)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['operations_access_level']).to be_present
end
@@ -1654,6 +1655,7 @@ RSpec.describe API::Projects do
expect(json_response['shared_with_groups'][0]).to have_key('expires_at')
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['allow_merge_on_skipped_pipeline']).to eq(project.allow_merge_on_skipped_pipeline)
+ expect(json_response['restrict_user_defined_variables']).to eq(project.restrict_user_defined_variables?)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
expect(json_response['ci_forward_deployment_enabled']).to eq(project.ci_forward_deployment_enabled)
@@ -2597,6 +2599,18 @@ RSpec.describe API::Projects do
expect(response).to have_gitlab_http_status(:bad_request)
end
+ it 'updates restrict_user_defined_variables', :aggregate_failures do
+ project_param = { restrict_user_defined_variables: true }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ project_param.each_pair do |k, v|
+ expect(json_response[k.to_s]).to eq(v)
+ end
+ end
+
it 'updates avatar' do
project_param = {
avatar: fixture_file_upload('spec/fixtures/banana_sample.gif',
@@ -2711,6 +2725,22 @@ RSpec.describe API::Projects do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['container_expiration_policy.name_regex_keep']).to contain_exactly('not valid RE2 syntax: missing ]: [')
end
+
+ it "doesn't update container_expiration_policy with invalid keep_n" do
+ project_param = {
+ container_expiration_policy_attributes: {
+ cadence: '1month',
+ enabled: true,
+ keep_n: 'not_int',
+ name_regex_keep: 'foo.*'
+ }
+ }
+
+ put api("/projects/#{project3.id}", user4), params: project_param
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('container_expiration_policy_attributes[keep_n] is invalid')
+ end
end
context 'when authenticated as project developer' do
@@ -3364,10 +3394,10 @@ RSpec.describe API::Projects do
end
describe 'POST /projects/:id/housekeeping' do
- let(:housekeeping) { Projects::HousekeepingService.new(project) }
+ let(:housekeeping) { Repositories::HousekeepingService.new(project) }
before do
- allow(Projects::HousekeepingService).to receive(:new).with(project, :gc).and_return(housekeeping)
+ allow(Repositories::HousekeepingService).to receive(:new).with(project, :gc).and_return(housekeeping)
end
context 'when authenticated as owner' do
@@ -3381,12 +3411,12 @@ RSpec.describe API::Projects do
context 'when housekeeping lease is taken' do
it 'returns conflict' do
- expect(housekeeping).to receive(:execute).once.and_raise(Projects::HousekeepingService::LeaseTaken)
+ expect(housekeeping).to receive(:execute).once.and_raise(Repositories::HousekeepingService::LeaseTaken)
post api("/projects/#{project.id}/housekeeping", user)
expect(response).to have_gitlab_http_status(:conflict)
- expect(json_response['message']).to match(/Somebody already triggered housekeeping for this project/)
+ expect(json_response['message']).to match(/Somebody already triggered housekeeping for this resource/)
end
end
end
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 58b321a255e..70de2e5330b 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -16,9 +16,6 @@ RSpec.describe API::Releases do
project.add_reporter(reporter)
project.add_guest(guest)
project.add_developer(developer)
-
- project.repository.add_tag(maintainer, 'v0.1', commit.id)
- project.repository.add_tag(maintainer, 'v0.2', commit.id)
end
describe 'GET /projects/:id/releases' do
@@ -294,6 +291,25 @@ RSpec.describe API::Releases do
end
end
+ context 'when release is associated to mutiple milestones' do
+ context 'milestones order' do
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be_with_reload(:release_with_milestones) { create(:release, tag: 'v3.14', project: project) }
+
+ let(:actual_milestone_title_order) do
+ get api("/projects/#{project.id}/releases/#{release_with_milestones.tag}", non_project_member)
+
+ json_response['milestones'].map { |m| m['title'] }
+ end
+
+ before do
+ release_with_milestones.update!(milestones: [milestone_2, milestone_1])
+ end
+
+ it_behaves_like 'correct release milestone order'
+ end
+ end
+
context 'when release has link asset' do
let!(:link) do
create(:release_link,
@@ -461,6 +477,10 @@ RSpec.describe API::Releases do
}
end
+ before do
+ initialize_tags
+ end
+
it 'accepts the request' do
post api("/projects/#{project.id}/releases", maintainer), params: params
@@ -858,6 +878,10 @@ RSpec.describe API::Releases do
description: 'Super nice release')
end
+ before do
+ initialize_tags
+ end
+
it 'accepts the request' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
@@ -1108,4 +1132,9 @@ RSpec.describe API::Releases do
end
end
end
+
+ def initialize_tags
+ project.repository.add_tag(maintainer, 'v0.1', commit.id)
+ project.repository.add_tag(maintainer, 'v0.2', commit.id)
+ end
end
diff --git a/spec/requests/api/snippet_repository_storage_moves_spec.rb b/spec/requests/api/snippet_repository_storage_moves_spec.rb
new file mode 100644
index 00000000000..edb92569823
--- /dev/null
+++ b/spec/requests/api/snippet_repository_storage_moves_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::SnippetRepositoryStorageMoves do
+ it_behaves_like 'repository_storage_moves API', 'snippets' do
+ let_it_be(:container) { create(:snippet, :repository).tap { |snippet| snippet.create_repository } }
+ let_it_be(:storage_move) { create(:snippet_repository_storage_move, :scheduled, container: container) }
+ let(:repository_storage_move_factory) { :snippet_repository_storage_move }
+ let(:bulk_worker_klass) { SnippetScheduleBulkRepositoryShardMovesWorker }
+ end
+end
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index 0fa088a641e..bfdb5458fd1 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -21,9 +21,36 @@ RSpec.describe API::Terraform::State do
stub_terraform_state_object_storage
end
+ shared_examples 'endpoint with unique user tracking' do
+ context 'without authentication' do
+ let(:auth_header) { basic_auth_header('bad', 'token') }
+
+ before do
+ stub_feature_flags(usage_data_p_terraform_state_api_unique_users: false)
+ end
+
+ it 'does not track unique event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ request
+ end
+ end
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it_behaves_like 'tracking unique hll events', :usage_data_p_terraform_state_api_unique_users do
+ let(:target_id) { 'p_terraform_state_api_unique_users' }
+ let(:expected_type) { instance_of(Integer) }
+ end
+ end
+ end
+
describe 'GET /projects/:id/terraform/state/:name' do
subject(:request) { get api(state_path), headers: auth_header }
+ it_behaves_like 'endpoint with unique user tracking'
+
context 'without authentication' do
let(:auth_header) { basic_auth_header('bad', 'token') }
@@ -117,6 +144,8 @@ RSpec.describe API::Terraform::State do
subject(:request) { post api(state_path), headers: auth_header, as: :json, params: params }
+ it_behaves_like 'endpoint with unique user tracking'
+
context 'when terraform state with a given name is already present' do
context 'with maintainer permissions' do
let(:current_user) { maintainer }
@@ -219,6 +248,8 @@ RSpec.describe API::Terraform::State do
describe 'DELETE /projects/:id/terraform/state/:name' do
subject(:request) { delete api(state_path), headers: auth_header }
+ it_behaves_like 'endpoint with unique user tracking'
+
context 'with maintainer permissions' do
let(:current_user) { maintainer }
@@ -256,6 +287,8 @@ RSpec.describe API::Terraform::State do
subject(:request) { post api("#{state_path}/lock"), headers: auth_header, params: params }
+ it_behaves_like 'endpoint with unique user tracking'
+
it 'locks the terraform state' do
request
@@ -305,6 +338,10 @@ RSpec.describe API::Terraform::State do
subject(:request) { delete api("#{state_path}/lock"), headers: auth_header, params: params }
+ it_behaves_like 'endpoint with unique user tracking' do
+ let(:lock_id) { 'irrelevant to this test, just needs to be present' }
+ end
+
context 'with the correct lock id' do
let(:lock_id) { '123-456' }
diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb
index bc315c5b3c6..eaffa49fc9d 100644
--- a/spec/requests/api/todos_spec.rb
+++ b/spec/requests/api/todos_spec.rb
@@ -302,6 +302,8 @@ RSpec.describe API::Todos do
end
it 'returns 304 there already exist a todo on that issuable' do
+ stub_feature_flags(multiple_todos: false)
+
create(:todo, project: project_1, author: author_1, user: john_doe, target: issuable)
post api("/projects/#{project_1.id}/#{issuable_type}/#{issuable.iid}/todo", john_doe)
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 2cd1483f486..94fba451860 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -368,6 +368,16 @@ RSpec.describe API::Users do
expect(json_response.map { |u| u['id'] }).not_to include(internal_user.id)
end
end
+
+ context 'admins param' do
+ it 'returns all users' do
+ get api("/users?admins=true", user)
+
+ expect(response).to match_response_schema('public_api/v4/user/basics')
+ expect(json_response.size).to eq(2)
+ expect(json_response.map { |u| u['id'] }).to include(user.id, admin.id)
+ end
+ end
end
context "when admin" do
@@ -487,6 +497,16 @@ RSpec.describe API::Users do
expect(response).to have_gitlab_http_status(:bad_request)
end
end
+
+ context 'admins param' do
+ it 'returns only admins' do
+ get api("/users?admins=true", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/basics')
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(admin.id)
+ end
+ end
end
describe "GET /users/:id" do
@@ -2368,7 +2388,7 @@ RSpec.describe API::Users do
activate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
+ expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('blocked')
end
end
@@ -2382,7 +2402,7 @@ RSpec.describe API::Users do
activate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
+ expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('ldap_blocked')
end
end
@@ -2439,7 +2459,7 @@ RSpec.describe API::Users do
deactivate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq("403 Forbidden - The user you are trying to deactivate has been active in the past #{::User::MINIMUM_INACTIVE_DAYS} days and cannot be deactivated")
+ expect(json_response['message']).to eq("403 Forbidden - The user you are trying to deactivate has been active in the past #{::User::MINIMUM_INACTIVE_DAYS} days and cannot be deactivated")
expect(user.reload.state).to eq('active')
end
end
@@ -2467,7 +2487,7 @@ RSpec.describe API::Users do
deactivate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
+ expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
expect(user.reload.state).to eq('blocked')
end
end
@@ -2481,7 +2501,7 @@ RSpec.describe API::Users do
deactivate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
+ expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
expect(user.reload.state).to eq('ldap_blocked')
end
end
@@ -2493,7 +2513,7 @@ RSpec.describe API::Users do
deactivate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - An internal user cannot be deactivated by the API')
+ expect(json_response['message']).to eq('403 Forbidden - An internal user cannot be deactivated by the API')
end
end
@@ -2853,115 +2873,91 @@ RSpec.describe API::Users do
let(:expires_at) { 3.days.from_now.to_date.to_s }
let(:scopes) { %w(api read_user) }
- context 'when feature flag is enabled' do
- before do
- stub_feature_flags(pat_creation_api_for_admin: true)
- end
-
- it 'returns error if required attributes are missing' do
- post api("/users/#{user.id}/personal_access_tokens", admin)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq('name is missing, scopes is missing, scopes does not have a valid value')
- end
-
- it 'returns a 404 error if user not found' do
- post api("/users/#{non_existing_record_id}/personal_access_tokens", admin),
- params: {
- name: name,
- scopes: scopes,
- expires_at: expires_at
- }
+ it 'returns error if required attributes are missing' do
+ post api("/users/#{user.id}/personal_access_tokens", admin)
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 User Not Found')
- end
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('name is missing, scopes is missing, scopes does not have a valid value')
+ end
- it 'returns a 401 error when not authenticated' do
- post api("/users/#{user.id}/personal_access_tokens"),
- params: {
- name: name,
- scopes: scopes,
- expires_at: expires_at
- }
+ it 'returns a 404 error if user not found' do
+ post api("/users/#{non_existing_record_id}/personal_access_tokens", admin),
+ params: {
+ name: name,
+ scopes: scopes,
+ expires_at: expires_at
+ }
- expect(response).to have_gitlab_http_status(:unauthorized)
- expect(json_response['message']).to eq('401 Unauthorized')
- end
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
- it 'returns a 403 error when authenticated as normal user' do
- post api("/users/#{user.id}/personal_access_tokens", user),
- params: {
- name: name,
- scopes: scopes,
- expires_at: expires_at
- }
+ it 'returns a 401 error when not authenticated' do
+ post api("/users/#{user.id}/personal_access_tokens"),
+ params: {
+ name: name,
+ scopes: scopes,
+ expires_at: expires_at
+ }
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden')
- end
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(json_response['message']).to eq('401 Unauthorized')
+ end
- it 'creates a personal access token when authenticated as admin' do
- post api("/users/#{user.id}/personal_access_tokens", admin),
- params: {
- name: name,
- expires_at: expires_at,
- scopes: scopes
- }
+ it 'returns a 403 error when authenticated as normal user' do
+ post api("/users/#{user.id}/personal_access_tokens", user),
+ params: {
+ name: name,
+ scopes: scopes,
+ expires_at: expires_at
+ }
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq(name)
- expect(json_response['scopes']).to eq(scopes)
- expect(json_response['expires_at']).to eq(expires_at)
- expect(json_response['id']).to be_present
- expect(json_response['created_at']).to be_present
- expect(json_response['active']).to be_truthy
- expect(json_response['revoked']).to be_falsey
- expect(json_response['token']).to be_present
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden')
+ end
- context 'when an error is thrown by the model' do
- let!(:admin_personal_access_token) { create(:personal_access_token, user: admin) }
- let(:error_message) { 'error message' }
+ it 'creates a personal access token when authenticated as admin' do
+ post api("/users/#{user.id}/personal_access_tokens", admin),
+ params: {
+ name: name,
+ expires_at: expires_at,
+ scopes: scopes
+ }
- before do
- allow_next_instance_of(PersonalAccessToken) do |personal_access_token|
- allow(personal_access_token).to receive_message_chain(:errors, :full_messages)
- .and_return([error_message])
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq(name)
+ expect(json_response['scopes']).to eq(scopes)
+ expect(json_response['expires_at']).to eq(expires_at)
+ expect(json_response['id']).to be_present
+ expect(json_response['created_at']).to be_present
+ expect(json_response['active']).to be_truthy
+ expect(json_response['revoked']).to be_falsey
+ expect(json_response['token']).to be_present
+ end
- allow(personal_access_token).to receive(:save).and_return(false)
- end
- end
+ context 'when an error is thrown by the model' do
+ let!(:admin_personal_access_token) { create(:personal_access_token, user: admin) }
+ let(:error_message) { 'error message' }
- it 'returns the error' do
- post api("/users/#{user.id}/personal_access_tokens", personal_access_token: admin_personal_access_token),
- params: {
- name: name,
- expires_at: expires_at,
- scopes: scopes
- }
+ before do
+ allow_next_instance_of(PersonalAccessToken) do |personal_access_token|
+ allow(personal_access_token).to receive_message_chain(:errors, :full_messages)
+ .and_return([error_message])
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['message']).to eq(error_message)
+ allow(personal_access_token).to receive(:save).and_return(false)
end
end
- end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(pat_creation_api_for_admin: false)
- end
-
- it 'returns a 404' do
- post api("/users/#{user.id}/personal_access_tokens", admin),
+ it 'returns the error' do
+ post api("/users/#{user.id}/personal_access_tokens", personal_access_token: admin_personal_access_token),
params: {
name: name,
expires_at: expires_at,
scopes: scopes
}
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 Not Found')
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq(error_message)
end
end
end
diff --git a/spec/controllers/jwks_controller_spec.rb b/spec/requests/jwks_controller_spec.rb
index 013ec01eba2..5eda1979027 100644
--- a/spec/controllers/jwks_controller_spec.rb
+++ b/spec/requests/jwks_controller_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe JwksController do
- describe 'GET #index' do
+ describe 'GET /-/jwks' do
let(:ci_jwt_signing_key) { OpenSSL::PKey::RSA.generate(1024) }
let(:ci_jwk) { ci_jwt_signing_key.to_jwk }
let(:oidc_jwk) { OpenSSL::PKey::RSA.new(Rails.application.secrets.openid_connect_signing_key).to_jwk }
@@ -13,7 +13,7 @@ RSpec.describe JwksController do
end
it 'returns signing keys used to sign CI_JOB_JWT' do
- get :index
+ get jwks_url
expect(response).to have_gitlab_http_status(:ok)
@@ -22,7 +22,7 @@ RSpec.describe JwksController do
end
it 'does not leak private key data' do
- get :index
+ get jwks_url
aggregate_failures do
json_response['keys'].each do |jwk|
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 535d511a459..aeec18aee2b 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -193,10 +193,8 @@ RSpec.describe 'Git LFS API and storage' do
subject(:request) { post_lfs_json batch_url(project), body, headers }
let(:response) { request && super() }
- let(:lfs_chunked_encoding) { true }
before do
- stub_feature_flags(lfs_chunked_encoding: lfs_chunked_encoding)
project.lfs_objects << lfs_object
end
@@ -480,20 +478,6 @@ RSpec.describe 'Git LFS API and storage' do
expect(headers['Transfer-Encoding']).to eq('chunked')
end
- context 'when lfs_chunked_encoding feature is disabled' do
- let(:lfs_chunked_encoding) { false }
-
- it 'responds with upload hypermedia link' do
- expect(json_response['objects']).to be_kind_of(Array)
- expect(json_response['objects'].first).to include(sample_object)
- expect(json_response['objects'].first['actions']['upload']['href']).to eq(objects_url(project, sample_oid, sample_size))
-
- headers = json_response['objects'].first['actions']['upload']['header']
- expect(headers['Content-Type']).to eq('application/octet-stream')
- expect(headers['Transfer-Encoding']).to be_nil
- end
- end
-
it_behaves_like 'process authorization header', renew_authorization: renew_authorization
end
diff --git a/spec/requests/profiles/notifications_controller_spec.rb b/spec/requests/profiles/notifications_controller_spec.rb
index 87669b3594c..d7dfb1c675d 100644
--- a/spec/requests/profiles/notifications_controller_spec.rb
+++ b/spec/requests/profiles/notifications_controller_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'view user notifications' do
get profile_notifications_path
end
- describe 'GET /profile/notifications' do
+ describe 'GET /-/profile/notifications' do
it 'does not have an N+1 due to an additional groups (with no parent group)' do
get_profile_notifications
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index f8fa9459467..565576f3091 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -7,113 +7,95 @@ RSpec.describe 'value stream analytics events' do
let(:project) { create(:project, :repository, public_builds: false) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
- shared_examples 'value stream analytics events examples' do
- describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
- before do
- project.add_developer(user)
+ describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
+ before do
+ project.add_developer(user)
- 3.times do |count|
- travel_to(Time.now + count.days) do
- create_cycle
- end
+ 3.times do |count|
+ travel_to(Time.now + count.days) do
+ create_cycle
end
-
- deploy_master(user, project)
-
- login_as(user)
end
- it 'lists the issue events' do
- get project_cycle_analytics_issue_path(project, format: :json)
+ deploy_master(user, project)
- first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
+ login_as(user)
+ end
- expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['iid']).to eq(first_issue_iid)
- end
+ it 'lists the issue events' do
+ get project_cycle_analytics_issue_path(project, format: :json)
- it 'lists the plan events' do
- get project_cycle_analytics_plan_path(project, format: :json)
+ first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
- first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
+ expect(json_response['events']).not_to be_empty
+ expect(json_response['events'].first['iid']).to eq(first_issue_iid)
+ end
- expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['iid']).to eq(first_issue_iid)
- end
+ it 'lists the plan events' do
+ get project_cycle_analytics_plan_path(project, format: :json)
- it 'lists the code events' do
- get project_cycle_analytics_code_path(project, format: :json)
+ first_issue_iid = project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
- expect(json_response['events']).not_to be_empty
+ expect(json_response['events']).not_to be_empty
+ expect(json_response['events'].first['iid']).to eq(first_issue_iid)
+ end
- first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
+ it 'lists the code events' do
+ get project_cycle_analytics_code_path(project, format: :json)
- expect(json_response['events'].first['iid']).to eq(first_mr_iid)
- end
+ expect(json_response['events']).not_to be_empty
- it 'lists the test events', :sidekiq_inline do
- get project_cycle_analytics_test_path(project, format: :json)
+ first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
- expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['date']).not_to be_empty
- end
+ expect(json_response['events'].first['iid']).to eq(first_mr_iid)
+ end
- it 'lists the review events' do
- get project_cycle_analytics_review_path(project, format: :json)
+ it 'lists the test events', :sidekiq_inline do
+ get project_cycle_analytics_test_path(project, format: :json)
- first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
+ expect(json_response['events']).not_to be_empty
+ expect(json_response['events'].first['date']).not_to be_empty
+ end
- expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['iid']).to eq(first_mr_iid)
- end
+ it 'lists the review events' do
+ get project_cycle_analytics_review_path(project, format: :json)
- it 'lists the staging events', :sidekiq_inline do
- get project_cycle_analytics_staging_path(project, format: :json)
+ first_mr_iid = project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s
- expect(json_response['events']).not_to be_empty
- expect(json_response['events'].first['date']).not_to be_empty
- end
+ expect(json_response['events']).not_to be_empty
+ expect(json_response['events'].first['iid']).to eq(first_mr_iid)
+ end
- context 'with private project and builds' do
- before do
- project.members.last.update(access_level: Gitlab::Access::GUEST)
- end
+ it 'lists the staging events', :sidekiq_inline do
+ get project_cycle_analytics_staging_path(project, format: :json)
- it 'does not list the test events' do
- get project_cycle_analytics_test_path(project, format: :json)
+ expect(json_response['events']).not_to be_empty
+ expect(json_response['events'].first['date']).not_to be_empty
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ context 'with private project and builds' do
+ before do
+ project.members.last.update(access_level: Gitlab::Access::GUEST)
+ end
- it 'does not list the staging events' do
- get project_cycle_analytics_staging_path(project, format: :json)
+ it 'does not list the test events' do
+ get project_cycle_analytics_test_path(project, format: :json)
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
- it 'lists the issue events' do
- get project_cycle_analytics_issue_path(project, format: :json)
+ it 'does not list the staging events' do
+ get project_cycle_analytics_staging_path(project, format: :json)
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:not_found)
end
- end
- end
- describe 'when new_project_level_vsa_backend feature flag is off' do
- before do
- stub_feature_flags(new_project_level_vsa_backend: false, thing: project)
- end
-
- it_behaves_like 'value stream analytics events examples'
- end
+ it 'lists the issue events' do
+ get project_cycle_analytics_issue_path(project, format: :json)
- describe 'when new_project_level_vsa_backend feature flag is on' do
- before do
- stub_feature_flags(new_project_level_vsa_backend: true, thing: project)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
-
- it_behaves_like 'value stream analytics events examples'
end
def create_cycle
diff --git a/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb b/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb
index b18bffdb110..32434435475 100644
--- a/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb
+++ b/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'PagerDuty webhook' do
it 'calls PagerDuty webhook processor with correct parameters' do
make_request
- expect(webhook_processor_class).to have_received(:new).with(project, nil, payload)
+ expect(webhook_processor_class).to have_received(:new).with(project, payload)
expect(webhook_processor).to have_received(:execute).with('VALID-TOKEN')
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index c2e68df2c40..34f34c0b850 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -60,6 +60,24 @@ RSpec.describe 'Rack Attack global throttles' do
expect_rejection { get url_that_does_not_require_authentication }
end
+ context 'with custom response text' do
+ before do
+ stub_application_setting(rate_limiting_response_text: 'Custom response')
+ end
+
+ it 'rejects requests over the rate limit' do
+ # At first, allow requests under the rate limit.
+ requests_per_period.times do
+ get url_that_does_not_require_authentication
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ # the last straw
+ expect_rejection { get url_that_does_not_require_authentication }
+ expect(response.body).to eq("Custom response\n")
+ end
+ end
+
it 'allows requests after throttling and then waiting for the next period' do
requests_per_period.times do
get url_that_does_not_require_authentication
@@ -170,16 +188,16 @@ RSpec.describe 'Rack Attack global throttles' do
end
describe 'API requests authenticated with personal access token', :api do
- let(:user) { create(:user) }
- let(:token) { create(:personal_access_token, user: user) }
- let(:other_user) { create(:user) }
- let(:other_user_token) { create(:personal_access_token, user: other_user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
let(:throttle_setting_prefix) { 'throttle_authenticated_api' }
let(:api_partial_url) { '/todos' }
context 'with the token in the query string' do
- let(:request_args) { [api(api_partial_url, personal_access_token: token)] }
- let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, personal_access_token: token), {}] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token), {}] }
it_behaves_like 'rate-limited token-authenticated requests'
end
@@ -190,6 +208,41 @@ RSpec.describe 'Rack Attack global throttles' do
it_behaves_like 'rate-limited token-authenticated requests'
end
+
+ context 'with the token in the OAuth headers' do
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with the token in basic auth' do
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, basic_auth_headers(user, token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, basic_auth_headers(other_user, other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with a read_api scope' do
+ before do
+ token.update!(scopes: ['read_api'])
+ other_user_token.update!(scopes: ['read_api'])
+ end
+
+ context 'with the token in the headers' do
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with the token in the OAuth headers' do
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+ end
end
describe 'API requests authenticated with OAuth token', :api do
@@ -205,8 +258,8 @@ RSpec.describe 'Rack Attack global throttles' do
let(:api_partial_url) { '/todos' }
context 'with the token in the query string' do
- let(:request_args) { [api(api_partial_url, oauth_access_token: token)] }
- let(:other_user_request_args) { [api(api_partial_url, oauth_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, oauth_access_token: token), {}] }
+ let(:other_user_request_args) { [api(api_partial_url, oauth_access_token: other_user_token), {}] }
it_behaves_like 'rate-limited token-authenticated requests'
end
@@ -217,6 +270,15 @@ RSpec.describe 'Rack Attack global throttles' do
it_behaves_like 'rate-limited token-authenticated requests'
end
+
+ context 'with a read_api scope' do
+ let(:read_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "read_api") }
+ let(:other_user_read_token) { Doorkeeper::AccessToken.create!(application_id: other_user_application.id, resource_owner_id: other_user.id, scopes: "read_api") }
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(read_token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_read_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
end
describe '"web" (non-API) requests authenticated with RSS token' do
@@ -314,8 +376,8 @@ RSpec.describe 'Rack Attack global throttles' do
end
context 'with the token in the query string' do
- let(:request_args) { [api(api_partial_url, personal_access_token: token)] }
- let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
+ let(:request_args) { [api(api_partial_url, personal_access_token: token), {}] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token), {}] }
it_behaves_like 'rate-limited token-authenticated requests'
end
diff --git a/spec/controllers/runner_setup_controller_spec.rb b/spec/requests/runner_setup_controller_spec.rb
index 0b237500907..665c896e30d 100644
--- a/spec/controllers/runner_setup_controller_spec.rb
+++ b/spec/requests/runner_setup_controller_spec.rb
@@ -9,9 +9,9 @@ RSpec.describe RunnerSetupController do
sign_in(user)
end
- describe 'GET #platforms' do
+ describe 'GET /-/runner_setup/platforms' do
it 'renders the platforms' do
- get :platforms
+ get runner_setup_platforms_url
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key("windows")
diff --git a/spec/controllers/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index 916befe3f62..b224ef87229 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -9,29 +9,26 @@ RSpec.describe UsersController do
let(:public_user) { create(:user) }
describe 'GET #show' do
- context 'with rendered views' do
- render_views
+ shared_examples_for 'renders the show template' do
+ it 'renders the show template' do
+ get user_url user.username
- describe 'when logged in' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('show')
+ end
+ end
+
+ context 'when the user exists and has public visibility' do
+ context 'when logged in' do
before do
sign_in(user)
end
- it 'renders the show template' do
- get :show, params: { username: user.username }
-
- expect(response).to be_successful
- expect(response).to render_template('show')
- end
+ it_behaves_like 'renders the show template'
end
- describe 'when logged out' do
- it 'renders the show template' do
- get :show, params: { username: user.username }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('show')
- end
+ context 'when logged out' do
+ it_behaves_like 'renders the show template'
end
end
@@ -42,7 +39,8 @@ RSpec.describe UsersController do
context 'when logged out' do
it 'redirects to login page' do
- get :show, params: { username: user.username }
+ get user_url user.username
+
expect(response).to redirect_to new_user_session_path
end
end
@@ -52,18 +50,15 @@ RSpec.describe UsersController do
sign_in(user)
end
- it 'renders show' do
- get :show, params: { username: user.username }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('show')
- end
+ it_behaves_like 'renders the show template'
end
end
context 'when a user by that username does not exist' do
context 'when logged out' do
it 'redirects to login page' do
- get :show, params: { username: 'nonexistent' }
+ get user_url 'nonexistent'
+
expect(response).to redirect_to new_user_session_path
end
end
@@ -74,13 +69,14 @@ RSpec.describe UsersController do
end
it 'renders 404' do
- get :show, params: { username: 'nonexistent' }
+ get user_url 'nonexistent'
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
- context 'json with events' do
+ context 'requested in json format' do
let(:project) { create(:project) }
before do
@@ -90,55 +86,46 @@ RSpec.describe UsersController do
sign_in(user)
end
- it 'loads events' do
- get :show, params: { username: user }, format: :json
-
- expect(assigns(:events)).not_to be_empty
- end
-
- it 'hides events if the user cannot read cross project' do
- allow(Ability).to receive(:allowed?).and_call_original
- expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
-
- get :show, params: { username: user }, format: :json
+ it 'returns 404 with deprecation message' do
+ # Requesting "/username?format=json" instead of "/username.json"
+ get user_url user.username, params: { format: :json }
- expect(assigns(:events)).to be_empty
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.media_type).to eq('application/json')
+ expect(Gitlab::Json.parse(response.body)['message']).to include('This endpoint is deprecated.')
end
+ end
+ end
- it 'hides events if the user has a private profile' do
- Gitlab::DataBuilder::Push.build_sample(project, private_user)
-
- get :show, params: { username: private_user.username }, format: :json
+ describe 'GET /users/:username (deprecated user top)' do
+ it 'redirects to /user1' do
+ get '/users/user1'
- expect(assigns(:events)).to be_empty
- end
+ expect(response).to redirect_to user_path('user1')
end
end
describe 'GET #activity' do
- context 'with rendered views' do
- render_views
+ shared_examples_for 'renders the show template' do
+ it 'renders the show template' do
+ get user_activity_url user.username
- describe 'when logged in' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('show')
+ end
+ end
+
+ context 'when the user exists and has public visibility' do
+ context 'when logged in' do
before do
sign_in(user)
end
- it 'renders the show template' do
- get :show, params: { username: user.username }
-
- expect(response).to be_successful
- expect(response).to render_template('show')
- end
+ it_behaves_like 'renders the show template'
end
- describe 'when logged out' do
- it 'renders the show template' do
- get :activity, params: { username: user.username }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('show')
- end
+ context 'when logged out' do
+ it_behaves_like 'renders the show template'
end
end
@@ -149,7 +136,8 @@ RSpec.describe UsersController do
context 'when logged out' do
it 'redirects to login page' do
- get :activity, params: { username: user.username }
+ get user_activity_url user.username
+
expect(response).to redirect_to new_user_session_path
end
end
@@ -159,18 +147,15 @@ RSpec.describe UsersController do
sign_in(user)
end
- it 'renders show' do
- get :activity, params: { username: user.username }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('show')
- end
+ it_behaves_like 'renders the show template'
end
end
context 'when a user by that username does not exist' do
context 'when logged out' do
it 'redirects to login page' do
- get :activity, params: { username: 'nonexistent' }
+ get user_activity_url 'nonexistent'
+
expect(response).to redirect_to new_user_session_path
end
end
@@ -181,13 +166,14 @@ RSpec.describe UsersController do
end
it 'renders 404' do
- get :activity, params: { username: 'nonexistent' }
+ get user_activity_url 'nonexistent'
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
- context 'json with events' do
+ context 'requested in json format' do
let(:project) { create(:project) }
before do
@@ -198,76 +184,63 @@ RSpec.describe UsersController do
end
it 'loads events' do
- get :activity, params: { username: user }, format: :json
+ get user_activity_url user.username, format: :json
- expect(assigns(:events)).not_to be_empty
+ expect(response.media_type).to eq('application/json')
+ expect(Gitlab::Json.parse(response.body)['count']).to eq(1)
end
it 'hides events if the user cannot read cross project' do
allow(Ability).to receive(:allowed?).and_call_original
expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
- get :activity, params: { username: user }, format: :json
+ get user_activity_url user.username, format: :json
- expect(assigns(:events)).to be_empty
+ expect(response.media_type).to eq('application/json')
+ expect(Gitlab::Json.parse(response.body)['count']).to eq(0)
end
it 'hides events if the user has a private profile' do
Gitlab::DataBuilder::Push.build_sample(project, private_user)
- get :activity, params: { username: private_user.username }, format: :json
+ get user_activity_url private_user.username, format: :json
- expect(assigns(:events)).to be_empty
+ expect(response.media_type).to eq('application/json')
+ expect(Gitlab::Json.parse(response.body)['count']).to eq(0)
end
end
end
- describe "#ssh_keys" do
- describe "non existent user" do
- it "does not generally work" do
- get :ssh_keys, params: { username: 'not-existent' }
+ describe 'GET #ssh_keys' do
+ context 'non existent user' do
+ it 'does not generally work' do
+ get '/not-existent.keys'
expect(response).not_to be_successful
end
end
- describe "user with no keys" do
- it "does generally work" do
- get :ssh_keys, params: { username: user.username }
+ context 'user with no keys' do
+ it 'responds the empty body with text/plain content type' do
+ get "/#{user.username}.keys"
expect(response).to be_successful
- end
-
- it "renders all keys separated with a new line" do
- get :ssh_keys, params: { username: user.username }
-
+ expect(response.media_type).to eq("text/plain")
expect(response.body).to eq("")
end
-
- it "responds with text/plain content type" do
- get :ssh_keys, params: { username: user.username }
- expect(response.content_type).to eq("text/plain")
- end
end
- describe "user with keys" do
+ context 'user with keys' do
let!(:key) { create(:key, user: user) }
let!(:another_key) { create(:another_key, user: user) }
let!(:deploy_key) { create(:deploy_key, user: user) }
- describe "while signed in" do
- before do
- sign_in(user)
- end
-
- it "does generally work" do
- get :ssh_keys, params: { username: user.username }
+ shared_examples_for 'renders all public keys' do
+ it 'renders all non-deploy keys separated with a new line with text/plain content type without the comment key' do
+ get "/#{user.username}.keys"
expect(response).to be_successful
- end
-
- it "renders all non deploy keys separated with a new line" do
- get :ssh_keys, params: { username: user.username }
+ expect(response.media_type).to eq("text/plain")
expect(response.body).not_to eq('')
expect(response.body).to eq(user.all_ssh_keys.join("\n"))
@@ -275,104 +248,60 @@ RSpec.describe UsersController do
expect(response.body).to include(key.key.sub(' dummy@gitlab.com', ''))
expect(response.body).to include(another_key.key.sub(' dummy@gitlab.com', ''))
- expect(response.body).not_to include(deploy_key.key)
- end
-
- it "does not render the comment of the key" do
- get :ssh_keys, params: { username: user.username }
expect(response.body).not_to match(/dummy@gitlab.com/)
- end
-
- it "responds with text/plain content type" do
- get :ssh_keys, params: { username: user.username }
- expect(response.content_type).to eq("text/plain")
+ expect(response.body).not_to include(deploy_key.key)
end
end
- describe 'when logged out' do
+ context 'while signed in' do
before do
- sign_out(user)
- end
-
- it "still does generally work" do
- get :ssh_keys, params: { username: user.username }
-
- expect(response).to be_successful
+ sign_in(user)
end
- it "renders all non deploy keys separated with a new line" do
- get :ssh_keys, params: { username: user.username }
-
- expect(response.body).not_to eq('')
- expect(response.body).to eq(user.all_ssh_keys.join("\n"))
-
- expect(response.body).to include(key.key.sub(' dummy@gitlab.com', ''))
- expect(response.body).to include(another_key.key.sub(' dummy@gitlab.com', ''))
-
- expect(response.body).not_to include(deploy_key.key)
- end
+ it_behaves_like 'renders all public keys'
+ end
- it "does not render the comment of the key" do
- get :ssh_keys, params: { username: user.username }
- expect(response.body).not_to match(/dummy@gitlab.com/)
+ context 'when logged out' do
+ before do
+ sign_out(user)
end
- it "responds with text/plain content type" do
- get :ssh_keys, params: { username: user.username }
-
- expect(response.content_type).to eq("text/plain")
- end
+ it_behaves_like 'renders all public keys'
end
end
end
- describe "#gpg_keys" do
- describe "non existent user" do
- it "does not generally work" do
- get :gpg_keys, params: { username: 'not-existent' }
+ describe 'GET #gpg_keys' do
+ context 'non existent user' do
+ it 'does not generally work' do
+ get '/not-existent.keys'
expect(response).not_to be_successful
end
end
- describe "user with no keys" do
- it "does generally work" do
- get :gpg_keys, params: { username: user.username }
+ context 'user with no keys' do
+ it 'responds the empty body with text/plain content type' do
+ get "/#{user.username}.gpg"
expect(response).to be_successful
- end
-
- it "renders all keys separated with a new line" do
- get :gpg_keys, params: { username: user.username }
-
+ expect(response.media_type).to eq("text/plain")
expect(response.body).to eq("")
end
-
- it "responds with text/plain content type" do
- get :gpg_keys, params: { username: user.username }
-
- expect(response.content_type).to eq("text/plain")
- end
end
- describe "user with keys" do
+ context 'user with keys' do
let!(:gpg_key) { create(:gpg_key, user: user) }
let!(:another_gpg_key) { create(:another_gpg_key, user: user) }
- describe "while signed in" do
- before do
- sign_in(user)
- end
-
- it "does generally work" do
- get :gpg_keys, params: { username: user.username }
+ shared_examples_for 'renders all verified GPG keys' do
+ it 'renders all verified keys separated with a new line with text/plain content type' do
+ get "/#{user.username}.gpg"
expect(response).to be_successful
- end
- it "renders all verified keys separated with a new line" do
- get :gpg_keys, params: { username: user.username }
+ expect(response.media_type).to eq("text/plain")
expect(response.body).not_to eq('')
expect(response.body).to eq(user.gpg_keys.select(&:verified?).map(&:key).join("\n"))
@@ -380,65 +309,51 @@ RSpec.describe UsersController do
expect(response.body).to include(gpg_key.key)
expect(response.body).to include(another_gpg_key.key)
end
-
- it "responds with text/plain content type" do
- get :gpg_keys, params: { username: user.username }
-
- expect(response.content_type).to eq("text/plain")
- end
end
- describe 'when logged out' do
+ context 'while signed in' do
before do
- sign_out(user)
+ sign_in(user)
end
- it "still does generally work" do
- get :gpg_keys, params: { username: user.username }
+ it_behaves_like 'renders all verified GPG keys'
+ end
- expect(response).to be_successful
+ context 'when logged out' do
+ before do
+ sign_out(user)
end
- it "renders all verified keys separated with a new line" do
- get :gpg_keys, params: { username: user.username }
-
- expect(response.body).not_to eq('')
- expect(response.body).to eq(user.gpg_keys.map(&:key).join("\n"))
+ it_behaves_like 'renders all verified GPG keys'
+ end
- expect(response.body).to include(gpg_key.key)
- expect(response.body).to include(another_gpg_key.key)
- end
+ context 'when revoked' do
+ shared_examples_for 'doesn\'t render revoked keys' do
+ it 'doesn\'t render revoked keys' do
+ get "/#{user.username}.gpg"
- it "responds with text/plain content type" do
- get :gpg_keys, params: { username: user.username }
+ expect(response.body).not_to eq('')
- expect(response.content_type).to eq("text/plain")
+ expect(response.body).to include(gpg_key.key)
+ expect(response.body).not_to include(another_gpg_key.key)
+ end
end
- end
- describe 'when revoked' do
before do
sign_in(user)
another_gpg_key.revoke
end
- it "doesn't render revoked keys" do
- get :gpg_keys, params: { username: user.username }
-
- expect(response.body).not_to eq('')
-
- expect(response.body).to include(gpg_key.key)
- expect(response.body).not_to include(another_gpg_key.key)
+ context 'while signed in' do
+ it_behaves_like 'doesn\'t render revoked keys'
end
- it "doesn't render revoked keys for non-authorized users" do
- sign_out(user)
- get :gpg_keys, params: { username: user.username }
-
- expect(response.body).not_to eq('')
+ context 'when logged out' do
+ before do
+ sign_out(user)
+ end
- expect(response.body).to include(gpg_key.key)
- expect(response.body).not_to include(another_gpg_key.key)
+ it_behaves_like 'doesn\'t render revoked keys'
end
end
end
@@ -458,7 +373,7 @@ RSpec.describe UsersController do
push_data = Gitlab::DataBuilder::Push.build_sample(project, public_user)
EventCreateService.new.push(project, public_user, push_data)
- get :calendar, params: { username: public_user.username }, format: :json
+ get user_calendar_url public_user.username, format: :json
expect(response).to have_gitlab_http_status(:ok)
end
@@ -469,7 +384,7 @@ RSpec.describe UsersController do
push_data = Gitlab::DataBuilder::Push.build_sample(project, private_user)
EventCreateService.new.push(project, private_user, push_data)
- get :calendar, params: { username: private_user.username }, format: :json
+ get user_calendar_url private_user.username, format: :json
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -494,7 +409,8 @@ RSpec.describe UsersController do
end
it 'includes forked projects' do
- get :calendar, params: { username: user.username }
+ get user_calendar_url user.username
+
expect(assigns(:contributions_calendar).projects.count).to eq(2)
end
end
@@ -513,9 +429,11 @@ RSpec.describe UsersController do
project.add_developer(user)
end
- it 'assigns @calendar_date' do
- get :calendar_activities, params: { username: user.username, date: '2014-07-31' }
- expect(assigns(:calendar_date)).to eq(Date.parse('2014-07-31'))
+ it 'renders activities on the specified day' do
+ get user_calendar_activities_url user.username, date: '2014-07-31'
+
+ expect(response.media_type).to eq('text/html')
+ expect(response.body).to include('Jul 31, 2014')
end
context 'for user' do
@@ -523,28 +441,26 @@ RSpec.describe UsersController do
let(:issue) { create(:issue, project: project, author: user) }
let(:note) { create(:note, noteable: issue, author: user, project: project) }
- render_views
-
before do
create_push_event
create_note_event
end
it 'renders calendar_activities' do
- get :calendar_activities, params: { username: public_user.username }
+ get user_calendar_activities_url public_user.username
- expect(assigns[:events]).not_to be_empty
+ expect(response.body).not_to be_empty
end
it 'avoids N+1 queries', :request_store do
- get :calendar_activities, params: { username: public_user.username }
+ get user_calendar_activities_url public_user.username
- control = ActiveRecord::QueryRecorder.new { get :calendar_activities, params: { username: public_user.username } }
+ control = ActiveRecord::QueryRecorder.new { get user_calendar_activities_url public_user.username }
create_push_event
create_note_event
- expect { get :calendar_activities, params: { username: public_user.username } }.not_to exceed_query_limit(control)
+ expect { get user_calendar_activities_url public_user.username }.not_to exceed_query_limit(control)
end
end
@@ -553,13 +469,14 @@ RSpec.describe UsersController do
push_data = Gitlab::DataBuilder::Push.build_sample(project, private_user)
EventCreateService.new.push(project, private_user, push_data)
- get :calendar_activities, params: { username: private_user.username }
+ get user_calendar_activities_url private_user.username
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'external authorization' do
- subject { get :calendar_activities, params: { username: user.username } }
+ subject { get user_calendar_activities_url user.username }
it_behaves_like 'disabled when using an external authorization service'
end
@@ -579,7 +496,7 @@ RSpec.describe UsersController do
let(:project) { create(:project, :public) }
subject do
- get :contributed, params: { username: author.username }, format: format
+ get user_contributed_projects_url author.username, format: format
end
before do
@@ -594,8 +511,8 @@ RSpec.describe UsersController do
shared_examples_for 'renders contributed projects' do
it 'renders contributed projects' do
- expect(assigns[:contributed_projects]).not_to be_empty
expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).not_to be_empty
end
end
@@ -630,7 +547,7 @@ RSpec.describe UsersController do
let(:project) { create(:project, :public) }
subject do
- get :starred, params: { username: author.username }, format: format
+ get user_starred_projects_url author.username, format: format
end
before do
@@ -643,7 +560,7 @@ RSpec.describe UsersController do
shared_examples_for 'renders starred projects' do
it 'renders starred projects' do
expect(response).to have_gitlab_http_status(:ok)
- expect(assigns[:starred_projects]).not_to be_empty
+ expect(response.body).not_to be_empty
end
end
@@ -681,7 +598,8 @@ RSpec.describe UsersController do
context 'format html' do
it 'renders snippets page' do
- get :snippets, params: { username: user.username }
+ get user_snippets_url user.username
+
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('show')
end
@@ -689,14 +607,15 @@ RSpec.describe UsersController do
context 'format json' do
it 'response with snippets json data' do
- get :snippets, params: { username: user.username }, format: :json
+ get user_snippets_url user.username, format: :json
+
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('html')
end
end
context 'external authorization' do
- subject { get :snippets, params: { username: user.username } }
+ subject { get user_snippets_url user.username }
it_behaves_like 'disabled when using an external authorization service'
end
@@ -709,7 +628,7 @@ RSpec.describe UsersController do
context 'when user exists' do
it 'returns JSON indicating the user exists' do
- get :exists, params: { username: user.username }
+ get user_exists_url user.username
expected_json = { exists: true }.to_json
expect(response.body).to eq(expected_json)
@@ -719,7 +638,7 @@ RSpec.describe UsersController do
let(:user) { create(:user, username: 'CamelCaseUser') }
it 'returns JSON indicating the user exists' do
- get :exists, params: { username: user.username.downcase }
+ get user_exists_url user.username.downcase
expected_json = { exists: true }.to_json
expect(response.body).to eq(expected_json)
@@ -729,7 +648,7 @@ RSpec.describe UsersController do
context 'when the user does not exist' do
it 'returns JSON indicating the user does not exist' do
- get :exists, params: { username: 'foo' }
+ get user_exists_url 'foo'
expected_json = { exists: false }.to_json
expect(response.body).to eq(expected_json)
@@ -739,7 +658,7 @@ RSpec.describe UsersController do
let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
it 'returns JSON indicating a user by that username does not exist' do
- get :exists, params: { username: 'old-username' }
+ get user_exists_url 'old-username'
expected_json = { exists: false }.to_json
expect(response.body).to eq(expected_json)
@@ -751,7 +670,7 @@ RSpec.describe UsersController do
describe 'GET #suggests' do
context 'when user exists' do
it 'returns JSON indicating the user exists and a suggestion' do
- get :suggests, params: { username: user.username }
+ get user_suggests_url user.username
expected_json = { exists: true, suggests: ["#{user.username}1"] }.to_json
expect(response.body).to eq(expected_json)
@@ -761,7 +680,7 @@ RSpec.describe UsersController do
let(:user) { create(:user, username: 'CamelCaseUser') }
it 'returns JSON indicating the user exists and a suggestion' do
- get :suggests, params: { username: user.username.downcase }
+ get user_suggests_url user.username.downcase
expected_json = { exists: true, suggests: ["#{user.username.downcase}1"] }.to_json
expect(response.body).to eq(expected_json)
@@ -771,7 +690,7 @@ RSpec.describe UsersController do
context 'when the user does not exist' do
it 'returns JSON indicating the user does not exist' do
- get :suggests, params: { username: 'foo' }
+ get user_suggests_url 'foo'
expected_json = { exists: false, suggests: [] }.to_json
expect(response.body).to eq(expected_json)
@@ -781,7 +700,7 @@ RSpec.describe UsersController do
let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
it 'returns JSON indicating a user by that username does not exist' do
- get :suggests, params: { username: 'old-username' }
+ get user_suggests_url 'old-username'
expected_json = { exists: false, suggests: [] }.to_json
expect(response.body).to eq(expected_json)
@@ -802,7 +721,7 @@ RSpec.describe UsersController do
context 'with exactly matching casing' do
it 'responds with success' do
- get :show, params: { username: user.username }
+ get user_url user.username
expect(response).to be_successful
end
@@ -810,44 +729,39 @@ RSpec.describe UsersController do
context 'with different casing' do
it 'redirects to the correct casing' do
- get :show, params: { username: user.username.downcase }
+ get user_url user.username.downcase
expect(response).to redirect_to(user)
- expect(controller).not_to set_flash[:notice]
+ expect(flash[:notice]).to be_nil
end
end
end
- context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
-
+ shared_examples_for 'redirects to the canonical path' do
it 'redirects to the canonical path' do
- get :show, params: { username: redirect_route.path }
+ get user_url redirect_route.path
expect(response).to redirect_to(user)
- expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ expect(flash[:notice]).to eq(user_moved_message(redirect_route, user))
end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
+
+ it_behaves_like 'redirects to the canonical path'
context 'when the old path is a substring of the scheme or host' do
let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') }
- it 'does not modify the requested host' do
- get :show, params: { username: redirect_route.path }
-
- expect(response).to redirect_to(user)
- expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
- end
+ # it does not modify the requested host and ...
+ it_behaves_like 'redirects to the canonical path'
end
context 'when the old path is substring of users' do
let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') }
- it 'redirects to the canonical path' do
- get :show, params: { username: redirect_route.path }
-
- expect(response).to redirect_to(user)
- expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
- end
+ it_behaves_like 'redirects to the canonical path'
end
end
end
@@ -858,7 +772,7 @@ RSpec.describe UsersController do
context 'with exactly matching casing' do
it 'responds with success' do
- get :projects, params: { username: user.username }
+ get user_projects_url user.username
expect(response).to be_successful
end
@@ -866,45 +780,41 @@ RSpec.describe UsersController do
context 'with different casing' do
it 'redirects to the correct casing' do
- get :projects, params: { username: user.username.downcase }
+ get user_projects_url user.username.downcase
expect(response).to redirect_to(user_projects_path(user))
- expect(controller).not_to set_flash[:notice]
+ expect(flash[:notice]).to be_nil
end
end
end
- context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
-
+ shared_examples_for 'redirects to the canonical path' do
it 'redirects to the canonical path' do
- get :projects, params: { username: redirect_route.path }
+ get user_projects_url redirect_route.path
expect(response).to redirect_to(user_projects_path(user))
- expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ expect(flash[:notice]).to eq(user_moved_message(redirect_route, user))
end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
+
+ it_behaves_like 'redirects to the canonical path'
context 'when the old path is a substring of the scheme or host' do
let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') }
- it 'does not modify the requested host' do
- get :projects, params: { username: redirect_route.path }
-
- expect(response).to redirect_to(user_projects_path(user))
- expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
- end
+ # it does not modify the requested host and ...
+ it_behaves_like 'redirects to the canonical path'
end
context 'when the old path is substring of users' do
let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') }
- # I.e. /users/ser should not become /ufoos/ser
- it 'does not modify the /users part of the path' do
- get :projects, params: { username: redirect_route.path }
-
- expect(response).to redirect_to(user_projects_path(user))
- expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
- end
+ # it does not modify the /users part of the path
+ # (i.e. /users/ser should not become /ufoos/ser) and ...
+ it_behaves_like 'redirects to the canonical path'
end
end
end
@@ -912,11 +822,9 @@ RSpec.describe UsersController do
end
context 'token authentication' do
- it_behaves_like 'authenticates sessionless user', :show, :atom, public: true do
- before do
- default_params.merge!(username: user.username)
- end
- end
+ let(:url) { user_url(user.username, format: :atom) }
+
+ it_behaves_like 'authenticates sessionless user for the request spec', public: true
end
def user_moved_message(redirect_route, user)
diff --git a/spec/routing/notifications_routing_spec.rb b/spec/routing/notifications_routing_spec.rb
index 007e8ff4816..d66aa7f219f 100644
--- a/spec/routing/notifications_routing_spec.rb
+++ b/spec/routing/notifications_routing_spec.rb
@@ -4,15 +4,15 @@ require "spec_helper"
RSpec.describe "notifications routing" do
it "routes to #show" do
- expect(get("/profile/notifications")).to route_to("profiles/notifications#show")
+ expect(get("/-/profile/notifications")).to route_to("profiles/notifications#show")
end
it "routes to #update" do
- expect(put("/profile/notifications")).to route_to("profiles/notifications#update")
+ expect(put("/-/profile/notifications")).to route_to("profiles/notifications#update")
end
it 'routes to group #update' do
- expect(put("/profile/notifications/groups/gitlab-org")).to route_to("profiles/groups#update", id: 'gitlab-org')
- expect(put("/profile/notifications/groups/gitlab.org")).to route_to("profiles/groups#update", id: 'gitlab.org')
+ expect(put("/-/profile/notifications/groups/gitlab-org")).to route_to("profiles/groups#update", id: 'gitlab-org')
+ expect(put("/-/profile/notifications/groups/gitlab.org")).to route_to("profiles/groups#update", id: 'gitlab.org')
end
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index a683dc28f4f..29e5c1b4bae 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -106,8 +106,8 @@ RSpec.describe 'project routing' do
let(:base_path) { '/gitlab/gitlabhq/-/wikis' }
end
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/wikis", "/gitlab/gitlabhq/-/wikis"
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/wikis/home/edit", "/gitlab/gitlabhq/-/wikis/home/edit"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/wikis", "/gitlab/gitlabhq/-/wikis"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/wikis/home/edit", "/gitlab/gitlabhq/-/wikis/home/edit"
end
# branches_project_repository GET /:project_id/repository/branches(.:format) projects/repositories#branches
@@ -171,7 +171,7 @@ RSpec.describe 'project routing' do
expect(delete('/gitlab/gitlabhq/-/tags/feature@45/foo/bar/baz')).to route_to('projects/tags#destroy', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature@45/foo/bar/baz')
end
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/tags", "/gitlab/gitlabhq/-/tags"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/tags", "/gitlab/gitlabhq/-/tags"
end
# project_deploy_keys GET /:project_id/deploy_keys(.:format) deploy_keys#index
@@ -259,8 +259,8 @@ RSpec.describe 'project routing' do
let(:base_path) { '/gitlab/gitlabhq/-/merge_requests' }
end
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/merge_requests", "/gitlab/gitlabhq/-/merge_requests"
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/merge_requests/1/diffs", "/gitlab/gitlabhq/-/merge_requests/1/diffs"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/merge_requests", "/gitlab/gitlabhq/-/merge_requests"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/merge_requests/1/diffs", "/gitlab/gitlabhq/-/merge_requests/1/diffs"
end
describe Projects::MergeRequests::CreationsController, 'routing' do
@@ -290,7 +290,7 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/merge_requests/new/diffs.json')).to route_to('projects/merge_requests/creations#diffs', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
end
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/merge_requests/new", "/gitlab/gitlabhq/-/merge_requests/new"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/merge_requests/new", "/gitlab/gitlabhq/-/merge_requests/new"
end
describe Projects::MergeRequests::DiffsController, 'routing' do
@@ -454,8 +454,8 @@ RSpec.describe 'project routing' do
let(:base_path) { '/gitlab/gitlabhq/-/issues' }
end
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/issues", "/gitlab/gitlabhq/-/issues"
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/issues/1/edit", "/gitlab/gitlabhq/-/issues/1/edit"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/issues", "/gitlab/gitlabhq/-/issues"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/issues/1/edit", "/gitlab/gitlabhq/-/issues/1/edit"
end
# project_noteable_notes GET /:project_id/noteable/:target_type/:target_id/notes notes#index
@@ -769,25 +769,25 @@ RSpec.describe 'project routing' do
describe Projects::EnvironmentsController, 'routing' do
describe 'legacy routing' do
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/environments", "/gitlab/gitlabhq/-/environments"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/environments", "/gitlab/gitlabhq/-/environments"
end
end
describe Projects::ClustersController, 'routing' do
describe 'legacy routing' do
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/clusters", "/gitlab/gitlabhq/-/clusters"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/clusters", "/gitlab/gitlabhq/-/clusters"
end
end
describe Projects::ErrorTrackingController, 'routing' do
describe 'legacy routing' do
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/error_tracking", "/gitlab/gitlabhq/-/error_tracking"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/error_tracking", "/gitlab/gitlabhq/-/error_tracking"
end
end
describe Projects::Serverless, 'routing' do
describe 'legacy routing' do
- it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/serverless", "/gitlab/gitlabhq/-/serverless"
+ it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/serverless", "/gitlab/gitlabhq/-/serverless"
end
end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 26ad1f14786..7b9ba783885 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -122,111 +122,115 @@ RSpec.describe HelpController, "routing" do
end
end
-# profile_account GET /profile/account(.:format) profile#account
-# profile_history GET /profile/history(.:format) profile#history
-# profile_password PUT /profile/password(.:format) profile#password_update
-# profile_token GET /profile/token(.:format) profile#token
-# profile GET /profile(.:format) profile#show
-# profile_update PUT /profile/update(.:format) profile#update
+# profile_account GET /-/profile/account(.:format) profile#account
+# profile_history GET /-/profile/history(.:format) profile#history
+# profile_password PUT /-/profile/password(.:format) profile#password_update
+# profile_token GET /-/profile/token(.:format) profile#token
+# profile GET /-/profile(.:format) profile#show
+# profile_update PUT /-/profile/update(.:format) profile#update
RSpec.describe ProfilesController, "routing" do
it "to #account" do
- expect(get("/profile/account")).to route_to('profiles/accounts#show')
+ expect(get("/-/profile/account")).to route_to('profiles/accounts#show')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/account', '/-/profile/account'
it "to #audit_log" do
- expect(get("/profile/audit_log")).to route_to('profiles#audit_log')
+ expect(get("/-/profile/audit_log")).to route_to('profiles#audit_log')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/audit_log', '/-/profile/audit_log'
it "to #reset_feed_token" do
- expect(put("/profile/reset_feed_token")).to route_to('profiles#reset_feed_token')
+ expect(put("/-/profile/reset_feed_token")).to route_to('profiles#reset_feed_token')
end
it "to #show" do
- expect(get("/profile")).to route_to('profiles#show')
- end
-
- it 'to #show from scope routing' do
expect(get("/-/profile")).to route_to('profiles#show')
end
+ it_behaves_like 'redirecting a legacy path', '/profile', '/-/profile'
end
-# profile_preferences GET /profile/preferences(.:format) profiles/preferences#show
-# PATCH /profile/preferences(.:format) profiles/preferences#update
-# PUT /profile/preferences(.:format) profiles/preferences#update
+# profile_preferences GET /-/profile/preferences(.:format) profiles/preferences#show
+# PATCH /-/profile/preferences(.:format) profiles/preferences#update
+# PUT /-/profile/preferences(.:format) profiles/preferences#update
RSpec.describe Profiles::PreferencesController, 'routing' do
it 'to #show' do
- expect(get('/profile/preferences')).to route_to('profiles/preferences#show')
+ expect(get('/-/profile/preferences')).to route_to('profiles/preferences#show')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/preferences', '/-/profile/preferences'
it 'to #update' do
- expect(put('/profile/preferences')).to route_to('profiles/preferences#update')
- expect(patch('/profile/preferences')).to route_to('profiles/preferences#update')
+ expect(put('/-/profile/preferences')).to route_to('profiles/preferences#update')
+ expect(patch('/-/profile/preferences')).to route_to('profiles/preferences#update')
end
end
-# keys GET /keys(.:format) keys#index
-# POST /keys(.:format) keys#create
-# edit_key GET /keys/:id/edit(.:format) keys#edit
-# key GET /keys/:id(.:format) keys#show
-# PUT /keys/:id(.:format) keys#update
-# DELETE /keys/:id(.:format) keys#destroy
+# keys GET /-/profile/keys(.:format) keys#index
+# POST /-/profile/keys(.:format) keys#create
+# edit_key GET /-/profile/keys/:id/edit(.:format) keys#edit
+# key GET /-/profile/keys/:id(.:format) keys#show
+# PUT /-/profile/keys/:id(.:format) keys#update
+# DELETE /-/profile/keys/:id(.:format) keys#destroy
RSpec.describe Profiles::KeysController, "routing" do
it "to #index" do
- expect(get("/profile/keys")).to route_to('profiles/keys#index')
+ expect(get("/-/profile/keys")).to route_to('profiles/keys#index')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/keys', '/-/profile/keys'
it "to #create" do
- expect(post("/profile/keys")).to route_to('profiles/keys#create')
+ expect(post("/-/profile/keys")).to route_to('profiles/keys#create')
end
it "to #show" do
- expect(get("/profile/keys/1")).to route_to('profiles/keys#show', id: '1')
+ expect(get("/-/profile/keys/1")).to route_to('profiles/keys#show', id: '1')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/keys/1', '/-/profile/keys/1'
it "to #destroy" do
- expect(delete("/profile/keys/1")).to route_to('profiles/keys#destroy', id: '1')
+ expect(delete("/-/profile/keys/1")).to route_to('profiles/keys#destroy', id: '1')
end
end
-# keys GET /gpg_keys gpg_keys#index
-# key POST /gpg_keys gpg_keys#create
-# PUT /gpg_keys/:id gpg_keys#revoke
-# DELETE /gpg_keys/:id gpg_keys#desroy
+# keys GET /-/profile/gpg_keys gpg_keys#index
+# key POST /-/profile/gpg_keys gpg_keys#create
+# PUT /-/profile/gpg_keys/:id gpg_keys#revoke
+# DELETE /-/profile/gpg_keys/:id gpg_keys#desroy
RSpec.describe Profiles::GpgKeysController, "routing" do
it "to #index" do
- expect(get("/profile/gpg_keys")).to route_to('profiles/gpg_keys#index')
+ expect(get("/-/profile/gpg_keys")).to route_to('profiles/gpg_keys#index')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/gpg_keys', '/-/profile/gpg_keys'
it "to #create" do
- expect(post("/profile/gpg_keys")).to route_to('profiles/gpg_keys#create')
+ expect(post("/-/profile/gpg_keys")).to route_to('profiles/gpg_keys#create')
end
it "to #destroy" do
- expect(delete("/profile/gpg_keys/1")).to route_to('profiles/gpg_keys#destroy', id: '1')
+ expect(delete("/-/profile/gpg_keys/1")).to route_to('profiles/gpg_keys#destroy', id: '1')
end
end
-# emails GET /emails(.:format) emails#index
-# POST /keys(.:format) emails#create
-# DELETE /keys/:id(.:format) keys#destroy
+# emails GET /-/profile/emails(.:format) emails#index
+# POST /-/profile/emails(.:format) emails#create
+# DELETE /-/profile/emails/:id(.:format) keys#destroy
RSpec.describe Profiles::EmailsController, "routing" do
it "to #index" do
- expect(get("/profile/emails")).to route_to('profiles/emails#index')
+ expect(get("/-/profile/emails")).to route_to('profiles/emails#index')
end
+ it_behaves_like 'redirecting a legacy path', '/profile/emails', '/-/profile/emails'
it "to #create" do
- expect(post("/profile/emails")).to route_to('profiles/emails#create')
+ expect(post("/-/profile/emails")).to route_to('profiles/emails#create')
end
it "to #destroy" do
- expect(delete("/profile/emails/1")).to route_to('profiles/emails#destroy', id: '1')
+ expect(delete("/-/profile/emails/1")).to route_to('profiles/emails#destroy', id: '1')
end
end
-# profile_avatar DELETE /profile/avatar(.:format) profiles/avatars#destroy
+# profile_avatar DELETE /-/profile/avatar(.:format) profiles/avatars#destroy
RSpec.describe Profiles::AvatarsController, "routing" do
it "to #destroy" do
- expect(delete("/profile/avatar")).to route_to('profiles/avatars#destroy')
+ expect(delete("/-/profile/avatar")).to route_to('profiles/avatars#destroy')
end
end
diff --git a/spec/rubocop/cop/active_record_association_reload_spec.rb b/spec/rubocop/cop/active_record_association_reload_spec.rb
index e8d46064b49..8dbe6daeeca 100644
--- a/spec/rubocop/cop/active_record_association_reload_spec.rb
+++ b/spec/rubocop/cop/active_record_association_reload_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../rubocop/cop/active_record_association_reload'
-RSpec.describe RuboCop::Cop::ActiveRecordAssociationReload, type: :rubocop do
+RSpec.describe RuboCop::Cop::ActiveRecordAssociationReload do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/api/base_spec.rb b/spec/rubocop/cop/api/base_spec.rb
index 893bcf49627..de05ab93874 100644
--- a/spec/rubocop/cop/api/base_spec.rb
+++ b/spec/rubocop/cop/api/base_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/api/base'
-RSpec.describe RuboCop::Cop::API::Base, type: :rubocop do
+RSpec.describe RuboCop::Cop::API::Base do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/avoid_becomes_spec.rb b/spec/rubocop/cop/avoid_becomes_spec.rb
index 3e3e3abc27d..07cf374faf5 100644
--- a/spec/rubocop/cop/avoid_becomes_spec.rb
+++ b/spec/rubocop/cop/avoid_becomes_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/avoid_becomes'
-RSpec.describe RuboCop::Cop::AvoidBecomes, type: :rubocop do
+RSpec.describe RuboCop::Cop::AvoidBecomes do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
index 4fb47e758bb..3c3aa5b7b5c 100644
--- a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
+++ b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../rubocop/cop/avoid_break_from_strong_memoize'
-RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize, type: :rubocop do
+RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb b/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb
index 851493e004e..1e1fe851840 100644
--- a/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb
+++ b/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers'
-RSpec.describe RuboCop::Cop::AvoidKeywordArgumentsInSidekiqWorkers, type: :rubocop do
+RSpec.describe RuboCop::Cop::AvoidKeywordArgumentsInSidekiqWorkers do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
index a157183646c..71311b9df7f 100644
--- a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
+++ b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../rubocop/cop/avoid_return_from_blocks'
-RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks, type: :rubocop do
+RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb b/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb
index 78bc859beda..9e13a5278e3 100644
--- a/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb
+++ b/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../rubocop/cop/avoid_route_redirect_leading_slash'
-RSpec.describe RuboCop::Cop::AvoidRouteRedirectLeadingSlash, type: :rubocop do
+RSpec.describe RuboCop::Cop::AvoidRouteRedirectLeadingSlash do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/ban_catch_throw_spec.rb b/spec/rubocop/cop/ban_catch_throw_spec.rb
index 1d0ccb6d262..4f669bad4af 100644
--- a/spec/rubocop/cop/ban_catch_throw_spec.rb
+++ b/spec/rubocop/cop/ban_catch_throw_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/ban_catch_throw'
-RSpec.describe RuboCop::Cop::BanCatchThrow, type: :rubocop do
+RSpec.describe RuboCop::Cop::BanCatchThrow do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/code_reuse/finder_spec.rb b/spec/rubocop/cop/code_reuse/finder_spec.rb
index 1935d825b19..6f04d5e0d60 100644
--- a/spec/rubocop/cop/code_reuse/finder_spec.rb
+++ b/spec/rubocop/cop/code_reuse/finder_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/finder'
-RSpec.describe RuboCop::Cop::CodeReuse::Finder, type: :rubocop do
+RSpec.describe RuboCop::Cop::CodeReuse::Finder do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/code_reuse/presenter_spec.rb b/spec/rubocop/cop/code_reuse/presenter_spec.rb
index 1366018ec12..8efd4da8aa1 100644
--- a/spec/rubocop/cop/code_reuse/presenter_spec.rb
+++ b/spec/rubocop/cop/code_reuse/presenter_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/presenter'
-RSpec.describe RuboCop::Cop::CodeReuse::Presenter, type: :rubocop do
+RSpec.describe RuboCop::Cop::CodeReuse::Presenter do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/code_reuse/serializer_spec.rb b/spec/rubocop/cop/code_reuse/serializer_spec.rb
index d4341cc0367..74999df5859 100644
--- a/spec/rubocop/cop/code_reuse/serializer_spec.rb
+++ b/spec/rubocop/cop/code_reuse/serializer_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/serializer'
-RSpec.describe RuboCop::Cop::CodeReuse::Serializer, type: :rubocop do
+RSpec.describe RuboCop::Cop::CodeReuse::Serializer do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/code_reuse/service_class_spec.rb b/spec/rubocop/cop/code_reuse/service_class_spec.rb
index b018e743230..4870daf72dc 100644
--- a/spec/rubocop/cop/code_reuse/service_class_spec.rb
+++ b/spec/rubocop/cop/code_reuse/service_class_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/service_class'
-RSpec.describe RuboCop::Cop::CodeReuse::ServiceClass, type: :rubocop do
+RSpec.describe RuboCop::Cop::CodeReuse::ServiceClass do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/code_reuse/worker_spec.rb b/spec/rubocop/cop/code_reuse/worker_spec.rb
index 1f502e554c4..9e015f286d8 100644
--- a/spec/rubocop/cop/code_reuse/worker_spec.rb
+++ b/spec/rubocop/cop/code_reuse/worker_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/worker'
-RSpec.describe RuboCop::Cop::CodeReuse::Worker, type: :rubocop do
+RSpec.describe RuboCop::Cop::CodeReuse::Worker do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/default_scope_spec.rb b/spec/rubocop/cop/default_scope_spec.rb
index 617a7f63497..fee1895603c 100644
--- a/spec/rubocop/cop/default_scope_spec.rb
+++ b/spec/rubocop/cop/default_scope_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/default_scope'
-RSpec.describe RuboCop::Cop::DefaultScope, type: :rubocop do
+RSpec.describe RuboCop::Cop::DefaultScope do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/destroy_all_spec.rb b/spec/rubocop/cop/destroy_all_spec.rb
index 3220d44ea2b..df664724a91 100644
--- a/spec/rubocop/cop/destroy_all_spec.rb
+++ b/spec/rubocop/cop/destroy_all_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/destroy_all'
-RSpec.describe RuboCop::Cop::DestroyAll, type: :rubocop do
+RSpec.describe RuboCop::Cop::DestroyAll do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/filename_length_spec.rb b/spec/rubocop/cop/filename_length_spec.rb
index 381218c61ed..2411c8dbc7b 100644
--- a/spec/rubocop/cop/filename_length_spec.rb
+++ b/spec/rubocop/cop/filename_length_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/filename_length'
-RSpec.describe RuboCop::Cop::FilenameLength, type: :rubocop do
+RSpec.describe RuboCop::Cop::FilenameLength do
subject(:cop) { described_class.new }
it 'does not flag files with names 100 characters long' do
diff --git a/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb b/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb
index 8341b0cab3a..2db03898e01 100644
--- a/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb
+++ b/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/avoid_uploaded_file_from_params'
-RSpec.describe RuboCop::Cop::Gitlab::AvoidUploadedFileFromParams, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::AvoidUploadedFileFromParams do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/bulk_insert_spec.rb b/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
index d1236865897..ad7e685e505 100644
--- a/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
+++ b/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/bulk_insert'
-RSpec.describe RuboCop::Cop::Gitlab::BulkInsert, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::BulkInsert do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/change_timezone_spec.rb b/spec/rubocop/cop/gitlab/change_timezone_spec.rb
index 1ec5f28e811..6abbc06bb1a 100644
--- a/spec/rubocop/cop/gitlab/change_timezone_spec.rb
+++ b/spec/rubocop/cop/gitlab/change_timezone_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/change_timzone'
-RSpec.describe RuboCop::Cop::Gitlab::ChangeTimezone, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::ChangeTimezone do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb b/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb
index 97ed6b743f9..bed06ab2b17 100644
--- a/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb
+++ b/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/const_get_inherit_false'
-RSpec.describe RuboCop::Cop::Gitlab::ConstGetInheritFalse, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::ConstGetInheritFalse do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb b/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
index 1ed21e44290..5804b03b641 100644
--- a/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
+++ b/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/gitlab/duplicate_spec_location'
-RSpec.describe RuboCop::Cop::Gitlab::DuplicateSpecLocation, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::DuplicateSpecLocation do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/except_spec.rb b/spec/rubocop/cop/gitlab/except_spec.rb
index 50277d15a57..173e5943da5 100644
--- a/spec/rubocop/cop/gitlab/except_spec.rb
+++ b/spec/rubocop/cop/gitlab/except_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/except'
-RSpec.describe RuboCop::Cop::Gitlab::Except, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::Except do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
index a47625d5dc1..db3bcf1dfdb 100644
--- a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
+++ b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/finder_with_find_by'
-RSpec.describe RuboCop::Cop::Gitlab::FinderWithFindBy, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::FinderWithFindBy do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/httparty_spec.rb b/spec/rubocop/cop/gitlab/httparty_spec.rb
index 379365096ba..b112ac84bff 100644
--- a/spec/rubocop/cop/gitlab/httparty_spec.rb
+++ b/spec/rubocop/cop/gitlab/httparty_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/httparty'
-RSpec.describe RuboCop::Cop::Gitlab::HTTParty, type: :rubocop do # rubocop:disable RSpec/FilePath
+RSpec.describe RuboCop::Cop::Gitlab::HTTParty do # rubocop:disable RSpec/FilePath
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/intersect_spec.rb b/spec/rubocop/cop/gitlab/intersect_spec.rb
index 351033d0ed2..e724f47029c 100644
--- a/spec/rubocop/cop/gitlab/intersect_spec.rb
+++ b/spec/rubocop/cop/gitlab/intersect_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/intersect'
-RSpec.describe RuboCop::Cop::Gitlab::Intersect, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::Intersect do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/json_spec.rb b/spec/rubocop/cop/gitlab/json_spec.rb
index 6f5ec07ffb1..fc25f69a244 100644
--- a/spec/rubocop/cop/gitlab/json_spec.rb
+++ b/spec/rubocop/cop/gitlab/json_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/json'
-RSpec.describe RuboCop::Cop::Gitlab::Json, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::Json do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb b/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
index 3d22201c92e..1d09c720bf7 100644
--- a/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
+++ b/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/module_with_instance_variables'
-RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb b/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb
index 6221d038512..e6fb9ab9d57 100644
--- a/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb
+++ b/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/policy_rule_boolean'
-RSpec.describe RuboCop::Cop::Gitlab::PolicyRuleBoolean, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::PolicyRuleBoolean do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb b/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
index ebe984b189d..322c7c82968 100644
--- a/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
+++ b/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/predicate_memoization'
-RSpec.describe RuboCop::Cop::Gitlab::PredicateMemoization, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::PredicateMemoization do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/rails_logger_spec.rb b/spec/rubocop/cop/gitlab/rails_logger_spec.rb
index 70d208b31ec..768da243b02 100644
--- a/spec/rubocop/cop/gitlab/rails_logger_spec.rb
+++ b/spec/rubocop/cop/gitlab/rails_logger_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/rails_logger'
-RSpec.describe RuboCop::Cop::Gitlab::RailsLogger, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::RailsLogger do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/gitlab/union_spec.rb b/spec/rubocop/cop/gitlab/union_spec.rb
index 571dcc4eeb4..20364b1b901 100644
--- a/spec/rubocop/cop/gitlab/union_spec.rb
+++ b/spec/rubocop/cop/gitlab/union_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/union'
-RSpec.describe RuboCop::Cop::Gitlab::Union, type: :rubocop do
+RSpec.describe RuboCop::Cop::Gitlab::Union do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/graphql/authorize_types_spec.rb b/spec/rubocop/cop/graphql/authorize_types_spec.rb
index df637a26ec5..a1b7a3f3a9b 100644
--- a/spec/rubocop/cop/graphql/authorize_types_spec.rb
+++ b/spec/rubocop/cop/graphql/authorize_types_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/authorize_types'
-RSpec.describe RuboCop::Cop::Graphql::AuthorizeTypes, type: :rubocop do
+RSpec.describe RuboCop::Cop::Graphql::AuthorizeTypes do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/graphql/descriptions_spec.rb b/spec/rubocop/cop/graphql/descriptions_spec.rb
index f4693057bcb..b44205b0920 100644
--- a/spec/rubocop/cop/graphql/descriptions_spec.rb
+++ b/spec/rubocop/cop/graphql/descriptions_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/descriptions'
-RSpec.describe RuboCop::Cop::Graphql::Descriptions, type: :rubocop do
+RSpec.describe RuboCop::Cop::Graphql::Descriptions do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/graphql/gid_expected_type_spec.rb b/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
index a81af2aea5d..8fd7ae03748 100644
--- a/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
+++ b/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/gid_expected_type'
-RSpec.describe RuboCop::Cop::Graphql::GIDExpectedType, type: :rubocop do
+RSpec.describe RuboCop::Cop::Graphql::GIDExpectedType do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/graphql/id_type_spec.rb b/spec/rubocop/cop/graphql/id_type_spec.rb
index 8767412e282..6135c9fef43 100644
--- a/spec/rubocop/cop/graphql/id_type_spec.rb
+++ b/spec/rubocop/cop/graphql/id_type_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/id_type'
-RSpec.describe RuboCop::Cop::Graphql::IDType, type: :rubocop do
+RSpec.describe RuboCop::Cop::Graphql::IDType do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/graphql/json_type_spec.rb b/spec/rubocop/cop/graphql/json_type_spec.rb
index ac25e0feb69..6d9f86e44d2 100644
--- a/spec/rubocop/cop/graphql/json_type_spec.rb
+++ b/spec/rubocop/cop/graphql/json_type_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/json_type'
-RSpec.describe RuboCop::Cop::Graphql::JSONType, type: :rubocop do
+RSpec.describe RuboCop::Cop::Graphql::JSONType do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/graphql/resolver_type_spec.rb b/spec/rubocop/cop/graphql/resolver_type_spec.rb
index 4807d66396a..25213e30528 100644
--- a/spec/rubocop/cop/graphql/resolver_type_spec.rb
+++ b/spec/rubocop/cop/graphql/resolver_type_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/resolver_type'
-RSpec.describe RuboCop::Cop::Graphql::ResolverType, type: :rubocop do
+RSpec.describe RuboCop::Cop::Graphql::ResolverType do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb b/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
index 4e725deaafd..ac6c481a7c3 100644
--- a/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
+++ b/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/group_public_or_visible_to_user'
-RSpec.describe RuboCop::Cop::GroupPublicOrVisibleToUser, type: :rubocop do
+RSpec.describe RuboCop::Cop::GroupPublicOrVisibleToUser do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/ignored_columns_spec.rb b/spec/rubocop/cop/ignored_columns_spec.rb
index ec5499bbdff..38b4ac0bc1a 100644
--- a/spec/rubocop/cop/ignored_columns_spec.rb
+++ b/spec/rubocop/cop/ignored_columns_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/ignored_columns'
-RSpec.describe RuboCop::Cop::IgnoredColumns, type: :rubocop do
+RSpec.describe RuboCop::Cop::IgnoredColumns do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/include_sidekiq_worker_spec.rb b/spec/rubocop/cop/include_sidekiq_worker_spec.rb
index 33737babee5..f12652a1a58 100644
--- a/spec/rubocop/cop/include_sidekiq_worker_spec.rb
+++ b/spec/rubocop/cop/include_sidekiq_worker_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/include_sidekiq_worker'
-RSpec.describe RuboCop::Cop::IncludeSidekiqWorker, type: :rubocop do
+RSpec.describe RuboCop::Cop::IncludeSidekiqWorker do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
index 767ed994542..47247006e42 100644
--- a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
+++ b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/inject_enterprise_edition_module'
-RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule, type: :rubocop do
+RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
index 5822bf74e8d..826c681a880 100644
--- a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
+++ b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/lint/last_keyword_argument'
-RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, type: :rubocop do
+RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument do
include CopHelper
subject(:cop) { described_class.new }
@@ -38,6 +38,8 @@ RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, type: :rubocop do
- |
DEPRECATION WARNING: /Users/tkuah/code/ee-gdk/gitlab/create_service.rb:1: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
/Users/tkuah/code/ee-gdk/gitlab/user.rb:17: warning: The called method `call' is defined here
+ - |
+ DEPRECATION WARNING: /Users/tkuah/code/ee-gdk/gitlab/other_warning_type.rb:1: warning: Some other warning type
YAML
end
@@ -62,7 +64,7 @@ RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, type: :rubocop do
allow(File).to receive(:read).and_return(create_spec_yaml, projects_spec_yaml)
end
- it 'registers an offense' do
+ it 'registers an offense for last keyword warning' do
expect_offense(<<~SOURCE, 'create_service.rb')
users.call(params)
^^^^^^ Using the last argument as keyword parameters is deprecated
@@ -73,6 +75,12 @@ RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, type: :rubocop do
SOURCE
end
+ it 'does not register an offense for other warning types' do
+ expect_no_offenses(<<~SOURCE, 'other_warning_type.rb')
+ users.call(params)
+ SOURCE
+ end
+
it 'registers an offense for the new method call' do
expect_offense(<<~SOURCE, 'projects_spec.rb')
Project.new(params)
@@ -95,6 +103,23 @@ RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, type: :rubocop do
SOURCE
end
+ it 'registers an offense on the last non-block argument' do
+ expect_offense(<<~SOURCE, 'create_service.rb')
+ users.call(id, params, &block)
+ ^^^^^^ Using the last argument as keyword parameters is deprecated
+ SOURCE
+
+ expect_correction(<<~SOURCE)
+ users.call(id, **params, &block)
+ SOURCE
+ end
+
+ it 'does not register an offense if the only argument is a block argument' do
+ expect_no_offenses(<<~SOURCE, 'create_service.rb')
+ users.call(&block)
+ SOURCE
+ end
+
it 'registers an offense and corrects by converting splat to double splat' do
expect_offense(<<~SOURCE, 'create_service.rb')
users.call(id, *params)
diff --git a/spec/rubocop/cop/migration/add_column_with_default_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_spec.rb
index 6deb092f235..cf476ae55d6 100644
--- a/spec/rubocop/cop/migration/add_column_with_default_spec.rb
+++ b/spec/rubocop/cop/migration/add_column_with_default_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_column_with_default'
-RSpec.describe RuboCop::Cop::Migration::AddColumnWithDefault, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddColumnWithDefault do
include CopHelper
let(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb b/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb
index 6ae4fb21126..92863c45b1a 100644
--- a/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb
+++ b/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_columns_to_wide_tables'
-RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables do
include CopHelper
let(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
index aaf191a1b6b..25350ad1ecb 100644
--- a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
+++ b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_concurrent_foreign_key'
-RSpec.describe RuboCop::Cop::Migration::AddConcurrentForeignKey, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddConcurrentForeignKey do
include CopHelper
let(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_concurrent_index_spec.rb b/spec/rubocop/cop/migration/add_concurrent_index_spec.rb
index cef5295830c..351283a230a 100644
--- a/spec/rubocop/cop/migration/add_concurrent_index_spec.rb
+++ b/spec/rubocop/cop/migration/add_concurrent_index_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_concurrent_index'
-RSpec.describe RuboCop::Cop::Migration::AddConcurrentIndex, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddConcurrentIndex do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_index_spec.rb b/spec/rubocop/cop/migration/add_index_spec.rb
index 6bb78a7f3c7..1d083e9f2d2 100644
--- a/spec/rubocop/cop/migration/add_index_spec.rb
+++ b/spec/rubocop/cop/migration/add_index_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_index'
-RSpec.describe RuboCop::Cop::Migration::AddIndex, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddIndex do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
index 0bea7bd7a0c..97b9d0d1ee2 100644
--- a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
+++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_limit_to_text_columns'
-RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_reference_spec.rb b/spec/rubocop/cop/migration/add_reference_spec.rb
index fab70d74b55..6e229d3eefc 100644
--- a/spec/rubocop/cop/migration/add_reference_spec.rb
+++ b/spec/rubocop/cop/migration/add_reference_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_reference'
-RSpec.describe RuboCop::Cop::Migration::AddReference, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddReference do
include CopHelper
let(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/add_timestamps_spec.rb b/spec/rubocop/cop/migration/add_timestamps_spec.rb
index c18b6b06ded..83570711ab9 100644
--- a/spec/rubocop/cop/migration/add_timestamps_spec.rb
+++ b/spec/rubocop/cop/migration/add_timestamps_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/add_timestamps'
-RSpec.describe RuboCop::Cop::Migration::AddTimestamps, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::AddTimestamps do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb b/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb
index b769109057e..38ccf546b7c 100644
--- a/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb
+++ b/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/complex_indexes_require_name'
-RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
index eaaa50b8190..2159bad1490 100644
--- a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
+++ b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/create_table_with_foreign_keys'
-RSpec.describe RuboCop::Cop::Migration::CreateTableWithForeignKeys, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::CreateTableWithForeignKeys do
include CopHelper
let(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/datetime_spec.rb b/spec/rubocop/cop/migration/datetime_spec.rb
index c5f6fd503ec..a3cccae21e0 100644
--- a/spec/rubocop/cop/migration/datetime_spec.rb
+++ b/spec/rubocop/cop/migration/datetime_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/datetime'
-RSpec.describe RuboCop::Cop::Migration::Datetime, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::Datetime do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/drop_table_spec.rb b/spec/rubocop/cop/migration/drop_table_spec.rb
index 9ce5ee45b08..d783cb56203 100644
--- a/spec/rubocop/cop/migration/drop_table_spec.rb
+++ b/spec/rubocop/cop/migration/drop_table_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/drop_table'
-RSpec.describe RuboCop::Cop::Migration::DropTable, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::DropTable do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/hash_index_spec.rb b/spec/rubocop/cop/migration/hash_index_spec.rb
index 3d26ea41d08..15f68eb990f 100644
--- a/spec/rubocop/cop/migration/hash_index_spec.rb
+++ b/spec/rubocop/cop/migration/hash_index_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/hash_index'
-RSpec.describe RuboCop::Cop::Migration::HashIndex, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::HashIndex do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/prevent_strings_spec.rb b/spec/rubocop/cop/migration/prevent_strings_spec.rb
index 6882dca1926..560a485017a 100644
--- a/spec/rubocop/cop/migration/prevent_strings_spec.rb
+++ b/spec/rubocop/cop/migration/prevent_strings_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/prevent_strings'
-RSpec.describe RuboCop::Cop::Migration::PreventStrings, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::PreventStrings do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb b/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb
index 76554d7446c..a25328a56a8 100644
--- a/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb
+++ b/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/refer_to_index_by_name'
-RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/remove_column_spec.rb b/spec/rubocop/cop/migration/remove_column_spec.rb
index 7ef5556d8d7..4768093b10d 100644
--- a/spec/rubocop/cop/migration/remove_column_spec.rb
+++ b/spec/rubocop/cop/migration/remove_column_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/remove_column'
-RSpec.describe RuboCop::Cop::Migration::RemoveColumn, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::RemoveColumn do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
index f70febb571d..8da368d588c 100644
--- a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
+++ b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/remove_concurrent_index'
-RSpec.describe RuboCop::Cop::Migration::RemoveConcurrentIndex, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::RemoveConcurrentIndex do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/remove_index_spec.rb b/spec/rubocop/cop/migration/remove_index_spec.rb
index cc82306a0f5..274c907ac41 100644
--- a/spec/rubocop/cop/migration/remove_index_spec.rb
+++ b/spec/rubocop/cop/migration/remove_index_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/remove_index'
-RSpec.describe RuboCop::Cop::Migration::RemoveIndex, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::RemoveIndex do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/safer_boolean_column_spec.rb b/spec/rubocop/cop/migration/safer_boolean_column_spec.rb
index 72b817fde12..aa7bb58ab45 100644
--- a/spec/rubocop/cop/migration/safer_boolean_column_spec.rb
+++ b/spec/rubocop/cop/migration/safer_boolean_column_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/safer_boolean_column'
-RSpec.describe RuboCop::Cop::Migration::SaferBooleanColumn, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::SaferBooleanColumn do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/timestamps_spec.rb b/spec/rubocop/cop/migration/timestamps_spec.rb
index 14b3cb36cf8..2f4154907d2 100644
--- a/spec/rubocop/cop/migration/timestamps_spec.rb
+++ b/spec/rubocop/cop/migration/timestamps_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/timestamps'
-RSpec.describe RuboCop::Cop::Migration::Timestamps, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::Timestamps do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
index 1d50d8c675e..8049cba12d0 100644
--- a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
+++ b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/migration/update_column_in_batches'
-RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do
let(:cop) { described_class.new }
let(:tmp_rails_root) { rails_root_join('tmp', 'rails_root') }
let(:migration_code) do
diff --git a/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb b/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb
index 607daf0c9f0..814d87ea24b 100644
--- a/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb
+++ b/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/with_lock_retries_disallowed_method'
-RSpec.describe RuboCop::Cop::Migration::WithLockRetriesDisallowedMethod, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::WithLockRetriesDisallowedMethod do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb b/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb
index 93b96f3a20e..f0be14c8ee9 100644
--- a/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb
+++ b/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/migration/with_lock_retries_with_change'
-RSpec.describe RuboCop::Cop::Migration::WithLockRetriesWithChange, type: :rubocop do
+RSpec.describe RuboCop::Cop::Migration::WithLockRetriesWithChange do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/performance/ar_count_each_spec.rb b/spec/rubocop/cop/performance/ar_count_each_spec.rb
index 33b667dd52f..6242c7a4c5e 100644
--- a/spec/rubocop/cop/performance/ar_count_each_spec.rb
+++ b/spec/rubocop/cop/performance/ar_count_each_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/performance/ar_count_each.rb'
-RSpec.describe RuboCop::Cop::Performance::ARCountEach, type: :rubocop do
+RSpec.describe RuboCop::Cop::Performance::ARCountEach do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb b/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb
index 972f3c52153..3321d400ae1 100644
--- a/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb
+++ b/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/performance/ar_exists_and_present_blank.rb'
-RSpec.describe RuboCop::Cop::Performance::ARExistsAndPresentBlank, type: :rubocop do
+RSpec.describe RuboCop::Cop::Performance::ARExistsAndPresentBlank do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/performance/readlines_each_spec.rb b/spec/rubocop/cop/performance/readlines_each_spec.rb
index 2f222f1dc00..c19426606f6 100644
--- a/spec/rubocop/cop/performance/readlines_each_spec.rb
+++ b/spec/rubocop/cop/performance/readlines_each_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/performance/readlines_each'
-RSpec.describe RuboCop::Cop::Performance::ReadlinesEach, type: :rubocop do
+RSpec.describe RuboCop::Cop::Performance::ReadlinesEach do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb b/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb
index bed8d331209..dc665f9dd25 100644
--- a/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb
+++ b/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/prefer_class_methods_over_module'
-RSpec.describe RuboCop::Cop::PreferClassMethodsOverModule, type: :rubocop do
+RSpec.describe RuboCop::Cop::PreferClassMethodsOverModule do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/project_path_helper_spec.rb b/spec/rubocop/cop/project_path_helper_spec.rb
index 0e5b0b10ae6..78a590b89f6 100644
--- a/spec/rubocop/cop/project_path_helper_spec.rb
+++ b/spec/rubocop/cop/project_path_helper_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/project_path_helper'
-RSpec.describe RuboCop::Cop::ProjectPathHelper, type: :rubocop do
+RSpec.describe RuboCop::Cop::ProjectPathHelper do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/put_group_routes_under_scope_spec.rb b/spec/rubocop/cop/put_group_routes_under_scope_spec.rb
index 888d1b6a2ba..46b50d7690b 100644
--- a/spec/rubocop/cop/put_group_routes_under_scope_spec.rb
+++ b/spec/rubocop/cop/put_group_routes_under_scope_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../rubocop/cop/put_group_routes_under_scope'
-RSpec.describe RuboCop::Cop::PutGroupRoutesUnderScope, type: :rubocop do
+RSpec.describe RuboCop::Cop::PutGroupRoutesUnderScope do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/put_project_routes_under_scope_spec.rb b/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
index eebb7f3eb61..b0627af0e8b 100644
--- a/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
+++ b/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../rubocop/cop/put_project_routes_under_scope'
-RSpec.describe RuboCop::Cop::PutProjectRoutesUnderScope, type: :rubocop do
+RSpec.describe RuboCop::Cop::PutProjectRoutesUnderScope do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb b/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb
index 484b5fad473..4876fcd5050 100644
--- a/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb
+++ b/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/qa/ambiguous_page_object_name'
-RSpec.describe RuboCop::Cop::QA::AmbiguousPageObjectName, type: :rubocop do
+RSpec.describe RuboCop::Cop::QA::AmbiguousPageObjectName do
include CopHelper
let(:source_file) { 'qa/page.rb' }
diff --git a/spec/rubocop/cop/qa/element_with_pattern_spec.rb b/spec/rubocop/cop/qa/element_with_pattern_spec.rb
index 0e599701531..6289b1a7c97 100644
--- a/spec/rubocop/cop/qa/element_with_pattern_spec.rb
+++ b/spec/rubocop/cop/qa/element_with_pattern_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/qa/element_with_pattern'
-RSpec.describe RuboCop::Cop::QA::ElementWithPattern, type: :rubocop do
+RSpec.describe RuboCop::Cop::QA::ElementWithPattern do
include CopHelper
let(:source_file) { 'qa/page.rb' }
diff --git a/spec/rubocop/cop/rspec/any_instance_of_spec.rb b/spec/rubocop/cop/rspec/any_instance_of_spec.rb
index 971e28853a3..42bb7d196a1 100644
--- a/spec/rubocop/cop/rspec/any_instance_of_spec.rb
+++ b/spec/rubocop/cop/rspec/any_instance_of_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require_relative '../../../../rubocop/cop/rspec/any_instance_of'
-RSpec.describe RuboCop::Cop::RSpec::AnyInstanceOf, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::AnyInstanceOf do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/be_success_matcher_spec.rb b/spec/rubocop/cop/rspec/be_success_matcher_spec.rb
index b14cf39cbde..d49507c89b1 100644
--- a/spec/rubocop/cop/rspec/be_success_matcher_spec.rb
+++ b/spec/rubocop/cop/rspec/be_success_matcher_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/rspec/be_success_matcher'
-RSpec.describe RuboCop::Cop::RSpec::BeSuccessMatcher, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::BeSuccessMatcher do
include CopHelper
let(:source_file) { 'spec/foo_spec.rb' }
diff --git a/spec/rubocop/cop/rspec/env_assignment_spec.rb b/spec/rubocop/cop/rspec/env_assignment_spec.rb
index 72ad584dd6f..07afd30fc90 100644
--- a/spec/rubocop/cop/rspec/env_assignment_spec.rb
+++ b/spec/rubocop/cop/rspec/env_assignment_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/rspec/env_assignment'
-RSpec.describe RuboCop::Cop::RSpec::EnvAssignment, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::EnvAssignment do
include CopHelper
offense_call_single_quotes_key = %(ENV['FOO'] = 'bar').freeze
diff --git a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
index 1bb93402a5b..fe9cea47a43 100644
--- a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
+++ b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/rspec/factories_in_migration_specs'
-RSpec.describe RuboCop::Cop::RSpec::FactoriesInMigrationSpecs, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::FactoriesInMigrationSpecs do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb b/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb
index 70dbe086127..33fdaaee3c7 100644
--- a/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb
+++ b/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb
@@ -6,7 +6,7 @@ require 'rubocop'
require_relative '../../../../../rubocop/cop/rspec/factory_bot/inline_association'
-RSpec.describe RuboCop::Cop::RSpec::FactoryBot::InlineAssociation, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::FactoryBot::InlineAssociation do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/htt_party_basic_auth_spec.rb b/spec/rubocop/cop/rspec/htt_party_basic_auth_spec.rb
index 8c3703a488a..6e9e436602c 100644
--- a/spec/rubocop/cop/rspec/htt_party_basic_auth_spec.rb
+++ b/spec/rubocop/cop/rspec/htt_party_basic_auth_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require_relative '../../../../rubocop/cop/rspec/httparty_basic_auth'
-RSpec.describe RuboCop::Cop::RSpec::HTTPartyBasicAuth, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::HTTPartyBasicAuth do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb b/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
index ca47bba4264..d9e3ca5741c 100644
--- a/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
+++ b/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/rspec/modify_sidekiq_middleware'
-RSpec.describe RuboCop::Cop::RSpec::ModifySidekiqMiddleware, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::ModifySidekiqMiddleware do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/timecop_freeze_spec.rb b/spec/rubocop/cop/rspec/timecop_freeze_spec.rb
index 3809431a2fc..b1cf82492e4 100644
--- a/spec/rubocop/cop/rspec/timecop_freeze_spec.rb
+++ b/spec/rubocop/cop/rspec/timecop_freeze_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/rspec/timecop_freeze'
-RSpec.describe RuboCop::Cop::RSpec::TimecopFreeze, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::TimecopFreeze do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/timecop_travel_spec.rb b/spec/rubocop/cop/rspec/timecop_travel_spec.rb
index 25a8127d40e..2ee8bfe9ad7 100644
--- a/spec/rubocop/cop/rspec/timecop_travel_spec.rb
+++ b/spec/rubocop/cop/rspec/timecop_travel_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/rspec/timecop_travel'
-RSpec.describe RuboCop::Cop::RSpec::TimecopTravel, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::TimecopTravel do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb b/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
index 92a76bde243..4936936836d 100644
--- a/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
+++ b/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/rspec/top_level_describe_path'
-RSpec.describe RuboCop::Cop::RSpec::TopLevelDescribePath, type: :rubocop do
+RSpec.describe RuboCop::Cop::RSpec::TopLevelDescribePath do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/web_mock_enable_spec.rb b/spec/rubocop/cop/rspec/web_mock_enable_spec.rb
new file mode 100644
index 00000000000..61a85064a61
--- /dev/null
+++ b/spec/rubocop/cop/rspec/web_mock_enable_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../../../rubocop/cop/rspec/web_mock_enable'
+
+RSpec.describe RuboCop::Cop::RSpec::WebMockEnable do
+ subject(:cop) { described_class.new }
+
+ context 'when calling WebMock.disable_net_connect!' do
+ it 'registers an offence and autocorrects it' do
+ expect_offense(<<~RUBY)
+ WebMock.disable_net_connect!(allow_localhost: true)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use webmock_enable! instead of calling WebMock.disable_net_connect! directly.
+ RUBY
+
+ expect_correction(<<~RUBY)
+ webmock_enable!
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb b/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb
index 938ddc7fb6a..a6a44b3fa68 100644
--- a/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb
+++ b/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb
@@ -9,7 +9,7 @@ require_relative '../../../rubocop/cop/ruby_interpolation_in_translation'
# Disabling interpolation check as we deliberately want to have #{} in strings.
# rubocop:disable Lint/InterpolationCheck
-RSpec.describe RuboCop::Cop::RubyInterpolationInTranslation, type: :rubocop do
+RSpec.describe RuboCop::Cop::RubyInterpolationInTranslation do
subject(:cop) { described_class.new }
it 'does not add an offence for a regular messages' do
diff --git a/spec/rubocop/cop/safe_params_spec.rb b/spec/rubocop/cop/safe_params_spec.rb
index c4f683a41bf..c8f6768c4bb 100644
--- a/spec/rubocop/cop/safe_params_spec.rb
+++ b/spec/rubocop/cop/safe_params_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/safe_params'
-RSpec.describe RuboCop::Cop::SafeParams, type: :rubocop do
+RSpec.describe RuboCop::Cop::SafeParams do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb b/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb
index 48964ab76e0..6e526f7ad8f 100644
--- a/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb
+++ b/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/scalability/bulk_perform_with_context'
-RSpec.describe RuboCop::Cop::Scalability::BulkPerformWithContext, type: :rubocop do
+RSpec.describe RuboCop::Cop::Scalability::BulkPerformWithContext do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/scalability/cron_worker_context_spec.rb b/spec/rubocop/cop/scalability/cron_worker_context_spec.rb
index 4b4ee7258f1..4699e06e9cf 100644
--- a/spec/rubocop/cop/scalability/cron_worker_context_spec.rb
+++ b/spec/rubocop/cop/scalability/cron_worker_context_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/scalability/cron_worker_context'
-RSpec.describe RuboCop::Cop::Scalability::CronWorkerContext, type: :rubocop do
+RSpec.describe RuboCop::Cop::Scalability::CronWorkerContext do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/scalability/file_uploads_spec.rb b/spec/rubocop/cop/scalability/file_uploads_spec.rb
index ed826728681..78ff7fea55c 100644
--- a/spec/rubocop/cop/scalability/file_uploads_spec.rb
+++ b/spec/rubocop/cop/scalability/file_uploads_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/scalability/file_uploads'
-RSpec.describe RuboCop::Cop::Scalability::FileUploads, type: :rubocop do
+RSpec.describe RuboCop::Cop::Scalability::FileUploads do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/scalability/idempotent_worker_spec.rb b/spec/rubocop/cop/scalability/idempotent_worker_spec.rb
index 9197cc954f5..666122a9de4 100644
--- a/spec/rubocop/cop/scalability/idempotent_worker_spec.rb
+++ b/spec/rubocop/cop/scalability/idempotent_worker_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'rubocop'
require_relative '../../../../rubocop/cop/scalability/idempotent_worker'
-RSpec.describe RuboCop::Cop::Scalability::IdempotentWorker, type: :rubocop do
+RSpec.describe RuboCop::Cop::Scalability::IdempotentWorker do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/sidekiq_options_queue_spec.rb b/spec/rubocop/cop/sidekiq_options_queue_spec.rb
index e76265d426c..306cbcf62b5 100644
--- a/spec/rubocop/cop/sidekiq_options_queue_spec.rb
+++ b/spec/rubocop/cop/sidekiq_options_queue_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/sidekiq_options_queue'
-RSpec.describe RuboCop::Cop::SidekiqOptionsQueue, type: :rubocop do
+RSpec.describe RuboCop::Cop::SidekiqOptionsQueue do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb
index f3185def3d7..8a38a318999 100644
--- a/spec/rubocop/cop/static_translation_definition_spec.rb
+++ b/spec/rubocop/cop/static_translation_definition_spec.rb
@@ -7,7 +7,7 @@ require 'rspec-parameterized'
require_relative '../../../rubocop/cop/static_translation_definition'
-RSpec.describe RuboCop::Cop::StaticTranslationDefinition, type: :rubocop do
+RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
include CopHelper
using RSpec::Parameterized::TableSyntax
diff --git a/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb b/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
index 8b6a2eac349..1c90df798a5 100644
--- a/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
+++ b/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/usage_data/distinct_count_by_large_foreign_key'
-RSpec.describe RuboCop::Cop::UsageData::DistinctCountByLargeForeignKey, type: :rubocop do
+RSpec.describe RuboCop::Cop::UsageData::DistinctCountByLargeForeignKey do
include CopHelper
let(:allowed_foreign_keys) { [:author_id, :user_id, :'merge_requests.target_project_id'] }
diff --git a/spec/rubocop/cop/usage_data/large_table_spec.rb b/spec/rubocop/cop/usage_data/large_table_spec.rb
index de6fb9c17e2..638e8c67dc8 100644
--- a/spec/rubocop/cop/usage_data/large_table_spec.rb
+++ b/spec/rubocop/cop/usage_data/large_table_spec.rb
@@ -7,7 +7,7 @@ require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/usage_data/large_table'
-RSpec.describe RuboCop::Cop::UsageData::LargeTable, type: :rubocop do
+RSpec.describe RuboCop::Cop::UsageData::LargeTable do
include CopHelper
let(:large_tables) { %i[Rails Time] }
diff --git a/spec/rubocop/qa_helpers_spec.rb b/spec/rubocop/qa_helpers_spec.rb
index 786b9c78952..051817903a8 100644
--- a/spec/rubocop/qa_helpers_spec.rb
+++ b/spec/rubocop/qa_helpers_spec.rb
@@ -5,7 +5,7 @@ require 'rubocop'
require 'parser/current'
require_relative '../../rubocop/qa_helpers'
-RSpec.describe RuboCop::QAHelpers, type: :rubocop do
+RSpec.describe RuboCop::QAHelpers do
def parse_source(source, path = 'foo.rb')
buffer = Parser::Source::Buffer.new(path)
buffer.source = source
diff --git a/spec/serializers/analytics_stage_serializer_spec.rb b/spec/serializers/analytics_stage_serializer_spec.rb
deleted file mode 100644
index 0f2de262188..00000000000
--- a/spec/serializers/analytics_stage_serializer_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe AnalyticsStageSerializer do
- subject do
- described_class.new.represent(resource)
- end
-
- let(:resource) do
- Gitlab::CycleAnalytics::CodeStage.new(options: { project: double })
- end
-
- before do
- allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(1.12)
- allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:event_result).and_return({})
- end
-
- it 'generates payload for single object' do
- expect(subject).to be_kind_of Hash
- end
-
- it 'contains important elements of AnalyticsStage' do
- expect(subject).to include(:title, :description, :value)
- end
-
- context 'when median is equal 0' do
- before do
- allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(0)
- end
-
- it 'sets the value to nil' do
- expect(subject.fetch(:value)).to be_nil
- end
- end
-
- context 'when median is below 1' do
- before do
- allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(0.12)
- end
-
- it 'sets the value to equal to median' do
- expect(subject.fetch(:value)).to eq('less than a minute')
- end
- end
-
- context 'when median is above 1' do
- before do
- allow_any_instance_of(Gitlab::CycleAnalytics::BaseStage).to receive(:project_median).and_return(60.12)
- end
-
- it 'sets the value to equal to median' do
- expect(subject.fetch(:value)).to eq('1 minute')
- end
- end
-end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 5d29452e91c..4a58f341658 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -5,14 +5,13 @@ require 'spec_helper'
RSpec.describe BuildDetailsEntity do
include ProjectForksHelper
- let_it_be(:user) { create(:admin) }
-
it 'inherits from JobEntity' do
expect(described_class).to be < JobEntity
end
describe '#as_json' do
let(:project) { create(:project, :repository) }
+ let(:user) { project.owner }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, :failed, pipeline: pipeline) }
let(:request) { double('request', project: project) }
@@ -66,6 +65,7 @@ RSpec.describe BuildDetailsEntity do
before do
allow(build).to receive(:merge_request).and_return(merge_request)
+ forked_project.add_developer(user)
end
let(:merge_request) do
@@ -186,7 +186,7 @@ RSpec.describe BuildDetailsEntity do
end
context 'when the build has expired artifacts' do
- let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.ago) }
+ let!(:build) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 7.days.ago) }
context 'when pipeline is unlocked' do
before do
@@ -218,12 +218,43 @@ RSpec.describe BuildDetailsEntity do
end
context 'when the build has archive type artifacts' do
- let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.from_now) }
+ let!(:build) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 7.days.from_now) }
let!(:report) { create(:ci_job_artifact, :codequality, job: build) }
it 'exposes artifact details' do
expect(subject[:artifact].keys).to include(:download_path, :browse_path, :keep_path, :expire_at, :expired, :locked)
end
end
+
+ context 'when the project is public and the user is a guest' do
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:project_member, :guest, project: project).user }
+
+ context 'when the build has public archive type artifacts' do
+ let(:build) { create(:ci_build, :artifacts) }
+
+ it 'exposes public artifact details' do
+ expect(subject[:artifact].keys).to include(:download_path, :browse_path, :locked)
+ end
+ end
+
+ context 'when the build has non public archive type artifacts' do
+ let(:build) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+
+ it 'does not expose non public artifacts' do
+ expect(subject.keys).not_to include(:artifact)
+ end
+
+ context 'with the non_public_artifacts feature flag disabled' do
+ before do
+ stub_feature_flags(non_public_artifacts: false)
+ end
+
+ it 'exposes artifact details' do
+ expect(subject[:artifact].keys).to include(:download_path, :browse_path, :locked)
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/serializers/deploy_key_entity_spec.rb b/spec/serializers/deploy_key_entity_spec.rb
index 3404d27a23c..e8d9701be67 100644
--- a/spec/serializers/deploy_key_entity_spec.rb
+++ b/spec/serializers/deploy_key_entity_spec.rb
@@ -52,7 +52,13 @@ RSpec.describe DeployKeyEntity do
context 'user is an admin' do
let(:user) { create(:user, :admin) }
- it { expect(entity.as_json).to include(can_edit: true) }
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { expect(entity.as_json).to include(can_edit: true) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { expect(entity.as_json).not_to include(can_edit: true) }
+ end
end
context 'user is a project maintainer' do
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index e8cbc2076d7..f6993d4652e 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe DiffsMetadataEntity do
:merge_request_diffs, :context_commits,
:definition_path_prefix, :source_branch_exists,
:can_merge, :conflict_resolution_path, :has_conflicts,
+ :project_name, :project_path, :user_full_name, :username,
# Attributes
:diff_files
)
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index 5b83507b4ec..a6101f825e9 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe EnvironmentEntity do
+ include KubernetesHelpers
include Gitlab::Routing.url_helpers
- let(:request) { double('request') }
+ let(:request) { double('request', current_user: user, project: project) }
let(:entity) do
described_class.new(environment, request: request)
end
@@ -167,4 +168,23 @@ RSpec.describe EnvironmentEntity do
end
end
end
+
+ context 'with deployment service ready' do
+ before do
+ allow(environment).to receive(:has_terminals?).and_return(true)
+ allow(environment).to receive(:rollout_status).and_return(kube_deployment_rollout_status)
+ end
+
+ it 'exposes rollout_status' do
+ expect(subject).to include(:rollout_status)
+ end
+ end
+
+ context 'with deployment service not ready' do
+ let(:user) { create(:user) }
+
+ it 'does not expose rollout_status' do
+ expect(subject).not_to include(:rollout_status)
+ end
+ end
end
diff --git a/spec/serializers/member_entity_spec.rb b/spec/serializers/member_entity_spec.rb
new file mode 100644
index 00000000000..f34434188c1
--- /dev/null
+++ b/spec/serializers/member_entity_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberEntity do
+ let_it_be(:current_user) { create(:user) }
+ let(:entity) { described_class.new(member, { current_user: current_user, group: group }) }
+ let(:entity_hash) { entity.as_json }
+
+ shared_examples 'member.json' do
+ it 'matches json schema' do
+ expect(entity.to_json).to match_schema('entities/member')
+ end
+
+ it 'correctly exposes `can_update`' do
+ allow(member).to receive(:can_update?).and_return(true)
+
+ expect(entity_hash[:can_update]).to be(true)
+ end
+
+ it 'correctly exposes `can_remove`' do
+ allow(member).to receive(:can_remove?).and_return(true)
+
+ expect(entity_hash[:can_remove]).to be(true)
+ end
+ end
+
+ shared_examples 'invite' do
+ it 'correctly exposes `invite.avatar_url`' do
+ avatar_url = 'https://www.gravatar.com/avatar/c4637cb869d5f94c3193bde4f23d4cdc?s=80&d=identicon'
+ allow(entity).to receive(:avatar_icon_for_email).with(member.invite_email, Member::AVATAR_SIZE).and_return(avatar_url)
+
+ expect(entity_hash[:invite][:avatar_url]).to match(avatar_url)
+ end
+
+ it 'correctly exposes `invite.can_resend`' do
+ allow(member).to receive(:can_resend_invite?).and_return(true)
+
+ expect(entity_hash[:invite][:can_resend]).to be(true)
+ end
+ end
+
+ context 'group member' do
+ let(:group) { create(:group) }
+ let(:member) { GroupMemberPresenter.new(create(:group_member, group: group), current_user: current_user) }
+
+ it_behaves_like 'member.json'
+
+ context 'invite' do
+ let(:member) { GroupMemberPresenter.new(create(:group_member, :invited, group: group), current_user: current_user) }
+
+ it_behaves_like 'member.json'
+ it_behaves_like 'invite'
+ end
+ end
+
+ context 'project member' do
+ let(:project) { create(:project) }
+ let(:group) { project.group }
+ let(:member) { ProjectMemberPresenter.new(create(:project_member, project: project), current_user: current_user) }
+
+ it_behaves_like 'member.json'
+
+ context 'invite' do
+ let(:member) { ProjectMemberPresenter.new(create(:project_member, :invited, project: project), current_user: current_user) }
+
+ it_behaves_like 'member.json'
+ it_behaves_like 'invite'
+ end
+ end
+end
diff --git a/spec/serializers/member_serializer_spec.rb b/spec/serializers/member_serializer_spec.rb
new file mode 100644
index 00000000000..d3ec45fe9c4
--- /dev/null
+++ b/spec/serializers/member_serializer_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberSerializer do
+ include MembersPresentation
+
+ let_it_be(:current_user) { create(:user) }
+
+ subject { described_class.new.represent(members, { current_user: current_user, group: group }) }
+
+ shared_examples 'members.json' do
+ it 'matches json schema' do
+ expect(subject.to_json).to match_schema('members')
+ end
+ end
+
+ context 'group member' do
+ let(:group) { create(:group) }
+ let(:members) { present_members(create_list(:group_member, 1, group: group)) }
+
+ it_behaves_like 'members.json'
+ end
+
+ context 'project member' do
+ let(:project) { create(:project) }
+ let(:group) { project.group }
+ let(:members) { present_members(create_list(:project_member, 1, project: project)) }
+
+ it_behaves_like 'members.json'
+ end
+end
diff --git a/spec/serializers/member_user_entity_spec.rb b/spec/serializers/member_user_entity_spec.rb
new file mode 100644
index 00000000000..1c000c06bb6
--- /dev/null
+++ b/spec/serializers/member_user_entity_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberUserEntity do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:emoji) { 'slight_smile' }
+ let_it_be(:user_status) { create(:user_status, user: user, emoji: emoji) }
+ let(:entity) { described_class.new(user) }
+ let(:entity_hash) { entity.as_json }
+
+ it 'matches json schema' do
+ expect(entity.to_json).to match_schema('entities/member_user')
+ end
+
+ it 'correctly exposes `avatar_url`' do
+ avatar_url = 'https://www.gravatar.com/avatar/c4637cb869d5f94c3193bde4f23d4cdc?s=80&d=identicon'
+ allow(user).to receive(:avatar_url).and_return(avatar_url)
+
+ expect(entity_hash[:avatar_url]).to match(avatar_url)
+ end
+
+ it 'correctly exposes `blocked`' do
+ allow(user).to receive(:blocked?).and_return(true)
+
+ expect(entity_hash[:blocked]).to be(true)
+ end
+
+ it 'correctly exposes `two_factor_enabled`' do
+ allow(user).to receive(:two_factor_enabled?).and_return(true)
+
+ expect(entity_hash[:two_factor_enabled]).to be(true)
+ end
+
+ it 'correctly exposes `status.emoji`' do
+ expect(entity_hash[:status][:emoji]).to match(emoji)
+ end
+end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 031dc729a79..8c72430ff5c 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
let_it_be(:project, refind: true) { create :project, :repository }
let_it_be(:resource, refind: true) { create(:merge_request, source_project: project, target_project: project) }
let_it_be(:user) { create(:user) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:request) { double('request', current_user: user, project: project) }
@@ -25,6 +26,17 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:merge_status]).to eq 'checking'
end
+ it 'has blob path data' do
+ allow(resource).to receive_messages(
+ base_pipeline: pipeline,
+ head_pipeline: pipeline
+ )
+
+ expect(subject).to include(:blob_path)
+ expect(subject[:blob_path]).to include(:base_path)
+ expect(subject[:blob_path]).to include(:head_path)
+ end
+
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 9f734c08ef4..42d843af596 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -76,17 +76,6 @@ RSpec.describe MergeRequestWidgetEntity do
.to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}.diff")
end
- it 'has blob path data' do
- allow(resource).to receive_messages(
- base_pipeline: pipeline,
- head_pipeline: pipeline
- )
-
- expect(subject).to include(:blob_path)
- expect(subject[:blob_path]).to include(:base_path)
- expect(subject[:blob_path]).to include(:head_path)
- end
-
describe 'codequality report artifacts', :request_store do
let(:merge_base_pipeline) { create(:ci_pipeline, :with_codequality_report, project: project) }
diff --git a/spec/serializers/merge_requests/pipeline_entity_spec.rb b/spec/serializers/merge_requests/pipeline_entity_spec.rb
index 8685d4bb846..b91d5e7679f 100644
--- a/spec/serializers/merge_requests/pipeline_entity_spec.rb
+++ b/spec/serializers/merge_requests/pipeline_entity_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe MergeRequests::PipelineEntity do
)
expect(subject[:commit]).to include(:short_id, :commit_path)
expect(subject[:ref]).to include(:branch)
- expect(subject[:details]).to include(:name, :status, :stages)
+ expect(subject[:details]).to include(:artifacts, :name, :status, :stages)
expect(subject[:details][:status]).to include(:icon, :favicon, :text, :label, :tooltip)
expect(subject[:flags]).to include(:merge_request_pipeline)
end
@@ -42,4 +42,6 @@ RSpec.describe MergeRequests::PipelineEntity do
expect(entity.as_json).not_to include(:coverage)
end
end
+
+ it_behaves_like 'public artifacts'
end
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index 1357836cb89..74e91cc9cdd 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -183,5 +183,7 @@ RSpec.describe PipelineDetailsEntity do
expect(source_jobs[child_pipeline.id][:name]).to eq('child')
end
end
+
+ it_behaves_like 'public artifacts'
end
end
diff --git a/spec/serializers/runner_entity_spec.rb b/spec/serializers/runner_entity_spec.rb
index 84c7d1720e2..e864b52c0f2 100644
--- a/spec/serializers/runner_entity_spec.rb
+++ b/spec/serializers/runner_entity_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe RunnerEntity do
let(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:entity) { described_class.new(runner, request: request, current_user: user) }
let(:request) { double('request') }
- let(:user) { create(:admin) }
+ let(:user) { project.owner }
before do
allow(request).to receive(:current_user).and_return(user)
diff --git a/spec/services/alert_management/alerts/todo/create_service_spec.rb b/spec/services/alert_management/alerts/todo/create_service_spec.rb
index e3d9de8b4df..fa4fd8ed0b2 100644
--- a/spec/services/alert_management/alerts/todo/create_service_spec.rb
+++ b/spec/services/alert_management/alerts/todo/create_service_spec.rb
@@ -58,6 +58,10 @@ RSpec.describe AlertManagement::Alerts::Todo::CreateService do
create(:todo, :pending, **todo_params)
end
+ before do
+ stub_feature_flags(multiple_todos: false)
+ end
+
it 'does not create a todo' do
expect { result }.not_to change { Todo.count }
end
diff --git a/spec/services/alert_management/sync_alert_service_data_service_spec.rb b/spec/services/alert_management/sync_alert_service_data_service_spec.rb
deleted file mode 100644
index ecec60011db..00000000000
--- a/spec/services/alert_management/sync_alert_service_data_service_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe AlertManagement::SyncAlertServiceDataService do
- let_it_be(:alerts_service) do
- AlertsService.skip_callback(:save, :after, :update_http_integration)
- service = create(:alerts_service, :active)
- AlertsService.set_callback(:save, :after, :update_http_integration)
-
- service
- end
-
- describe '#execute' do
- subject(:execute) { described_class.new(alerts_service).execute }
-
- context 'without http integration' do
- it 'creates the integration' do
- expect { execute }
- .to change { AlertManagement::HttpIntegration.count }.by(1)
- end
-
- it 'returns a success' do
- expect(subject.success?).to eq(true)
- end
- end
-
- context 'existing legacy http integration' do
- let_it_be(:integration) { create(:alert_management_http_integration, :legacy, project: alerts_service.project) }
-
- it 'updates the integration' do
- expect { execute }
- .to change { integration.reload.encrypted_token }.to(alerts_service.data.encrypted_token)
- .and change { integration.encrypted_token_iv }.to(alerts_service.data.encrypted_token_iv)
- end
-
- it 'returns a success' do
- expect(subject.success?).to eq(true)
- end
- end
-
- context 'existing other http integration' do
- let_it_be(:integration) { create(:alert_management_http_integration, project: alerts_service.project) }
-
- it 'creates the integration' do
- expect { execute }
- .to change { AlertManagement::HttpIntegration.count }.by(1)
- end
-
- it 'returns a success' do
- expect(subject.success?).to eq(true)
- end
- end
- end
-end
diff --git a/spec/services/ci/build_report_result_service_spec.rb b/spec/services/ci/build_report_result_service_spec.rb
index 244ffbf4bbd..7c2702af086 100644
--- a/spec/services/ci/build_report_result_service_spec.rb
+++ b/spec/services/ci/build_report_result_service_spec.rb
@@ -6,12 +6,6 @@ RSpec.describe Ci::BuildReportResultService do
describe '#execute', :clean_gitlab_redis_shared_state do
subject(:build_report_result) { described_class.new.execute(build) }
- around do |example|
- travel_to(DateTime.parse('2020-07-01')) do
- example.run
- end
- end
-
context 'when build is finished' do
let(:build) { create(:ci_build, :success, :test_reports) }
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 03cea4074bf..860932d4fde 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -371,6 +371,26 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
expect { service.execute(bridge) }.to change { Ci::Pipeline.count }.by(1)
end
end
+
+ context 'when downstream project does not allow user-defined variables for child pipelines' do
+ before do
+ bridge.yaml_variables = [{ key: 'BRIDGE', value: '$PIPELINE_VARIABLE-var', public: true }]
+
+ upstream_pipeline.project.update!(restrict_user_defined_variables: true)
+ end
+
+ it 'creates a new pipeline allowing variables to be passed downstream' do
+ expect { service.execute(bridge) }.to change { Ci::Pipeline.count }.by(1)
+ end
+
+ it 'passes variables downstream from the bridge' do
+ pipeline = service.execute(bridge)
+
+ pipeline.variables.map(&:key).tap do |variables|
+ expect(variables).to include 'BRIDGE'
+ end
+ end
+ end
end
end
@@ -460,6 +480,33 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
expect(variable.value).to eq 'my-value-var'
end
end
+
+ context 'when downstream project does not allow user-defined variables for multi-project pipelines' do
+ before do
+ downstream_project.update!(restrict_user_defined_variables: true)
+ end
+
+ it 'does not create a new pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+ end
+
+ it 'ignores variables passed downstream from the bridge' do
+ pipeline = service.execute(bridge)
+
+ pipeline.variables.map(&:key).tap do |variables|
+ expect(variables).not_to include 'BRIDGE'
+ end
+ end
+
+ it 'sets errors', :aggregate_failures do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
+ expect(bridge.options[:downstream_errors]).to eq(['Insufficient permissions to set pipeline variables'])
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/dry_run_spec.rb b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
index 60c56ed0f67..c21a4ef0917 100644
--- a/spec/services/ci/create_pipeline_service/dry_run_spec.rb
+++ b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
@@ -108,7 +108,7 @@ RSpec.describe Ci::CreatePipelineService do
it_behaves_like 'returns a non persisted pipeline'
it 'returns a pipeline with errors', :aggregate_failures do
- error_message = "test: needs 'build'"
+ error_message = "'test' job needs 'build' job, but it was not added to the pipeline"
expect(subject.error_messages.map(&:content)).to eq([error_message])
expect(subject.errors).not_to be_empty
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index f9015752644..e1f1bdc41a1 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -91,6 +91,23 @@ RSpec.describe Ci::CreatePipelineService do
.with({ source: 'push' }, 5)
end
+ it 'tracks included template usage' do
+ expect_next_instance_of(Gitlab::Ci::Pipeline::Chain::TemplateUsage) do |instance|
+ expect(instance).to receive(:perform!)
+ end
+
+ execute_service
+ end
+
+ describe 'recording a conversion event' do
+ it 'schedules a record conversion event worker' do
+ expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates, user.id)
+ expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:pipelines_empty_state, user.id)
+
+ pipeline
+ end
+ end
+
context 'when merge requests already exist for this source branch' do
let(:merge_request_1) do
create(:merge_request, source_branch: 'feature', target_branch: "master", source_project: project)
@@ -481,6 +498,7 @@ RSpec.describe Ci::CreatePipelineService do
expect(execute_service).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
+ expect(Namespaces::OnboardingPipelineCreatedWorker).not_to receive(:perform_async)
end
shared_examples 'a failed pipeline' do
@@ -1418,6 +1436,13 @@ RSpec.describe Ci::CreatePipelineService do
pipeline
end
+ it 'schedules a namespace onboarding create action worker' do
+ expect(Namespaces::OnboardingPipelineCreatedWorker)
+ .to receive(:perform_async).with(project.namespace_id)
+
+ pipeline
+ end
+
context 'when target sha is specified' do
let(:target_sha) { merge_request.target_branch_sha }
@@ -1688,9 +1713,11 @@ RSpec.describe Ci::CreatePipelineService do
shared_examples 'has errors' do
it 'contains the expected errors' do
expect(pipeline.builds).to be_empty
- expect(pipeline.yaml_errors).to eq("test_a: needs 'build_a'")
- expect(pipeline.error_messages.map(&:content)).to contain_exactly("test_a: needs 'build_a'")
- expect(pipeline.errors[:base]).to contain_exactly("test_a: needs 'build_a'")
+
+ error_message = "'test_a' job needs 'build_a' job, but it was not added to the pipeline"
+ expect(pipeline.yaml_errors).to eq(error_message)
+ expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
+ expect(pipeline.errors[:base]).to contain_exactly(error_message)
end
end
@@ -2385,16 +2412,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(build_names).to contain_exactly('regular-job')
end
- context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
- before do
- stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
- end
-
- it 'does not a pipeline' do
- expect(pipeline).not_to be_persisted
- end
- end
-
context 'when a job requires the same variable' do
let(:config) do
<<-EOY
@@ -2423,16 +2440,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('build', 'test1', 'test2')
end
-
- context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
- before do
- stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
- end
-
- it 'does not a pipeline' do
- expect(pipeline).not_to be_persisted
- end
- end
end
end
@@ -2443,16 +2450,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).not_to be_persisted
end
- context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
- before do
- stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
- end
-
- it 'does not create a pipeline' do
- expect(pipeline).not_to be_persisted
- end
- end
-
context 'when a job requires the same variable' do
let(:config) do
<<-EOY
@@ -2480,16 +2477,6 @@ RSpec.describe Ci::CreatePipelineService do
it 'does not create a pipeline' do
expect(pipeline).not_to be_persisted
end
-
- context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
- before do
- stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
- end
-
- it 'does not create a pipeline' do
- expect(pipeline).not_to be_persisted
- end
- end
end
end
end
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
index c8d426ee657..1edcef2977b 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
describe '.execute' do
subject { service.execute }
- let_it_be(:artifact, reload: true) do
+ let_it_be(:artifact, refind: true) do
create(:ci_job_artifact, expire_at: 1.day.ago)
end
@@ -30,14 +30,16 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
it 'performs the smallest number of queries for job_artifacts' do
log = ActiveRecord::QueryRecorder.new { subject }
- # SELECT expired ci_job_artifacts
+ # SELECT expired ci_job_artifacts - 3 queries from each_batch
# PRELOAD projects, routes, project_statistics
# BEGIN
# INSERT into ci_deleted_objects
# DELETE loaded ci_job_artifacts
# DELETE security_findings -- for EE
# COMMIT
- expect(log.count).to be_within(1).of(8)
+ # SELECT next expired ci_job_artifacts
+
+ expect(log.count).to be_within(1).of(11)
end
end
@@ -162,13 +164,21 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
context 'when timeout happens' do
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
before do
- stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_TIMEOUT', 1.second)
- allow_any_instance_of(described_class).to receive(:destroy_artifacts_batch) { true }
+ stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_TIMEOUT', 0.seconds)
+ stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
+
+ second_artifact.job.pipeline.unlocked!
+ end
+
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
end
- it 'returns false and does not continue destroying' do
- is_expected.to be_falsy
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
end
end
@@ -182,13 +192,13 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
- it 'raises an error and does not continue destroying' do
- is_expected.to be_falsy
- end
-
it 'destroys one artifact' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
end
context 'when there are no artifacts' do
@@ -199,6 +209,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
it 'does not raise error' do
expect { subject }.not_to raise_error
end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(0)
+ end
end
context 'when there are artifacts more than batch sizes' do
@@ -213,33 +227,9 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
it 'destroys all expired artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
end
- end
-
- context 'when artifact is a pipeline artifact' do
- context 'when artifacts are expired' do
- let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
- let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
- before do
- [pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
- end
-
- it 'destroys pipeline artifacts' do
- expect { subject }.to change { Ci::PipelineArtifact.count }.by(-2)
- end
- end
-
- context 'when artifacts are not expired' do
- let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 2.days.from_now) }
- let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 2.days.from_now) }
-
- before do
- [pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
- end
-
- it 'does not destroy pipeline artifacts' do
- expect { subject }.not_to change { Ci::PipelineArtifact.count }
- end
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(2)
end
end
@@ -255,16 +245,4 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
end
end
-
- describe '.destroy_job_artifacts_batch' do
- it 'returns a falsy value without artifacts' do
- expect(service.send(:destroy_job_artifacts_batch)).to be_falsy
- end
- end
-
- describe '.destroy_pipeline_artifacts_batch' do
- it 'returns a falsy value without artifacts' do
- expect(service.send(:destroy_pipeline_artifacts_batch)).to be_falsy
- end
- end
end
diff --git a/spec/services/ci/pipelines/create_artifact_service_spec.rb b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
index 4e9248d9d1a..b48ea70aa4c 100644
--- a/spec/services/ci/pipelines/create_artifact_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Pipelines::CreateArtifactService do
+RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
describe '#execute' do
subject { described_class.new.execute(pipeline) }
diff --git a/spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb b/spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb
new file mode 100644
index 00000000000..ac1a590face
--- /dev/null
+++ b/spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
+ let(:service) { described_class.new }
+
+ describe '.execute' do
+ subject { service.execute }
+
+ context 'when timeout happens' do
+ before do
+ stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_TIMEOUT', 0.1.seconds)
+ allow(service).to receive(:destroy_artifacts_batch) { true }
+ end
+
+ it 'returns 0 and does not continue destroying' do
+ is_expected.to eq(0)
+ end
+ end
+
+ context 'when there are no artifacts' do
+ it 'does not raise error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when the loop limit is reached' do
+ before do
+ stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_LIMIT', 1)
+ stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
+
+ create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
+ end
+
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.by(-1)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
+ end
+
+ context 'when there are artifacts more than batch sizes' do
+ before do
+ stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
+
+ create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
+ end
+
+ it 'destroys all expired artifacts' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.by(-2)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(2)
+ end
+ end
+
+ context 'when artifacts are not expired' do
+ before do
+ create(:ci_pipeline_artifact, expire_at: 2.days.from_now)
+ end
+
+ it 'does not destroy pipeline artifacts' do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(0)
+ end
+ end
+ end
+
+ describe '.destroy_artifacts_batch' do
+ it 'returns a falsy value without artifacts' do
+ expect(service.send(:destroy_artifacts_batch)).to be_falsy
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_delayed_and_needs_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_delayed_and_needs_test.yml
new file mode 100644
index 00000000000..b729efaeab2
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_delayed_and_needs_test.yml
@@ -0,0 +1,41 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: delayed
+ start_in: 5 seconds
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_manual_and_needs_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_manual_and_needs_test.yml
new file mode 100644
index 00000000000..479fc8fd72d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_is_manual_and_needs_test.yml
@@ -0,0 +1,40 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: manual
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index ac077e3c30e..0cc66e67b91 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -3,14 +3,16 @@
require 'spec_helper'
RSpec.describe Ci::PipelineTriggerService do
- let(:project) { create(:project, :repository) }
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
before do
stub_ci_pipeline_to_return_yaml_file
end
describe '#execute' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:result) { described_class.new(project, user, params).execute }
before do
@@ -29,8 +31,8 @@ RSpec.describe Ci::PipelineTriggerService do
end
end
- context 'when params have an existsed trigger token' do
- context 'when params have an existsed ref' do
+ context 'when params have an existing trigger token' do
+ context 'when params have an existing ref' do
let(:params) { { token: trigger.token, ref: 'master', variables: nil } }
it 'triggers a pipeline' do
@@ -45,9 +47,7 @@ RSpec.describe Ci::PipelineTriggerService do
context 'when commit message has [ci skip]' do
before do
- allow_next_instance_of(Ci::Pipeline) do |instance|
- allow(instance).to receive(:git_commit_message) { '[ci skip]' }
- end
+ allow_next(Ci::Pipeline).to receive(:git_commit_message) { '[ci skip]' }
end
it 'ignores [ci skip] and create as general' do
diff --git a/spec/services/ci/play_build_service_spec.rb b/spec/services/ci/play_build_service_spec.rb
index c9ecbad3167..00c6de7681d 100644
--- a/spec/services/ci/play_build_service_spec.rb
+++ b/spec/services/ci/play_build_service_spec.rb
@@ -72,6 +72,31 @@ RSpec.describe Ci::PlayBuildService, '#execute' do
expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second')
end
+
+ context 'when user defined variables are restricted' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ context 'when user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'assigns the variables to the build' do
+ service.execute(build, job_variables)
+
+ expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second')
+ end
+ end
+
+ context 'when user is developer' do
+ it 'raises an error' do
+ expect { service.execute(build, job_variables) }
+ .to raise_error Gitlab::Access::AccessDeniedError
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/process_build_service_spec.rb b/spec/services/ci/process_build_service_spec.rb
index a6e8732f5ff..6d2af81a6e8 100644
--- a/spec/services/ci/process_build_service_spec.rb
+++ b/spec/services/ci/process_build_service_spec.rb
@@ -124,24 +124,46 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
end
context 'when build is scheduled with DAG' do
+ using RSpec::Parameterized::TableSyntax
+
let(:pipeline) { create(:ci_pipeline, ref: 'master', project: project) }
- let!(:build) { create(:ci_build, :created, when: :on_success, pipeline: pipeline, scheduling_type: :dag) }
+ let!(:build) { create(:ci_build, :created, when: build_when, pipeline: pipeline, scheduling_type: :dag) }
let!(:other_build) { create(:ci_build, :created, when: :on_success, pipeline: pipeline) }
let!(:build_on_other_build) { create(:ci_build_need, build: build, name: other_build.name) }
- context 'when current status is success' do
- let(:current_status) { 'success' }
+ where(:build_when, :current_status, :after_status) do
+ :on_success | 'success' | 'pending'
+ :on_success | 'skipped' | 'skipped'
+ :manual | 'success' | 'manual'
+ :manual | 'skipped' | 'skipped'
+ :delayed | 'success' | 'manual'
+ :delayed | 'skipped' | 'skipped'
+ end
- it 'enqueues the build' do
- expect { subject }.to change { build.status }.to('pending')
+ with_them do
+ it 'proceeds the build' do
+ expect { subject }.to change { build.status }.to(after_status)
end
end
- context 'when current status is skipped' do
- let(:current_status) { 'skipped' }
+ context 'when FF skip_dag_manual_and_delayed_jobs is disabled' do
+ before do
+ stub_feature_flags(skip_dag_manual_and_delayed_jobs: false)
+ end
- it 'skips the build' do
- expect { subject }.to change { build.status }.to('skipped')
+ where(:build_when, :current_status, :after_status) do
+ :on_success | 'success' | 'pending'
+ :on_success | 'skipped' | 'skipped'
+ :manual | 'success' | 'manual'
+ :manual | 'skipped' | 'manual'
+ :delayed | 'success' | 'manual'
+ :delayed | 'skipped' | 'manual'
+ end
+
+ with_them do
+ it 'proceeds the build' do
+ expect { subject }.to change { build.status }.to(after_status)
+ end
end
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 81d56a0e42a..bdf60bb3fdc 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -206,6 +206,22 @@ RSpec.describe Ci::RetryBuildService do
expect(subsequent_build.reload).to be_created
expect(subsequent_bridge.reload).to be_created
end
+
+ it 'updates ownership for subsequent builds' do
+ expect { service.execute(build) }.to change { subsequent_build.reload.user }.to(user)
+ end
+
+ it 'updates ownership for subsequent bridges' do
+ expect { service.execute(build) }.to change { subsequent_bridge.reload.user }.to(user)
+ end
+
+ it 'does not cause n+1 when updaing build ownership' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(build) }.count
+
+ create_list(:ci_build, 2, :skipped, stage_idx: build.stage_idx + 1, pipeline: pipeline, stage: 'deploy')
+
+ expect { service.execute(build) }.not_to exceed_all_query_limit(control_count)
+ end
end
context 'when pipeline has other builds' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 526c2f39b46..3c6a99efbf8 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -64,6 +64,18 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(build('spinach 1')).to be_created
expect(pipeline.reload).to be_running
end
+
+ it 'changes ownership of subsequent builds' do
+ expect(build('rspec 2').user).not_to eq(user)
+ expect(build('rspec 3').user).not_to eq(user)
+ expect(build('spinach 1').user).not_to eq(user)
+
+ service.execute(pipeline)
+
+ expect(build('rspec 2').user).to eq(user)
+ expect(build('rspec 3').user).to eq(user)
+ expect(build('spinach 1').user).to eq(user)
+ end
end
context 'when there is failed build present which was run on failure' do
@@ -161,6 +173,16 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(build('rspec 2')).to be_created
expect(pipeline.reload).to be_running
end
+
+ it 'changes ownership of subsequent builds' do
+ expect(build('staging').user).not_to eq(user)
+ expect(build('rspec 2').user).not_to eq(user)
+
+ service.execute(pipeline)
+
+ expect(build('staging').user).to eq(user)
+ expect(build('rspec 2').user).to eq(user)
+ end
end
end
diff --git a/spec/services/ci/test_failure_history_service_spec.rb b/spec/services/ci/test_failure_history_service_spec.rb
index e858c85490d..d9c1c8dc3fa 100644
--- a/spec/services/ci/test_failure_history_service_spec.rb
+++ b/spec/services/ci/test_failure_history_service_spec.rb
@@ -22,19 +22,6 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
expect(Ci::TestCaseFailure.count).to eq(2)
end
- context 'when feature flag for test failure history is disabled' do
- before do
- stub_feature_flags(test_failure_history: false)
- end
-
- it 'does not persist data' do
- execute_service
-
- expect(Ci::TestCase.count).to eq(0)
- expect(Ci::TestCaseFailure.count).to eq(0)
- end
- end
-
context 'when pipeline is not for the default branch' do
before do
pipeline.update_column(:ref, 'new-feature')
@@ -136,14 +123,6 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
it { is_expected.to eq(true) }
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(test_failure_history: false)
- end
-
- it { is_expected.to eq(false) }
- end
-
context 'when pipeline is not equal to the project default branch' do
before do
pipeline.update_column(:ref, 'some-other-branch')
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index 3112e5dda1b..63190cc5d49 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -82,8 +82,9 @@ RSpec.describe Ci::UpdateBuildStateService do
let(:params) do
{
output: { checksum: 'crc32:12345678', bytesize: 123 },
+ state: 'failed',
failure_reason: 'script_failure',
- state: 'failed'
+ exit_code: 42
}
end
@@ -95,6 +96,15 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(result.status).to eq 200
end
+ it 'updates the allow_failure flag' do
+ expect(build)
+ .to receive(:drop_with_exit_code!)
+ .with('script_failure', 42)
+ .and_call_original
+
+ subject.execute
+ end
+
it 'does not increment invalid trace metric' do
execute_with_stubbed_metrics!
@@ -115,6 +125,15 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(build).to be_failed
end
+ it 'updates the allow_failure flag' do
+ expect(build)
+ .to receive(:drop_with_exit_code!)
+ .with('script_failure', 42)
+ .and_call_original
+
+ subject.execute
+ end
+
it 'responds with 200 OK status' do
result = subject.execute
diff --git a/spec/services/container_expiration_policies/cleanup_service_spec.rb b/spec/services/container_expiration_policies/cleanup_service_spec.rb
index 8438073ceb0..34f69d24141 100644
--- a/spec/services/container_expiration_policies/cleanup_service_spec.rb
+++ b/spec/services/container_expiration_policies/cleanup_service_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
it 'completely clean up the repository' do
expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
+ .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
response = subject
@@ -34,10 +34,14 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
end
context 'without a successful cleanup tags service execution' do
- it 'partially clean up the repository' do
+ let(:cleanup_tags_service_response) { { status: :error, message: 'timeout' } }
+
+ before do
expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).and_return(double(execute: { status: :error, message: 'timeout' }))
+ .to receive(:new).and_return(double(execute: cleanup_tags_service_response))
+ end
+ it 'partially clean up the repository' do
response = subject
aggregate_failures "checking the response and container repositories" do
@@ -49,6 +53,39 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
expect(repository.expiration_policy_completed_at).to eq(nil)
end
end
+
+ context 'with a truncated cleanup tags service response' do
+ let(:cleanup_tags_service_response) do
+ {
+ status: :error,
+ original_size: 1000,
+ before_truncate_size: 800,
+ after_truncate_size: 200,
+ before_delete_size: 100
+ }
+ end
+
+ it 'partially clean up the repository' do
+ response = subject
+
+ aggregate_failures "checking the response and container repositories" do
+ expect(response.success?).to eq(true)
+ expect(response.payload)
+ .to include(
+ cleanup_status: :unfinished,
+ container_repository_id: repository.id,
+ cleanup_tags_service_original_size: 1000,
+ cleanup_tags_service_before_truncate_size: 800,
+ cleanup_tags_service_after_truncate_size: 200,
+ cleanup_tags_service_before_delete_size: 100
+ )
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
+ expect(repository.reload.cleanup_unfinished?).to be_truthy
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
+ expect(repository.expiration_policy_completed_at).to eq(nil)
+ end
+ end
+ end
end
context 'with no repository' do
diff --git a/spec/services/draft_notes/create_service_spec.rb b/spec/services/draft_notes/create_service_spec.rb
index f0291067777..9e084dbed1c 100644
--- a/spec/services/draft_notes/create_service_spec.rb
+++ b/spec/services/draft_notes/create_service_spec.rb
@@ -20,6 +20,23 @@ RSpec.describe DraftNotes::CreateService do
expect(draft.discussion_id).to be_nil
end
+ it 'tracks the start event when the draft is persisted' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_create_review_note_action)
+ .with(user: user)
+
+ draft = create_draft(note: 'This is a test')
+ expect(draft).to be_persisted
+ end
+
+ it 'does not track the start event when the draft is not persisted' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_create_review_note_action)
+
+ draft = create_draft(note: 'Not a reply!', resolve_discussion: true)
+ expect(draft).not_to be_persisted
+ end
+
it 'cannot resolve when there is nothing to resolve' do
draft = create_draft(note: 'Not a reply!', resolve_discussion: true)
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index ae0c8113904..f83e91b683f 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -43,6 +43,13 @@ RSpec.describe DraftNotes::PublishService do
expect(result[:status]).to eq(:success)
end
+ it 'does not track the publish event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_publish_review_action)
+
+ publish(draft: drafts.first)
+ end
+
context 'commit_id is set' do
let(:commit_id) { commit.id }
@@ -74,6 +81,13 @@ RSpec.describe DraftNotes::PublishService do
expect { publish }.not_to change { DraftNote.count }
end
+ it 'does not track the publish event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_publish_review_action)
+
+ publish
+ end
+
it 'returns an error' do
result = publish
@@ -105,6 +119,14 @@ RSpec.describe DraftNotes::PublishService do
publish
end
+ it 'tracks the publish event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_publish_review_action)
+ .with(user: user)
+
+ publish
+ end
+
context 'commit_id is set' do
let(:commit_id) { commit.id }
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index 2cd19000f99..e115d8098c9 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -34,6 +34,12 @@ RSpec.describe FeatureFlags::CreateService do
it 'does not create audit log' do
expect { subject }.not_to change { AuditEvent.count }
end
+
+ it 'does not sync the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
+
+ subject
+ end
end
context 'when feature flag is saved correctly' do
@@ -54,6 +60,24 @@ RSpec.describe FeatureFlags::CreateService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
end
+ it 'syncs the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
+
+ subject
+ end
+
+ context 'the feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_sync_feature_flags: false)
+ end
+
+ it 'does not sync the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
it 'creates audit event' do
expected_message = 'Created feature flag <strong>feature_flag</strong> '\
'with description <strong>"description"</strong>. '\
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 66a75a2c24e..8c4055ddd9e 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -26,6 +26,24 @@ RSpec.describe FeatureFlags::UpdateService do
expect(subject[:status]).to eq(:success)
end
+ context 'the feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_sync_feature_flags: false)
+ end
+
+ it 'does not sync the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ it 'syncs the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
+
+ subject
+ end
+
it 'creates audit event with correct message' do
name_was = feature_flag.name
@@ -52,6 +70,12 @@ RSpec.describe FeatureFlags::UpdateService do
it 'does not create audit event' do
expect { subject }.not_to change { AuditEvent.count }
end
+
+ it 'does not sync the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
+
+ subject
+ end
end
context 'when user is reporter' do
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index c7bf006dab0..cc3ba21f002 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -554,7 +554,7 @@ RSpec.describe Git::BranchPushService, services: true do
end
describe "housekeeping" do
- let(:housekeeping) { Projects::HousekeepingService.new(project) }
+ let(:housekeeping) { Repositories::HousekeepingService.new(project) }
before do
# Flush any raw key-value data stored by the housekeeping code.
@@ -562,7 +562,7 @@ RSpec.describe Git::BranchPushService, services: true do
Gitlab::Redis::Queues.with { |conn| conn.flushall }
Gitlab::Redis::SharedState.with { |conn| conn.flushall }
- allow(Projects::HousekeepingService).to receive(:new).and_return(housekeeping)
+ allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
end
after do
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 4f5bc3a3d5a..f0cd42c1948 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -63,6 +63,10 @@ RSpec.describe Groups::CreateService, '#execute' do
end
it { is_expected.to be_persisted }
+
+ it 'adds an onboarding progress record' do
+ expect { subject }.to change(OnboardingProgress, :count).from(0).to(1)
+ end
end
context 'when user can not create a group' do
@@ -84,6 +88,10 @@ RSpec.describe Groups::CreateService, '#execute' do
end
it { is_expected.to be_persisted }
+
+ it 'does not add an onboarding progress record' do
+ expect { subject }.not_to change(OnboardingProgress, :count).from(0)
+ end
end
context 'as guest' do
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index e06f09d0463..2f9bb72939a 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -135,51 +135,120 @@ RSpec.describe Groups::DestroyService do
end
describe 'authorization updates', :sidekiq_inline do
- context 'shared groups' do
+ context 'for solo groups' do
+ context 'group is deleted' do
+ it 'updates project authorization' do
+ expect { destroy_group(group, user, false) }.to(
+ change { user.can?(:read_project, project) }.from(true).to(false))
+ end
+
+ it 'does not make use of a specific service to update project_authorizations records' do
+ expect(UserProjectAccessChangedService)
+ .not_to receive(:new).with(group.user_ids_for_project_authorizations)
+
+ destroy_group(group, user, false)
+ end
+ end
+ end
+
+ context 'for shared groups within different hierarchies' do
+ let(:shared_with_group) { group }
let!(:shared_group) { create(:group, :private) }
let!(:shared_group_child) { create(:group, :private, parent: shared_group) }
+ let!(:shared_group_user) { create(:user) }
let!(:project) { create(:project, group: shared_group) }
let!(:project_child) { create(:project, group: shared_group_child) }
before do
- create(:group_group_link, shared_group: shared_group, shared_with_group: group)
- group.refresh_members_authorized_projects
+ shared_group.add_user(shared_group_user, Gitlab::Access::OWNER)
+
+ create(:group_group_link, shared_group: shared_group, shared_with_group: shared_with_group)
+ shared_with_group.refresh_members_authorized_projects
+ end
+
+ context 'the shared group is deleted' do
+ it 'updates project authorization' do
+ expect(shared_group_user.can?(:read_project, project)).to eq(true)
+ expect(shared_group_user.can?(:read_project, project_child)).to eq(true)
+
+ destroy_group(shared_group, shared_group_user, false)
+
+ expect(shared_group_user.can?(:read_project, project)).to eq(false)
+ expect(shared_group_user.can?(:read_project, project_child)).to eq(false)
+ end
+
+ it 'does not make use of specific service to update project_authorizations records' do
+ expect(UserProjectAccessChangedService)
+ .not_to receive(:new).with(shared_group.user_ids_for_project_authorizations).and_call_original
+
+ destroy_group(shared_group, shared_group_user, false)
+ end
end
- it 'updates project authorization' do
- expect(user.can?(:read_project, project)).to eq(true)
- expect(user.can?(:read_project, project_child)).to eq(true)
+ context 'the shared_with group is deleted' do
+ it 'updates project authorization' do
+ expect(user.can?(:read_project, project)).to eq(true)
+ expect(user.can?(:read_project, project_child)).to eq(true)
- destroy_group(group, user, false)
+ destroy_group(shared_with_group, user, false)
- expect(user.can?(:read_project, project)).to eq(false)
- expect(user.can?(:read_project, project_child)).to eq(false)
+ expect(user.can?(:read_project, project)).to eq(false)
+ expect(user.can?(:read_project, project_child)).to eq(false)
+ end
+
+ it 'makes use of a specific service to update project_authorizations records' do
+ expect(UserProjectAccessChangedService)
+ .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original
+
+ destroy_group(shared_with_group, user, false)
+ end
end
end
- context 'shared groups in the same group hierarchy' do
- let!(:subgroup) { create(:group, :private, parent: group) }
- let!(:subgroup_user) { create(:user) }
+ context 'for shared groups in the same group hierarchy' do
+ let(:shared_group) { group }
+ let(:shared_with_group) { nested_group }
+ let!(:shared_with_group_user) { create(:user) }
before do
- subgroup.add_user(subgroup_user, Gitlab::Access::MAINTAINER)
+ shared_with_group.add_user(shared_with_group_user, Gitlab::Access::MAINTAINER)
- create(:group_group_link, shared_group: group, shared_with_group: subgroup)
- subgroup.refresh_members_authorized_projects
+ create(:group_group_link, shared_group: shared_group, shared_with_group: shared_with_group)
+ shared_with_group.refresh_members_authorized_projects
end
- context 'group is deleted' do
+ context 'the shared group is deleted' do
it 'updates project authorization' do
- expect { destroy_group(group, user, false) }.to(
- change { subgroup_user.can?(:read_project, project) }.from(true).to(false))
+ expect { destroy_group(shared_group, user, false) }.to(
+ change { shared_with_group_user.can?(:read_project, project) }.from(true).to(false))
+ end
+
+ it 'does not make use of a specific service to update project authorizations' do
+ # Due to the recursive nature of `Groups::DestroyService`, `UserProjectAccessChangedService`
+ # will still be executed for the nested group as they fall under the same hierarchy
+ # and hence we need to account for this scenario.
+ expect(UserProjectAccessChangedService)
+ .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original
+
+ expect(UserProjectAccessChangedService)
+ .not_to receive(:new).with(shared_group.user_ids_for_project_authorizations)
+
+ destroy_group(shared_group, user, false)
end
end
- context 'subgroup is deleted' do
+ context 'the shared_with group is deleted' do
it 'updates project authorization' do
- expect { destroy_group(subgroup, user, false) }.to(
- change { subgroup_user.can?(:read_project, project) }.from(true).to(false))
+ expect { destroy_group(shared_with_group, user, false) }.to(
+ change { shared_with_group_user.can?(:read_project, project) }.from(true).to(false))
+ end
+
+ it 'makes use of a specific service to update project authorizations' do
+ expect(UserProjectAccessChangedService)
+ .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original
+
+ destroy_group(shared_with_group, user, false)
end
end
end
diff --git a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
index 4c8aebe5fe2..0caffb16f42 100644
--- a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessWebhookService do
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:token) { nil }
- subject(:execute) { described_class.new(project, nil, webhook_payload).execute(token) }
+ subject(:execute) { described_class.new(project, webhook_payload).execute(token) }
context 'when PagerDuty webhook setting is active' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index f2bc4f717af..79543fe9f5d 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Issuable::BulkUpdateService do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
def bulk_update(issuables, extra_params = {})
bulk_update_params = extra_params
@@ -31,6 +31,23 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
+ shared_examples 'updates iterations' do
+ it 'succeeds' do
+ result = bulk_update(issuables, sprint_id: iteration.id)
+
+ expect(result.success?).to be_truthy
+ expect(result.payload[:count]).to eq(issuables.count)
+ end
+
+ it 'updates the issuables iteration' do
+ bulk_update(issuables, sprint_id: iteration.id)
+
+ issuables.each do |issuable|
+ expect(issuable.reload.iteration).to eq(iteration)
+ end
+ end
+ end
+
shared_examples 'updating labels' do
def create_issue_with_labels(labels)
create(:labeled_issue, project: project, labels: labels)
@@ -233,6 +250,21 @@ RSpec.describe Issuable::BulkUpdateService do
it_behaves_like 'updates milestones'
end
+ describe 'updating iterations' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issuables) { [create(:issue, project: project)] }
+ let_it_be(:iteration) { create(:iteration, group: group) }
+
+ let(:parent) { project }
+
+ before do
+ group.add_reporter(user)
+ end
+
+ it_behaves_like 'updates iterations'
+ end
+
describe 'updating labels' do
let(:bug) { create(:label, project: project) }
let(:regression) { create(:label, project: project) }
@@ -283,7 +315,7 @@ RSpec.describe Issuable::BulkUpdateService do
end
context 'with issuables at a group level' do
- let(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
let(:parent) { group }
before do
@@ -315,6 +347,19 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
+ describe 'updating iterations' do
+ let_it_be(:iteration) { create(:iteration, group: group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+
+ context 'when issues' do
+ let_it_be(:issue1) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
+ let_it_be(:issuables) { [issue1, issue2] }
+
+ it_behaves_like 'updates iterations'
+ end
+ end
+
describe 'updating labels' do
let(:project) { create(:project, :repository, group: group) }
let(:bug) { create(:group_label, group: group) }
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 9076fb11c9b..dc545f57d23 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -112,10 +112,14 @@ RSpec.describe Issues::CloseService do
end
context "closed by a merge request", :sidekiq_might_not_need_inline do
- it 'mentions closure via a merge request' do
+ subject(:close_issue) do
perform_enqueued_jobs do
described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request)
end
+ end
+
+ it 'mentions closure via a merge request' do
+ close_issue
email = ActionMailer::Base.deliveries.last
@@ -124,12 +128,15 @@ RSpec.describe Issues::CloseService do
expect(email.body.parts.map(&:body)).to all(include(closing_merge_request.to_reference))
end
+ it_behaves_like 'records an onboarding progress action', :issue_auto_closed do
+ let(:namespace) { project.namespace }
+ end
+
context 'when user cannot read merge request' do
it 'does not mention merge request' do
project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED)
- perform_enqueued_jobs do
- described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request)
- end
+
+ close_issue
email = ActionMailer::Base.deliveries.last
body_text = email.body.parts.map(&:body).join(" ")
@@ -141,13 +148,11 @@ RSpec.describe Issues::CloseService do
end
context 'updating `metrics.first_mentioned_in_commit_at`' do
- subject { described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request) }
-
context 'when `metrics.first_mentioned_in_commit_at` is not set' do
it 'uses the first commit authored timestamp' do
expected = closing_merge_request.commits.first.authored_date
- subject
+ close_issue
expect(issue.metrics.first_mentioned_in_commit_at).to eq(expected)
end
@@ -159,7 +164,7 @@ RSpec.describe Issues::CloseService do
end
it 'does not update the metrics' do
- expect { subject }.not_to change { issue.metrics.first_mentioned_in_commit_at }
+ expect { close_issue }.not_to change { issue.metrics.first_mentioned_in_commit_at }
end
end
@@ -167,7 +172,7 @@ RSpec.describe Issues::CloseService do
let(:closing_merge_request) { create(:merge_request, :without_diffs, source_project: project) }
it 'does not update the metrics' do
- subject
+ close_issue
expect(issue.metrics.first_mentioned_in_commit_at).to be_nil
end
@@ -206,7 +211,7 @@ RSpec.describe Issues::CloseService do
end
context "valid params" do
- def close_issue
+ subject(:close_issue) do
perform_enqueued_jobs do
described_class.new(project, user).close_issue(issue)
end
@@ -290,6 +295,8 @@ RSpec.describe Issues::CloseService do
close_issue
end
+
+ it_behaves_like 'does not record an onboarding progress action'
end
context 'when issue is not confidential' do
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 8072b7a478e..fd1bcf82ccd 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -20,7 +20,9 @@ RSpec.describe Issues::ExportCsvService do
end
it 'renders with a target filesize' do
- expect(subject.csv_builder).to receive(:render).with(described_class::TARGET_FILESIZE)
+ expect_next_instance_of(CsvBuilder) do |csv_builder|
+ expect(csv_builder).to receive(:render).with(described_class::TARGET_FILESIZE).once
+ end
subject.email(user)
end
diff --git a/spec/services/jira_connect/sync_service_spec.rb b/spec/services/jira_connect/sync_service_spec.rb
index 4b434348146..edd0bad70f5 100644
--- a/spec/services/jira_connect/sync_service_spec.rb
+++ b/spec/services/jira_connect/sync_service_spec.rb
@@ -45,11 +45,11 @@ RSpec.describe JiraConnect::SyncService do
it 'logs the response as an error' do
expect_next(client).to store_info([
{ 'errorMessages' => ['some error message'] },
- { 'rejectedBuilds' => ['x'] }
+ { 'errorMessages' => ['x'] }
])
expect_log(:error, { 'errorMessages' => ['some error message'] })
- expect_log(:error, { 'rejectedBuilds' => ['x'] })
+ expect_log(:error, { 'errorMessages' => ['x'] })
subject
end
diff --git a/spec/services/jira_connect_subscriptions/create_service_spec.rb b/spec/services/jira_connect_subscriptions/create_service_spec.rb
index 9750c671fa2..5f467a07a78 100644
--- a/spec/services/jira_connect_subscriptions/create_service_spec.rb
+++ b/spec/services/jira_connect_subscriptions/create_service_spec.rb
@@ -49,18 +49,6 @@ RSpec.describe JiraConnectSubscriptions::CreateService do
subject
end
-
- context 'when the jira_connect_full_namespace_sync feature flag is disabled' do
- before do
- stub_feature_flags(jira_connect_full_namespace_sync: false)
- end
-
- specify do
- expect(JiraConnect::SyncProjectWorker).not_to receive(:bulk_perform_in_with_contexts)
-
- subject
- end
- end
end
end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index e8a4a798b20..50efee9f43c 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -2,26 +2,41 @@
require 'spec_helper'
-RSpec.describe Members::CreateService do
- let_it_be(:project) { create(:project) }
+RSpec.describe Members::CreateService, :clean_gitlab_redis_shared_state, :sidekiq_inline do
+ let_it_be(:source) { create(:project) }
let_it_be(:user) { create(:user) }
- let_it_be(:project_user) { create(:user) }
- let_it_be(:user_ids) { project_user.id.to_s }
+ let_it_be(:member) { create(:user) }
+ let_it_be(:user_ids) { member.id.to_s }
let_it_be(:access_level) { Gitlab::Access::GUEST }
let(:params) { { user_ids: user_ids, access_level: access_level } }
- subject(:execute_service) { described_class.new(user, params).execute(project) }
+ subject(:execute_service) { described_class.new(user, params).execute(source) }
before do
- project.add_maintainer(user)
- allow(Namespaces::OnboardingUserAddedWorker).to receive(:idempotent?).and_return(false)
+ if source.is_a?(Project)
+ source.add_maintainer(user)
+ OnboardingProgress.onboard(source.namespace)
+ else
+ source.add_owner(user)
+ OnboardingProgress.onboard(source)
+ end
end
context 'when passing valid parameters' do
it 'adds a user to members' do
expect(execute_service[:status]).to eq(:success)
- expect(project.users).to include project_user
- expect(Namespaces::OnboardingUserAddedWorker.jobs.last['args'][0]).to eq(project.id)
+ expect(source.users).to include member
+ expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ end
+
+ context 'when executing on a group' do
+ let_it_be(:source) { create(:group) }
+
+ it 'adds a user to members' do
+ expect(execute_service[:status]).to eq(:success)
+ expect(source.users).to include member
+ expect(OnboardingProgress.completed?(source, :user_added)).to be(true)
+ end
end
end
@@ -31,8 +46,8 @@ RSpec.describe Members::CreateService do
it 'does not add a member' do
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
- expect(project.users).not_to include project_user
- expect(Namespaces::OnboardingUserAddedWorker.jobs.size).to eq(0)
+ expect(source.users).not_to include member
+ expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
@@ -42,8 +57,8 @@ RSpec.describe Members::CreateService do
it 'limits the number of users to 100' do
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
- expect(project.users).not_to include project_user
- expect(Namespaces::OnboardingUserAddedWorker.jobs.size).to eq(0)
+ expect(source.users).not_to include member
+ expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
@@ -52,19 +67,19 @@ RSpec.describe Members::CreateService do
it 'does not add a member' do
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to include("#{project_user.username}: Access level is not included in the list")
- expect(project.users).not_to include project_user
- expect(Namespaces::OnboardingUserAddedWorker.jobs.size).to eq(0)
+ expect(execute_service[:message]).to include("#{member.username}: Access level is not included in the list")
+ expect(source.users).not_to include member
+ expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
context 'when passing an existing invite user id' do
- let(:user_ids) { create(:project_member, :invited, project: project).invite_email }
+ let(:user_ids) { create(:project_member, :invited, project: source).invite_email }
it 'does not add a member' do
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to eq('Invite email has already been taken')
- expect(Namespaces::OnboardingUserAddedWorker.jobs.size).to eq(0)
+ expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
end
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index 69bab3b1ea4..9ae310d8cee 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe MergeRequests::AfterCreateService do
+ include AfterNextHelpers
+
let_it_be(:merge_request) { create(:merge_request) }
subject(:after_create_service) do
@@ -27,6 +29,14 @@ RSpec.describe MergeRequests::AfterCreateService do
execute_service
end
+ it 'calls the merge request activity counter' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_create_mr_action)
+ .with(user: merge_request.author)
+
+ execute_service
+ end
+
it 'creates a new merge request notification' do
expect(notification_service)
.to receive(:new_merge_request).with(merge_request, merge_request.author)
@@ -56,11 +66,15 @@ RSpec.describe MergeRequests::AfterCreateService do
execute_service
end
- it 'records a namespace onboarding progress action' do
- expect(NamespaceOnboardingAction).to receive(:create_action)
- .with(merge_request.target_project.namespace, :merge_request_created).and_call_original
+ it 'registers an onboarding progress action' do
+ OnboardingProgress.onboard(merge_request.target_project.namespace)
+
+ expect_next(OnboardingProgressService, merge_request.target_project.namespace)
+ .to receive(:execute).with(action: :merge_request_created).and_call_original
+
+ execute_service
- expect { execute_service }.to change(NamespaceOnboardingAction, :count).by(1)
+ expect(OnboardingProgress.completed?(merge_request.target_project.namespace, :merge_request_created)).to be(true)
end
end
end
diff --git a/spec/services/merge_requests/cleanup_refs_service_spec.rb b/spec/services/merge_requests/cleanup_refs_service_spec.rb
index 38c0e204e54..a1822a4d5ba 100644
--- a/spec/services/merge_requests/cleanup_refs_service_spec.rb
+++ b/spec/services/merge_requests/cleanup_refs_service_spec.rb
@@ -91,6 +91,26 @@ RSpec.describe MergeRequests::CleanupRefsService do
it_behaves_like 'service that does not clean up merge request refs'
end
+ context 'when a git error is raised' do
+ context 'Gitlab::Git::Repository::GitError' do
+ before do
+ allow(merge_request.project.repository).to receive(:delete_refs).and_raise(Gitlab::Git::Repository::GitError)
+ end
+
+ it_behaves_like 'service that does not clean up merge request refs'
+ end
+
+ context 'Gitlab::Git::CommandError' do
+ before do
+ allow_next_instance_of(Gitlab::Git::KeepAround) do |keep_around|
+ expect(keep_around).to receive(:kept_around?).and_raise(Gitlab::Git::CommandError)
+ end
+ end
+
+ it_behaves_like 'service that does not clean up merge request refs'
+ end
+ end
+
context 'when cleanup schedule fails to update' do
before do
allow(merge_request.cleanup_schedule).to receive(:update).and_return(false)
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 67fb4eaade5..48f56b3ec68 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe MergeRequests::CloseService do
describe '#execute' do
it_behaves_like 'cache counters invalidator'
+ it_behaves_like 'merge request reviewers cache counters invalidator'
context 'valid params' do
let(:service) { described_class.new(project, user, {}) }
@@ -75,6 +76,14 @@ RSpec.describe MergeRequests::CloseService do
described_class.new(project, user, {}).execute(merge_request)
end
+ it 'calls the merge request activity counter' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_close_mr_action)
+ .with(user: user)
+
+ described_class.new(project, user, {}).execute(merge_request)
+ end
+
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
service = described_class.new(project, user, {})
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 86e49fe601c..be8c41bc4a1 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -66,10 +66,11 @@ RSpec.describe MergeRequests::CreateFromIssueService do
expect { service.execute }.to change(target_project.merge_requests, :count).by(1)
end
- it 'sets the merge request author to current user', :sidekiq_might_not_need_inline do
+ it 'sets the merge request author to current user and assigns them', :sidekiq_might_not_need_inline do
result = service.execute
expect(result[:merge_request].author).to eq(user)
+ expect(result[:merge_request].assignees).to eq([user])
end
it 'sets the merge request source branch to the new issue branch', :sidekiq_might_not_need_inline do
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index d042b318d02..4f47a22b07c 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -155,6 +155,12 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(Todo.where(attributes).count).to eq 1
end
+
+ it 'invalidates counter cache for reviewers', :use_clean_rails_memory_store_caching do
+ expect { merge_request }
+ .to change { user2.review_requested_open_merge_requests_count }
+ .by(1)
+ end
end
context 'when head pipelines already exist for merge request source branch', :sidekiq_inline do
diff --git a/spec/services/merge_requests/export_csv_service_spec.rb b/spec/services/merge_requests/export_csv_service_spec.rb
index ecb17b3fe77..4ce032c396e 100644
--- a/spec/services/merge_requests/export_csv_service_spec.rb
+++ b/spec/services/merge_requests/export_csv_service_spec.rb
@@ -27,11 +27,11 @@ RSpec.describe MergeRequests::ExportCsvService do
let_it_be(:merge_request) { create(:merge_request, assignees: create_list(:user, 2)) }
it 'contains the names of assignees' do
- expect(csv['Assignees']).to eq(merge_request.assignees.map(&:name).join(', '))
+ expect(csv['Assignees'].split(', ')).to match_array(merge_request.assignees.map(&:name))
end
it 'contains the usernames of assignees' do
- expect(csv['Assignee Usernames']).to eq(merge_request.assignees.map(&:username).join(', '))
+ expect(csv['Assignee Usernames'].split(', ')).to match_array(merge_request.assignees.map(&:username))
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index d0e3102f157..dd37d87e3f5 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -37,6 +37,10 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect(merge_request.squash_commit_sha).to be_nil
+ end
+
it 'sends email to user2 about merge of new merge_request' do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
@@ -76,6 +80,12 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_commit.message).to eq('Merge commit message')
expect(squash_commit.message).to eq("Squash commit message\n")
end
+
+ it 'persists squash_commit_sha' do
+ squash_commit = merge_request.merge_commit.parents.last
+
+ expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
+ end
end
end
@@ -361,6 +371,7 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
@@ -381,6 +392,7 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
@@ -395,10 +407,27 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ allow(service).to receive(:execute_hooks)
+ merge_request.update!(squash: true)
+
+ service.execute(merge_request)
+
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ end
+
context 'when fast-forward merge is not allowed' do
before do
allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
@@ -415,6 +444,7 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 725fc16fa7c..17bfa9d7368 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -124,14 +124,6 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
it_behaves_like 'mergeable merge request'
- context 'when lock is disabled' do
- before do
- stub_feature_flags(merge_ref_auto_sync_lock: false)
- end
-
- it_behaves_like 'mergeable merge request'
- end
-
context 'when concurrent calls' do
it 'waits first lock and returns "cached" result in subsequent calls' do
threads = execute_within_threads(amount: 3)
@@ -167,25 +159,6 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
end
end
- context 'disabled merge ref sync feature flag' do
- before do
- stub_feature_flags(merge_ref_auto_sync: false)
- end
-
- it 'returns error and no payload' do
- result = subject
-
- expect(result).to be_a(ServiceResponse)
- expect(result.error?).to be(true)
- expect(result.message).to eq('Merge ref is outdated due to disabled feature')
- expect(result.payload).to be_empty
- end
-
- it 'ignores merge-ref and updates merge status' do
- expect { subject }.to change(merge_request, :merge_status).from('unchecked').to('can_be_merged')
- end
- end
-
context 'when broken' do
before do
expect(merge_request).to receive(:broken?) { true }
@@ -305,28 +278,6 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
context 'recheck enforced' do
subject { described_class.new(merge_request).execute(recheck: true) }
- context 'when MR is mergeable and merge-ref auto-sync is disabled' do
- before do
- stub_feature_flags(merge_ref_auto_sync: false)
- merge_request.mark_as_mergeable!
- end
-
- it 'returns ServiceResponse.error' do
- result = subject
-
- expect(result).to be_a(ServiceResponse)
- expect(result.error?).to be(true)
- expect(result.message).to eq('Merge ref is outdated due to disabled feature')
- expect(result.payload).to be_empty
- end
-
- it 'merge status is not changed' do
- subject
-
- expect(merge_request.merge_status).to eq('can_be_merged')
- end
- end
-
context 'when MR is marked as mergeable, but repo is not mergeable and MR is not opened' do
before do
# Making sure that we don't touch the merge-status after
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index 402f753c0af..6523b5a158c 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe MergeRequests::PostMergeService do
describe '#execute' do
it_behaves_like 'cache counters invalidator'
+ it_behaves_like 'merge request reviewers cache counters invalidator'
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
# Cache the counter before the MR changed state.
@@ -37,6 +38,14 @@ RSpec.describe MergeRequests::PostMergeService do
subject
end
+ it 'calls the merge request activity counter' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_merge_mr_action)
+ .with(user: user)
+
+ subject
+ end
+
it 'deletes non-latest diffs' do
diff_removal_service = instance_double(MergeRequests::DeleteNonLatestDiffsService, execute: nil)
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index 2bd83dc36a8..8541d597581 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe MergeRequests::ReopenService do
describe '#execute' do
it_behaves_like 'cache counters invalidator'
+ it_behaves_like 'merge request reviewers cache counters invalidator'
context 'valid params' do
let(:service) { described_class.new(project, user, {}) }
@@ -80,6 +81,14 @@ RSpec.describe MergeRequests::ReopenService do
described_class.new(project, user, {}).execute(merge_request)
end
+ it 'calls the merge request activity counter' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_reopen_mr_action)
+ .with(user: user)
+
+ described_class.new(project, user, {}).execute(merge_request)
+ end
+
it 'refreshes the number of open merge requests for a valid MR' do
service = described_class.new(project, user, {})
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index ed8872b71f7..9eb82dcd0ad 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -431,14 +431,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
describe 'merge' do
it_behaves_like 'correct merge behavior'
-
- context 'when merge_orchestration_service feature flag is disabled' do
- before do
- stub_feature_flags(merge_orchestration_service: false)
- end
-
- it_behaves_like 'correct merge behavior'
- end
end
context 'todos' do
@@ -529,6 +521,19 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_email(user2)
should_email(user3)
end
+
+ it 'updates open merge request counter for reviewers', :use_clean_rails_memory_store_caching do
+ merge_request.reviewers = [user3]
+
+ # Cache them to ensure the cache gets invalidated on update
+ expect(user2.review_requested_open_merge_requests_count).to eq(0)
+ expect(user3.review_requested_open_merge_requests_count).to eq(1)
+
+ update_merge_request(reviewer_ids: [user2.id])
+
+ expect(user2.review_requested_open_merge_requests_count).to eq(1)
+ expect(user3.review_requested_open_merge_requests_count).to eq(0)
+ end
end
context 'when the milestone is removed' do
@@ -837,20 +842,20 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
it 'does not allow a maintainer of the target project to set `allow_collaboration`' do
target_project.add_developer(user)
- update_merge_request(allow_collaboration: true, title: 'Updated title')
+ update_merge_request(allow_collaboration: false, title: 'Updated title')
expect(merge_request.title).to eq('Updated title')
- expect(merge_request.allow_collaboration).to be_falsy
+ expect(merge_request.allow_collaboration).to be_truthy
end
it 'is allowed by a user that can push to the source and can update the merge request' do
merge_request.update!(assignees: [user])
source_project.add_developer(user)
- update_merge_request(allow_collaboration: true, title: 'Updated title')
+ update_merge_request(allow_collaboration: false, title: 'Updated title')
expect(merge_request.title).to eq('Updated title')
- expect(merge_request.allow_collaboration).to be_truthy
+ expect(merge_request.allow_collaboration).to be_falsy
end
end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
new file mode 100644
index 00000000000..fa0c58e4c9b
--- /dev/null
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Namespaces::PackageSettings::UpdateService do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:namespace) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:params) { {} }
+
+ describe '#execute' do
+ subject { described_class.new(container: namespace, current_user: user, params: params).execute }
+
+ shared_examples 'returning a success' do
+ it 'returns a success' do
+ result = subject
+
+ expect(result.payload[:package_settings]).to be_present
+ expect(result.success?).to be_truthy
+ end
+ end
+
+ shared_examples 'returning an error' do |message, http_status|
+ it 'returns an error' do
+ result = subject
+
+ expect(result.message).to eq(message)
+ expect(result.status).to eq(:error)
+ expect(result.http_status).to eq(http_status)
+ end
+ end
+
+ shared_examples 'updating the namespace package setting' do
+ it_behaves_like 'updating the namespace package setting attributes', from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT' }, to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' }
+
+ it_behaves_like 'returning a success'
+
+ context 'with invalid params' do
+ let_it_be(:params) { { maven_duplicates_allowed: nil } }
+
+ it_behaves_like 'not creating the namespace package setting'
+
+ it "doesn't update the maven_duplicates_allowed" do
+ expect { subject }
+ .not_to change { package_settings.reload.maven_duplicates_allowed }
+ end
+
+ it_behaves_like 'returning an error', 'Maven duplicates allowed is not included in the list', 400
+ end
+ end
+
+ shared_examples 'denying access to namespace package setting' do
+ context 'with existing namespace package setting' do
+ it_behaves_like 'not creating the namespace package setting'
+
+ it_behaves_like 'returning an error', 'Access Denied', 403
+ end
+ end
+
+ context 'with existing namespace package setting' do
+ let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
+ let_it_be(:params) { { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' } }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the namespace package setting'
+ :developer | 'updating the namespace package setting'
+ :reporter | 'denying access to namespace package setting'
+ :guest | 'denying access to namespace package setting'
+ :anonymous | 'denying access to namespace package setting'
+ end
+
+ with_them do
+ before do
+ namespace.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'without existing namespace package setting' do
+ let_it_be(:package_settings) { namespace.package_settings }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'creating the namespace package setting'
+ :developer | 'creating the namespace package setting'
+ :reporter | 'denying access to namespace package setting'
+ :guest | 'denying access to namespace package setting'
+ :anonymous | 'denying access to namespace package setting'
+ end
+
+ with_them do
+ before do
+ namespace.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+ end
+end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 3118956951e..20f06619e02 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -78,6 +78,12 @@ RSpec.describe Notes::CreateService do
end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
end
+ it 'does not track merge request usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).not_to receive(:track_create_comment_action)
+
+ described_class.new(project, user, opts).execute
+ end
+
context 'in a merge request' do
let_it_be(:project_with_repo) { create(:project, :repository) }
let_it_be(:merge_request) do
@@ -85,18 +91,6 @@ RSpec.describe Notes::CreateService do
target_project: project_with_repo)
end
- context 'issue comment usage data' do
- let(:opts) do
- { note: 'Awesome comment', noteable_type: 'MergeRequest', noteable_id: merge_request.id }
- end
-
- it 'does not track' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_comment_added_action)
-
- described_class.new(project, user, opts).execute
- end
- end
-
context 'noteable highlight cache clearing' do
let(:position) do
Gitlab::Diff::Position.new(old_path: "files/ruby/popen.rb",
@@ -119,6 +113,18 @@ RSpec.describe Notes::CreateService do
.to receive(:unfolded_diff?) { true }
end
+ it 'does not track issue comment usage data' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_comment_added_action)
+
+ described_class.new(project_with_repo, user, new_opts).execute
+ end
+
+ it 'tracks merge request usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).to receive(:track_create_comment_action).with(note: kind_of(Note))
+
+ described_class.new(project_with_repo, user, new_opts).execute
+ end
+
it 'clears noteable diff cache when it was unfolded for the note position' do
expect_any_instance_of(Gitlab::Diff::HighlightCache).to receive(:clear)
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 0859c28cbe7..eebbdcc33b8 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -35,6 +35,14 @@ RSpec.describe Notes::DestroyService do
end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
end
+ it 'tracks merge request usage data' do
+ mr = create(:merge_request, source_project: project)
+ note = create(:note, project: project, noteable: mr)
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).to receive(:track_remove_comment_action).with(note: note)
+
+ described_class.new(project, user).execute(note)
+ end
+
context 'in a merge request' do
let_it_be(:repo_project) { create(:project, :repository) }
let_it_be(:merge_request) do
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 64aa845841b..c098500b78a 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -153,8 +153,8 @@ RSpec.describe Notes::QuickActionsService do
expect(execute(note)).to be_empty
end
- it 'does not assign the milestone' do
- expect { execute(note) }.not_to change { issue.reload.milestone }
+ it 'assigns the milestone' do
+ expect { execute(note) }.to change { issue.reload.milestone }.from(nil).to(milestone)
end
end
@@ -195,8 +195,8 @@ RSpec.describe Notes::QuickActionsService do
expect(execute(note)).to be_empty
end
- it 'does not remove the milestone' do
- expect { execute(note) }.not_to change { issue.reload.milestone }
+ it 'removes the milestone' do
+ expect { execute(note) }.to change { issue.reload.milestone }.from(milestone).to(nil)
end
end
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index e2f51c9af67..902fd9958f8 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -49,11 +49,12 @@ RSpec.describe Notes::UpdateService do
it 'does not track usage data when params is blank' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_comment_edited_action)
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).not_to receive(:track_edit_comment_action)
update_note({})
end
- it 'tracks usage data', :clean_gitlab_redis_shared_state do
+ it 'tracks issue usage data', :clean_gitlab_redis_shared_state do
event = Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED
counter = Gitlab::UsageDataCounters::HLLRedisCounter
@@ -63,6 +64,17 @@ RSpec.describe Notes::UpdateService do
end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
end
+ context 'when the notable is a merge request' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:note) { create(:note, project: project, noteable: merge_request, author: user, note: "Old note #{user2.to_reference}") }
+
+ it 'tracks merge request usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).to receive(:track_edit_comment_action).with(note: note)
+
+ update_note(note: 'new text')
+ end
+ end
+
context 'with system note' do
before do
note.update_column(:system, true)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 9431c023850..85234077b1f 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2455,6 +2455,18 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { group.add_guest(added_user) }
end
end
+
+ describe '#updated_group_member_expiration' do
+ let_it_be(:group_member) { create(:group_member) }
+
+ it 'emails the user that their group membership expiry has changed' do
+ expect_next_instance_of(NotificationService) do |notification|
+ allow(notification).to receive(:updated_group_member_expiration).with(group_member)
+ end
+
+ group_member.update!(expires_at: 5.days.from_now)
+ end
+ end
end
describe 'ProjectMember', :deliver_mails_inline do
diff --git a/spec/services/onboarding_progress_service_spec.rb b/spec/services/onboarding_progress_service_spec.rb
index 59b6083d38a..340face4ae8 100644
--- a/spec/services/onboarding_progress_service_spec.rb
+++ b/spec/services/onboarding_progress_service_spec.rb
@@ -5,28 +5,44 @@ require 'spec_helper'
RSpec.describe OnboardingProgressService do
describe '#execute' do
let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:root_namespace) { nil }
+ let(:action) { :namespace_action }
subject(:execute_service) { described_class.new(namespace).execute(action: :subscription_created) }
context 'when the namespace is a root' do
- let(:root_namespace) { nil }
+ before do
+ OnboardingProgress.onboard(namespace)
+ end
- it 'records a namespace onboarding progress action for the given namespace' do
- expect(NamespaceOnboardingAction).to receive(:create_action)
- .with(namespace, :subscription_created).and_call_original
+ it 'registers a namespace onboarding progress action for the given namespace' do
+ execute_service
- expect { execute_service }.to change(NamespaceOnboardingAction, :count).by(1)
+ expect(OnboardingProgress.completed?(namespace, :subscription_created)).to eq(true)
end
end
context 'when the namespace is not the root' do
- let_it_be(:root_namespace) { build(:namespace) }
+ let(:root_namespace) { build(:namespace) }
+
+ before do
+ OnboardingProgress.onboard(root_namespace)
+ end
+
+ it 'registers a namespace onboarding progress action for the root namespace' do
+ execute_service
+
+ expect(OnboardingProgress.completed?(root_namespace, :subscription_created)).to eq(true)
+ end
+ end
+
+ context 'when no namespace is passed' do
+ let(:namespace) { nil }
- it 'records a namespace onboarding progress action for the root namespace' do
- expect(NamespaceOnboardingAction).to receive(:create_action)
- .with(root_namespace, :subscription_created).and_call_original
+ it 'does not register a namespace onboarding progress action' do
+ execute_service
- expect { execute_service }.to change(NamespaceOnboardingAction, :count).by(1)
+ expect(OnboardingProgress.completed?(root_namespace, :subscription_created)).to be(nil)
end
end
end
diff --git a/spec/services/packages/create_event_service_spec.rb b/spec/services/packages/create_event_service_spec.rb
index f581d704087..f7bab0e5a9f 100644
--- a/spec/services/packages/create_event_service_spec.rb
+++ b/spec/services/packages/create_event_service_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Packages::CreateEventService do
end
end
- shared_examples 'redis package event creation' do |originator_type, expected_scope|
+ shared_examples 'redis package unique event creation' do |originator_type, expected_scope|
context 'with feature flag disable' do
before do
stub_feature_flags(collect_package_events_redis: false)
@@ -70,29 +70,27 @@ RSpec.describe Packages::CreateEventService do
end
it 'tracks the event' do
- expect(::Gitlab::UsageDataCounters::GuestPackageEventCounter).not_to receive(:count)
- expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(user.id, Packages::Event.allowed_event_name(expected_scope, event_name, originator_type))
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(/package/, values: user.id)
subject
end
end
- shared_examples 'redis package guest event creation' do |originator_type, expected_scope|
+ shared_examples 'redis package count event creation' do |originator_type, expected_scope|
context 'with feature flag disabled' do
before do
stub_feature_flags(collect_package_events_redis: false)
end
it 'does not track the event' do
- expect(::Gitlab::UsageDataCounters::GuestPackageEventCounter).not_to receive(:count)
+ expect(::Gitlab::UsageDataCounters::PackageEventCounter).not_to receive(:count)
subject
end
end
it 'tracks the event' do
- expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- expect(::Gitlab::UsageDataCounters::GuestPackageEventCounter).to receive(:count).with(Packages::Event.allowed_event_name(expected_scope, event_name, originator_type))
+ expect(::Gitlab::UsageDataCounters::PackageEventCounter).to receive(:count).at_least(:once)
subject
end
@@ -102,21 +100,23 @@ RSpec.describe Packages::CreateEventService do
let(:user) { create(:user) }
it_behaves_like 'db package event creation', 'user', 'container'
- it_behaves_like 'redis package event creation', 'user', 'container'
+ it_behaves_like 'redis package unique event creation', 'user', 'container'
+ it_behaves_like 'redis package count event creation', 'user', 'container'
end
context 'with a deploy token' do
let(:user) { create(:deploy_token) }
it_behaves_like 'db package event creation', 'deploy_token', 'container'
- it_behaves_like 'redis package event creation', 'deploy_token', 'container'
+ it_behaves_like 'redis package unique event creation', 'deploy_token', 'container'
+ it_behaves_like 'redis package count event creation', 'deploy_token', 'container'
end
context 'with no user' do
let(:user) { nil }
it_behaves_like 'db package event creation', 'guest', 'container'
- it_behaves_like 'redis package guest event creation', 'guest', 'container'
+ it_behaves_like 'redis package count event creation', 'guest', 'container'
end
context 'with a package as scope' do
@@ -126,14 +126,15 @@ RSpec.describe Packages::CreateEventService do
let(:user) { nil }
it_behaves_like 'db package event creation', 'guest', 'npm'
- it_behaves_like 'redis package guest event creation', 'guest', 'npm'
+ it_behaves_like 'redis package count event creation', 'guest', 'npm'
end
context 'with user' do
let(:user) { create(:user) }
it_behaves_like 'db package event creation', 'user', 'npm'
- it_behaves_like 'redis package event creation', 'user', 'npm'
+ it_behaves_like 'redis package unique event creation', 'user', 'npm'
+ it_behaves_like 'redis package count event creation', 'user', 'npm'
end
end
end
diff --git a/spec/services/packages/debian/create_package_file_service_spec.rb b/spec/services/packages/debian/create_package_file_service_spec.rb
new file mode 100644
index 00000000000..74b97a4f941
--- /dev/null
+++ b/spec/services/packages/debian/create_package_file_service_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::CreatePackageFileService do
+ include WorkhorseHelpers
+
+ let_it_be(:package) { create(:debian_incoming, without_package_files: true) }
+
+ describe '#execute' do
+ let(:file_name) { 'libsample0_1.2.3~alpha2_amd64.deb' }
+ let(:fixture_path) { "spec/fixtures/packages/debian/#{file_name}" }
+
+ let(:params) do
+ {
+ file: file,
+ file_name: file_name,
+ file_sha1: '54321',
+ file_md5: '12345'
+ }.with_indifferent_access
+ end
+
+ let(:service) { described_class.new(package, params) }
+
+ subject(:package_file) { service.execute }
+
+ shared_examples 'a valid deb' do
+ it 'creates a new package file', :aggregate_failures do
+ expect(package_file).to be_valid
+ expect(package_file.file.read).to start_with('!<arch>')
+ expect(package_file.size).to eq(1124)
+ expect(package_file.file_name).to eq(file_name)
+ expect(package_file.file_sha1).to eq('54321')
+ expect(package_file.file_sha256).to eq('543212345')
+ expect(package_file.file_md5).to eq('12345')
+ expect(package_file.debian_file_metadatum).to be_valid
+ expect(package_file.debian_file_metadatum.file_type).to eq('unknown')
+ expect(package_file.debian_file_metadatum.architecture).to be_nil
+ expect(package_file.debian_file_metadatum.fields).to be_nil
+ end
+ end
+
+ context 'with temp file' do
+ let!(:file) do
+ upload_path = ::Packages::PackageFileUploader.workhorse_local_upload_path
+ file_path = upload_path + '/' + file_name
+
+ FileUtils.mkdir_p(upload_path)
+ File.write(file_path, File.read(fixture_path))
+
+ UploadedFile.new(file_path, filename: File.basename(file_path), sha256: '543212345')
+ end
+
+ it_behaves_like 'a valid deb'
+ end
+
+ context 'with remote file' do
+ let!(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: true)
+ end
+
+ before do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:sha256).and_return('543212345')
+ end
+ end
+
+ let(:tmp_object) do
+ fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang
+ key: "tmp/uploads/#{file_name}",
+ body: File.read(fixture_path)
+ )
+ end
+
+ let!(:file) { fog_to_uploaded_file(tmp_object) }
+
+ it_behaves_like 'a valid deb'
+ end
+
+ context 'package is missing' do
+ let(:package) { nil }
+ let(:params) { {} }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(ArgumentError, 'Invalid package')
+ end
+ end
+
+ context 'params is empty' do
+ let(:params) { {} }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'file is missing' do
+ let(:file_name) { 'libsample0_1.2.3~alpha2_amd64.deb' }
+ let(:file) { nil }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/debian/extract_metadata_service_spec.rb b/spec/services/packages/debian/extract_metadata_service_spec.rb
new file mode 100644
index 00000000000..0aa9a67b263
--- /dev/null
+++ b/spec/services/packages/debian/extract_metadata_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ExtractMetadataService do
+ let(:service) { described_class.new(package_file) }
+
+ subject { service.execute }
+
+ RSpec.shared_context 'Debian ExtractMetadata Service' do |trait|
+ let(:package_file) { create(:debian_package_file, trait) }
+ end
+
+ RSpec.shared_examples 'Test Debian ExtractMetadata Service' do |expected_file_type, expected_architecture, expected_fields|
+ it "returns file_type #{expected_file_type.inspect}" do
+ expect(subject[:file_type]).to eq(expected_file_type)
+ end
+
+ it "returns architecture #{expected_architecture.inspect}" do
+ expect(subject[:architecture]).to eq(expected_architecture)
+ end
+
+ it "returns fields #{expected_fields.nil? ? '' : 'including '}#{expected_fields.inspect}" do
+ if expected_fields.nil?
+ expect(subject[:fields]).to be_nil
+ else
+ expect(subject[:fields]).to include(**expected_fields)
+ end
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:case_name, :trait, :expected_file_type, :expected_architecture, :expected_fields) do
+ 'with invalid' | :invalid | :unknown | nil | nil
+ 'with source' | :source | :source | nil | nil
+ 'with dsc' | :dsc | :dsc | nil | { 'Binary': 'sample-dev, libsample0, sample-udeb' }
+ 'with deb' | :deb | :deb | 'amd64' | { 'Multi-Arch': 'same' }
+ 'with udeb' | :udeb | :udeb | 'amd64' | { 'Package': 'sample-udeb' }
+ 'with buildinfo' | :buildinfo | :buildinfo | nil | { 'Architecture': 'amd64 source', 'Build-Architecture': 'amd64' }
+ 'with changes' | :changes | :changes | nil | { 'Architecture': 'source amd64', 'Binary': 'libsample0 sample-dev sample-udeb' }
+ end
+
+ with_them do
+ include_context 'Debian ExtractMetadata Service', params[:trait] do
+ it_behaves_like 'Test Debian ExtractMetadata Service',
+ params[:expected_file_type],
+ params[:expected_architecture],
+ params[:expected_fields]
+ end
+ end
+
+ context 'with invalid package file' do
+ let(:package_file) { create(:conan_package_file) }
+
+ it 'raise error' do
+ expect { subject }.to raise_error(described_class::ExtractionError, 'invalid package file')
+ end
+ end
+end
diff --git a/spec/services/packages/debian/get_or_create_incoming_service_spec.rb b/spec/services/packages/debian/get_or_create_incoming_service_spec.rb
new file mode 100644
index 00000000000..ab99b091246
--- /dev/null
+++ b/spec/services/packages/debian/get_or_create_incoming_service_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::GetOrCreateIncomingService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:service) { described_class.new(project, user) }
+
+ describe '#execute' do
+ subject(:package) { service.execute }
+
+ context 'run once' do
+ it 'creates a new package', :aggregate_failures do
+ expect(package).to be_valid
+ expect(package.project_id).to eq(project.id)
+ expect(package.creator_id).to eq(user.id)
+ expect(package.name).to eq('incoming')
+ expect(package.version).to be_nil
+ expect(package.package_type).to eq('debian')
+ expect(package.debian_incoming?).to be_truthy
+ end
+
+ it_behaves_like 'assigns the package creator'
+ end
+
+ context 'run twice' do
+ let!(:package2) { service.execute }
+
+ it 'returns the same object' do
+ expect(package2.id).to eq(package.id)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/maven/find_or_create_package_service_spec.rb b/spec/services/packages/maven/find_or_create_package_service_spec.rb
index 2eaad7db445..82dffeefcde 100644
--- a/spec/services/packages/maven/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/maven/find_or_create_package_service_spec.rb
@@ -11,29 +11,36 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
let(:file_name) { 'test.jar' }
let(:param_path) { "#{path}/#{version}" }
let(:params) { { path: param_path, file_name: file_name } }
+ let(:service) { described_class.new(project, user, params) }
describe '#execute' do
using RSpec::Parameterized::TableSyntax
- subject { described_class.new(project, user, params).execute }
+ subject { service.execute }
- RSpec.shared_examples 'reuse existing package' do
- it { expect { subject}.not_to change { Packages::Package.count } }
+ shared_examples 'reuse existing package' do
+ it { expect { subject }.not_to change { Packages::Package.count } }
- it { is_expected.to eq(existing_package) }
+ it 'returns the existing package' do
+ expect(subject.payload).to eq(package: existing_package)
+ end
end
- RSpec.shared_examples 'create package' do
+ shared_examples 'create package' do
it { expect { subject }.to change { Packages::Package.count }.by(1) }
- it 'sets the proper name and version' do
- pkg = subject
+ it 'sets the proper name and version', :aggregate_failures do
+ pkg = subject.payload[:package]
expect(pkg.name).to eq(path)
expect(pkg.version).to eq(version)
end
- it_behaves_like 'assigns build to package'
+ context 'with a build' do
+ subject { service.execute.payload[:package] }
+
+ it_behaves_like 'assigns build to package'
+ end
end
context 'path with version' do
@@ -90,5 +97,36 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
expect { subject }.to change { Packages::BuildInfo.count }.by(1)
end
end
+
+ context 'when package duplicates are not allowed' do
+ let_it_be_with_refind(:package_settings) { create(:namespace_package_setting, :group, maven_duplicates_allowed: false) }
+ let_it_be_with_refind(:group) { package_settings.namespace }
+ let_it_be_with_refind(:project) { create(:project, group: group) }
+ let!(:existing_package) { create(:maven_package, name: path, version: version, project: project) }
+
+ it { expect { subject }.not_to change { project.package_files.count } }
+
+ it 'returns an error', :aggregate_failures do
+ expect(subject.payload).to be_empty
+ expect(subject.errors).to include('Duplicate package is not allowed')
+ end
+
+ context 'when uploading different non-duplicate files to the same package' do
+ before do
+ package_file = existing_package.package_files.find_by(file_name: 'my-app-1.0-20180724.124855-1.jar')
+ package_file.destroy!
+ end
+
+ it_behaves_like 'reuse existing package'
+ end
+
+ context 'when the package name matches the exception regex' do
+ before do
+ package_settings.update!(maven_duplicate_exception_regex: '.*')
+ end
+
+ it_behaves_like 'reuse existing package'
+ end
+ end
end
end
diff --git a/spec/services/packages/nuget/search_service_spec.rb b/spec/services/packages/nuget/search_service_spec.rb
index d163e7087e4..db758dc6672 100644
--- a/spec/services/packages/nuget/search_service_spec.rb
+++ b/spec/services/packages/nuget/search_service_spec.rb
@@ -1,8 +1,12 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe Packages::Nuget::SearchService do
- let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, namespace: subgroup) }
let_it_be(:package_a) { create(:nuget_package, project: project, name: 'DummyPackageA') }
let_it_be(:packages_b) { create_list(:nuget_package, 5, project: project, name: 'DummyPackageB') }
let_it_be(:packages_c) { create_list(:nuget_package, 5, project: project, name: 'DummyPackageC') }
@@ -16,94 +20,126 @@ RSpec.describe Packages::Nuget::SearchService do
let(:options) { { include_prerelease_versions: include_prerelease_versions, per_page: per_page, padding: padding } }
describe '#execute' do
- subject { described_class.new(project, search_term, options).execute }
+ subject { described_class.new(user, target, search_term, options).execute }
- it { expect_search_results 3, package_a, packages_b, packages_c }
+ shared_examples 'handling all the conditions' do
+ it { expect_search_results 3, package_a, packages_b, packages_c }
- context 'with a smaller per page count' do
- let(:per_page) { 2 }
+ context 'with a smaller per page count' do
+ let(:per_page) { 2 }
- it { expect_search_results 3, package_a, packages_b }
- end
+ it { expect_search_results 3, package_a, packages_b }
+ end
- context 'with 0 per page count' do
- let(:per_page) { 0 }
+ context 'with 0 per page count' do
+ let(:per_page) { 0 }
- it { expect_search_results 3, [] }
- end
+ it { expect_search_results 3, [] }
+ end
- context 'with a negative per page count' do
- let(:per_page) { -1 }
+ context 'with a negative per page count' do
+ let(:per_page) { -1 }
- it { expect { subject }.to raise_error(ArgumentError, 'negative per_page') }
- end
+ it { expect { subject }.to raise_error(ArgumentError, 'negative per_page') }
+ end
- context 'with a padding' do
- let(:padding) { 2 }
+ context 'with a padding' do
+ let(:padding) { 2 }
- it { expect_search_results 3, packages_c }
- end
+ it { expect_search_results 3, packages_c }
+ end
- context 'with a too big padding' do
- let(:padding) { 5 }
+ context 'with a too big padding' do
+ let(:padding) { 5 }
- it { expect_search_results 3, [] }
- end
+ it { expect_search_results 3, [] }
+ end
- context 'with a negative padding' do
- let(:padding) { -1 }
+ context 'with a negative padding' do
+ let(:padding) { -1 }
- it { expect { subject }.to raise_error(ArgumentError, 'negative padding') }
- end
+ it { expect { subject }.to raise_error(ArgumentError, 'negative padding') }
+ end
- context 'with search term' do
- let(:search_term) { 'umm' }
+ context 'with search term' do
+ let(:search_term) { 'umm' }
- it { expect_search_results 3, package_a, packages_b, packages_c }
- end
+ it { expect_search_results 3, package_a, packages_b, packages_c }
+ end
- context 'with nil search term' do
- let(:search_term) { nil }
+ context 'with nil search term' do
+ let(:search_term) { nil }
- it { expect_search_results 4, package_a, packages_b, packages_c, package_d }
- end
+ it { expect_search_results 4, package_a, packages_b, packages_c, package_d }
+ end
- context 'with empty search term' do
- let(:search_term) { '' }
+ context 'with empty search term' do
+ let(:search_term) { '' }
- it { expect_search_results 4, package_a, packages_b, packages_c, package_d }
- end
+ it { expect_search_results 4, package_a, packages_b, packages_c, package_d }
+ end
- context 'with prefix search term' do
- let(:search_term) { 'dummy' }
+ context 'with prefix search term' do
+ let(:search_term) { 'dummy' }
- it { expect_search_results 3, package_a, packages_b, packages_c }
- end
+ it { expect_search_results 3, package_a, packages_b, packages_c }
+ end
+
+ context 'with suffix search term' do
+ let(:search_term) { 'packagec' }
+
+ it { expect_search_results 1, packages_c }
+ end
+
+ context 'with pre release packages' do
+ let_it_be(:package_e) { create(:nuget_package, project: project, name: 'DummyPackageE', version: '3.2.1-alpha') }
+
+ context 'including them' do
+ it { expect_search_results 4, package_a, packages_b, packages_c, package_e }
+ end
+
+ context 'excluding them' do
+ let(:include_prerelease_versions) { false }
- context 'with suffix search term' do
- let(:search_term) { 'packagec' }
+ it { expect_search_results 3, package_a, packages_b, packages_c }
- it { expect_search_results 1, packages_c }
+ context 'when mixed with release versions' do
+ let_it_be(:package_e_release) { create(:nuget_package, project: project, name: 'DummyPackageE', version: '3.2.1') }
+
+ it { expect_search_results 4, package_a, packages_b, packages_c, package_e_release }
+ end
+ end
+ end
end
- context 'with pre release packages' do
- let_it_be(:package_e) { create(:nuget_package, project: project, name: 'DummyPackageE', version: '3.2.1-alpha') }
+ context 'with project' do
+ let(:target) { project }
- context 'including them' do
- it { expect_search_results 4, package_a, packages_b, packages_c, package_e }
+ before do
+ project.add_developer(user)
end
- context 'excluding them' do
- let(:include_prerelease_versions) { false }
+ it_behaves_like 'handling all the conditions'
+ end
- it { expect_search_results 3, package_a, packages_b, packages_c }
+ context 'with subgroup' do
+ let(:target) { subgroup }
- context 'when mixed with release versions' do
- let_it_be(:package_e_release) { create(:nuget_package, project: project, name: 'DummyPackageE', version: '3.2.1') }
+ before do
+ subgroup.add_developer(user)
+ end
- it { expect_search_results 4, package_a, packages_b, packages_c, package_e_release }
- end
+ it_behaves_like 'handling all the conditions'
+ end
+
+ context 'with group' do
+ let(:target) { group }
+
+ before do
+ group.add_developer(user)
end
+
+ it_behaves_like 'handling all the conditions'
end
def expect_search_results(total_count, *results)
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
new file mode 100644
index 00000000000..29023621413
--- /dev/null
+++ b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::MigrateLegacyStorageToDeploymentService do
+ let(:project) { create(:project, :repository) }
+ let(:service) { described_class.new(project) }
+
+ it 'marks pages as not deployed if public directory is absent' do
+ project.mark_pages_as_deployed
+
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
+
+ expect(service.execute).to(
+ eq(status: :error,
+ message: "Can't create zip archive: Can not find valid public dir in #{project.pages_path}")
+ )
+
+ expect(project.pages_metadatum.reload.deployed).to eq(false)
+ end
+
+ it 'does not mark pages as not deployed if public directory is absent but pages_deployment exists' do
+ deployment = create(:pages_deployment, project: project)
+ project.update_pages_deployment!(deployment)
+ project.mark_pages_as_deployed
+
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
+
+ expect(service.execute).to(
+ eq(status: :error,
+ message: "Can't create zip archive: Can not find valid public dir in #{project.pages_path}")
+ )
+
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
+ end
+
+ it 'does not mark pages as not deployed if public directory is absent but feature is disabled' do
+ stub_feature_flags(pages_migration_mark_as_not_deployed: false)
+
+ project.mark_pages_as_deployed
+
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
+
+ expect(service.execute).to(
+ eq(status: :error,
+ message: "Can't create zip archive: Can not find valid public dir in #{project.pages_path}")
+ )
+
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
+ end
+
+ it 'removes pages archive when can not save deployment' do
+ archive = fixture_file_upload("spec/fixtures/pages.zip")
+ expect_next_instance_of(::Pages::ZipDirectoryService) do |zip_service|
+ expect(zip_service).to receive(:execute).and_return(status: :success,
+ archive_path: archive.path,
+ entries_count: 3)
+ end
+
+ expect_next_instance_of(PagesDeployment) do |deployment|
+ expect(deployment).to receive(:save!).and_raise("Something")
+ end
+
+ expect do
+ service.execute
+ end.to raise_error("Something")
+
+ expect(File.exist?(archive.path)).to eq(false)
+ end
+
+ context 'when pages site is deployed to legacy storage' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ it 'creates pages deployment' do
+ expect do
+ expect(described_class.new(project).execute).to eq(status: :success)
+ end.to change { project.reload.pages_deployments.count }.by(1)
+
+ deployment = project.pages_metadatum.pages_deployment
+
+ Zip::File.open(deployment.file.path) do |zip_file|
+ expect(zip_file.glob("public").first.ftype).to eq(:directory)
+ expect(zip_file.glob("public/index.html").first.get_input_stream.read).to eq("Hello!")
+ end
+
+ expect(deployment.file_count).to eq(2)
+ expect(deployment.file_sha256).to eq(Digest::SHA256.file(deployment.file.path).hexdigest)
+ end
+
+ it 'removes tmp pages archive' do
+ described_class.new(project).execute
+
+ expect(File.exist?(File.join(project.pages_path, '@migrated.zip'))).to eq(false)
+ end
+
+ it 'does not change pages deployment if it is set' do
+ old_deployment = create(:pages_deployment, project: project)
+ project.update_pages_deployment!(old_deployment)
+
+ expect do
+ described_class.new(project).execute
+ end.not_to change { project.pages_metadatum.reload.pages_deployment_id }.from(old_deployment.id)
+ end
+
+ it 'raises exception if exclusive lease is taken' do
+ described_class.new(project).try_obtain_lease do
+ expect do
+ described_class.new(project).execute
+ end.to raise_error(described_class::ExclusiveLeaseTakenError)
+ end
+ end
+ end
+end
diff --git a/spec/services/pages/zip_directory_service_spec.rb b/spec/services/pages/zip_directory_service_spec.rb
index 1568103d102..dcab6b2dada 100644
--- a/spec/services/pages/zip_directory_service_spec.rb
+++ b/spec/services/pages/zip_directory_service_spec.rb
@@ -14,19 +14,35 @@ RSpec.describe Pages::ZipDirectoryService do
described_class.new(@work_dir).execute
end
- let(:archive) { result.first }
- let(:entries_count) { result.second }
+ let(:status) { result[:status] }
+ let(:message) { result[:message] }
+ let(:archive) { result[:archive_path] }
+ let(:entries_count) { result[:entries_count] }
+
+ it 'returns error if project pages dir does not exist' do
+ expect(
+ described_class.new("/tmp/not/existing/dir").execute
+ ).to eq(status: :error, message: "Can not find valid public dir in /tmp/not/existing/dir")
+ end
+
+ it 'returns nils if there is no public directory and does not leave archive' do
+ expect(status).to eq(:error)
+ expect(message).to eq("Can not find valid public dir in #{@work_dir}")
+ expect(archive).to eq(nil)
+ expect(entries_count).to eq(nil)
- it 'raises error if there is no public directory' do
- expect { archive }.to raise_error(described_class::InvalidArchiveError)
+ expect(File.exist?(File.join(@work_dir, '@migrated.zip'))).to eq(false)
end
- it 'raises error if public directory is a symlink' do
+ it 'returns nils if public directory is a symlink' do
create_dir('target')
create_file('./target/index.html', 'hello')
create_link("public", "./target")
- expect { archive }.to raise_error(described_class::InvalidArchiveError)
+ expect(status).to eq(:error)
+ expect(message).to eq("Can not find valid public dir in #{@work_dir}")
+ expect(archive).to eq(nil)
+ expect(entries_count).to eq(nil)
end
context 'when there is a public directory' do
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 4e303bfc20a..6e2cd7edf04 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe PostReceiveService do
include Gitlab::Routing
+ include AfterNextHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
@@ -46,8 +47,8 @@ RSpec.describe PostReceiveService do
expect(subject).to be_empty
end
- it 'does not record a namespace onboarding progress action' do
- expect(NamespaceOnboardingAction).not_to receive(:create_action)
+ it 'does not record an onboarding progress action' do
+ expect_next(OnboardingProgressService).not_to receive(:execute)
subject
end
@@ -87,9 +88,9 @@ RSpec.describe PostReceiveService do
expect(response.reference_counter_decreased).to be(true)
end
- it 'records a namespace onboarding progress action' do
- expect(NamespaceOnboardingAction).to receive(:create_action)
- .with(project.namespace, :git_write)
+ it 'records an onboarding progress action' do
+ expect_next(OnboardingProgressService, project.namespace)
+ .to receive(:execute).with(action: :git_write)
subject
end
diff --git a/spec/services/projects/after_import_service_spec.rb b/spec/services/projects/after_import_service_spec.rb
index a109348ea19..a16aec891a9 100644
--- a/spec/services/projects/after_import_service_spec.rb
+++ b/spec/services/projects/after_import_service_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Projects::AfterImportService do
describe '#execute' do
before do
- allow(Projects::HousekeepingService)
+ allow(Repositories::HousekeepingService)
.to receive(:new).with(project).and_return(housekeeping_service)
allow(housekeeping_service)
@@ -73,10 +73,10 @@ RSpec.describe Projects::AfterImportService do
end
context 'when housekeeping service lease is taken' do
- let(:exception) { Projects::HousekeepingService::LeaseTaken.new }
+ let(:exception) { Repositories::HousekeepingService::LeaseTaken.new }
it 'logs the error message' do
- allow_next_instance_of(Projects::HousekeepingService) do |instance|
+ allow_next_instance_of(Repositories::HousekeepingService) do |instance|
expect(instance).to receive(:execute).and_raise(exception)
end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 8ddcb8ce660..17c2f0f6c17 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -3,11 +3,14 @@
require 'spec_helper'
RSpec.describe Projects::ContainerRepository::CleanupTagsService do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
let_it_be(:repository) { create(:container_repository, :root, project: project) }
let(:service) { described_class.new(project, user, params) }
+ let(:tags) { %w[latest A Ba Bb C D E] }
before do
project.add_maintainer(user)
@@ -16,7 +19,8 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
stub_container_registry_tags(
repository: repository.path,
- tags: %w(latest A Ba Bb C D E))
+ tags: tags
+ )
stub_tag_digest('latest', 'sha256:configA')
stub_tag_digest('A', 'sha256:configA')
@@ -30,6 +34,8 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
stub_digest_config('sha256:configB', 5.days.ago)
stub_digest_config('sha256:configC', 1.month.ago)
stub_digest_config('sha256:configD', nil)
+
+ stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
describe '#execute' do
@@ -42,7 +48,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
.not_to receive(:execute)
- is_expected.to include(status: :success, deleted: [])
+ is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
end
end
@@ -51,7 +57,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does remove all tags except latest' do
expect_delete(%w(A Ba Bb C D E))
- is_expected.to include(status: :success, deleted: %w(A Ba Bb C D E))
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
end
end
@@ -78,12 +84,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
subject
end
- it 'returns an error' do
- response = subject
-
- expect(response[:status]).to eq(:error)
- expect(response[:message]).to eq('invalid regex')
- end
+ it { is_expected.to eq(status: :error, message: 'invalid regex') }
it 'calls error tracking service' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
@@ -119,7 +120,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does remove C and D' do
expect_delete(%w(C D))
- is_expected.to include(status: :success, deleted: %w(C D))
+ is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
end
context 'with overriding allow regex' do
@@ -131,7 +132,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does not remove C' do
expect_delete(%w(D))
- is_expected.to include(status: :success, deleted: %w(D))
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
end
end
@@ -144,7 +145,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does not remove C' do
expect_delete(%w(D))
- is_expected.to include(status: :success, deleted: %w(D))
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
end
end
end
@@ -158,7 +159,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does not remove B*' do
expect_delete(%w(A C D E))
- is_expected.to include(status: :success, deleted: %w(A C D E))
+ is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
end
end
@@ -173,7 +174,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
expect(service).to receive(:order_by_date).and_call_original
- is_expected.to include(status: :success, deleted: %w(Bb Ba C))
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
end
end
@@ -187,7 +188,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
expect(service).not_to receive(:order_by_date)
- is_expected.to include(status: :success, deleted: %w(A Ba Bb C))
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
end
end
@@ -200,7 +201,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does remove B* and C as they are the oldest' do
expect_delete(%w(Bb Ba C))
- is_expected.to include(status: :success, deleted: %w(Bb Ba C))
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
end
end
@@ -213,7 +214,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does remove B* and C as they are older than 1 day' do
expect_delete(%w(Ba Bb C))
- is_expected.to include(status: :success, deleted: %w(Ba Bb C))
+ is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
end
end
@@ -227,7 +228,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'does remove B* and C' do
expect_delete(%w(Bb Ba C))
- is_expected.to include(status: :success, deleted: %w(Bb Ba C))
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
end
end
@@ -245,7 +246,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
it 'succeeds without a user' do
expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- is_expected.to include(status: :success, deleted: %w(Bb Ba C))
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
end
end
@@ -257,10 +258,73 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
end
it 'fails' do
- is_expected.to include(status: :error, message: 'access denied')
+ is_expected.to eq(status: :error, message: 'access denied')
end
end
end
+
+ context 'truncating the tags list' do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
+
+ shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ result = subject
+
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ deleted: nil
+ )
+
+ expect(result).to eq(service_response.compact)
+ end
+ end
+
+ where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ false | 10 | :success | :success | false
+ false | 10 | :error | :error | false
+ false | 3 | :success | :success | false
+ false | 3 | :error | :error | false
+ false | 0 | :success | :success | false
+ false | 0 | :error | :error | false
+ true | 10 | :success | :success | false
+ true | 10 | :error | :error | false
+ true | 3 | :success | :error | true
+ true | 3 | :error | :error | true
+ true | 0 | :success | :success | false
+ true | 0 | :error | :error | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ end
+ end
+
+ original_size = 7
+ keep_n = 1
+
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
+ )
+ end
+ end
end
private
@@ -295,4 +359,16 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
.to receive(:execute)
.with(repository) { { status: :success, deleted: tags } }
end
+
+ # all those -1 because the default tags on L13 have a "latest" that will be filtered out
+ def expected_service_response(status: :success, deleted: [], original_size: tags.size, before_truncate_size: tags.size - 1, after_truncate_size: tags.size - 1, before_delete_size: tags.size - 1)
+ {
+ status: status,
+ deleted: deleted,
+ original_size: original_size,
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size
+ }.compact
+ end
end
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index 4da416d9698..94037d6de1e 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -119,9 +119,9 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
end
end
- it { is_expected.to include(status: :error, message: 'timeout while deleting tags') }
+ it { is_expected.to include(status: :error, message: 'error while deleting tags') }
- it_behaves_like 'logging an error response', message: 'timeout while deleting tags', extra_log: { deleted_tags_count: 0 }
+ it_behaves_like 'logging an error response', message: 'error while deleting tags', extra_log: { deleted_tags_count: 0 }
end
end
end
diff --git a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
index 988971171fc..74f782538c5 100644
--- a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
stub_delete_reference_requests('A' => 200)
end
- it { is_expected.to eq(status: :error, message: 'timeout while deleting tags', deleted: ['A']) }
+ it { is_expected.to eq(status: :error, message: 'error while deleting tags', deleted: ['A'], exception_class_name: Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError.name) }
it 'tracks the exception' do
expect(::Gitlab::ErrorTracking)
@@ -89,6 +89,21 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
it_behaves_like 'deleting tags'
end
end
+
+ context 'with a network error' do
+ before do
+ expect(service).to receive(:delete_tags).and_raise(::Faraday::TimeoutError)
+ end
+
+ it { is_expected.to eq(status: :error, message: 'error while deleting tags', deleted: [], exception_class_name: ::Faraday::TimeoutError.name) }
+
+ it 'tracks the exception' do
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_exception).with(::Faraday::TimeoutError, tags_count: tags.size, container_repository_id: repository.id)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 60dfee820ca..a11f16573f5 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -116,6 +116,24 @@ RSpec.describe Projects::ForkService do
expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
end
+ context 'when the forked project has higher visibility than the root project' do
+ let(:root_project) { create(:project, :public) }
+
+ it 'successfully creates a fork of the fork with correct visibility' do
+ forked_project = fork_project(root_project, @to_user, using_service: true)
+
+ root_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+
+ # Forked project visibility is not affected by root project visibility change
+ expect(forked_project).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ fork_of_the_fork = fork_project(forked_project, @to_user, namespace: other_namespace, using_service: true)
+
+ expect(fork_of_the_fork).to be_valid
+ expect(fork_of_the_fork).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+ end
+
it_behaves_like 'forks count cache refresh' do
let(:from_project) { from_forked_project }
let(:to_user) { @to_user }
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index 18871f010f8..0aa4a1cd312 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -2,127 +2,19 @@
require 'spec_helper'
+# This is a compatibility class to avoid calling a non-existent
+# class from sidekiq during deployment.
+#
+# We're deploying the name of the referenced class in 13.9. Nevertheless,
+# we cannot remove the class entirely because there can be jobs
+# referencing it. We still need this specs to ensure that the old
+# class still has the old behavior.
+#
+# We can get rid of this class in 13.10
+# https://gitlab.com/gitlab-org/gitlab/-/issues/297580
+#
RSpec.describe Projects::HousekeepingService do
- subject { described_class.new(project) }
-
- let_it_be(:project) { create(:project, :repository) }
-
- before do
- project.reset_pushes_since_gc
- end
-
- after do
- project.reset_pushes_since_gc
- end
-
- describe '#execute' do
- it 'enqueues a sidekiq job' do
- expect(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
- expect(subject).to receive(:lease_key).and_return(:the_lease_key)
- expect(subject).to receive(:task).and_return(:incremental_repack)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :incremental_repack, :the_lease_key, :the_uuid).and_call_original
-
- Sidekiq::Testing.fake! do
- expect { subject.execute }.to change(GitGarbageCollectWorker.jobs, :size).by(1)
- end
- end
-
- it 'yields the block if given' do
- expect do |block|
- subject.execute(&block)
- end.to yield_with_no_args
- end
-
- it 'resets counter after execution' do
- expect(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
- allow(subject).to receive(:gc_period).and_return(1)
- project.increment_pushes_since_gc
-
- perform_enqueued_jobs do
- expect { subject.execute }.to change { project.pushes_since_gc }.to(0)
- end
- end
-
- context 'when no lease can be obtained' do
- before do
- expect(subject).to receive(:try_obtain_lease).and_return(false)
- end
-
- it 'does not enqueue a job' do
- expect(GitGarbageCollectWorker).not_to receive(:perform_async)
-
- expect { subject.execute }.to raise_error(Projects::HousekeepingService::LeaseTaken)
- end
-
- it 'does not reset pushes_since_gc' do
- expect do
- expect { subject.execute }.to raise_error(Projects::HousekeepingService::LeaseTaken)
- end.not_to change { project.pushes_since_gc }
- end
-
- it 'does not yield' do
- expect do |block|
- expect { subject.execute(&block) }
- .to raise_error(Projects::HousekeepingService::LeaseTaken)
- end.not_to yield_with_no_args
- end
- end
-
- context 'task type' do
- it 'goes through all three housekeeping tasks, executing only the highest task when there is overlap' do
- allow(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
- allow(subject).to receive(:lease_key).and_return(:the_lease_key)
-
- # At push 200
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :gc, :the_lease_key, :the_uuid)
- .once
- # At push 50, 100, 150
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :full_repack, :the_lease_key, :the_uuid)
- .exactly(3).times
- # At push 10, 20, ... (except those above)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :incremental_repack, :the_lease_key, :the_uuid)
- .exactly(16).times
- # At push 6, 12, 18, ... (except those above)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :pack_refs, :the_lease_key, :the_uuid)
- .exactly(27).times
-
- 201.times do
- subject.increment!
- subject.execute if subject.needed?
- end
-
- expect(project.pushes_since_gc).to eq(1)
- end
- end
-
- it 'runs the task specifically requested' do
- housekeeping = described_class.new(project, :gc)
-
- allow(housekeeping).to receive(:try_obtain_lease).and_return(:gc_uuid)
- allow(housekeeping).to receive(:lease_key).and_return(:gc_lease_key)
-
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :gc, :gc_lease_key, :gc_uuid).twice
-
- 2.times do
- housekeeping.execute
- end
- end
- end
-
- describe '#needed?' do
- it 'when the count is low enough' do
- expect(subject.needed?).to eq(false)
- end
-
- it 'when the count is high enough' do
- allow(project).to receive(:pushes_since_gc).and_return(10)
- expect(subject.needed?).to eq(true)
- end
- end
-
- describe '#increment!' do
- it 'increments the pushes_since_gc counter' do
- expect { subject.increment! }.to change { project.pushes_since_gc }.by(1)
- end
+ it_behaves_like 'housekeeps repository' do
+ let_it_be(:resource) { create(:project, :repository) }
end
end
diff --git a/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb b/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
index 5b76386bfab..15c9d1e5925 100644
--- a/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
+++ b/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
@@ -3,45 +3,10 @@
require 'spec_helper'
RSpec.describe Projects::ScheduleBulkRepositoryShardMovesService do
- before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
- end
-
- let!(:project) { create(:project, :repository).tap { |project| project.track_project_repository } }
- let(:source_storage_name) { 'default' }
- let(:destination_storage_name) { 'test_second_storage' }
-
- describe '#execute' do
- it 'schedules project repository storage moves' do
- expect { subject.execute(source_storage_name, destination_storage_name) }
- .to change(ProjectRepositoryStorageMove, :count).by(1)
-
- storage_move = project.repository_storage_moves.last!
-
- expect(storage_move).to have_attributes(
- source_storage_name: source_storage_name,
- destination_storage_name: destination_storage_name,
- state_name: :scheduled
- )
- end
-
- context 'read-only repository' do
- let!(:project) { create(:project, :repository, :read_only).tap { |project| project.track_project_repository } }
-
- it 'does not get scheduled' do
- expect(subject).to receive(:log_info)
- .with("Project #{project.full_path} (#{project.id}) was skipped: Project is read only")
- expect { subject.execute(source_storage_name, destination_storage_name) }
- .to change(ProjectRepositoryStorageMove, :count).by(0)
- end
- end
- end
-
- describe '.enqueue' do
- it 'defers to the worker' do
- expect(::ProjectScheduleBulkRepositoryShardMovesWorker).to receive(:perform_async).with(source_storage_name, destination_storage_name)
+ it_behaves_like 'moves repository shard in bulk' do
+ let_it_be_with_reload(:container) { create(:project, :repository).tap { |project| project.track_project_repository } }
- described_class.enqueue(source_storage_name, destination_storage_name)
- end
+ let(:move_service_klass) { ProjectRepositoryStorageMove }
+ let(:bulk_worker_klass) { ::ProjectScheduleBulkRepositoryShardMovesWorker }
end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index a15f6bdbe2c..a6730c5de52 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -79,6 +79,19 @@ RSpec.describe Projects::UpdatePagesService do
end
end
+ it 'fails if sha on branch was updated before deployment was uploaded' do
+ expect(subject).to receive(:create_pages_deployment).and_wrap_original do |m, *args|
+ build.update!(ref: 'feature')
+ m.call(*args)
+ end
+
+ expect(execute).not_to eq(:success)
+ expect(project.pages_metadatum).not_to be_deployed
+
+ expect(deploy_status).to be_failed
+ expect(deploy_status.description).to eq('build SHA is outdated for this ref')
+ end
+
it 'does not fail if pages_metadata is absent' do
project.pages_metadatum.destroy!
project.reload
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 760cd85bf71..a59b6adf346 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -15,13 +15,6 @@ RSpec.describe Projects::UpdateService do
let(:admin) { create(:admin) }
context 'when changing visibility level' do
- def expect_to_call_unlink_fork_service
- service = Projects::UnlinkForkService.new(project, user)
-
- expect(Projects::UnlinkForkService).to receive(:new).with(project, user).and_return(service)
- expect(service).to receive(:execute).and_call_original
- end
-
context 'when visibility_level changes to INTERNAL' do
it 'updates the project to internal' do
expect(TodosDestroyer::ProjectPrivateWorker).not_to receive(:perform_in)
@@ -31,18 +24,6 @@ RSpec.describe Projects::UpdateService do
expect(result).to eq({ status: :success })
expect(project).to be_internal
end
-
- context 'and project is PUBLIC' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- end
-
- it 'unlinks project from fork network' do
- expect_to_call_unlink_fork_service
-
- update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- end
- end
end
context 'when visibility_level changes to PUBLIC' do
@@ -78,30 +59,6 @@ RSpec.describe Projects::UpdateService do
expect(result).to eq({ status: :success })
expect(project).to be_private
end
-
- context 'and project is PUBLIC' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- end
-
- it 'unlinks project from fork network' do
- expect_to_call_unlink_fork_service
-
- update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
- end
-
- context 'and project is INTERNAL' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- end
-
- it 'unlinks project from fork network' do
- expect_to_call_unlink_fork_service
-
- update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
- end
end
context 'when visibility levels are restricted to PUBLIC only' do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index e6d1d0e90a7..21e294418a1 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe QuickActions::InterpretService do
let_it_be(:project) { public_project }
let_it_be(:developer) { create(:user) }
let_it_be(:developer2) { create(:user) }
+ let_it_be(:developer3) { create(:user) }
let_it_be_with_reload(:issue) { create(:issue, project: project) }
let(:milestone) { create(:milestone, project: project, title: '9.10') }
let(:commit) { create(:commit, project: project) }
@@ -23,6 +24,7 @@ RSpec.describe QuickActions::InterpretService do
before do
stub_licensed_features(multiple_issue_assignees: false,
+ multiple_merge_request_reviewers: false,
multiple_merge_request_assignees: false)
end
@@ -665,6 +667,24 @@ RSpec.describe QuickActions::InterpretService do
end
end
+ shared_examples 'assign_reviewer command' do
+ it 'assigns a reviewer to a single user' do
+ _, updates, message = service.execute(content, issuable)
+
+ expect(updates).to eq(reviewer_ids: [developer.id])
+ expect(message).to eq("Assigned #{developer.to_reference} as reviewer.")
+ end
+ end
+
+ shared_examples 'unassign_reviewer command' do
+ it 'removes a single reviewer' do
+ _, updates, message = service.execute(content, issuable)
+
+ expect(updates).to eq(reviewer_ids: [])
+ expect(message).to eq("Removed reviewer #{developer.to_reference}.")
+ end
+ end
+
it_behaves_like 'reopen command' do
let(:content) { '/reopen' }
let(:issuable) { issue }
@@ -779,6 +799,11 @@ RSpec.describe QuickActions::InterpretService do
it_behaves_like 'assign command' do
let(:content) { "/assign @#{developer.username}" }
+ let(:issuable) { create(:incident, project: project) }
+ end
+
+ it_behaves_like 'assign command' do
+ let(:content) { "/assign @#{developer.username}" }
let(:issuable) { merge_request }
end
end
@@ -789,12 +814,32 @@ RSpec.describe QuickActions::InterpretService do
project.add_developer(developer2)
end
- it_behaves_like 'assign command' do
+ # There's no guarantee that the reference extractor will preserve
+ # the order of the mentioned users since this is dependent on the
+ # order in which rows are returned. We just ensure that at least
+ # one of the mentioned users is assigned.
+ shared_examples 'assigns to one of the two users' do
+ let(:content) { "/assign @#{developer.username} @#{developer2.username}" }
+
+ it 'assigns to a single user' do
+ _, updates, message = service.execute(content, issuable)
+
+ expect(updates[:assignee_ids].count).to eq(1)
+ assignee = updates[:assignee_ids].first
+ expect([developer.id, developer2.id]).to include(assignee)
+
+ user = assignee == developer.id ? developer : developer2
+
+ expect(message).to match("Assigned #{user.to_reference}.")
+ end
+ end
+
+ it_behaves_like 'assigns to one of the two users' do
let(:content) { "/assign @#{developer.username} @#{developer2.username}" }
let(:issuable) { issue }
end
- it_behaves_like 'assign command', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/27989' do
+ it_behaves_like 'assigns to one of the two users' do
let(:content) { "/assign @#{developer.username} @#{developer2.username}" }
let(:issuable) { merge_request }
end
@@ -834,6 +879,142 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
+ context 'when the merge_request_reviewers flag is enabled' do
+ describe 'assign_reviewer command' do
+ let(:content) { "/assign_reviewer @#{developer.username}" }
+ let(:issuable) { merge_request }
+
+ context 'with one user' do
+ it_behaves_like 'assign_reviewer command'
+ end
+
+ context 'with an issue instead of a merge request' do
+ let(:issuable) { issue }
+
+ it_behaves_like 'empty command'
+ end
+
+ # CE does not have multiple reviewers
+ context 'assign command with multiple assignees' do
+ before do
+ project.add_developer(developer2)
+ end
+
+ # There's no guarantee that the reference extractor will preserve
+ # the order of the mentioned users since this is dependent on the
+ # order in which rows are returned. We just ensure that at least
+ # one of the mentioned users is assigned.
+ context 'assigns to one of the two users' do
+ let(:content) { "/assign_reviewer @#{developer.username} @#{developer2.username}" }
+
+ it 'assigns to a single reviewer' do
+ _, updates, message = service.execute(content, issuable)
+
+ expect(updates[:reviewer_ids].count).to eq(1)
+ reviewer = updates[:reviewer_ids].first
+ expect([developer.id, developer2.id]).to include(reviewer)
+
+ user = reviewer == developer.id ? developer : developer2
+
+ expect(message).to match("Assigned #{user.to_reference} as reviewer.")
+ end
+ end
+ end
+
+ context 'with "me" alias' do
+ let(:content) { '/assign_reviewer me' }
+
+ it_behaves_like 'assign_reviewer command'
+ end
+
+ context 'with an alias and whitespace' do
+ let(:content) { '/assign_reviewer me ' }
+
+ it_behaves_like 'assign_reviewer command'
+ end
+
+ context 'with an incorrect user' do
+ let(:content) { '/assign_reviewer @abcd1234' }
+
+ it_behaves_like 'empty command', "Failed to assign a reviewer because no user was found."
+ end
+
+ context 'with the "reviewer" alias' do
+ let(:content) { "/reviewer @#{developer.username}" }
+
+ it_behaves_like 'assign_reviewer command'
+ end
+
+ context 'with no user' do
+ let(:content) { '/assign_reviewer' }
+
+ it_behaves_like 'empty command', "Failed to assign a reviewer because no user was found."
+ end
+
+ context 'includes only the user reference with extra text' do
+ let(:content) { "/assign_reviewer @#{developer.username} do it!" }
+
+ it_behaves_like 'assign_reviewer command'
+ end
+ end
+
+ describe 'unassign_reviewer command' do
+ # CE does not have multiple reviewers, so basically anything
+ # after /unassign_reviewer (including whitespace) will remove
+ # all the current reviewers.
+ let(:issuable) { create(:merge_request, reviewers: [developer]) }
+ let(:content) { "/unassign_reviewer @#{developer.username}" }
+
+ context 'with one user' do
+ it_behaves_like 'unassign_reviewer command'
+ end
+
+ context 'with an issue instead of a merge request' do
+ let(:issuable) { issue }
+
+ it_behaves_like 'empty command'
+ end
+
+ context 'with anything after the command' do
+ let(:content) { '/unassign_reviewer supercalifragilisticexpialidocious' }
+
+ it_behaves_like 'unassign_reviewer command'
+ end
+
+ context 'with the "remove_reviewer" alias' do
+ let(:content) { "/remove_reviewer @#{developer.username}" }
+
+ it_behaves_like 'unassign_reviewer command'
+ end
+
+ context 'with no user' do
+ let(:content) { '/unassign_reviewer' }
+
+ it_behaves_like 'unassign_reviewer command'
+ end
+ end
+ end
+
+ context 'when the merge_request_reviewers flag is disabled' do
+ before do
+ stub_feature_flags(merge_request_reviewers: false)
+ end
+
+ describe 'assign_reviewer command' do
+ it_behaves_like 'empty command' do
+ let(:content) { "/assign_reviewer @#{developer.username}" }
+ let(:issuable) { merge_request }
+ end
+ end
+
+ describe 'unassign_reviewer command' do
+ it_behaves_like 'empty command' do
+ let(:content) { "/unassign_reviewer @#{developer.username}" }
+ let(:issuable) { merge_request }
+ end
+ end
+ end
+
context 'unassign command' do
let(:content) { '/unassign' }
@@ -1117,6 +1298,11 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
+ it_behaves_like 'confidential command' do
+ let(:content) { '/confidential' }
+ let(:issuable) { create(:incident, project: project) }
+ end
+
it_behaves_like 'lock command' do
let(:content) { '/lock' }
let(:issuable) { issue }
@@ -1819,6 +2005,28 @@ RSpec.describe QuickActions::InterpretService do
end
end
+ describe 'unassign_reviewer command' do
+ let(:content) { '/unassign_reviewer' }
+ let(:merge_request) { create(:merge_request, source_project: project, reviewers: [developer]) }
+
+ it 'includes current assignee reference' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to eq(["Removes reviewer @#{developer.username}."])
+ end
+ end
+
+ describe 'assign_reviewer command' do
+ let(:content) { "/assign_reviewer #{developer.to_reference}" }
+ let(:merge_request) { create(:merge_request, source_project: project, assignees: [developer]) }
+
+ it 'includes only the user reference' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to eq(["Assigns #{developer.to_reference} as reviewer."])
+ end
+ end
+
describe 'milestone command' do
let(:content) { '/milestone %wrong-milestone' }
let!(:milestone) { create(:milestone, project: project, title: '9.10') }
diff --git a/spec/services/repositories/housekeeping_service_spec.rb b/spec/services/repositories/housekeeping_service_spec.rb
new file mode 100644
index 00000000000..fbd9affb33c
--- /dev/null
+++ b/spec/services/repositories/housekeeping_service_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::HousekeepingService do
+ it_behaves_like 'housekeeps repository' do
+ let_it_be(:resource) { create(:project, :repository) }
+ end
+
+ it_behaves_like 'housekeeps repository' do
+ let_it_be(:project) { create(:project, :wiki_repo) }
+ let_it_be(:resource) { project.wiki }
+ end
+end
diff --git a/spec/services/resource_events/change_state_service_spec.rb b/spec/services/resource_events/change_state_service_spec.rb
index 5b5379b241b..255ee9eca57 100644
--- a/spec/services/resource_events/change_state_service_spec.rb
+++ b/spec/services/resource_events/change_state_service_spec.rb
@@ -30,6 +30,15 @@ RSpec.describe ResourceEvents::ChangeStateService do
expect_event_source(event, source)
end
+
+ it "sets the created_at timestamp from the system_note_timestamp" do
+ resource.system_note_timestamp = Time.at(43).utc
+
+ described_class.new(user: user, resource: resource).execute(status: state, mentionable_source: source)
+ event = resource.resource_state_events.last
+
+ expect(event.created_at).to eq(Time.at(43).utc)
+ end
end
end
diff --git a/spec/services/service_desk_settings/update_service_spec.rb b/spec/services/service_desk_settings/update_service_spec.rb
index fbef587365d..72134af1369 100644
--- a/spec/services/service_desk_settings/update_service_spec.rb
+++ b/spec/services/service_desk_settings/update_service_spec.rb
@@ -16,19 +16,6 @@ RSpec.describe ServiceDeskSettings::UpdateService do
expect(settings.reload.outgoing_name).to eq 'some name'
expect(settings.reload.project_key).to eq 'foo'
end
-
- context 'when service_desk_custom_address is disabled' do
- before do
- stub_feature_flags(service_desk_custom_address: false)
- end
-
- it 'ignores project_key parameter' do
- result = described_class.new(settings.project, user, params).execute
-
- expect(result[:status]).to eq :success
- expect(settings.reload.project_key).to be_nil
- end
- end
end
context 'when project_key is an empty string' do
diff --git a/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb b/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb
new file mode 100644
index 00000000000..764c7f94a46
--- /dev/null
+++ b/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Snippets::ScheduleBulkRepositoryShardMovesService do
+ it_behaves_like 'moves repository shard in bulk' do
+ let_it_be_with_reload(:container) { create(:snippet, :repository) }
+
+ let(:move_service_klass) { SnippetRepositoryStorageMove }
+ let(:bulk_worker_klass) { ::SnippetScheduleBulkRepositoryShardMovesWorker }
+ end
+end
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
new file mode 100644
index 00000000000..b2bcd620d76
--- /dev/null
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Snippets::UpdateRepositoryStorageService do
+ include Gitlab::ShellAdapter
+
+ subject { described_class.new(repository_storage_move) }
+
+ describe "#execute" do
+ let_it_be_with_reload(:snippet) { create(:snippet, :repository) }
+ let_it_be(:destination) { 'test_second_storage' }
+ let_it_be(:checksum) { snippet.repository.checksum }
+
+ let(:repository_storage_move_state) { :scheduled }
+ let(:repository_storage_move) { create(:snippet_repository_storage_move, repository_storage_move_state, container: snippet, destination_storage_name: destination) }
+ let(:snippet_repository_double) { double(:repository) }
+ let(:original_snippet_repository_double) { double(:repository) }
+
+ before do
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with(destination).and_return(SecureRandom.uuid)
+ allow(Gitlab::Git::Repository).to receive(:new).and_call_original
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with(destination, snippet.repository.raw.relative_path, snippet.repository.gl_repository, snippet.repository.full_path)
+ .and_return(snippet_repository_double)
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('default', snippet.repository.raw.relative_path, nil, nil)
+ .and_return(original_snippet_repository_double)
+ end
+
+ context 'when the move succeeds' do
+ it 'moves the repository to the new storage and unmarks the repository as read only' do
+ old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ snippet.repository.path_to_repo
+ end
+
+ expect(snippet_repository_double).to receive(:replicate)
+ .with(snippet.repository.raw)
+ expect(snippet_repository_double).to receive(:checksum)
+ .and_return(checksum)
+ expect(original_snippet_repository_double).to receive(:remove)
+
+ result = subject.execute
+ snippet.reload
+
+ expect(result).to be_success
+ expect(snippet).not_to be_repository_read_only
+ expect(snippet.repository_storage).to eq(destination)
+ expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
+ expect(snippet.snippet_repository.shard_name).to eq(destination)
+ end
+ end
+
+ context 'when the filesystems are the same' do
+ let(:destination) { snippet.repository_storage }
+
+ it 'bails out and does nothing' do
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(result.message).to match(/SameFilesystemError/)
+ end
+ end
+
+ context 'when the move fails' do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ expect(snippet_repository_double).to receive(:replicate)
+ .with(snippet.repository.raw)
+ .and_raise(Gitlab::Git::CommandError)
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(snippet).not_to be_repository_read_only
+ expect(snippet.repository_storage).to eq('default')
+ expect(repository_storage_move).to be_failed
+ end
+ end
+
+ context 'when the cleanup fails' do
+ it 'sets the correct state' do
+ expect(snippet_repository_double).to receive(:replicate)
+ .with(snippet.repository.raw)
+ expect(snippet_repository_double).to receive(:checksum)
+ .and_return(checksum)
+ expect(original_snippet_repository_double).to receive(:remove)
+ .and_raise(Gitlab::Git::CommandError)
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(repository_storage_move).to be_cleanup_failed
+ end
+ end
+
+ context 'when the checksum does not match' do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ expect(snippet_repository_double).to receive(:replicate)
+ .with(snippet.repository.raw)
+ expect(snippet_repository_double).to receive(:checksum)
+ .and_return('not matching checksum')
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(snippet).not_to be_repository_read_only
+ expect(snippet.repository_storage).to eq('default')
+ end
+ end
+
+ context 'when the repository move is finished' do
+ let(:repository_storage_move_state) { :finished }
+
+ it 'is idempotent' do
+ expect do
+ result = subject.execute
+
+ expect(result).to be_success
+ end.not_to change(repository_storage_move, :state)
+ end
+ end
+
+ context 'when the repository move is failed' do
+ let(:repository_storage_move_state) { :failed }
+
+ it 'is idempotent' do
+ expect do
+ result = subject.execute
+
+ expect(result).to be_success
+ end.not_to change(repository_storage_move, :state)
+ end
+ end
+ end
+end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 90325c564bc..83d233a8112 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -100,17 +100,18 @@ RSpec.describe TodoService do
end
describe 'Issues' do
- let(:issue) { create(:issue, project: project, assignees: [john_doe], author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
- let(:addressed_issue) { create(:issue, project: project, assignees: [john_doe], author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:issue) { create(:issue, project: project, author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_issue) { create(:issue, project: project, author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:assigned_issue) { create(:issue, project: project, assignees: [john_doe]) }
let(:unassigned_issue) { create(:issue, project: project, assignees: []) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee], description: mentions) }
let(:addressed_confident_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee], description: directly_addressed) }
describe '#new_issue' do
it 'creates a todo if assigned' do
- service.new_issue(issue, author)
+ service.new_issue(assigned_issue, author)
- should_create_todo(user: john_doe, target: issue, action: Todo::ASSIGNED)
+ should_create_todo(user: john_doe, target: assigned_issue, action: Todo::ASSIGNED)
end
it 'does not create a todo if unassigned' do
@@ -130,7 +131,7 @@ RSpec.describe TodoService do
should_create_todo(user: member, target: issue, action: Todo::MENTIONED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
should_create_todo(user: author, target: issue, action: Todo::MENTIONED)
- should_not_create_todo(user: john_doe, target: issue, action: Todo::MENTIONED)
+ should_create_todo(user: john_doe, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: issue, action: Todo::MENTIONED)
end
@@ -140,7 +141,7 @@ RSpec.describe TodoService do
should_create_todo(user: member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: john_doe, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: john_doe, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
end
@@ -244,6 +245,8 @@ RSpec.describe TodoService do
end
it 'does not create a todo if user was already mentioned and todo is pending' do
+ stub_feature_flags(multiple_todos: false)
+
create(:todo, :mentioned, user: member, project: project, target: issue, author: author)
expect { service.update_issue(issue, author, skip_users) }.not_to change(member.todos, :count)
@@ -256,6 +259,8 @@ RSpec.describe TodoService do
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
+ stub_feature_flags(multiple_todos: false)
+
create(:todo, :directly_addressed, user: member, project: project, target: addressed_issue, author: author)
expect { service.update_issue(addressed_issue, author, skip_users) }.not_to change(member.todos, :count)
@@ -622,6 +627,26 @@ RSpec.describe TodoService do
expect(service.todo_exist?(unassigned_issue, author)).to be_truthy
end
end
+
+ context 'when multiple_todos are enabled' do
+ before do
+ stub_feature_flags(multiple_todos: true)
+ end
+
+ it 'creates a todo even if user already has a pending todo' do
+ create(:todo, :mentioned, user: member, project: project, target: issue, author: author)
+
+ expect { service.update_issue(issue, author) }.to change(member.todos, :count)
+ end
+
+ it 'creates multiple todos if a user is assigned and mentioned in a new issue' do
+ assigned_issue.description = mentions
+ service.new_issue(assigned_issue, author)
+
+ should_create_todo(user: john_doe, target: assigned_issue, action: Todo::ASSIGNED)
+ should_create_todo(user: john_doe, target: assigned_issue, action: Todo::MENTIONED)
+ end
+ end
end
describe '#reassigned_assignable' do
@@ -664,154 +689,161 @@ RSpec.describe TodoService do
end
describe 'Merge Requests' do
- let(:mr_assigned) { create(:merge_request, source_project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
- let(:addressed_mr_assigned) { create(:merge_request, source_project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
- let(:mr_unassigned) { create(:merge_request, source_project: project, author: author, assignees: []) }
+ let(:mentioned_mr) { create(:merge_request, source_project: project, author: author, description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_mr) { create(:merge_request, source_project: project, author: author, description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:assigned_mr) { create(:merge_request, source_project: project, author: author, assignees: [john_doe]) }
+ let(:unassigned_mr) { create(:merge_request, source_project: project, author: author, assignees: []) }
describe '#new_merge_request' do
it 'creates a pending todo if assigned' do
- service.new_merge_request(mr_assigned, author)
+ service.new_merge_request(assigned_mr, author)
- should_create_todo(user: john_doe, target: mr_assigned, action: Todo::ASSIGNED)
+ should_create_todo(user: john_doe, target: assigned_mr, action: Todo::ASSIGNED)
end
it 'does not create a todo if unassigned' do
- should_not_create_any_todo { service.new_merge_request(mr_unassigned, author) }
+ should_not_create_any_todo { service.new_merge_request(unassigned_mr, author) }
end
- it 'does not create a todo if assignee is the current user' do
- should_not_create_any_todo { service.new_merge_request(mr_unassigned, john_doe) }
+ it 'creates a todo if assignee is the current user' do
+ service.new_merge_request(assigned_mr, john_doe)
+
+ should_create_todo(user: john_doe, target: assigned_mr, author: john_doe, action: Todo::ASSIGNED)
end
it 'creates a todo for each valid mentioned user' do
- service.new_merge_request(mr_assigned, author)
+ service.new_merge_request(mentioned_mr, author)
- should_create_todo(user: member, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: guest, target: mr_assigned, action: Todo::MENTIONED)
- should_create_todo(user: author, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: john_doe, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: non_member, target: mr_assigned, action: Todo::MENTIONED)
+ should_create_todo(user: member, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
+ should_create_todo(user: author, target: mentioned_mr, action: Todo::MENTIONED)
+ should_create_todo(user: john_doe, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: non_member, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'creates a todo for each valid user based on the type of mention' do
- mr_assigned.update!(description: directly_addressed_and_mentioned)
+ mentioned_mr.update!(description: directly_addressed_and_mentioned)
- service.new_merge_request(mr_assigned, author)
+ service.new_merge_request(mentioned_mr, author)
- should_create_todo(user: member, target: mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: admin, target: mr_assigned, action: Todo::MENTIONED)
+ should_create_todo(user: member, target: mentioned_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: admin, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'creates a directly addressed todo for each valid addressed user' do
- service.new_merge_request(addressed_mr_assigned, author)
+ service.new_merge_request(addressed_mr, author)
- should_create_todo(user: member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: guest, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_create_todo(user: author, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: john_doe, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: non_member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: author, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: john_doe, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: non_member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
end
describe '#update_merge_request' do
it 'creates a todo for each valid mentioned user not included in skip_users' do
- service.update_merge_request(mr_assigned, author, skip_users)
+ service.update_merge_request(mentioned_mr, author, skip_users)
- should_create_todo(user: member, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: guest, target: mr_assigned, action: Todo::MENTIONED)
- should_create_todo(user: john_doe, target: mr_assigned, action: Todo::MENTIONED)
- should_create_todo(user: author, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: non_member, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: skipped, target: mr_assigned, action: Todo::MENTIONED)
+ should_create_todo(user: member, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
+ should_create_todo(user: john_doe, target: mentioned_mr, action: Todo::MENTIONED)
+ should_create_todo(user: author, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: non_member, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
- mr_assigned.update!(description: directly_addressed_and_mentioned)
+ mentioned_mr.update!(description: directly_addressed_and_mentioned)
- service.update_merge_request(mr_assigned, author, skip_users)
+ service.update_merge_request(mentioned_mr, author, skip_users)
- should_create_todo(user: member, target: mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: admin, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: skipped, target: mr_assigned)
+ should_create_todo(user: member, target: mentioned_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: admin, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: mentioned_mr)
end
it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
- service.update_merge_request(addressed_mr_assigned, author, skip_users)
+ service.update_merge_request(addressed_mr, author, skip_users)
- should_create_todo(user: member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: guest, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_create_todo(user: john_doe, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_create_todo(user: author, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: non_member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: skipped, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: john_doe, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: author, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: non_member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: skipped, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not create a todo if user was already mentioned and todo is pending' do
- create(:todo, :mentioned, user: member, project: project, target: mr_assigned, author: author)
+ stub_feature_flags(multiple_todos: false)
+
+ create(:todo, :mentioned, user: member, project: project, target: mentioned_mr, author: author)
- expect { service.update_merge_request(mr_assigned, author) }.not_to change(member.todos, :count)
+ expect { service.update_merge_request(mentioned_mr, author) }.not_to change(member.todos, :count)
end
it 'does not create a todo if user was already mentioned and todo is done' do
- create(:todo, :mentioned, :done, user: skipped, project: project, target: mr_assigned, author: author)
+ create(:todo, :mentioned, :done, user: skipped, project: project, target: mentioned_mr, author: author)
- expect { service.update_merge_request(mr_assigned, author, skip_users) }.not_to change(skipped.todos, :count)
+ expect { service.update_merge_request(mentioned_mr, author, skip_users) }.not_to change(skipped.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
- create(:todo, :directly_addressed, user: member, project: project, target: addressed_mr_assigned, author: author)
+ stub_feature_flags(multiple_todos: false)
+
+ create(:todo, :directly_addressed, user: member, project: project, target: addressed_mr, author: author)
- expect { service.update_merge_request(addressed_mr_assigned, author) }.not_to change(member.todos, :count)
+ expect { service.update_merge_request(addressed_mr, author) }.not_to change(member.todos, :count)
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
- create(:todo, :directly_addressed, user: skipped, project: project, target: addressed_mr_assigned, author: author)
+ create(:todo, :directly_addressed, user: skipped, project: project, target: addressed_mr, author: author)
- expect { service.update_merge_request(addressed_mr_assigned, author, skip_users) }.not_to change(skipped.todos, :count)
+ expect { service.update_merge_request(addressed_mr, author, skip_users) }.not_to change(skipped.todos, :count)
end
context 'with a task list' do
it 'does not create todo when tasks are marked as completed' do
- mr_assigned.update!(description: "- [x] Task 1\n- [X] Task 2 #{mentions}")
+ mentioned_mr.update!(description: "- [x] Task 1\n- [X] Task 2 #{mentions}")
- service.update_merge_request(mr_assigned, author)
+ service.update_merge_request(mentioned_mr, author)
- should_not_create_todo(user: admin, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: assignee, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: author, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: john_doe, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: member, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: non_member, target: mr_assigned, action: Todo::MENTIONED)
- should_not_create_todo(user: guest, target: mr_assigned, action: Todo::MENTIONED)
+ should_not_create_todo(user: admin, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: assignee, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: author, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: john_doe, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: member, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: non_member, target: mentioned_mr, action: Todo::MENTIONED)
+ should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'does not create directly addressed todo when tasks are marked as completed' do
- addressed_mr_assigned.update!(description: "#{directly_addressed}\n- [x] Task 1\n- [X] Task 2")
+ addressed_mr.update!(description: "#{directly_addressed}\n- [x] Task 1\n- [X] Task 2")
- service.update_merge_request(addressed_mr_assigned, author)
+ service.update_merge_request(addressed_mr, author)
- should_not_create_todo(user: admin, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: assignee, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: author, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: john_doe, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: non_member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
- should_not_create_todo(user: guest, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: admin, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: assignee, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: author, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: john_doe, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: non_member, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
it 'does not raise an error when description not change' do
- mr_assigned.update!(title: 'Sample')
+ mentioned_mr.update!(title: 'Sample')
- expect { service.update_merge_request(mr_assigned, author) }.not_to raise_error
+ expect { service.update_merge_request(mentioned_mr, author) }.not_to raise_error
end
end
end
describe '#close_merge_request' do
it 'marks related pending todos to the target for the user as done' do
- first_todo = create(:todo, :assigned, user: john_doe, project: project, target: mr_assigned, author: author)
- second_todo = create(:todo, :assigned, user: john_doe, project: project, target: mr_assigned, author: author)
- service.close_merge_request(mr_assigned, john_doe)
+ first_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
+ second_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
+ service.close_merge_request(mentioned_mr, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
@@ -820,55 +852,55 @@ RSpec.describe TodoService do
describe '#merge_merge_request' do
it 'marks related pending todos to the target for the user as done' do
- first_todo = create(:todo, :assigned, user: john_doe, project: project, target: mr_assigned, author: author)
- second_todo = create(:todo, :assigned, user: john_doe, project: project, target: mr_assigned, author: author)
- service.merge_merge_request(mr_assigned, john_doe)
+ first_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
+ second_todo = create(:todo, :assigned, user: john_doe, project: project, target: mentioned_mr, author: author)
+ service.merge_merge_request(mentioned_mr, john_doe)
expect(first_todo.reload).to be_done
expect(second_todo.reload).to be_done
end
it 'does not create todo for guests' do
- service.merge_merge_request(mr_assigned, john_doe)
- should_not_create_todo(user: guest, target: mr_assigned, action: Todo::MENTIONED)
+ service.merge_merge_request(mentioned_mr, john_doe)
+ should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
end
it 'does not create directly addressed todo for guests' do
- service.merge_merge_request(addressed_mr_assigned, john_doe)
- should_not_create_todo(user: guest, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
+ service.merge_merge_request(addressed_mr, john_doe)
+ should_not_create_todo(user: guest, target: addressed_mr, action: Todo::DIRECTLY_ADDRESSED)
end
end
describe '#new_award_emoji' do
it 'marks related pending todos to the target for the user as done' do
- todo = create(:todo, user: john_doe, project: project, target: mr_assigned, author: author)
- service.new_award_emoji(mr_assigned, john_doe)
+ todo = create(:todo, user: john_doe, project: project, target: mentioned_mr, author: author)
+ service.new_award_emoji(mentioned_mr, john_doe)
expect(todo.reload).to be_done
end
end
describe '#merge_request_build_failed' do
- let(:merge_participants) { [mr_unassigned.author, admin] }
+ let(:merge_participants) { [unassigned_mr.author, admin] }
before do
- allow(mr_unassigned).to receive(:merge_participants).and_return(merge_participants)
+ allow(unassigned_mr).to receive(:merge_participants).and_return(merge_participants)
end
it 'creates a pending todo for each merge_participant' do
- service.merge_request_build_failed(mr_unassigned)
+ service.merge_request_build_failed(unassigned_mr)
merge_participants.each do |participant|
- should_create_todo(user: participant, author: participant, target: mr_unassigned, action: Todo::BUILD_FAILED)
+ should_create_todo(user: participant, author: participant, target: unassigned_mr, action: Todo::BUILD_FAILED)
end
end
end
describe '#merge_request_push' do
it 'marks related pending todos to the target for the user as done' do
- first_todo = create(:todo, :build_failed, user: author, project: project, target: mr_assigned, author: john_doe)
- second_todo = create(:todo, :build_failed, user: john_doe, project: project, target: mr_assigned, author: john_doe)
- service.merge_request_push(mr_assigned, author)
+ first_todo = create(:todo, :build_failed, user: author, project: project, target: mentioned_mr, author: john_doe)
+ second_todo = create(:todo, :build_failed, user: john_doe, project: project, target: mentioned_mr, author: john_doe)
+ service.merge_request_push(mentioned_mr, author)
expect(first_todo.reload).to be_done
expect(second_todo.reload).not_to be_done
@@ -879,24 +911,24 @@ RSpec.describe TodoService do
let(:merge_participants) { [admin, create(:user)] }
before do
- allow(mr_unassigned).to receive(:merge_participants).and_return(merge_participants)
+ allow(unassigned_mr).to receive(:merge_participants).and_return(merge_participants)
end
it 'creates a pending todo for each merge_participant' do
- mr_unassigned.update!(merge_when_pipeline_succeeds: true, merge_user: admin)
- service.merge_request_became_unmergeable(mr_unassigned)
+ unassigned_mr.update!(merge_when_pipeline_succeeds: true, merge_user: admin)
+ service.merge_request_became_unmergeable(unassigned_mr)
merge_participants.each do |participant|
- should_create_todo(user: participant, author: participant, target: mr_unassigned, action: Todo::UNMERGEABLE)
+ should_create_todo(user: participant, author: participant, target: unassigned_mr, action: Todo::UNMERGEABLE)
end
end
end
describe '#mark_todo' do
it 'creates a todo from a merge request' do
- service.mark_todo(mr_unassigned, author)
+ service.mark_todo(unassigned_mr, author)
- should_create_todo(user: author, target: mr_unassigned, action: Todo::MARKED)
+ should_create_todo(user: author, target: unassigned_mr, action: Todo::MARKED)
end
end
@@ -913,33 +945,33 @@ RSpec.describe TodoService do
end
let(:mention) { john_doe.to_reference }
- let(:diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: mr_unassigned, author: author, note: "Hey #{mention}") }
- let(:addressed_diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: mr_unassigned, author: author, note: "#{mention}, hey!") }
- let(:legacy_diff_note_on_merge_request) { create(:legacy_diff_note_on_merge_request, project: project, noteable: mr_unassigned, author: author, note: "Hey #{mention}") }
+ let(:diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: unassigned_mr, author: author, note: "Hey #{mention}") }
+ let(:addressed_diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: unassigned_mr, author: author, note: "#{mention}, hey!") }
+ let(:legacy_diff_note_on_merge_request) { create(:legacy_diff_note_on_merge_request, project: project, noteable: unassigned_mr, author: author, note: "Hey #{mention}") }
it 'creates a todo for mentioned user on new diff note' do
service.new_note(diff_note_on_merge_request, author)
- should_create_todo(user: john_doe, target: mr_unassigned, author: author, action: Todo::MENTIONED, note: diff_note_on_merge_request)
+ should_create_todo(user: john_doe, target: unassigned_mr, author: author, action: Todo::MENTIONED, note: diff_note_on_merge_request)
end
it 'creates a directly addressed todo for addressed user on new diff note' do
service.new_note(addressed_diff_note_on_merge_request, author)
- should_create_todo(user: john_doe, target: mr_unassigned, author: author, action: Todo::DIRECTLY_ADDRESSED, note: addressed_diff_note_on_merge_request)
+ should_create_todo(user: john_doe, target: unassigned_mr, author: author, action: Todo::DIRECTLY_ADDRESSED, note: addressed_diff_note_on_merge_request)
end
it 'creates a todo for mentioned user on legacy diff note' do
service.new_note(legacy_diff_note_on_merge_request, author)
- should_create_todo(user: john_doe, target: mr_unassigned, author: author, action: Todo::MENTIONED, note: legacy_diff_note_on_merge_request)
+ should_create_todo(user: john_doe, target: unassigned_mr, author: author, action: Todo::MENTIONED, note: legacy_diff_note_on_merge_request)
end
it 'does not create todo for guests' do
- note_on_merge_request = create :note_on_merge_request, project: project, noteable: mr_assigned, note: mentions
+ note_on_merge_request = create :note_on_merge_request, project: project, noteable: mentioned_mr, note: mentions
service.new_note(note_on_merge_request, author)
- should_not_create_todo(user: guest, target: mr_assigned, action: Todo::MENTIONED)
+ should_not_create_todo(user: guest, target: mentioned_mr, action: Todo::MENTIONED)
end
end
end
@@ -1013,6 +1045,8 @@ RSpec.describe TodoService do
end
it 'does not create a todo if user was already mentioned and todo is pending' do
+ stub_feature_flags(multiple_todos: false)
+
create(:todo, :mentioned, user: member, project: project, target: noteable, author: author)
expect { service.update_note(note, author, skip_users) }.not_to change(member.todos, :count)
@@ -1025,6 +1059,8 @@ RSpec.describe TodoService do
end
it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
+ stub_feature_flags(multiple_todos: false)
+
create(:todo, :directly_addressed, user: member, project: project, target: noteable, author: author)
expect { service.update_note(addressed_note, author, skip_users) }.not_to change(member.todos, :count)
@@ -1038,7 +1074,7 @@ RSpec.describe TodoService do
end
it 'updates cached counts when a todo is created' do
- issue = create(:issue, project: project, assignees: [john_doe], author: author, description: mentions)
+ issue = create(:issue, project: project, assignees: [john_doe], author: author)
expect(john_doe.todos_pending_count).to eq(0)
expect(john_doe).to receive(:update_todos_count_cache).and_call_original
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 274c44394f3..b30b7e6eb56 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Users::UpdateService do
result = update_user(user, { username: 'taken' })
end.not_to change { user.reload.username }
expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Username has already been taken')
+ expect(result[:message]).to eq('A user, alias, or group already exists with that username.')
end
it 'updates the status if status params were given' do
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index a607a6734b0..2fe72ab31c2 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -131,6 +131,15 @@ RSpec.describe WebHookService do
end
end
+ context 'when url is not encoded' do
+ let(:project_hook) { create(:project_hook, url: 'http://server.com/my path/') }
+
+ it 'handles exceptions' do
+ expect(service_instance.execute).to eq(status: :error, message: 'bad URI(is not URI?): "http://server.com/my path/"')
+ expect { service_instance.execute }.not_to raise_error
+ end
+ end
+
context 'when request body size is too big' do
it 'does not perform the request' do
stub_const("#{described_class}::REQUEST_BODY_SIZE_LIMIT", 10.bytes)
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index c19c26f9a0b..9fac6c8e192 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -272,41 +272,6 @@ RSpec.configure do |config|
Sidekiq::Worker.clear_all
- # Temporary patch to force admin mode to be active by default in tests when
- # using the feature flag :user_mode_in_session, since this will require
- # modifying a significant number of specs to test both states for admin
- # mode enabled / disabled.
- #
- # This will only be applied to specs below dirs in `admin_mode_mock_dirs`
- #
- # See ongoing migration: https://gitlab.com/gitlab-org/gitlab/-/issues/31511
- #
- # Until the migration is finished, if it is required to have the real
- # behaviour in any of the mocked dirs specs that an admin is signed in
- # with normal user mode and needs to switch to admin mode, it is possible to
- # mark such tests with the `do_not_mock_admin_mode` metadata tag, e.g:
- #
- # context 'some test in mocked dir', :do_not_mock_admin_mode do ... end
- admin_mode_mock_dirs = %w(
- ./ee/spec/elastic_integration
- ./ee/spec/finders
- ./ee/spec/lib
- ./ee/spec/serializers
- ./ee/spec/support/shared_examples/finders/geo
- ./ee/spec/support/shared_examples/graphql/geo
- ./spec/finders
- ./spec/lib
- ./spec/serializers
- ./spec/support/shared_examples/lib/gitlab
- ./spec/workers
- )
-
- if !example.metadata[:do_not_mock_admin_mode] && example.metadata[:file_path].start_with?(*admin_mode_mock_dirs)
- allow_any_instance_of(Gitlab::Auth::CurrentUserMode).to receive(:admin_mode?) do |current_user_mode|
- current_user_mode.send(:user)&.admin?
- end
- end
-
# Administrators have to re-authenticate in order to access administrative
# functionality when feature flag :user_mode_in_session is active. Any spec
# that requires administrative access can use the tag :enable_admin_mode
@@ -314,6 +279,10 @@ RSpec.configure do |config|
#
# context 'some test that requires admin mode', :enable_admin_mode do ... end
#
+ # Some specs do get admin mode enabled automatically (e.g. `spec/controllers/admin`).
+ # In this case, specs that need to test both admin mode states can use the
+ # :do_not_mock_admin_mode tag to disable auto admin mode.
+ #
# See also spec/support/helpers/admin_mode_helpers.rb
if example.metadata[:enable_admin_mode] && !example.metadata[:do_not_mock_admin_mode]
allow_any_instance_of(Gitlab::Auth::CurrentUserMode).to receive(:admin_mode?) do |current_user_mode|
@@ -400,8 +369,5 @@ end
# Prevent Rugged from picking up local developer gitconfig.
Rugged::Settings['search_path_global'] = Rails.root.join('tmp/tests').to_s
-# Disable timestamp checks for invisible_captcha
-InvisibleCaptcha.timestamp_enabled = false
-
# Initialize FactoryDefault to use create_default helper
TestProf::FactoryDefault.init
diff --git a/spec/support/atlassian/jira_connect/schemata.rb b/spec/support/atlassian/jira_connect/schemata.rb
index 91f8fe0bb41..d056c7cacf3 100644
--- a/spec/support/atlassian/jira_connect/schemata.rb
+++ b/spec/support/atlassian/jira_connect/schemata.rb
@@ -2,82 +2,291 @@
module Atlassian
module Schemata
- def self.build_info
- {
- 'type' => 'object',
- 'required' => %w(schemaVersion pipelineId buildNumber updateSequenceNumber displayName url state issueKeys testInfo references),
- 'properties' => {
- 'schemaVersion' => { 'type' => 'string', 'pattern' => '1.0' },
- 'pipelineId' => { 'type' => 'string' },
- 'buildNumber' => { 'type' => 'integer' },
- 'updateSequenceNumber' => { 'type' => 'integer' },
- 'displayName' => { 'type' => 'string' },
- 'url' => { 'type' => 'string' },
- 'state' => {
- 'type' => 'string',
- 'pattern' => '(pending|in_progress|successful|failed|cancelled)'
- },
- 'issueKeys' => {
- 'type' => 'array',
- 'items' => { 'type' => 'string' },
- 'minItems' => 1
- },
- 'testInfo' => {
- 'type' => 'object',
- 'required' => %w(totalNumber numberPassed numberFailed numberSkipped),
- 'properties' => {
- 'totalNumber' => { 'type' => 'integer' },
- 'numberFailed' => { 'type' => 'integer' },
- 'numberPassed' => { 'type' => 'integer' },
- 'numberSkipped' => { 'type' => 'integer' }
- }
- },
- 'references' => {
- 'type' => 'array',
- 'items' => {
+ class << self
+ def build_info
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(
+ schemaVersion pipelineId buildNumber updateSequenceNumber
+ displayName url state issueKeys testInfo references
+ lastUpdated
+ ),
+ 'properties' => {
+ 'schemaVersion' => schema_version_type,
+ 'pipelineId' => { 'type' => 'string' },
+ 'buildNumber' => { 'type' => 'integer' },
+ 'updateSequenceNumber' => { 'type' => 'integer' },
+ 'displayName' => { 'type' => 'string' },
+ 'lastUpdated' => iso8601_type,
+ 'url' => { 'type' => 'string' },
+ 'state' => state_type,
+ 'issueKeys' => issue_keys_type,
+ 'testInfo' => {
'type' => 'object',
- 'required' => %w(commit ref),
+ 'required' => %w(totalNumber numberPassed numberFailed numberSkipped),
'properties' => {
- 'commit' => {
- 'type' => 'object',
- 'required' => %w(id repositoryUri),
- 'properties' => {
- 'id' => { 'type' => 'string' },
- 'repositoryUri' => { 'type' => 'string' }
+ 'totalNumber' => { 'type' => 'integer' },
+ 'numberFailed' => { 'type' => 'integer' },
+ 'numberPassed' => { 'type' => 'integer' },
+ 'numberSkipped' => { 'type' => 'integer' }
+ }
+ },
+ 'references' => {
+ 'type' => 'array',
+ 'items' => {
+ 'type' => 'object',
+ 'required' => %w(commit ref),
+ 'properties' => {
+ 'commit' => {
+ 'type' => 'object',
+ 'required' => %w(id repositoryUri),
+ 'properties' => {
+ 'id' => { 'type' => 'string' },
+ 'repositoryUri' => { 'type' => 'string' }
+ }
+ },
+ 'ref' => {
+ 'type' => 'object',
+ 'required' => %w(name uri),
+ 'properties' => {
+ 'name' => { 'type' => 'string' },
+ 'uri' => { 'type' => 'string' }
+ }
}
- },
- 'ref' => {
- 'type' => 'object',
- 'required' => %w(name uri),
- 'properties' => {
- 'name' => { 'type' => 'string' },
- 'uri' => { 'type' => 'string' }
+ }
+ }
+ }
+ }
+ }
+ end
+
+ def deployment_info
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(
+ deploymentSequenceNumber updateSequenceNumber
+ associations displayName url description lastUpdated
+ state pipeline environment
+ ),
+ 'properties' => {
+ 'deploymentSequenceNumber' => { 'type' => 'integer' },
+ 'updateSequenceNumber' => { 'type' => 'integer' },
+ 'associations' => {
+ 'type' => 'array',
+ 'items' => association_type,
+ 'minItems' => 1
+ },
+ 'displayName' => { 'type' => 'string' },
+ 'description' => { 'type' => 'string' },
+ 'label' => { 'type' => 'string' },
+ 'url' => { 'type' => 'string' },
+ 'lastUpdated' => iso8601_type,
+ 'state' => state_type,
+ 'pipeline' => pipeline_type,
+ 'environment' => environment_type,
+ 'schemaVersion' => schema_version_type
+ }
+ }
+ end
+
+ def feature_flag_info
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(
+ updateSequenceId id key issueKeys summary details
+ ),
+ 'properties' => {
+ 'id' => { 'type' => 'string' },
+ 'key' => { 'type' => 'string' },
+ 'displayName' => { 'type' => 'string' },
+ 'issueKeys' => issue_keys_type,
+ 'summary' => summary_type,
+ 'details' => details_type,
+ 'updateSequenceId' => { 'type' => 'integer' },
+ 'schemaVersion' => schema_version_type
+ }
+ }
+ end
+
+ def details_type
+ {
+ 'type' => 'array',
+ 'items' => combine(summary_type, {
+ 'required' => ['environment'],
+ 'properties' => {
+ 'environment' => {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(name),
+ 'properties' => {
+ 'name' => { 'type' => 'string' },
+ 'type' => {
+ 'type' => 'string',
+ 'pattern' => '^(development|testing|staging|production)$'
}
}
}
}
+ })
+ }
+ end
+
+ def combine(map_a, map_b)
+ map_a.merge(map_b) do |k, a, b|
+ a.respond_to?(:merge) ? a.merge(b) : a + b
+ end
+ end
+
+ def summary_type
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(url status lastUpdated),
+ 'properties' => {
+ 'lastUpdated' => iso8601_type,
+ 'url' => { 'type' => 'string' },
+ 'status' => feature_status_type
}
}
- }
- end
+ end
- def self.build_info_payload
- {
- 'type' => 'object',
- 'required' => %w(providerMetadata builds),
- 'properties' => {
- 'providerMetadata' => provider_metadata,
- 'builds' => { 'type' => 'array', 'items' => build_info }
+ def feature_status_type
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(enabled),
+ 'properties' => {
+ 'enabled' => { 'type' => 'boolean' },
+ 'defaultValue' => { 'type' => 'string' },
+ 'rollout' => rollout_type
+ }
}
- }
- end
+ end
+
+ def rollout_type
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'properties' => {
+ 'percentage' => { 'type' => 'number' },
+ 'text' => { 'type' => 'string' },
+ 'rules' => { 'type' => 'number' }
+ }
+ }
+ end
- def self.provider_metadata
- {
- 'type' => 'object',
- 'required' => %w(product),
- 'properties' => { 'product' => { 'type' => 'string' } }
- }
+ def environment_type
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(id displayName type),
+ 'properties' => {
+ 'id' => { 'type' => 'string', 'maxLength' => 255 },
+ 'displayName' => { 'type' => 'string', 'maxLength' => 255 },
+ 'type' => {
+ 'type' => 'string',
+ 'pattern' => '(unmapped|development|testing|staging|production)'
+ }
+ }
+ }
+ end
+
+ def pipeline_type
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(id displayName url),
+ 'properties' => {
+ 'id' => { 'type' => 'string', 'maxLength' => 255 },
+ 'displayName' => { 'type' => 'string', 'maxLength' => 255 },
+ 'url' => { 'type' => 'string', 'maxLength' => 2000 }
+ }
+ }
+ end
+
+ def schema_version_type
+ { 'type' => 'string', 'pattern' => '1.0' }
+ end
+
+ def state_type
+ {
+ 'type' => 'string',
+ 'pattern' => '(pending|in_progress|successful|failed|cancelled)'
+ }
+ end
+
+ def association_type
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(associationType values),
+ 'properties' => {
+ 'associationType' => {
+ 'type' => 'string',
+ 'pattern' => '(issueKeys|issueIdOrKeys)'
+ },
+ 'values' => issue_keys_type
+ }
+ }
+ end
+
+ def issue_keys_type
+ {
+ 'type' => 'array',
+ 'items' => { 'type' => 'string' },
+ 'minItems' => 1,
+ 'maxItems' => 100
+ }
+ end
+
+ def deploy_info_payload
+ payload('deployments', deployment_info)
+ end
+
+ def build_info_payload
+ payload('builds', build_info)
+ end
+
+ def ff_info_payload
+ pl = payload('flags', feature_flag_info)
+ pl['properties']['properties'] = {
+ 'type' => 'object',
+ 'additionalProperties' => { 'type' => 'string' },
+ 'maxProperties' => 5,
+ 'propertyNames' => { 'pattern' => '^[^_][^:]+$' }
+ }
+ pl
+ end
+
+ def payload(key, schema)
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => ['providerMetadata', key],
+ 'properties' => {
+ 'providerMetadata' => provider_metadata,
+ key => { 'type' => 'array', 'items' => schema }
+ }
+ }
+ end
+
+ def provider_metadata
+ {
+ 'type' => 'object',
+ 'required' => %w(product),
+ 'properties' => { 'product' => { 'type' => 'string' } }
+ }
+ end
+
+ def iso8601_type
+ {
+ 'type' => 'string',
+ 'pattern' => '^-?([1-9][0-9]*)?[0-9]{4}-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$'
+ }
+ end
end
end
end
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index ab55cf97ab4..db198ac9808 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -33,7 +33,7 @@ Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
socket_path = file.path
file.close! # We just want the filename
- TestEnv.with_workhorse(TestEnv.workhorse_dir, host, port, socket_path) do
+ TestEnv.with_workhorse(host, port, socket_path) do
Capybara.servers[:puma].call(app, nil, socket_path, **options)
end
end
diff --git a/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml b/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml
index c4f3c3aace2..d20078c8904 100644
--- a/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml
+++ b/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml
@@ -4,7 +4,8 @@ include:
variables:
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers2"
SAST_EXCLUDED_PATHS: "spec, executables"
- SAST_DEFAULT_ANALYZERS: "bandit, gosec"
+ SAST_DEFAULT_ANALYZERS: "bandit, brakeman"
+ SAST_EXCLUDED_ANALYZERS: "brakeman"
stages:
- our_custom_security_stage
diff --git a/spec/support/gitlab_stubs/gitlab_ci_for_sast_default_analyzers.yml b/spec/support/gitlab_stubs/gitlab_ci_for_sast_default_analyzers.yml
new file mode 100644
index 00000000000..c4f3c3aace2
--- /dev/null
+++ b/spec/support/gitlab_stubs/gitlab_ci_for_sast_default_analyzers.yml
@@ -0,0 +1,15 @@
+include:
+ - template: SAST.gitlab-ci.yml
+
+variables:
+ SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers2"
+ SAST_EXCLUDED_PATHS: "spec, executables"
+ SAST_DEFAULT_ANALYZERS: "bandit, gosec"
+
+stages:
+ - our_custom_security_stage
+sast:
+ stage: our_custom_security_stage
+ variables:
+ SEARCH_MAX_DEPTH: 8
+ SAST_BRAKEMAN_LEVEL: 2
diff --git a/spec/support/gitlab_stubs/gitlab_ci_for_sast_excluded_analyzers.yml b/spec/support/gitlab_stubs/gitlab_ci_for_sast_excluded_analyzers.yml
new file mode 100644
index 00000000000..b665de5f982
--- /dev/null
+++ b/spec/support/gitlab_stubs/gitlab_ci_for_sast_excluded_analyzers.yml
@@ -0,0 +1,14 @@
+include:
+ - template: SAST.gitlab-ci.yml
+
+variables:
+ SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers2"
+ SAST_EXCLUDED_PATHS: "spec, executables"
+ SAST_EXCLUDED_ANALYZERS: "brakeman"
+
+stages:
+ - our_custom_security_stage
+sast:
+ stage: our_custom_security_stage
+ variables:
+ SEARCH_MAX_DEPTH: 8
diff --git a/spec/support/gitlab_stubs/gitlab_ci_includes.yml b/spec/support/gitlab_stubs/gitlab_ci_includes.yml
index e74773ce23e..1029fa1ea86 100644
--- a/spec/support/gitlab_stubs/gitlab_ci_includes.yml
+++ b/spec/support/gitlab_stubs/gitlab_ci_includes.yml
@@ -1,19 +1,45 @@
+before_script:
+ - bundle install
+ - bundle exec rake db:create
+
rspec 0 1:
stage: build
script: 'rake spec'
needs: []
+ tags:
+ - ruby
+ - postgres
+ only:
+ - branches
+ - master
rspec 0 2:
stage: build
+ allow_failure: true
script: 'rake spec'
+ when: on_failure
needs: []
spinach:
stage: build
script: 'rake spinach'
needs: []
+ except:
+ - tags
+deploy_job:
+ stage: deploy
+ script:
+ - echo 'done'
+ environment:
+ name: production
docker:
stage: test
script: 'curl http://dockerhub/URL'
needs: [spinach, rspec 0 1]
+ when: manual
+ except:
+ - branches
+
+after_script:
+ - echo 'run this after'
diff --git a/spec/support/helpers/database/database_helpers.rb b/spec/support/helpers/database/database_helpers.rb
new file mode 100644
index 00000000000..b8d7ea3662f
--- /dev/null
+++ b/spec/support/helpers/database/database_helpers.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Database
+ module DatabaseHelpers
+ # In order to directly work with views using factories,
+ # we can swapout the view for a table of identical structure.
+ def swapout_view_for_table(view)
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{view}_copy (LIKE #{view});
+ DROP VIEW #{view};
+ ALTER TABLE #{view}_copy RENAME TO #{view};
+ SQL
+ end
+ end
+end
diff --git a/spec/support/helpers/database/partitioning_helpers.rb b/spec/support/helpers/database/partitioning_helpers.rb
new file mode 100644
index 00000000000..80b31fe0603
--- /dev/null
+++ b/spec/support/helpers/database/partitioning_helpers.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+module Database
+ module PartitioningHelpers
+ def expect_table_partitioned_by(table, columns, part_type: :range)
+ columns_with_part_type = columns.map { |c| [part_type.to_s, c] }
+ actual_columns = find_partitioned_columns(table)
+
+ expect(columns_with_part_type).to match_array(actual_columns)
+ end
+
+ def expect_range_partition_of(partition_name, table_name, min_value, max_value)
+ definition = find_partition_definition(partition_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
+
+ expect(definition).not_to be_nil
+ expect(definition['base_table']).to eq(table_name.to_s)
+ expect(definition['condition']).to eq("FOR VALUES FROM (#{min_value}) TO (#{max_value})")
+ end
+
+ def expect_total_partitions(table_name, count, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
+ partitions = find_partitions(table_name, schema: schema)
+
+ expect(partitions.size).to eq(count)
+ end
+
+ def expect_range_partitions_for(table_name, partitions)
+ partitions.each do |suffix, (min_value, max_value)|
+ partition_name = "#{table_name}_#{suffix}"
+ expect_range_partition_of(partition_name, table_name, min_value, max_value)
+ end
+
+ expect_total_partitions(table_name, partitions.size, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
+ end
+
+ def expect_hash_partition_of(partition_name, table_name, modulus, remainder)
+ definition = find_partition_definition(partition_name, schema: Gitlab::Database::STATIC_PARTITIONS_SCHEMA)
+
+ expect(definition).not_to be_nil
+ expect(definition['base_table']).to eq(table_name.to_s)
+ expect(definition['condition']).to eq("FOR VALUES WITH (modulus #{modulus}, remainder #{remainder})")
+ end
+
+ private
+
+ def find_partitioned_columns(table)
+ connection.select_rows(<<~SQL)
+ select
+ case partstrat
+ when 'l' then 'list'
+ when 'r' then 'range'
+ when 'h' then 'hash'
+ end as partstrat,
+ cols.column_name
+ from (
+ select partrelid, partstrat, unnest(partattrs) as col_pos
+ from pg_partitioned_table
+ ) pg_part
+ inner join pg_class
+ on pg_part.partrelid = pg_class.oid
+ inner join information_schema.columns cols
+ on cols.table_name = pg_class.relname
+ and cols.ordinal_position = pg_part.col_pos
+ where pg_class.relname = '#{table}';
+ SQL
+ end
+
+ def find_partition_definition(partition, schema: )
+ connection.select_one(<<~SQL)
+ select
+ parent_class.relname as base_table,
+ pg_get_expr(pg_class.relpartbound, inhrelid) as condition
+ from pg_class
+ inner join pg_inherits i on pg_class.oid = inhrelid
+ inner join pg_class parent_class on parent_class.oid = inhparent
+ inner join pg_namespace ON pg_namespace.oid = pg_class.relnamespace
+ where pg_namespace.nspname = '#{schema}'
+ and pg_class.relname = '#{partition}'
+ and pg_class.relispartition
+ SQL
+ end
+
+ def find_partitions(partition, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
+ connection.select_rows(<<~SQL)
+ select
+ pg_class.relname
+ from pg_class
+ inner join pg_inherits i on pg_class.oid = inhrelid
+ inner join pg_class parent_class on parent_class.oid = inhparent
+ inner join pg_namespace ON pg_namespace.oid = pg_class.relnamespace
+ where pg_namespace.nspname = '#{schema}'
+ and parent_class.relname = '#{partition}'
+ and pg_class.relispartition
+ SQL
+ end
+ end
+end
diff --git a/spec/support/helpers/database/table_schema_helpers.rb b/spec/support/helpers/database/table_schema_helpers.rb
new file mode 100644
index 00000000000..48d33442110
--- /dev/null
+++ b/spec/support/helpers/database/table_schema_helpers.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+module Database
+ module TableSchemaHelpers
+ def connection
+ ActiveRecord::Base.connection
+ end
+
+ def expect_table_to_be_replaced(original_table:, replacement_table:, archived_table:)
+ original_oid = table_oid(original_table)
+ replacement_oid = table_oid(replacement_table)
+
+ yield
+
+ expect(table_oid(original_table)).to eq(replacement_oid)
+ expect(table_oid(archived_table)).to eq(original_oid)
+ expect(table_oid(replacement_table)).to be_nil
+ end
+
+ def expect_table_columns_to_match(expected_column_attributes, table_name)
+ expect(connection.table_exists?(table_name)).to eq(true)
+
+ actual_columns = connection.columns(table_name)
+ expect(actual_columns.size).to eq(column_attributes.size)
+
+ column_attributes.each_with_index do |attributes, i|
+ actual_column = actual_columns[i]
+
+ attributes.each do |name, value|
+ actual_value = actual_column.public_send(name)
+ message = "expected #{actual_column.name}.#{name} to be #{value}, but got #{actual_value}"
+
+ expect(actual_value).to eq(value), message
+ end
+ end
+ end
+
+ def expect_index_to_exist(name, schema: nil)
+ expect(index_exists_by_name(name, schema: schema)).to eq(true)
+ end
+
+ def expect_index_not_to_exist(name, schema: nil)
+ expect(index_exists_by_name(name, schema: schema)).to be_nil
+ end
+
+ def expect_check_constraint(table_name, name, definition, schema: nil)
+ expect(check_constraint_definition(table_name, name, schema: schema)).to eq("CHECK ((#{definition}))")
+ end
+
+ def expect_primary_keys_after_tables(tables, schema: nil)
+ tables.each do |table|
+ primary_key = primary_key_constraint_name(table, schema: schema)
+
+ expect(primary_key).to eq("#{table}_pkey")
+ end
+ end
+
+ def table_oid(name)
+ connection.select_value(<<~SQL)
+ SELECT oid
+ FROM pg_catalog.pg_class
+ WHERE relname = '#{name}'
+ SQL
+ end
+
+ def table_type(name)
+ connection.select_value(<<~SQL)
+ SELECT
+ CASE class.relkind
+ WHEN 'r' THEN 'normal'
+ WHEN 'p' THEN 'partitioned'
+ ELSE 'other'
+ END as table_type
+ FROM pg_catalog.pg_class class
+ WHERE class.relname = '#{name}'
+ SQL
+ end
+
+ def sequence_owned_by(table_name, column_name)
+ connection.select_value(<<~SQL)
+ SELECT
+ sequence.relname as name
+ FROM pg_catalog.pg_class as sequence
+ INNER JOIN pg_catalog.pg_depend depend
+ ON depend.objid = sequence.oid
+ INNER JOIN pg_catalog.pg_class class
+ ON class.oid = depend.refobjid
+ INNER JOIN pg_catalog.pg_attribute attribute
+ ON attribute.attnum = depend.refobjsubid
+ AND attribute.attrelid = depend.refobjid
+ WHERE class.relname = '#{table_name}'
+ AND attribute.attname = '#{column_name}'
+ SQL
+ end
+
+ def default_expression_for(table_name, column_name)
+ connection.select_value(<<~SQL)
+ SELECT
+ pg_get_expr(attrdef.adbin, attrdef.adrelid) AS default_value
+ FROM pg_catalog.pg_attribute attribute
+ INNER JOIN pg_catalog.pg_attrdef attrdef
+ ON attribute.attrelid = attrdef.adrelid
+ AND attribute.attnum = attrdef.adnum
+ WHERE attribute.attrelid = '#{table_name}'::regclass
+ AND attribute.attname = '#{column_name}'
+ SQL
+ end
+
+ def primary_key_constraint_name(table_name, schema: nil)
+ table_name = schema ? "#{schema}.#{table_name}" : table_name
+
+ connection.select_value(<<~SQL)
+ SELECT
+ conname AS constraint_name
+ FROM pg_catalog.pg_constraint
+ WHERE pg_constraint.conrelid = '#{table_name}'::regclass
+ AND pg_constraint.contype = 'p'
+ SQL
+ end
+
+ def index_exists_by_name(index, schema: nil)
+ schema = schema ? "'#{schema}'" : 'current_schema'
+
+ connection.select_value(<<~SQL)
+ SELECT true
+ FROM pg_catalog.pg_index i
+ INNER JOIN pg_catalog.pg_class c
+ ON c.oid = i.indexrelid
+ INNER JOIN pg_catalog.pg_namespace n
+ ON c.relnamespace = n.oid
+ WHERE c.relname = '#{index}'
+ AND n.nspname = #{schema}
+ SQL
+ end
+
+ def check_constraint_definition(table_name, constraint_name, schema: nil)
+ table_name = schema ? "#{schema}.#{table_name}" : table_name
+
+ connection.select_value(<<~SQL)
+ SELECT
+ pg_get_constraintdef(oid) AS constraint_definition
+ FROM pg_catalog.pg_constraint
+ WHERE pg_constraint.conrelid = '#{table_name}'::regclass
+ AND pg_constraint.contype = 'c'
+ AND pg_constraint.conname = '#{constraint_name}'
+ SQL
+ end
+ end
+end
diff --git a/spec/support/helpers/database/trigger_helpers.rb b/spec/support/helpers/database/trigger_helpers.rb
new file mode 100644
index 00000000000..9ec03e68413
--- /dev/null
+++ b/spec/support/helpers/database/trigger_helpers.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module Database
+ module TriggerHelpers
+ def expect_function_to_exist(name)
+ expect(find_function_def(name)).not_to be_nil
+ end
+
+ def expect_function_not_to_exist(name)
+ expect(find_function_def(name)).to be_nil
+ end
+
+ def expect_function_to_contain(name, *statements)
+ return_stmt, *body_stmts = parsed_function_statements(name).reverse
+
+ expect(return_stmt).to eq('return old')
+ expect(body_stmts).to contain_exactly(*statements)
+ end
+
+ def expect_trigger_not_to_exist(table_name, name)
+ expect(find_trigger_def(table_name, name)).to be_nil
+ end
+
+ def expect_valid_function_trigger(table_name, name, fn_name, fires_on)
+ events, timing, definition = cleaned_trigger_def(table_name, name)
+
+ events = events&.split(',')
+ expected_timing, expected_events = fires_on.first
+ expect(timing).to eq(expected_timing.to_s)
+ expect(events).to match_array(Array.wrap(expected_events))
+
+ expect(definition).to match(%r{execute (?:procedure|function) #{fn_name}()})
+ end
+
+ private
+
+ def parsed_function_statements(name)
+ cleaned_definition = find_function_def(name)['body'].downcase.gsub(/\s+/, ' ')
+ statements = cleaned_definition.sub(/\A\s*begin\s*(.*)\s*end\s*\Z/, "\\1")
+ statements.split(';').map! { |stmt| stmt.strip.presence }.compact!
+ end
+
+ def find_function_def(name)
+ connection.select_one(<<~SQL)
+ SELECT prosrc AS body
+ FROM pg_proc
+ WHERE proname = '#{name}'
+ SQL
+ end
+
+ def cleaned_trigger_def(table_name, name)
+ find_trigger_def(table_name, name).values_at('event', 'action_timing', 'action_statement').map!(&:downcase)
+ end
+
+ def find_trigger_def(table_name, name)
+ connection.select_one(<<~SQL)
+ SELECT
+ string_agg(event_manipulation, ',') AS event,
+ action_timing,
+ action_statement
+ FROM information_schema.triggers
+ WHERE event_object_table = '#{table_name}'
+ AND trigger_name = '#{name}'
+ GROUP BY 2, 3
+ SQL
+ end
+ end
+end
diff --git a/spec/support/helpers/database_helpers.rb b/spec/support/helpers/database_helpers.rb
deleted file mode 100644
index e9f0a74a8d1..00000000000
--- a/spec/support/helpers/database_helpers.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module DatabaseHelpers
- # In order to directly work with views using factories,
- # we can swapout the view for a table of identical structure.
- def swapout_view_for_table(view)
- ActiveRecord::Base.connection.execute(<<~SQL)
- CREATE TABLE #{view}_copy (LIKE #{view});
- DROP VIEW #{view};
- ALTER TABLE #{view}_copy RENAME TO #{view};
- SQL
- end
-end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index b20801bd3c4..35c298a4d48 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -67,14 +67,16 @@ module GraphqlHelpers
end
end
+ def with_clean_batchloader_executor(&block)
+ BatchLoader::Executor.ensure_current
+ yield
+ ensure
+ BatchLoader::Executor.clear_current
+ end
+
# Runs a block inside a BatchLoader::Executor wrapper
def batch(max_queries: nil, &blk)
- wrapper = proc do
- BatchLoader::Executor.ensure_current
- yield
- ensure
- BatchLoader::Executor.clear_current
- end
+ wrapper = -> { with_clean_batchloader_executor(&blk) }
if max_queries
result = nil
@@ -85,6 +87,32 @@ module GraphqlHelpers
end
end
+ # Use this when writing N+1 tests.
+ #
+ # It does not use the controller, so it avoids confounding factors due to
+ # authentication (token set-up, license checks)
+ # It clears the request store, rails cache, and BatchLoader Executor between runs.
+ def run_with_clean_state(query, **args)
+ ::Gitlab::WithRequestStore.with_request_store do
+ with_clean_rails_cache do
+ with_clean_batchloader_executor do
+ ::GitlabSchema.execute(query, **args)
+ end
+ end
+ end
+ end
+
+ # Basically a combination of use_sql_query_cache and use_clean_rails_memory_store_caching,
+ # but more fine-grained, suitable for comparing two runs in the same example.
+ def with_clean_rails_cache(&blk)
+ caching_store = Rails.cache
+ Rails.cache = ActiveSupport::Cache::MemoryStore.new
+
+ ActiveRecord::Base.cache(&blk)
+ ensure
+ Rails.cache = caching_store
+ end
+
# BatchLoader::GraphQL returns a wrapper, so we need to :sync in order
# to get the actual values
def batch_sync(max_queries: nil, &blk)
@@ -245,7 +273,7 @@ module GraphqlHelpers
return if max_depth <= 0
allow_unlimited_graphql_complexity
- allow_unlimited_graphql_depth
+ allow_unlimited_graphql_depth if max_depth > 1
allow_high_graphql_recursion
allow_high_graphql_transaction_threshold
diff --git a/spec/support/helpers/multipart_helpers.rb b/spec/support/helpers/multipart_helpers.rb
index bcb184f84c5..8438a83aa8a 100644
--- a/spec/support/helpers/multipart_helpers.rb
+++ b/spec/support/helpers/multipart_helpers.rb
@@ -13,29 +13,23 @@ module MultipartHelpers
)
end
- # This function assumes a `mode` variable to be set
- def upload_parameters_for(filepath: nil, key: nil, filename: 'filename', remote_id: 'remote_id')
+ def upload_parameters_for(filepath: nil, key: nil, mode: nil, filename: 'filename', remote_id: 'remote_id')
result = {
- "#{key}.name" => filename,
- "#{key}.type" => "application/octet-stream",
- "#{key}.sha256" => "1234567890"
+ "name" => filename,
+ "type" => "application/octet-stream",
+ "sha256" => "1234567890"
}
case mode
when :local
- result["#{key}.path"] = filepath
+ result["path"] = filepath
when :remote
- result["#{key}.remote_id"] = remote_id
- result["#{key}.size"] = 3.megabytes
+ result["remote_id"] = remote_id
+ result["size"] = 3.megabytes
else
raise ArgumentError, "can't handle #{mode} mode"
end
- return result if ::Feature.disabled?(:upload_middleware_jwt_params_handler, default_enabled: true)
-
- # the HandlerForJWTParams expects a jwt token with the upload parameters
- # *without* the "#{key}." prefix
- result.deep_transform_keys! { |k| k.remove("#{key}.") }
{
"#{key}.gitlab-workhorse-upload" => jwt_token(data: { 'upload' => result })
}
diff --git a/spec/support/helpers/partitioning_helpers.rb b/spec/support/helpers/partitioning_helpers.rb
deleted file mode 100644
index 8981fea04d5..00000000000
--- a/spec/support/helpers/partitioning_helpers.rb
+++ /dev/null
@@ -1,94 +0,0 @@
-# frozen_string_literal: true
-
-module PartitioningHelpers
- def expect_table_partitioned_by(table, columns, part_type: :range)
- columns_with_part_type = columns.map { |c| [part_type.to_s, c] }
- actual_columns = find_partitioned_columns(table)
-
- expect(columns_with_part_type).to match_array(actual_columns)
- end
-
- def expect_range_partition_of(partition_name, table_name, min_value, max_value)
- definition = find_partition_definition(partition_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
-
- expect(definition).not_to be_nil
- expect(definition['base_table']).to eq(table_name.to_s)
- expect(definition['condition']).to eq("FOR VALUES FROM (#{min_value}) TO (#{max_value})")
- end
-
- def expect_total_partitions(table_name, count, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
- partitions = find_partitions(table_name, schema: schema)
-
- expect(partitions.size).to eq(count)
- end
-
- def expect_range_partitions_for(table_name, partitions)
- partitions.each do |suffix, (min_value, max_value)|
- partition_name = "#{table_name}_#{suffix}"
- expect_range_partition_of(partition_name, table_name, min_value, max_value)
- end
-
- expect_total_partitions(table_name, partitions.size, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
- end
-
- def expect_hash_partition_of(partition_name, table_name, modulus, remainder)
- definition = find_partition_definition(partition_name, schema: Gitlab::Database::STATIC_PARTITIONS_SCHEMA)
-
- expect(definition).not_to be_nil
- expect(definition['base_table']).to eq(table_name.to_s)
- expect(definition['condition']).to eq("FOR VALUES WITH (modulus #{modulus}, remainder #{remainder})")
- end
-
- private
-
- def find_partitioned_columns(table)
- connection.select_rows(<<~SQL)
- select
- case partstrat
- when 'l' then 'list'
- when 'r' then 'range'
- when 'h' then 'hash'
- end as partstrat,
- cols.column_name
- from (
- select partrelid, partstrat, unnest(partattrs) as col_pos
- from pg_partitioned_table
- ) pg_part
- inner join pg_class
- on pg_part.partrelid = pg_class.oid
- inner join information_schema.columns cols
- on cols.table_name = pg_class.relname
- and cols.ordinal_position = pg_part.col_pos
- where pg_class.relname = '#{table}';
- SQL
- end
-
- def find_partition_definition(partition, schema: )
- connection.select_one(<<~SQL)
- select
- parent_class.relname as base_table,
- pg_get_expr(pg_class.relpartbound, inhrelid) as condition
- from pg_class
- inner join pg_inherits i on pg_class.oid = inhrelid
- inner join pg_class parent_class on parent_class.oid = inhparent
- inner join pg_namespace ON pg_namespace.oid = pg_class.relnamespace
- where pg_namespace.nspname = '#{schema}'
- and pg_class.relname = '#{partition}'
- and pg_class.relispartition
- SQL
- end
-
- def find_partitions(partition, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)
- connection.select_rows(<<~SQL)
- select
- pg_class.relname
- from pg_class
- inner join pg_inherits i on pg_class.oid = inhrelid
- inner join pg_class parent_class on parent_class.oid = inhparent
- inner join pg_namespace ON pg_namespace.oid = pg_class.relnamespace
- where pg_namespace.nspname = '#{schema}'
- and parent_class.relname = '#{partition}'
- and pg_class.relispartition
- SQL
- end
-end
diff --git a/spec/support/helpers/rack_attack_spec_helpers.rb b/spec/support/helpers/rack_attack_spec_helpers.rb
index a8ae69885d8..d50a6382a40 100644
--- a/spec/support/helpers/rack_attack_spec_helpers.rb
+++ b/spec/support/helpers/rack_attack_spec_helpers.rb
@@ -21,10 +21,31 @@ module RackAttackSpecHelpers
{ 'AUTHORIZATION' => "Bearer #{oauth_access_token.token}" }
end
+ def basic_auth_headers(user, personal_access_token)
+ encoded_login = ["#{user.username}:#{personal_access_token.token}"].pack('m0')
+ { 'AUTHORIZATION' => "Basic #{encoded_login}" }
+ end
+
def expect_rejection(&block)
yield
expect(response).to have_gitlab_http_status(:too_many_requests)
+
+ expect(response.headers.to_h).to include(
+ 'RateLimit-Limit' => a_string_matching(/^\d+$/),
+ 'RateLimit-Name' => a_string_matching(/^throttle_.*$/),
+ 'RateLimit-Observed' => a_string_matching(/^\d+$/),
+ 'RateLimit-Remaining' => a_string_matching(/^\d+$/),
+ 'Retry-After' => a_string_matching(/^\d+$/)
+ )
+ expect(response).to have_header('RateLimit-Reset')
+ expect do
+ DateTime.strptime(response.headers['RateLimit-Reset'], '%s')
+ end.not_to raise_error
+ expect(response).to have_header('RateLimit-ResetTime')
+ expect do
+ Time.httpdate(response.headers['RateLimit-ResetTime'])
+ end.not_to raise_error
end
def expect_ok(&block)
diff --git a/spec/support/helpers/stub_experiments.rb b/spec/support/helpers/stub_experiments.rb
index 247692d83ee..408d16a7c08 100644
--- a/spec/support/helpers/stub_experiments.rb
+++ b/spec/support/helpers/stub_experiments.rb
@@ -11,6 +11,7 @@ module StubExperiments
allow(Gitlab::Experimentation).to receive(:active?).and_call_original
experiments.each do |experiment_key, enabled|
+ Feature.persist_used!("#{experiment_key}#{feature_flag_suffix}")
allow(Gitlab::Experimentation).to receive(:active?).with(experiment_key) { enabled }
end
end
@@ -25,7 +26,14 @@ module StubExperiments
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).and_call_original
experiments.each do |experiment_key, enabled|
+ Feature.persist_used!("#{experiment_key}#{feature_flag_suffix}")
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).with(experiment_key, anything) { enabled }
end
end
+
+ private
+
+ def feature_flag_suffix
+ Gitlab::Experimentation::Experiment::FEATURE_FLAG_SUFFIX
+ end
end
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index 7f30a2a70cd..77f31169ecb 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -66,4 +66,8 @@ module StubFeatureFlags
def skip_feature_flags_yaml_validation
allow(Feature::Definition).to receive(:valid_usage!)
end
+
+ def skip_default_enabled_yaml_check
+ allow(Feature::Definition).to receive(:default_enabled?).and_return(false)
+ end
end
diff --git a/spec/support/helpers/table_schema_helpers.rb b/spec/support/helpers/table_schema_helpers.rb
deleted file mode 100644
index 28794211190..00000000000
--- a/spec/support/helpers/table_schema_helpers.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-# frozen_string_literal: true
-
-module TableSchemaHelpers
- def connection
- ActiveRecord::Base.connection
- end
-
- def expect_table_to_be_replaced(original_table:, replacement_table:, archived_table:)
- original_oid = table_oid(original_table)
- replacement_oid = table_oid(replacement_table)
-
- yield
-
- expect(table_oid(original_table)).to eq(replacement_oid)
- expect(table_oid(archived_table)).to eq(original_oid)
- expect(table_oid(replacement_table)).to be_nil
- end
-
- def expect_index_to_exist(name, schema: nil)
- expect(index_exists_by_name(name, schema: schema)).to eq(true)
- end
-
- def expect_index_not_to_exist(name, schema: nil)
- expect(index_exists_by_name(name, schema: schema)).to be_nil
- end
-
- def expect_primary_keys_after_tables(tables, schema: nil)
- tables.each do |table|
- primary_key = primary_key_constraint_name(table, schema: schema)
-
- expect(primary_key).to eq("#{table}_pkey")
- end
- end
-
- def table_oid(name)
- connection.select_value(<<~SQL)
- SELECT oid
- FROM pg_catalog.pg_class
- WHERE relname = '#{name}'
- SQL
- end
-
- def table_type(name)
- connection.select_value(<<~SQL)
- SELECT
- CASE class.relkind
- WHEN 'r' THEN 'normal'
- WHEN 'p' THEN 'partitioned'
- ELSE 'other'
- END as table_type
- FROM pg_catalog.pg_class class
- WHERE class.relname = '#{name}'
- SQL
- end
-
- def sequence_owned_by(table_name, column_name)
- connection.select_value(<<~SQL)
- SELECT
- sequence.relname as name
- FROM pg_catalog.pg_class as sequence
- INNER JOIN pg_catalog.pg_depend depend
- ON depend.objid = sequence.oid
- INNER JOIN pg_catalog.pg_class class
- ON class.oid = depend.refobjid
- INNER JOIN pg_catalog.pg_attribute attribute
- ON attribute.attnum = depend.refobjsubid
- AND attribute.attrelid = depend.refobjid
- WHERE class.relname = '#{table_name}'
- AND attribute.attname = '#{column_name}'
- SQL
- end
-
- def default_expression_for(table_name, column_name)
- connection.select_value(<<~SQL)
- SELECT
- pg_get_expr(attrdef.adbin, attrdef.adrelid) AS default_value
- FROM pg_catalog.pg_attribute attribute
- INNER JOIN pg_catalog.pg_attrdef attrdef
- ON attribute.attrelid = attrdef.adrelid
- AND attribute.attnum = attrdef.adnum
- WHERE attribute.attrelid = '#{table_name}'::regclass
- AND attribute.attname = '#{column_name}'
- SQL
- end
-
- def primary_key_constraint_name(table_name, schema: nil)
- table_name = schema ? "#{schema}.#{table_name}" : table_name
-
- connection.select_value(<<~SQL)
- SELECT
- conname AS constraint_name
- FROM pg_catalog.pg_constraint
- WHERE pg_constraint.conrelid = '#{table_name}'::regclass
- AND pg_constraint.contype = 'p'
- SQL
- end
-
- def index_exists_by_name(index, schema: nil)
- schema = schema ? "'#{schema}'" : 'current_schema'
-
- connection.select_value(<<~SQL)
- SELECT true
- FROM pg_catalog.pg_index i
- INNER JOIN pg_catalog.pg_class c
- ON c.oid = i.indexrelid
- INNER JOIN pg_catalog.pg_namespace n
- ON c.relnamespace = n.oid
- WHERE c.relname = '#{index}'
- AND n.nspname = #{schema}
- SQL
- end
-end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 01571277a1d..cb25f5f9429 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -203,10 +203,13 @@ module TestEnv
end
gitaly_pid = Integer(File.read(TMP_TEST_PATH.join('gitaly.pid')))
+ gitaly2_pid = Integer(File.read(TMP_TEST_PATH.join('gitaly2.pid')))
praefect_pid = Integer(File.read(TMP_TEST_PATH.join('praefect.pid')))
- Kernel.at_exit { stop(gitaly_pid) }
- Kernel.at_exit { stop(praefect_pid) }
+ Kernel.at_exit do
+ pids = [gitaly_pid, gitaly2_pid, praefect_pid]
+ pids.each { |pid| stop(pid) }
+ end
wait('gitaly')
wait('praefect')
@@ -284,7 +287,7 @@ module TestEnv
@workhorse_path ||= File.join('tmp', 'tests', 'gitlab-workhorse')
end
- def with_workhorse(workhorse_dir, host, port, upstream, &blk)
+ def with_workhorse(host, port, upstream, &blk)
host = "[#{host}]" if host.include?(':')
listen_addr = [host, port].join(':')
diff --git a/spec/support/helpers/trigger_helpers.rb b/spec/support/helpers/trigger_helpers.rb
deleted file mode 100644
index dd6d8ff5bb5..00000000000
--- a/spec/support/helpers/trigger_helpers.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-module TriggerHelpers
- def expect_function_to_exist(name)
- expect(find_function_def(name)).not_to be_nil
- end
-
- def expect_function_not_to_exist(name)
- expect(find_function_def(name)).to be_nil
- end
-
- def expect_function_to_contain(name, *statements)
- return_stmt, *body_stmts = parsed_function_statements(name).reverse
-
- expect(return_stmt).to eq('return old')
- expect(body_stmts).to contain_exactly(*statements)
- end
-
- def expect_trigger_not_to_exist(table_name, name)
- expect(find_trigger_def(table_name, name)).to be_nil
- end
-
- def expect_valid_function_trigger(table_name, name, fn_name, fires_on)
- events, timing, definition = cleaned_trigger_def(table_name, name)
-
- events = events&.split(',')
- expected_timing, expected_events = fires_on.first
- expect(timing).to eq(expected_timing.to_s)
- expect(events).to match_array(Array.wrap(expected_events))
-
- expect(definition).to match(%r{execute (?:procedure|function) #{fn_name}()})
- end
-
- private
-
- def parsed_function_statements(name)
- cleaned_definition = find_function_def(name)['body'].downcase.gsub(/\s+/, ' ')
- statements = cleaned_definition.sub(/\A\s*begin\s*(.*)\s*end\s*\Z/, "\\1")
- statements.split(';').map! { |stmt| stmt.strip.presence }.compact!
- end
-
- def find_function_def(name)
- connection.select_one(<<~SQL)
- SELECT prosrc AS body
- FROM pg_proc
- WHERE proname = '#{name}'
- SQL
- end
-
- def cleaned_trigger_def(table_name, name)
- find_trigger_def(table_name, name).values_at('event', 'action_timing', 'action_statement').map!(&:downcase)
- end
-
- def find_trigger_def(table_name, name)
- connection.select_one(<<~SQL)
- SELECT
- string_agg(event_manipulation, ',') AS event,
- action_timing,
- action_statement
- FROM information_schema.triggers
- WHERE event_object_table = '#{table_name}'
- AND trigger_name = '#{name}'
- GROUP BY 2, 3
- SQL
- end
-end
diff --git a/spec/support/matchers/be_sorted.rb b/spec/support/matchers/be_sorted.rb
index 1455060fe71..b0ab93efbb2 100644
--- a/spec/support/matchers/be_sorted.rb
+++ b/spec/support/matchers/be_sorted.rb
@@ -4,18 +4,75 @@
#
# By default, this checks that the collection is sorted ascending
# but you can check order by specific field and order by passing
-# them, eg:
+# them, either as arguments, or using the fluent interface, eg:
#
# ```
+# # Usage examples:
+# expect(collection).to be_sorted
+# expect(collection).to be_sorted(:field)
# expect(collection).to be_sorted(:field, :desc)
+# expect(collection).to be_sorted.asc
+# expect(collection).to be_sorted.desc.by(&:field)
+# expect(collection).to be_sorted.by(&:field).desc
+# expect(collection).to be_sorted.by { |x| [x.foo, x.bar] }
# ```
-RSpec::Matchers.define :be_sorted do |by, order = :asc|
+RSpec::Matchers.define :be_sorted do |on = :itself, order = :asc|
+ def by(&block)
+ @comparator = block
+ self
+ end
+
+ def asc
+ @direction = :asc
+ self
+ end
+
+ def desc
+ @direction = :desc
+ self
+ end
+
+ def format_with(proc)
+ @format_with = proc
+ self
+ end
+
+ define_method :comparator do
+ @comparator || on
+ end
+
+ define_method :descending? do
+ (@direction || order.to_sym) == :desc
+ end
+
+ def order(items)
+ descending? ? items.reverse : items
+ end
+
+ def sort(items)
+ items.sort_by(&comparator)
+ end
+
match do |actual|
- next true unless actual.present? # emtpy collection is sorted
+ next true unless actual.present? # empty collection is sorted
+
+ actual = actual.to_a if actual.respond_to?(:to_a) && !actual.respond_to?(:sort_by)
+
+ @got = actual
+ @expected = order(sort(actual))
+
+ @expected == actual
+ end
+
+ def failure_message
+ "Expected #{show(@expected)}, got #{show(@got)}"
+ end
- actual
- .then { |collection| by ? collection.sort_by(&by) : collection.sort }
- .then { |sorted_collection| order.to_sym == :desc ? sorted_collection.reverse : sorted_collection }
- .then { |sorted_collection| sorted_collection == actual }
+ def show(things)
+ if @format_with
+ things.map(&@format_with)
+ else
+ things
+ end
end
end
diff --git a/spec/support/matchers/be_valid_json.rb b/spec/support/matchers/be_valid_json.rb
index f46c35c7198..228c1fc986e 100644
--- a/spec/support/matchers/be_valid_json.rb
+++ b/spec/support/matchers/be_valid_json.rb
@@ -1,20 +1,8 @@
# frozen_string_literal: true
RSpec::Matchers.define :be_valid_json do
- def according_to_schema(schema)
- @schema = schema
- self
- end
-
match do |actual|
- data = Gitlab::Json.parse(actual)
-
- if @schema.present?
- @validation_errors = JSON::Validator.fully_validate(@schema, data)
- @validation_errors.empty?
- else
- data.present?
- end
+ Gitlab::Json.parse(actual).present?
rescue JSON::ParserError => e
@error = e
false
@@ -23,8 +11,6 @@ RSpec::Matchers.define :be_valid_json do
def failure_message
if @error
"Parse failed with error: #{@error}"
- elsif @validation_errors.present?
- "Validation failed because #{@validation_errors.join(', and ')}"
else
"Parsing did not return any data"
end
diff --git a/spec/support/matchers/schema_matcher.rb b/spec/support/matchers/schema_matcher.rb
index ebbd57c8115..f0e7a52c51e 100644
--- a/spec/support/matchers/schema_matcher.rb
+++ b/spec/support/matchers/schema_matcher.rb
@@ -2,6 +2,8 @@
module SchemaPath
def self.expand(schema, dir = nil)
+ return schema unless schema.is_a?(String)
+
if Gitlab.ee? && dir.nil?
ee_path = expand(schema, 'ee')
@@ -35,7 +37,13 @@ RSpec::Matchers.define :match_schema do |schema, dir: nil, **options|
end
failure_message do |response|
- "didn't match the schema defined by #{SchemaPath.expand(schema, dir)}" \
+ "didn't match the schema defined by #{schema_name(schema, dir)}" \
" The validation errors were:\n#{@errors.join("\n")}"
end
+
+ def schema_name(schema, dir)
+ return 'provided schema' unless schema.is_a?(String)
+
+ SchemaPath.expand(schema, dir)
+ end
end
diff --git a/spec/support/rspec.rb b/spec/support/rspec.rb
index 32f738faa9b..00b9aac7bf4 100644
--- a/spec/support/rspec.rb
+++ b/spec/support/rspec.rb
@@ -25,4 +25,8 @@ RSpec.configure do |config|
config.include FastRailsRoot
config.include RuboCop::RSpec::ExpectOffense, type: :rubocop
+
+ config.define_derived_metadata(file_path: %r{spec/rubocop}) do |metadata|
+ metadata[:type] = :rubocop
+ end
end
diff --git a/spec/support/shared_contexts/email_shared_context.rb b/spec/support/shared_contexts/email_shared_context.rb
index 298e03162c4..9dffea7c94e 100644
--- a/spec/support/shared_contexts/email_shared_context.rb
+++ b/spec/support/shared_contexts/email_shared_context.rb
@@ -1,16 +1,16 @@
# frozen_string_literal: true
RSpec.shared_context :email_shared_context do
- let(:mail_key) { "59d8df8370b7e95c5a49fbf86aeb2c93" }
+ let(:mail_key) { '59d8df8370b7e95c5a49fbf86aeb2c93' }
let(:receiver) { Gitlab::Email::Receiver.new(email_raw) }
- let(:markdown) { "![image](uploads/image.png)" }
+ let(:markdown) { '![image](uploads/image.png)' }
def setup_attachment
allow_any_instance_of(Gitlab::Email::AttachmentUploader).to receive(:execute).and_return(
[
{
- url: "uploads/image.png",
- alt: "image",
+ url: 'uploads/image.png',
+ alt: 'image',
markdown: markdown
}
]
@@ -19,23 +19,252 @@ RSpec.shared_context :email_shared_context do
end
RSpec.shared_examples :reply_processing_shared_examples do
- context "when the user could not be found" do
+ context 'when the user could not be found' do
before do
user.destroy!
end
- it "raises a UserNotFoundError" do
+ it 'raises a UserNotFoundError' do
expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotFoundError)
end
end
- context "when the user is not authorized to the project" do
+ context 'when the user is not authorized to the project' do
before do
project.update_attribute(:visibility_level, Project::PRIVATE)
end
- it "raises a ProjectNotFound" do
+ it 'raises a ProjectNotFound' do
expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound)
end
end
end
+
+RSpec.shared_examples :checks_permissions_on_noteable_examples do
+ context 'when user has access' do
+ before do
+ project.add_reporter(user)
+ end
+
+ it 'creates a comment' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ end
+ end
+
+ context 'when user does not have access' do
+ it 'raises UserNotAuthorizedError' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotAuthorizedError)
+ end
+ end
+end
+
+RSpec.shared_examples :note_handler_shared_examples do |forwardable|
+ context 'when the noteable could not be found' do
+ before do
+ noteable.destroy!
+ end
+
+ it 'raises a NoteableNotFoundError' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::NoteableNotFoundError)
+ end
+ end
+
+ context 'when the note could not be saved' do
+ before do
+ allow_any_instance_of(Note).to receive(:persisted?).and_return(false)
+ end
+
+ it 'raises an InvalidNoteError' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
+ end
+
+ context 'because the note was update commands only' do
+ let!(:email_raw) { update_commands_only }
+
+ context 'and current user cannot update noteable' do
+ it 'raises a CommandsOnlyNoteError' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
+ end
+ end
+
+ context 'and current user can update noteable' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does not raise an error', unless: forwardable do
+ expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
+
+ expect(noteable.reload).to be_closed
+ end
+
+ it 'raises an InvalidNoteError', if: forwardable do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
+ end
+ end
+ end
+ end
+
+ context 'when the note contains quick actions' do
+ let!(:email_raw) { commands_in_reply }
+
+ context 'and current user cannot update the noteable' do
+ it 'only executes the commands that the user can perform' do
+ expect { receiver.execute }
+ .to change { noteable.notes.user.count }.by(1)
+ .and change { user.todos_pending_count }.from(0).to(1)
+
+ expect(noteable.reload).to be_open
+ end
+ end
+
+ context 'and current user can update noteable' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'posts a note and updates the noteable' do
+ expect(TodoService.new.todo_exist?(noteable, user)).to be_falsy
+
+ expect { receiver.execute }
+ .to change { noteable.notes.user.count }.by(1)
+ .and change { user.todos_pending_count }.from(0).to(1)
+
+ expect(noteable.reload).to be_closed
+ end
+ end
+ end
+
+ context 'when the reply is blank' do
+ let!(:email_raw) { no_content }
+
+ it 'raises an EmptyEmailError', unless: forwardable do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::EmptyEmailError)
+ end
+
+ it 'allows email to only have quoted text', if: forwardable do
+ expect { receiver.execute }.not_to raise_error(Gitlab::Email::EmptyEmailError)
+ end
+ end
+
+ context 'when discussion is locked' do
+ before do
+ noteable.update_attribute(:discussion_locked, true)
+ end
+
+ it_behaves_like :checks_permissions_on_noteable_examples
+ end
+
+ context 'when everything is fine' do
+ before do
+ setup_attachment
+ end
+
+ it 'adds all attachments' do
+ expect_next_instance_of(Gitlab::Email::AttachmentUploader) do |uploader|
+ expect(uploader).to receive(:execute).with(upload_parent: project, uploader_class: FileUploader).and_return(
+ [
+ {
+ url: 'uploads/image.png',
+ alt: 'image',
+ markdown: markdown
+ }
+ ]
+ )
+ end
+
+ receiver.execute
+
+ note = noteable.notes.last
+ expect(note.note).to include(markdown)
+ expect(note.note).to include('Jake out')
+ end
+ end
+
+ context 'when the service desk' do
+ let(:project) { create(:project, :public, service_desk_enabled: true) }
+ let(:support_bot) { User.support_bot }
+ let(:noteable) { create(:issue, project: project, author: support_bot, title: 'service desk issue') }
+ let!(:note) { create(:note, project: project, noteable: noteable) }
+ let(:email_raw) { with_quick_actions }
+
+ let!(:sent_notification) do
+ SentNotification.record_note(note, support_bot.id, mail_key)
+ end
+
+ context 'is enabled' do
+ before do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
+ project.project_feature.update!(issues_access_level: issues_access_level)
+ end
+
+ context 'when issues are enabled for everyone' do
+ let(:issues_access_level) { ProjectFeature::ENABLED }
+
+ it 'creates a comment' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ end
+
+ context 'when quick actions are present' do
+ before do
+ receiver.execute
+ noteable.reload
+ end
+
+ context 'when author is a support_bot', unless: forwardable do
+ it 'encloses quick actions with code span markdown' do
+ note = Note.last
+ expect(note.note).to include("Jake out\n\n`/close`\n`/title test`")
+ expect(noteable.title).to eq('service desk issue')
+ expect(noteable).to be_opened
+ end
+ end
+
+ context 'when author is a normal user', if: forwardable do
+ it 'extracted the quick actions' do
+ note = Note.last
+ expect(note.note).to include('Jake out')
+ expect(note.note).not_to include("`/close`\n`/title test`")
+ end
+ end
+ end
+ end
+
+ context 'when issues are protected members only' do
+ let(:issues_access_level) { ProjectFeature::PRIVATE }
+
+ before do
+ if recipient.support_bot?
+ @changed_by = 1
+ else
+ @changed_by = 2
+ project.add_developer(recipient)
+ end
+ end
+
+ it 'creates a comment' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(@changed_by)
+ end
+ end
+
+ context 'when issues are disabled' do
+ let(:issues_access_level) { ProjectFeature::DISABLED }
+
+ it 'does not create a comment' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotAuthorizedError)
+ end
+ end
+ end
+
+ context 'is disabled', unless: forwardable do
+ before do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).and_return(false)
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(false)
+ end
+
+ it 'does not create a comment' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
index 9ffde54c84a..d9cbea58406 100644
--- a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
@@ -8,13 +8,53 @@ RSpec.shared_context 'IssuesFinder context' do
let_it_be(:project1, reload: true) { create(:project, group: group) }
let_it_be(:project2, reload: true) { create(:project) }
let_it_be(:project3, reload: true) { create(:project, group: subgroup) }
- let_it_be(:milestone) { create(:milestone, project: project1) }
+ let_it_be(:release) { create(:release, project: project1, tag: 'v1.0.0') }
+ let_it_be(:milestone) { create(:milestone, project: project1, releases: [release]) }
let_it_be(:label) { create(:label, project: project2) }
let_it_be(:label2) { create(:label, project: project2) }
- let_it_be(:issue1, reload: true) { create(:issue, author: user, assignees: [user], project: project1, milestone: milestone, title: 'gitlab', created_at: 1.week.ago, updated_at: 1.week.ago) }
- let_it_be(:issue2, reload: true) { create(:issue, author: user, assignees: [user], project: project2, description: 'gitlab', created_at: 1.week.from_now, updated_at: 1.week.from_now) }
- let_it_be(:issue3, reload: true) { create(:issue, author: user2, assignees: [user2], project: project2, title: 'tanuki', description: 'tanuki', created_at: 2.weeks.from_now, updated_at: 2.weeks.from_now) }
+ let_it_be(:issue1, reload: true) do
+ create(:issue,
+ author: user,
+ assignees: [user],
+ project: project1,
+ milestone: milestone,
+ title: 'gitlab',
+ created_at: 1.week.ago,
+ updated_at: 1.week.ago)
+ end
+
+ let_it_be(:issue2, reload: true) do
+ create(:issue,
+ author: user,
+ assignees: [user],
+ project: project2,
+ description: 'gitlab',
+ created_at: 1.week.from_now,
+ updated_at: 1.week.from_now)
+ end
+
+ let_it_be(:issue3, reload: true) do
+ create(:issue,
+ author: user2,
+ assignees: [user2],
+ project: project2,
+ title: 'tanuki',
+ description: 'tanuki',
+ created_at: 2.weeks.from_now,
+ updated_at: 2.weeks.from_now)
+ end
+
let_it_be(:issue4, reload: true) { create(:issue, project: project3) }
+ let_it_be(:issue5, reload: true) do
+ create(:issue,
+ author: user,
+ assignees: [user],
+ project: project1,
+ title: 'wotnot',
+ created_at: 3.days.ago,
+ updated_at: 3.days.ago)
+ end
+
let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue1) }
let_it_be(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: issue2) }
let_it_be(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: issue3) }
diff --git a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
index 54022aeb494..6a09497a497 100644
--- a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
@@ -2,6 +2,7 @@
RSpec.shared_context 'UsersFinder#execute filter by project context' do
let_it_be(:normal_user) { create(:user, username: 'johndoe') }
+ let_it_be(:admin_user) { create(:user, :admin, username: 'iamadmin') }
let_it_be(:blocked_user) { create(:user, :blocked, username: 'notsorandom') }
let_it_be(:external_user) { create(:user, :external) }
let_it_be(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 549dc1cff1d..57d8320b76a 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -137,6 +137,7 @@ RSpec.shared_context 'group navbar structure' do
_('Projects'),
_('Repository'),
_('CI / CD'),
+ _('Packages & Registries'),
_('Webhooks')
]
}
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index e0e2a18cdd2..e7bc1450601 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -19,8 +19,29 @@ RSpec.shared_context 'GroupPolicy context' do
end
let(:read_group_permissions) { %i[read_label read_list read_milestone read_board] }
- let(:reporter_permissions) { %i[admin_label read_container_image read_metrics_dashboard_annotation read_prometheus] }
- let(:developer_permissions) { %i[admin_milestone create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation] }
+
+ let(:reporter_permissions) do
+ %i[
+ admin_label
+ admin_board
+ read_container_image
+ read_metrics_dashboard_annotation
+ read_prometheus
+ read_package_settings
+ ]
+ end
+
+ let(:developer_permissions) do
+ %i[
+ admin_milestone
+ create_metrics_dashboard_annotation
+ delete_metrics_dashboard_annotation
+ update_metrics_dashboard_annotation
+ create_custom_emoji
+ create_package_settings
+ ]
+ end
+
let(:maintainer_permissions) do
%i[
create_projects
diff --git a/spec/support/shared_contexts/read_ci_configuration_shared_context.rb b/spec/support/shared_contexts/read_ci_configuration_shared_context.rb
index f8f33e2a745..04c50171766 100644
--- a/spec/support/shared_contexts/read_ci_configuration_shared_context.rb
+++ b/spec/support/shared_contexts/read_ci_configuration_shared_context.rb
@@ -5,5 +5,13 @@ RSpec.shared_context 'read ci configuration for sast enabled project' do
File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_for_sast.yml'))
end
+ let_it_be(:gitlab_ci_yml_default_analyzers_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_for_sast_default_analyzers.yml'))
+ end
+
+ let_it_be(:gitlab_ci_yml_excluded_analyzers_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_for_sast_excluded_analyzers.yml'))
+ end
+
let_it_be(:project) { create(:project, :repository) }
end
diff --git a/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb
new file mode 100644
index 00000000000..f877d6299bd
--- /dev/null
+++ b/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'nuget api setup' do
+ include WorkhorseHelpers
+ include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+end
diff --git a/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb b/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb
index 9b738a4b002..00a0fb7e4c5 100644
--- a/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb
@@ -77,30 +77,6 @@ RSpec.shared_examples Repositories::GitHttpController do
end
end
end
-
- context 'with exceptions' do
- before do
- allow(controller).to receive(:authenticate_user).and_return(true)
- allow(controller).to receive(:verify_workhorse_api!).and_return(true)
- end
-
- it 'returns 503 with GRPC Unavailable' do
- allow(controller).to receive(:access_check).and_raise(GRPC::Unavailable)
-
- get :info_refs, params: params
-
- expect(response).to have_gitlab_http_status(:service_unavailable)
- end
-
- it 'returns 503 with timeout error' do
- allow(controller).to receive(:access_check).and_raise(Gitlab::GitAccess::TimeoutError)
-
- get :info_refs, params: params
-
- expect(response).to have_gitlab_http_status(:service_unavailable)
- expect(response.body).to eq 'Gitlab::GitAccess::TimeoutError'
- end
- end
end
describe 'POST #git_upload_pack' do
diff --git a/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb b/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
index b67eb0d99fd..041695d8111 100644
--- a/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
@@ -1,5 +1,9 @@
# frozen_string_literal: true
+# This controller shared examples will be migrated to
+# spec/support/shared_examples/requests/sessionless_auth_request_shared_examples.rb
+# See also https://gitlab.com/groups/gitlab-org/-/epics/5076
+
RSpec.shared_examples 'authenticates sessionless user' do |path, format, params|
params ||= {}
diff --git a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
index cf7ee17ea13..c5d65743810 100644
--- a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
@@ -7,7 +7,7 @@
RSpec.shared_examples 'tracking unique hll events' do |feature_flag|
it 'tracks unique event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(expected_type, target_id)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(target_id, values: expected_type)
request
end
diff --git a/spec/support/shared_examples/features/file_uploads_shared_examples.rb b/spec/support/shared_examples/features/file_uploads_shared_examples.rb
index ea8c8d44501..d586bf03b59 100644
--- a/spec/support/shared_examples/features/file_uploads_shared_examples.rb
+++ b/spec/support/shared_examples/features/file_uploads_shared_examples.rb
@@ -2,28 +2,6 @@
RSpec.shared_examples 'handling file uploads' do |shared_examples_name|
context 'with object storage disabled' do
- context 'with upload_middleware_jwt_params_handler disabled' do
- before do
- stub_feature_flags(upload_middleware_jwt_params_handler: false)
-
- expect_next_instance_of(Gitlab::Middleware::Multipart::Handler) do |handler|
- expect(handler).to receive(:with_open_files).and_call_original
- end
- end
-
- it_behaves_like shared_examples_name
- end
-
- context 'with upload_middleware_jwt_params_handler enabled' do
- before do
- stub_feature_flags(upload_middleware_jwt_params_handler: true)
-
- expect_next_instance_of(Gitlab::Middleware::Multipart::HandlerForJWTParams) do |handler|
- expect(handler).to receive(:with_open_files).and_call_original
- end
- end
-
- it_behaves_like shared_examples_name
- end
+ it_behaves_like shared_examples_name
end
end
diff --git a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
index 1dbaace1c89..c2dc87b0fb0 100644
--- a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
+++ b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
@@ -12,9 +12,7 @@ RSpec.shared_examples 'Maintainer manages access requests' do
sign_in(maintainer)
visit members_page_path
- if has_tabs
- click_on 'Access requests'
- end
+ click_on 'Access requests'
end
it 'maintainer can see access requests', :js do
@@ -48,11 +46,7 @@ RSpec.shared_examples 'Maintainer manages access requests' do
end
def expect_visible_access_request(entity, user)
- if has_tabs
- expect(page).to have_content "Access requests 1"
- else
- expect(page).to have_content "Users requesting access to #{entity.name} 1"
- end
+ expect(page).to have_content "Access requests 1"
expect(page).to have_content user.name
end
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index 44d82d2e753..2f8ebd0d264 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -20,15 +20,25 @@ RSpec.shared_examples 'User creates wiki page' do
click_link "Create your first page"
end
- it "shows validation error message" do
+ it "shows validation error message if the form is force submitted", :js do
page.within(".wiki-form") do
fill_in(:wiki_content, with: "")
- click_on("Create page")
+ page.execute_script("window.onbeforeunload = null")
+ page.execute_script("document.querySelector('.wiki-form').submit()")
end
expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
+ end
+
+ it "disables the submit button", :js do
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "")
+ expect(page).to have_button('Create page', disabled: true)
+ end
+ end
+ it "makes sure links to unknown pages work correctly", :js do
page.within(".wiki-form") do
fill_in(:wiki_content, with: "[link test](test)")
@@ -42,7 +52,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(page).to have_content("Create New Page")
end
- it "shows non-escaped link in the pages list" do
+ it "shows non-escaped link in the pages list", :js do
fill_in(:wiki_title, with: "one/two/three-test")
page.within(".wiki-form") do
@@ -61,7 +71,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(page).to have_field("wiki[message]", with: "Create home")
end
- it "creates a page from the home page" do
+ it "creates a page from the home page", :js do
fill_in(:wiki_content, with: "[test](test)\n[GitLab API doc](api)\n[Rake tasks](raketasks)\n# Wiki header\n")
fill_in(:wiki_message, with: "Adding links to wiki")
@@ -79,7 +89,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, "test"))
- page.within(:css, ".nav-text") do
+ page.within(:css, ".wiki-page-header") do
expect(page).to have_content("Create New Page")
end
@@ -91,7 +101,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, "api"))
- page.within(:css, ".nav-text") do
+ page.within(:css, ".wiki-page-header") do
expect(page).to have_content("Create")
end
@@ -103,7 +113,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, "raketasks"))
- page.within(:css, ".nav-text") do
+ page.within(:css, ".wiki-page-header") do
expect(page).to have_content("Create")
end
end
@@ -142,7 +152,7 @@ RSpec.shared_examples 'User creates wiki page' do
end
end
- it 'creates a wiki page with Org markup', :aggregate_failures do
+ it 'creates a wiki page with Org markup', :aggregate_failures, :js do
org_content = <<~ORG
* Heading
** Subheading
@@ -170,7 +180,7 @@ RSpec.shared_examples 'User creates wiki page' do
visit wiki_path(wiki)
end
- context "via the `new wiki page` page" do
+ context "via the `new wiki page` page", :js do
it "creates a page with a single word" do
click_link("New page")
@@ -189,7 +199,7 @@ RSpec.shared_examples 'User creates wiki page' do
.and have_content("My awesome wiki!")
end
- it "creates a page with spaces in the name" do
+ it "creates a page with spaces in the name", :js do
click_link("New page")
page.within(".wiki-form") do
@@ -207,7 +217,7 @@ RSpec.shared_examples 'User creates wiki page' do
.and have_content("My awesome wiki!")
end
- it "creates a page with hyphens in the name" do
+ it "creates a page with hyphens in the name", :js do
click_link("New page")
page.within(".wiki-form") do
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index 3350e54a8a7..1e325535e81 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -90,9 +90,11 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
end
- it 'shows a validation error message' do
+ it 'shows a validation error message if the form is force submitted', :js do
fill_in(:wiki_content, with: '')
- click_button('Save changes')
+
+ page.execute_script("window.onbeforeunload = null")
+ page.execute_script("document.querySelector('.wiki-form').submit()")
expect(page).to have_selector('.wiki-form')
expect(page).to have_content('Edit Page')
@@ -101,6 +103,13 @@ RSpec.shared_examples 'User updates wiki page' do
expect(find('textarea#wiki_content').value).to eq('')
end
+ it "disables the submit button", :js do
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "")
+ expect(page).to have_button('Save changes', disabled: true)
+ end
+ end
+
it 'shows the emoji autocompletion dropdown', :js do
find('#wiki_content').native.send_keys('')
fill_in(:wiki_content, with: ':')
@@ -108,7 +117,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_selector('.atwho-view')
end
- it 'shows the error message' do
+ it 'shows the error message', :js do
wiki_page.update(content: 'Update') # rubocop:disable Rails/SaveBang
click_button('Save changes')
@@ -116,13 +125,17 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_content('Someone edited the page the same time you did.')
end
- it 'updates a page' do
+ it 'updates a page', :js do
fill_in('Content', with: 'Updated Wiki Content')
click_on('Save changes')
expect(page).to have_content('Updated Wiki Content')
end
+ it 'focuses on the content field', :js do
+ expect(page).to have_selector '.note-textarea:focus'
+ end
+
it 'cancels editing of a page' do
page.within(:css, '.wiki-form .form-actions') do
click_on('Cancel')
@@ -143,7 +156,7 @@ RSpec.shared_examples 'User updates wiki page' do
visit wiki_page_path(wiki, wiki_page, action: :edit)
end
- it 'moves the page to the root folder' do
+ it 'moves the page to the root folder', :js do
fill_in(:wiki_title, with: "/#{page_name}")
click_button('Save changes')
@@ -151,7 +164,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, page_name))
end
- it 'moves the page to other dir' do
+ it 'moves the page to other dir', :js do
new_page_dir = "foo1/bar1/#{page_name}"
fill_in(:wiki_title, with: new_page_dir)
@@ -161,7 +174,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
end
- it 'remains in the same place if title has not changed' do
+ it 'remains in the same place if title has not changed', :js do
original_path = wiki_page_path(wiki, wiki_page)
fill_in(:wiki_title, with: page_name)
@@ -171,7 +184,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(current_path).to eq(original_path)
end
- it 'can be moved to a different dir with a different name' do
+ it 'can be moved to a different dir with a different name', :js do
new_page_dir = "foo1/bar1/new_page_name"
fill_in(:wiki_title, with: new_page_dir)
@@ -181,7 +194,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
end
- it 'can be renamed and moved to the root folder' do
+ it 'can be renamed and moved to the root folder', :js do
new_name = 'new_page_name'
fill_in(:wiki_title, with: "/#{new_name}")
@@ -191,7 +204,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(current_path).to eq(wiki_page_path(wiki, new_name))
end
- it 'squishes the title before creating the page' do
+ it 'squishes the title before creating the page', :js do
new_page_dir = " foo1 / bar1 / #{page_name} "
fill_in(:wiki_title, with: new_page_dir)
@@ -220,7 +233,7 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_content('Wiki page was successfully updated.')
end
- it 'shows a validation error when trying to change the content' do
+ it 'shows a validation error when trying to change the content', :js do
fill_in 'Content', with: 'new content'
click_on 'Save changes'
diff --git a/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
index 759cfaf6b1f..857d923785f 100644
--- a/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
@@ -15,6 +15,6 @@ RSpec.shared_examples 'User uses wiki shortcuts' do
it 'visit edit wiki page using "e" keyboard shortcut', :js do
find('body').native.send_key('e')
- expect(find('.wiki-page-title')).to have_content('Edit Page')
+ expect(find('.page-title')).to have_content('Edit Page')
end
end
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
index d7f5b485a82..14180d503df 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
@@ -53,7 +53,7 @@ RSpec.shared_examples 'User views empty wiki' do
if writable
element.click_link 'Create your first page'
- expect(page).to have_button('Create page')
+ expect(page).to have_button('Create page', disabled: true)
else
expect(element).not_to have_link('Create your first page')
end
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index af769be6d4b..61feeff57bb 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -44,7 +44,7 @@ RSpec.shared_examples 'User views a wiki page' do
expect(current_path).to include('one/two/three-test')
- page.within(:css, '.nav-text') do
+ page.within(:css, '.wiki-page-header') do
expect(page).to have_content('History')
end
end
@@ -69,7 +69,7 @@ RSpec.shared_examples 'User views a wiki page' do
click_on('Page history')
- within('.nav-text') do
+ within('.wiki-page-header') do
expect(page).to have_content('History')
end
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
index a7ba7a8ad07..639eb3f2b99 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
@@ -17,23 +17,55 @@ RSpec.shared_examples 'User views wiki sidebar' do
create(:wiki_page, wiki: wiki, title: 'another', content: 'another')
end
- it 'renders a default sidebar when there is no customized sidebar' do
- visit wiki_path(wiki)
+ context 'when there is no custom sidebar' do
+ before do
+ visit wiki_path(wiki)
+ end
- expect(page).to have_content('another')
- expect(page).not_to have_link('View All Pages')
+ it 'renders a default sidebar' do
+ within('.right-sidebar') do
+ expect(page).to have_content('another')
+ expect(page).not_to have_link('View All Pages')
+ end
+ end
+
+ it 'can create a custom sidebar', :js do
+ click_on 'Edit sidebar'
+ fill_in :wiki_content, with: 'My custom sidebar'
+ click_on 'Create page'
+
+ within('.right-sidebar') do
+ expect(page).to have_content('My custom sidebar')
+ expect(page).not_to have_content('another')
+ end
+ end
end
- context 'when there is a customized sidebar' do
+ context 'when there is a custom sidebar' do
before do
- create(:wiki_page, wiki: wiki, title: '_sidebar', content: 'My customized sidebar')
- end
+ create(:wiki_page, wiki: wiki, title: '_sidebar', content: 'My custom sidebar')
- it 'renders my customized sidebar instead of the default one' do
visit wiki_path(wiki)
+ end
+
+ it 'renders the custom sidebar instead of the default one' do
+ within('.right-sidebar') do
+ expect(page).to have_content('My custom sidebar')
+ expect(page).not_to have_content('another')
+ end
+ end
+
+ it 'can edit the custom sidebar', :js do
+ click_on 'Edit sidebar'
+
+ expect(page).to have_field(:wiki_content, with: 'My custom sidebar')
+
+ fill_in :wiki_content, with: 'My other custom sidebar'
+ click_on 'Save changes'
- expect(page).to have_content('My customized sidebar')
- expect(page).not_to have_content('Another')
+ within('.right-sidebar') do
+ expect(page).to have_content('My other custom sidebar')
+ end
end
end
end
diff --git a/spec/support/shared_examples/finders/packages/debian/distributions_finder_shared_examples.rb b/spec/support/shared_examples/finders/packages/debian/distributions_finder_shared_examples.rb
new file mode 100644
index 00000000000..2700d29bf0e
--- /dev/null
+++ b/spec/support/shared_examples/finders/packages/debian/distributions_finder_shared_examples.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'Debian Distributions Finder' do |factory, can_freeze|
+ let_it_be(:distribution_with_suite, freeze: can_freeze) { create(factory, suite: 'mysuite') }
+ let_it_be(:container) { distribution_with_suite.container }
+ let_it_be(:distribution_with_same_container, freeze: can_freeze) { create(factory, container: container ) }
+ let_it_be(:distribution_with_same_codename, freeze: can_freeze) { create(factory, codename: distribution_with_suite.codename ) }
+ let_it_be(:distribution_with_same_suite, freeze: can_freeze) { create(factory, suite: distribution_with_suite.suite ) }
+ let_it_be(:distribution_with_codename_and_suite_flipped, freeze: can_freeze) { create(factory, codename: distribution_with_suite.suite, suite: distribution_with_suite.codename) }
+
+ let(:params) { {} }
+ let(:service) { described_class.new(container, params) }
+
+ subject { service.execute.to_a }
+
+ context 'by codename' do
+ context 'with existing codename' do
+ let(:params) { { codename: distribution_with_suite.codename } }
+
+ it 'finds distributions by codename' do
+ is_expected.to contain_exactly(distribution_with_suite)
+ end
+ end
+
+ context 'with non-existing codename' do
+ let(:params) { { codename: 'does_not_exists' } }
+
+ it 'finds nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'by suite' do
+ context 'with existing suite' do
+ let(:params) { { suite: 'mysuite' } }
+
+ it 'finds distribution by suite' do
+ is_expected.to contain_exactly(distribution_with_suite)
+ end
+ end
+
+ context 'with non-existing suite' do
+ let(:params) { { suite: 'does_not_exists' } }
+
+ it 'finds nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'by codename_or_suite' do
+ context 'with existing codename' do
+ let(:params) { { codename_or_suite: distribution_with_suite.codename } }
+
+ it 'finds distribution by codename' do
+ is_expected.to contain_exactly(distribution_with_suite)
+ end
+ end
+
+ context 'with existing suite' do
+ let(:params) { { codename_or_suite: 'mysuite' } }
+
+ it 'finds distribution by suite' do
+ is_expected.to contain_exactly(distribution_with_suite)
+ end
+ end
+
+ context 'with non-existing suite' do
+ let(:params) { { codename_or_suite: 'does_not_exists' } }
+
+ it 'finds nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/finders/packages_shared_examples.rb b/spec/support/shared_examples/finders/packages_shared_examples.rb
new file mode 100644
index 00000000000..52976565b21
--- /dev/null
+++ b/spec/support/shared_examples/finders/packages_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'concerning versionless param' do
+ let_it_be(:versionless_package) { create(:maven_package, project: project, version: nil) }
+
+ it { is_expected.not_to include(versionless_package) }
+
+ context 'with valid include_versionless param' do
+ let(:params) { { include_versionless: true } }
+
+ it { is_expected.to include(versionless_package) }
+ end
+
+ context 'with empty include_versionless param' do
+ let(:params) { { include_versionless: '' } }
+
+ it { is_expected.not_to include(versionless_package) }
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb
new file mode 100644
index 00000000000..0338eb43f8d
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'creating a new HTTP integration' do
+ it 'creates a new integration' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ new_integration = ::AlertManagement::HttpIntegration.last!
+ integration_response = mutation_response['integration']
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(integration_response['id']).to eq(GitlabSchema.id_from_object(new_integration).to_s)
+ expect(integration_response['type']).to eq('HTTP')
+ expect(integration_response['name']).to eq(new_integration.name)
+ expect(integration_response['active']).to eq(new_integration.active)
+ expect(integration_response['token']).to eq(new_integration.token)
+ expect(integration_response['url']).to eq(new_integration.url)
+ expect(integration_response['apiUrl']).to eq(nil)
+ end
+end
diff --git a/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb b/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb
index 397e22ace28..738edd43c92 100644
--- a/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb
+++ b/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb
@@ -2,10 +2,12 @@
shared_examples 'N+1 query check' do
it 'prevents N+1 queries' do
execute_query # "warm up" to prevent undeterministic counts
+ expect(graphql_errors).to be_blank # Sanity check - ex falso quodlibet!
- control_count = ActiveRecord::QueryRecorder.new { execute_query }.count
+ control = ActiveRecord::QueryRecorder.new { execute_query }
+ expect(control.count).to be > 0
search_params[:iids] << extra_iid_for_second_query
- expect { execute_query }.not_to exceed_query_limit(control_count)
+ expect { execute_query }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/support/shared_examples/lib/banzai/filters/sanitization_filter_shared_examples.rb b/spec/support/shared_examples/lib/banzai/filters/sanitization_filter_shared_examples.rb
index 134e38833cf..b5c07f45d59 100644
--- a/spec/support/shared_examples/lib/banzai/filters/sanitization_filter_shared_examples.rb
+++ b/spec/support/shared_examples/lib/banzai/filters/sanitization_filter_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-RSpec.shared_examples 'default whitelist' do
- it 'sanitizes tags that are not whitelisted' do
+RSpec.shared_examples 'default allowlist' do
+ it 'sanitizes tags that are not allowed' do
act = %q{<textarea>no inputs</textarea> and <blink>no blinks</blink>}
exp = 'no inputs and no blinks'
expect(filter(act).to_html).to eq exp
diff --git a/spec/support/shared_examples/lib/gitlab/cycle_analytics/base_stage_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics/base_stage_shared_examples.rb
deleted file mode 100644
index d76089d56dd..00000000000
--- a/spec/support/shared_examples/lib/gitlab/cycle_analytics/base_stage_shared_examples.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-ISSUES_MEDIAN = 30.minutes.to_i
-
-RSpec.shared_examples 'base stage' do
- let(:stage) { described_class.new(options: { project: double }) }
-
- before do
- allow(stage).to receive(:project_median).and_return(1.12)
- allow_next_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher) do |instance|
- allow(instance).to receive(:event_result).and_return({})
- end
- end
-
- it 'has the median data value' do
- expect(stage.as_json[:value]).not_to be_nil
- end
-
- it 'has the median data stage' do
- expect(stage.as_json[:title]).not_to be_nil
- end
-
- it 'has the median data description' do
- expect(stage.as_json[:description]).not_to be_nil
- end
-
- it 'has the title' do
- expect(stage.title).to eq(stage_name.to_s.capitalize)
- end
-
- it 'has the events' do
- expect(stage.events).not_to be_nil
- end
-end
-
-RSpec.shared_examples 'calculate #median with date range' do
- context 'when valid date range is given' do
- before do
- stage_options[:from] = 5.days.ago
- stage_options[:to] = 5.days.from_now
- end
-
- it { expect(stage.project_median).to eq(ISSUES_MEDIAN) }
- end
-
- context 'when records are out of the date range' do
- before do
- stage_options[:from] = 2.years.ago
- stage_options[:to] = 1.year.ago
- end
-
- it { expect(stage.project_median).to eq(nil) }
- end
-end
-
-RSpec.shared_examples 'Gitlab::Analytics::CycleAnalytics::DataCollector backend examples' do
- let(:stage_params) { Gitlab::Analytics::CycleAnalytics::DefaultStages.send("params_for_#{stage_name}_stage").merge(project: project) }
- let(:stage) { Analytics::CycleAnalytics::ProjectStage.new(stage_params) }
- let(:data_collector) { Gitlab::Analytics::CycleAnalytics::DataCollector.new(stage: stage, params: { from: stage_options[:from], current_user: project.creator }) }
- let(:attribute_to_verify) { :title }
-
- context 'provides the same results as the old implementation' do
- it 'for the median' do
- expect(data_collector.median.seconds).to be_within(0.5).of(ISSUES_MEDIAN)
- end
-
- it 'for the list of event records' do
- records = data_collector.records_fetcher.serialized_records
- expect(records.map { |event| event[attribute_to_verify] }).to eq(expected_ordered_attribute_values)
- end
- end
-end
diff --git a/spec/support/shared_examples/lib/gitlab/cycle_analytics/default_query_config_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics/default_query_config_shared_examples.rb
deleted file mode 100644
index 4f648b27ea2..00000000000
--- a/spec/support/shared_examples/lib/gitlab/cycle_analytics/default_query_config_shared_examples.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.shared_examples 'default query config' do
- let(:project) { create(:project) }
- let(:event) { described_class.new(stage: stage_name, options: { from: 1.day.ago, project: project }) }
-
- it 'has the stage attribute' do
- expect(event.stage).not_to be_nil
- end
-
- it 'has the projection attributes' do
- expect(event.projections).not_to be_nil
- end
-end
diff --git a/spec/support/shared_examples/lib/gitlab/middleware/multipart_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/middleware/multipart_shared_examples.rb
index 6327367fcc2..40deaa27955 100644
--- a/spec/support/shared_examples/lib/gitlab/middleware/multipart_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/middleware/multipart_shared_examples.rb
@@ -5,7 +5,7 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
include_context 'with one temporary file for multipart'
let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
- let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename, remote_id: remote_id) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', mode: mode, filename: filename, remote_id: remote_id) }
it 'builds an UploadedFile' do
expect_uploaded_files(filepath: uploaded_filepath, original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(file))
@@ -19,8 +19,8 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
let(:rewritten_fields) { rewritten_fields_hash('file1' => uploaded_filepath, 'file2' => uploaded_filepath2) }
let(:params) do
- upload_parameters_for(filepath: uploaded_filepath, key: 'file1', filename: filename, remote_id: remote_id).merge(
- upload_parameters_for(filepath: uploaded_filepath2, key: 'file2', filename: filename2, remote_id: remote_id2)
+ upload_parameters_for(filepath: uploaded_filepath, key: 'file1', mode: mode, filename: filename, remote_id: remote_id).merge(
+ upload_parameters_for(filepath: uploaded_filepath2, key: 'file2', mode: mode, filename: filename2, remote_id: remote_id2)
)
end
@@ -38,7 +38,7 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
include_context 'with one temporary file for multipart'
let(:rewritten_fields) { rewritten_fields_hash('user[avatar]' => uploaded_filepath) }
- let(:params) { { 'user' => { 'avatar' => upload_parameters_for(filepath: uploaded_filepath, filename: filename, remote_id: remote_id) } } }
+ let(:params) { { 'user' => { 'avatar' => upload_parameters_for(filepath: uploaded_filepath, mode: mode, filename: filename, remote_id: remote_id) } } }
it 'builds an UploadedFile' do
expect_uploaded_files(filepath: uploaded_filepath, original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(user avatar))
@@ -54,8 +54,8 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
let(:params) do
{
'user' => {
- 'avatar' => upload_parameters_for(filepath: uploaded_filepath, filename: filename, remote_id: remote_id),
- 'screenshot' => upload_parameters_for(filepath: uploaded_filepath2, filename: filename2, remote_id: remote_id2)
+ 'avatar' => upload_parameters_for(filepath: uploaded_filepath, mode: mode, filename: filename, remote_id: remote_id),
+ 'screenshot' => upload_parameters_for(filepath: uploaded_filepath2, mode: mode, filename: filename2, remote_id: remote_id2)
}
}
end
@@ -74,7 +74,7 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
include_context 'with one temporary file for multipart'
let(:rewritten_fields) { rewritten_fields_hash('user[avatar][bananas]' => uploaded_filepath) }
- let(:params) { { 'user' => { 'avatar' => { 'bananas' => upload_parameters_for(filepath: uploaded_filepath, filename: filename, remote_id: remote_id) } } } }
+ let(:params) { { 'user' => { 'avatar' => { 'bananas' => upload_parameters_for(filepath: uploaded_filepath, mode: mode, filename: filename, remote_id: remote_id) } } } }
it 'builds an UploadedFile' do
expect_uploaded_files(filepath: uploaded_file, original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(user avatar bananas))
@@ -91,10 +91,10 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
{
'user' => {
'avatar' => {
- 'bananas' => upload_parameters_for(filepath: uploaded_filepath, filename: filename, remote_id: remote_id)
+ 'bananas' => upload_parameters_for(filepath: uploaded_filepath, mode: mode, filename: filename, remote_id: remote_id)
},
'friend' => {
- 'ananas' => upload_parameters_for(filepath: uploaded_filepath2, filename: filename2, remote_id: remote_id2)
+ 'ananas' => upload_parameters_for(filepath: uploaded_filepath2, mode: mode, filename: filename2, remote_id: remote_id2)
}
}
}
@@ -122,11 +122,11 @@ RSpec.shared_examples 'handling all upload parameters conditions' do
end
let(:params) do
- upload_parameters_for(filepath: uploaded_filepath, filename: filename, key: 'file', remote_id: remote_id).merge(
+ upload_parameters_for(filepath: uploaded_filepath, filename: filename, key: 'file', mode: mode, remote_id: remote_id).merge(
'user' => {
- 'avatar' => upload_parameters_for(filepath: uploaded_filepath2, filename: filename2, remote_id: remote_id2),
+ 'avatar' => upload_parameters_for(filepath: uploaded_filepath2, mode: mode, filename: filename2, remote_id: remote_id2),
'friend' => {
- 'avatar' => upload_parameters_for(filepath: uploaded_filepath3, filename: filename3, remote_id: remote_id3)
+ 'avatar' => upload_parameters_for(filepath: uploaded_filepath3, mode: mode, filename: filename3, remote_id: remote_id3)
}
}
)
diff --git a/spec/support/shared_examples/lib/gitlab/project_search_results_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/project_search_results_shared_examples.rb
index 94ef41ce5a5..f83fecee4ea 100644
--- a/spec/support/shared_examples/lib/gitlab/project_search_results_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/project_search_results_shared_examples.rb
@@ -54,7 +54,7 @@ RSpec.shared_examples 'access restricted confidential issues' do
end
end
- context 'when the user is a developper' do
+ context 'when the user is a developer' do
let(:user) do
create(:user) { |user| project.add_developer(user) }
end
@@ -70,10 +70,19 @@ RSpec.shared_examples 'access restricted confidential issues' do
context 'when the user is admin', :request_store do
let(:user) { create(:user, admin: true) }
- it 'lists all project issues' do
- expect(objects).to contain_exactly(issue,
- security_issue_1,
- security_issue_2)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'lists all project issues' do
+ expect(objects).to contain_exactly(issue,
+ security_issue_1,
+ security_issue_2)
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'does not list project confidential issues' do
+ expect(objects).to contain_exactly(issue)
+ expect(results.limited_issues_count).to eq 1
+ end
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/incident_management_activity_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/incident_management_activity_shared_examples.rb
index 788c35dd5bf..88bc8e8d0c1 100644
--- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/incident_management_activity_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/incident_management_activity_shared_examples.rb
@@ -13,7 +13,7 @@ RSpec.shared_examples 'an incident management tracked event' do |event|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
.to receive(:track_event)
- .with(current_user.id, event.to_s)
+ .with(event.to_s, values: current_user.id)
.and_call_original
expect { subject }
diff --git a/spec/support/shared_examples/metrics/sampler_shared_examples.rb b/spec/support/shared_examples/metrics/sampler_shared_examples.rb
new file mode 100644
index 00000000000..ebf199c3a8d
--- /dev/null
+++ b/spec/support/shared_examples/metrics/sampler_shared_examples.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'metrics sampler' do |env_prefix|
+ context 'when sampling interval is passed explicitly' do
+ subject { described_class.new(42) }
+
+ specify { expect(subject.interval).to eq(42) }
+ end
+
+ context 'when sampling interval is passed through the environment' do
+ subject { described_class.new }
+
+ before do
+ stub_env("#{env_prefix}_INTERVAL_SECONDS", '42')
+ end
+
+ specify { expect(subject.interval).to eq(42) }
+ end
+
+ context 'when no sampling interval is passed anywhere' do
+ subject { described_class.new }
+
+ it 'uses the hardcoded default' do
+ expect(subject.interval).to eq(described_class::DEFAULT_SAMPLING_INTERVAL_SECONDS)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/boards/listable_shared_examples.rb b/spec/support/shared_examples/models/boards/listable_shared_examples.rb
new file mode 100644
index 00000000000..e733a5488fb
--- /dev/null
+++ b/spec/support/shared_examples/models/boards/listable_shared_examples.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'boards listable model' do |list_factory|
+ subject { build(list_factory) }
+
+ describe 'associations' do
+ it { is_expected.to validate_presence_of(:position) }
+ it { is_expected.to validate_numericality_of(:position).only_integer.is_greater_than_or_equal_to(0) }
+
+ context 'when list_type is set to closed' do
+ subject { build(list_factory, list_type: :closed) }
+
+ it { is_expected.not_to validate_presence_of(:label) }
+ it { is_expected.not_to validate_presence_of(:position) }
+ end
+ end
+
+ describe 'scopes' do
+ describe '.ordered' do
+ it 'returns lists ordered by type and position' do
+ # rubocop:disable Rails/SaveBang
+ lists = [
+ create(list_factory, list_type: :backlog),
+ create(list_factory, list_type: :closed),
+ create(list_factory, position: 1),
+ create(list_factory, position: 2)
+ ]
+ # rubocop:enable Rails/SaveBang
+
+ expect(described_class.where(id: lists).ordered).to eq([lists[0], lists[2], lists[3], lists[1]])
+ end
+ end
+ end
+
+ describe '#destroyable?' do
+ it 'returns true when list_type is set to label' do
+ subject.list_type = :label
+
+ expect(subject).to be_destroyable
+ end
+
+ it 'returns false when list_type is set to closed' do
+ subject.list_type = :closed
+
+ expect(subject).not_to be_destroyable
+ end
+ end
+
+ describe '#movable?' do
+ it 'returns true when list_type is set to label' do
+ subject.list_type = :label
+
+ expect(subject).to be_movable
+ end
+
+ it 'returns false when list_type is set to closed' do
+ subject.list_type = :closed
+
+ expect(subject).not_to be_movable
+ end
+ end
+
+ describe '#title' do
+ it 'returns label name when list_type is set to label' do
+ subject.list_type = :label
+ subject.label = Label.new(name: 'Development')
+
+ expect(subject.title).to eq 'Development'
+ end
+
+ it 'returns Open when list_type is set to backlog' do
+ subject.list_type = :backlog
+
+ expect(subject.title).to eq 'Open'
+ end
+
+ it 'returns Closed when list_type is set to closed' do
+ subject.list_type = :closed
+
+ expect(subject.title).to eq 'Closed'
+ end
+ end
+
+ describe '#destroy' do
+ it 'can be destroyed when list_type is set to label' do
+ subject = create(list_factory) # rubocop:disable Rails/SaveBang
+
+ expect(subject.destroy).to be_truthy
+ end
+
+ it 'can not be destroyed when list_type is set to closed' do
+ subject = create(list_factory, list_type: :closed) # rubocop:disable Rails/SaveBang
+
+ expect(subject.destroy).to be_falsey
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb
new file mode 100644
index 00000000000..2f0b95427d2
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'can housekeep repository' do
+ context 'with a clean redis state', :clean_gitlab_redis_shared_state do
+ describe '#pushes_since_gc' do
+ context 'without any pushes' do
+ it 'returns 0' do
+ expect(resource.pushes_since_gc).to eq(0)
+ end
+ end
+
+ context 'with a number of pushes' do
+ it 'returns the number of pushes' do
+ 3.times { resource.increment_pushes_since_gc }
+
+ expect(resource.pushes_since_gc).to eq(3)
+ end
+ end
+ end
+
+ describe '#increment_pushes_since_gc' do
+ it 'increments the number of pushes since the last GC' do
+ 3.times { resource.increment_pushes_since_gc }
+
+ expect(resource.pushes_since_gc).to eq(3)
+ end
+ end
+
+ describe '#reset_pushes_since_gc' do
+ it 'resets the number of pushes since the last GC' do
+ 3.times { resource.increment_pushes_since_gc }
+
+ resource.reset_pushes_since_gc
+
+ expect(resource.pushes_since_gc).to eq(0)
+ end
+ end
+
+ describe '#pushes_since_gc_redis_shared_state_key' do
+ it 'returns the proper redis key format' do
+ expect(resource.send(:pushes_since_gc_redis_shared_state_key)).to eq("#{resource_key}/#{resource.id}/pushes_since_gc")
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb
new file mode 100644
index 00000000000..2f0b95427d2
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'can housekeep repository' do
+ context 'with a clean redis state', :clean_gitlab_redis_shared_state do
+ describe '#pushes_since_gc' do
+ context 'without any pushes' do
+ it 'returns 0' do
+ expect(resource.pushes_since_gc).to eq(0)
+ end
+ end
+
+ context 'with a number of pushes' do
+ it 'returns the number of pushes' do
+ 3.times { resource.increment_pushes_since_gc }
+
+ expect(resource.pushes_since_gc).to eq(3)
+ end
+ end
+ end
+
+ describe '#increment_pushes_since_gc' do
+ it 'increments the number of pushes since the last GC' do
+ 3.times { resource.increment_pushes_since_gc }
+
+ expect(resource.pushes_since_gc).to eq(3)
+ end
+ end
+
+ describe '#reset_pushes_since_gc' do
+ it 'resets the number of pushes since the last GC' do
+ 3.times { resource.increment_pushes_since_gc }
+
+ resource.reset_pushes_since_gc
+
+ expect(resource.pushes_since_gc).to eq(0)
+ end
+ end
+
+ describe '#pushes_since_gc_redis_shared_state_key' do
+ it 'returns the proper redis key format' do
+ expect(resource.send(:pushes_since_gc_redis_shared_state_key)).to eq("#{resource_key}/#{resource.id}/pushes_since_gc")
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index 5a8388d01df..4c617f3ba46 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -63,7 +63,6 @@ RSpec.shared_examples 'handles repository moves' do
context 'and transits to scheduled' do
it 'triggers the corresponding repository storage worker' do
- skip unless repository_storage_worker # TODO remove after https://gitlab.com/gitlab-org/gitlab/-/issues/218991 is implemented
expect(repository_storage_worker).to receive(:perform_async).with(container.id, 'test_second_storage', storage_move.id)
storage_move.schedule!
@@ -72,8 +71,7 @@ RSpec.shared_examples 'handles repository moves' do
end
context 'when the transition fails' do
- it 'does not trigger ProjectUpdateRepositoryStorageWorker and adds an error' do
- skip unless repository_storage_worker # TODO remove after https://gitlab.com/gitlab-org/gitlab/-/issues/218991 is implemented
+ it 'does not trigger the corresponding repository storage worker and adds an error' do
allow(storage_move.container).to receive(:set_repository_read_only!).and_raise(StandardError, 'foobar')
expect(repository_storage_worker).not_to receive(:perform_async)
diff --git a/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
new file mode 100644
index 00000000000..38983f752f4
--- /dev/null
+++ b/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'Debian Distribution Architecture' do |factory, container, can_freeze|
+ let_it_be_with_refind(:architecture) { create(factory) } # rubocop:disable Rails/SaveBang
+ let_it_be(:architecture_same_distribution, freeze: can_freeze) { create(factory, distribution: architecture.distribution) }
+ let_it_be(:architecture_same_name, freeze: can_freeze) { create(factory, name: architecture.name) }
+
+ subject { architecture }
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:distribution).class_name("Packages::Debian::#{container.capitalize}Distribution").inverse_of(:architectures) }
+ end
+
+ describe 'validations' do
+ describe "#distribution" do
+ it { is_expected.to validate_presence_of(:distribution) }
+ end
+
+ describe '#name' do
+ it { is_expected.to validate_presence_of(:name) }
+
+ it { is_expected.to allow_value('amd64').for(:name) }
+ it { is_expected.to allow_value('kfreebsd-i386').for(:name) }
+ it { is_expected.not_to allow_value('-a').for(:name) }
+ it { is_expected.not_to allow_value('AMD64').for(:name) }
+ end
+ end
+
+ describe 'scopes' do
+ describe '.with_distribution' do
+ subject { described_class.with_distribution(architecture.distribution) }
+
+ it 'does not return other distributions' do
+ expect(subject.to_a).to eq([architecture, architecture_same_distribution])
+ end
+ end
+
+ describe '.with_name' do
+ subject { described_class.with_name(architecture.name) }
+
+ it 'does not return other distributions' do
+ expect(subject.to_a).to eq([architecture, architecture_same_name])
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
new file mode 100644
index 00000000000..af87d30099f
--- /dev/null
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -0,0 +1,225 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
+ let_it_be(:distribution_with_suite, freeze: can_freeze) { create(factory, suite: 'mysuite') }
+ let_it_be(:distribution_with_same_container, freeze: can_freeze) { create(factory, container: distribution_with_suite.container ) }
+ let_it_be(:distribution_with_same_codename, freeze: can_freeze) { create(factory, codename: distribution_with_suite.codename ) }
+ let_it_be(:distribution_with_same_suite, freeze: can_freeze) { create(factory, suite: distribution_with_suite.suite ) }
+ let_it_be(:distribution_with_codename_and_suite_flipped, freeze: can_freeze) { create(factory, codename: distribution_with_suite.suite, suite: distribution_with_suite.codename) }
+
+ let_it_be_with_refind(:distribution) { create(factory, container: distribution_with_suite.container ) }
+
+ subject { distribution }
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(container) }
+ it { is_expected.to belong_to(:creator).class_name('User') }
+
+ it { is_expected.to have_many(:architectures).class_name("Packages::Debian::#{container.capitalize}Architecture").inverse_of(:distribution) }
+ end
+
+ describe 'validations' do
+ describe "##{container}" do
+ it { is_expected.to validate_presence_of(container) }
+ end
+
+ describe "#creator" do
+ it { is_expected.not_to validate_presence_of(:creator) }
+ end
+
+ describe '#codename' do
+ it { is_expected.to validate_presence_of(:codename) }
+
+ it { is_expected.to allow_value('buster').for(:codename) }
+ it { is_expected.to allow_value('buster-updates').for(:codename) }
+ it { is_expected.to allow_value('Debian10.5').for(:codename) }
+ it { is_expected.not_to allow_value('jessie/updates').for(:codename) }
+ it { is_expected.not_to allow_value('hé').for(:codename) }
+ end
+
+ describe '#suite' do
+ it { is_expected.to allow_value(nil).for(:suite) }
+ it { is_expected.to allow_value('testing').for(:suite) }
+ it { is_expected.not_to allow_value('hé').for(:suite) }
+ end
+
+ describe '#unique_debian_suite_and_codename' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:with_existing_suite, :suite, :codename, :errors) do
+ false | nil | :keep | nil
+ false | 'testing' | :keep | nil
+ false | nil | :codename | ["Codename has already been taken"]
+ false | :codename | :keep | ["Suite has already been taken as Codename"]
+ false | :codename | :codename | ["Codename has already been taken", "Suite has already been taken as Codename"]
+ true | nil | :keep | nil
+ true | 'testing' | :keep | nil
+ true | nil | :codename | ["Codename has already been taken"]
+ true | :codename | :keep | ["Suite has already been taken as Codename"]
+ true | :codename | :codename | ["Codename has already been taken", "Suite has already been taken as Codename"]
+ true | nil | :suite | ["Codename has already been taken as Suite"]
+ true | :suite | :keep | ["Suite has already been taken"]
+ true | :suite | :suite | ["Suite has already been taken", "Codename has already been taken as Suite"]
+ end
+
+ with_them do
+ context factory do
+ let(:new_distribution) { build(factory, container: distribution.container) }
+
+ before do
+ distribution.update_column(:suite, 'suite-' + distribution.codename) if with_existing_suite
+
+ if suite.is_a?(Symbol)
+ new_distribution.suite = distribution.send suite unless suite == :keep
+ else
+ new_distribution.suite = suite
+ end
+
+ if codename.is_a?(Symbol)
+ new_distribution.codename = distribution.send codename unless codename == :keep
+ else
+ new_distribution.codename = codename
+ end
+ end
+
+ it do
+ if errors
+ expect(new_distribution).not_to be_valid
+ expect(new_distribution.errors.to_a).to eq(errors)
+ else
+ expect(new_distribution).to be_valid
+ end
+ end
+ end
+ end
+ end
+
+ describe '#origin' do
+ it { is_expected.to allow_value(nil).for(:origin) }
+ it { is_expected.to allow_value('Debian').for(:origin) }
+ it { is_expected.not_to allow_value('hé').for(:origin) }
+ end
+
+ describe '#label' do
+ it { is_expected.to allow_value(nil).for(:label) }
+ it { is_expected.to allow_value('Debian').for(:label) }
+ it { is_expected.not_to allow_value('hé').for(:label) }
+ end
+
+ describe '#version' do
+ it { is_expected.to allow_value(nil).for(:version) }
+ it { is_expected.to allow_value('10.6').for(:version) }
+ it { is_expected.not_to allow_value('hé').for(:version) }
+ end
+
+ describe '#description' do
+ it { is_expected.to allow_value(nil).for(:description) }
+ it { is_expected.to allow_value('Debian 10.6 Released 26 September 2020').for(:description) }
+ it { is_expected.to allow_value('Hé !').for(:description) }
+ end
+
+ describe '#valid_time_duration_seconds' do
+ it { is_expected.to allow_value(nil).for(:valid_time_duration_seconds) }
+ it { is_expected.to allow_value(24.hours.to_i).for(:valid_time_duration_seconds) }
+ it { is_expected.not_to allow_value(12.hours.to_i).for(:valid_time_duration_seconds) }
+ end
+
+ describe '#signing_keys' do
+ it { is_expected.to validate_absence_of(:signing_keys) }
+ end
+
+ describe '#file' do
+ it { is_expected.not_to validate_presence_of(:file) }
+ end
+
+ describe '#file_store' do
+ it { is_expected.to validate_presence_of(:file_store) }
+ end
+
+ describe '#file_signature' do
+ it { is_expected.to validate_absence_of(:file_signature) }
+ end
+ end
+
+ describe 'scopes' do
+ describe '.with_container' do
+ subject { described_class.with_container(distribution_with_suite.container) }
+
+ it 'does not return other distributions' do
+ expect(subject).to match_array([distribution_with_suite, distribution, distribution_with_same_container])
+ end
+ end
+
+ describe '.with_codename' do
+ subject { described_class.with_codename(distribution_with_suite.codename) }
+
+ it 'does not return other distributions' do
+ expect(subject).to match_array([distribution_with_suite, distribution_with_same_codename])
+ end
+ end
+
+ describe '.with_suite' do
+ subject { described_class.with_suite(distribution_with_suite.suite) }
+
+ it 'does not return other distributions' do
+ expect(subject).to match_array([distribution_with_suite, distribution_with_same_suite])
+ end
+ end
+
+ describe '.with_codename_or_suite' do
+ describe 'passing codename' do
+ subject { described_class.with_codename_or_suite(distribution_with_suite.codename) }
+
+ it 'does not return other distributions' do
+ expect(subject.to_a).to eq([distribution_with_suite, distribution_with_same_codename, distribution_with_codename_and_suite_flipped])
+ end
+ end
+
+ describe 'passing suite' do
+ subject { described_class.with_codename_or_suite(distribution_with_suite.suite) }
+
+ it 'does not return other distributions' do
+ expect(subject.to_a).to eq([distribution_with_suite, distribution_with_same_suite, distribution_with_codename_and_suite_flipped])
+ end
+ end
+ end
+ end
+
+ describe '#needs_update?' do
+ subject { distribution.needs_update? }
+
+ context 'with new distribution' do
+ let(:distribution) { create(factory, container: distribution_with_suite.container) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with file' do
+ context 'without valid_time_duration_seconds' do
+ let(:distribution) { create(factory, :with_file, container: distribution_with_suite.container) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'with valid_time_duration_seconds' do
+ let(:distribution) { create(factory, :with_file, container: distribution_with_suite.container, valid_time_duration_seconds: 2.days.to_i) }
+
+ context 'when not yet expired' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when expired' do
+ it do
+ distribution
+
+ travel_to(4.days.from_now) do
+ is_expected.to be_truthy
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb
new file mode 100644
index 00000000000..28decb4011d
--- /dev/null
+++ b/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'rebase quick action' do
+ context 'when updating the description' do
+ before do
+ sign_in(user)
+ visit edit_project_merge_request_path(project, merge_request)
+ end
+
+ it 'rebases the MR', :sidekiq_inline do
+ fill_in('Description', with: '/rebase')
+ click_button('Save changes')
+
+ expect(page).not_to have_content('commit behind the target branch')
+ expect(merge_request.reload).not_to be_merged
+ end
+
+ it 'ignores /merge if /rebase is specified', :sidekiq_inline do
+ fill_in('Description', with: "/merge\n/rebase")
+ click_button('Save changes')
+
+ expect(page).not_to have_content('commit behind the target branch')
+ expect(merge_request.reload).not_to be_merged
+ end
+ end
+
+ context 'when creating a new note' do
+ context 'when the current user can rebase the MR' do
+ before do
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'rebase the MR', :sidekiq_inline do
+ add_note("/rebase")
+
+ expect(page).to have_content "Scheduled a rebase of branch #{merge_request.source_branch}."
+ end
+
+ context 'when the merge request is closed' do
+ before do
+ merge_request.close!
+ end
+
+ it 'does not rebase the MR', :sidekiq_inline do
+ add_note("/rebase")
+
+ expect(page).not_to have_content 'Scheduled a rebase'
+ end
+ end
+
+ context 'when a rebase is in progress', :sidekiq_inline, :clean_gitlab_redis_shared_state do
+ before do
+ jid = SecureRandom.hex
+ merge_request.update!(rebase_jid: jid)
+ Gitlab::SidekiqStatus.set(jid)
+ end
+
+ it 'tells the user a rebase is in progress' do
+ add_note('/rebase')
+
+ expect(page).to have_content 'A rebase is already in progress.'
+ expect(page).not_to have_content 'Scheduled a rebase'
+ end
+ end
+
+ context 'when there are conflicts in the merge request' do
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project, source_branch: 'conflict-missing-side', target_branch: 'conflict-start', merge_status: :cannot_be_merged) }
+
+ it 'does not rebase the MR' do
+ add_note("/rebase")
+
+ expect(page).to have_content 'This merge request cannot be rebased while there are conflicts.'
+ end
+ end
+ end
+
+ context 'when the current user cannot rebase the MR' do
+ before do
+ project.add_guest(guest)
+ sign_in(guest)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'does not rebase the MR' do
+ add_note("/rebase")
+
+ expect(page).not_to have_content 'Scheduled a rebase'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/boards_shared_examples.rb b/spec/support/shared_examples/requests/api/boards_shared_examples.rb
index 0096aab55e3..8e8edd61ef9 100644
--- a/spec/support/shared_examples/requests/api/boards_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/boards_shared_examples.rb
@@ -44,16 +44,35 @@ RSpec.shared_examples 'group and project boards' do |route_definition, ee = fals
expect_schema_match_for(response, 'public_api/v4/boards', ee)
end
+ end
+ end
- describe "GET #{route_definition}/:board_id" do
- let(:url) { "#{root_url}/#{board.id}" }
+ describe "GET #{route_definition}/:board_id" do
+ let(:url) { "#{root_url}/#{board.id}" }
- it 'get a single board by id' do
- get api(url, user)
+ it 'get a single board by id' do
+ get api(url, user)
- expect_schema_match_for(response, 'public_api/v4/board', ee)
- end
- end
+ expect_schema_match_for(response, 'public_api/v4/board', ee)
+ end
+ end
+
+ describe "PUT #{route_definition}/:board_id" do
+ let(:url) { "#{root_url}/#{board.id}" }
+
+ it 'updates the board name' do
+ put api(url, user), params: { name: 'changed board name' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq('changed board name')
+ end
+
+ it 'updates the issue board booleans' do
+ put api(url, user), params: { hide_backlog_list: true, hide_closed_list: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['hide_backlog_list']).to eq(true)
+ expect(json_response['hide_closed_list']).to eq(true)
end
end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index f55043fe64f..83ba72c12aa 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -37,9 +37,9 @@ RSpec.shared_context 'Debian repository shared context' do |object_type|
let(:params) { workhorse_params }
let(:auth_headers) { {} }
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:workhorse_headers) do
if method == :put
- workhorse_token = JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256')
{ 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token }
else
{}
@@ -117,12 +117,13 @@ RSpec.shared_examples 'Debian project repository PUT request' do |user_role, add
and_body = body.nil? ? '' : ' and expected body'
if status == :created
- it 'creates package files' do
+ it 'creates package files', :aggregate_failures do
pending "Debian package creation not implemented"
expect { subject }
.to change { project.packages.debian.count }.by(1)
expect(response).to have_gitlab_http_status(status)
+ expect(response.media_type).to eq('text/plain')
unless body.nil?
expect(response.body).to eq(body)
@@ -130,7 +131,59 @@ RSpec.shared_examples 'Debian project repository PUT request' do |user_role, add
end
it_behaves_like 'a package tracking event', described_class.name, 'push_package'
else
- it "returns #{status}#{and_body}" do
+ it "returns #{status}#{and_body}", :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository PUT authorize request' do |user_role, add_member, status, body, is_authorize|
+ context "for user type #{user_role}" do
+ before do
+ project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ if status == :created
+ it 'authorizes package file upload', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(Packages::PackageFileUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ expect(json_response['MaximumSize']).to be_nil
+ end
+
+ context 'without a valid token' do
+ let(:workhorse_token) { 'invalid' }
+
+ it 'rejects request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'bypassing gitlab-workhorse' do
+ let(:workhorse_headers) { {} }
+
+ it 'rejects request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ else
+ it "returns #{status}#{and_body}", :aggregate_failures do
subject
expect(response).to have_gitlab_http_status(status)
@@ -194,7 +247,7 @@ RSpec.shared_examples 'Debian project repository GET endpoint' do |success_statu
it_behaves_like 'rejects Debian access with unknown project id'
end
-RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_status, success_body|
+RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_status, success_body, is_authorize = false|
context 'with valid project' do
using RSpec::Parameterized::TableSyntax
@@ -221,7 +274,13 @@ RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_statu
with_them do
include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
- it_behaves_like 'Debian project repository PUT request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ desired_behavior = if is_authorize
+ 'Debian project repository PUT authorize request'
+ else
+ 'Debian project repository PUT request'
+ end
+
+ it_behaves_like desired_behavior, params[:user_role], params[:member], params[:expected_status], params[:expected_body]
end
end
end
diff --git a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
index f808d12baf4..7b7d2a33e8c 100644
--- a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
@@ -1,31 +1,31 @@
# frozen_string_literal: true
-RSpec.shared_examples 'handling nuget service requests' do
+RSpec.shared_examples 'handling nuget service requests' do |anonymous_requests_example_name: 'process nuget service index request', anonymous_requests_status: :success|
subject { get api(url) }
- context 'with valid project' do
+ context 'with valid target' do
using RSpec::Parameterized::TableSyntax
context 'personal token' do
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget service index request' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
- 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
+ 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
end
with_them do
@@ -35,7 +35,7 @@ RSpec.shared_examples 'handling nuget service requests' do
subject { get api(url), headers: headers }
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
@@ -43,7 +43,7 @@ RSpec.shared_examples 'handling nuget service requests' do
end
context 'with job token' do
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
@@ -52,7 +52,7 @@ RSpec.shared_examples 'handling nuget service requests' do
'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
@@ -71,7 +71,7 @@ RSpec.shared_examples 'handling nuget service requests' do
subject { get api(url), headers: headers }
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
@@ -79,14 +79,18 @@ RSpec.shared_examples 'handling nuget service requests' do
end
end
- it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'deploy token for package GET requests' do
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
- it_behaves_like 'rejects nuget access with unknown project id'
+ it_behaves_like 'rejects nuget access with unknown target id'
- it_behaves_like 'rejects nuget access with invalid project id'
+ it_behaves_like 'rejects nuget access with invalid target id'
end
-RSpec.shared_examples 'handling nuget metadata requests with package name' do
+RSpec.shared_examples 'handling nuget metadata requests with package name' do |anonymous_requests_example_name: 'process nuget metadata request at package name level', anonymous_requests_status: :success|
include_context 'with expected presenters dependency groups'
let_it_be(:package_name) { 'Dummy.Package' }
@@ -99,19 +103,19 @@ RSpec.shared_examples 'handling nuget metadata requests with package name' do
packages.each { |pkg| create_dependencies_for(pkg) }
end
- context 'with valid project' do
+ context 'with valid target' do
using RSpec::Parameterized::TableSyntax
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'process nuget metadata request at package name level' | :success
'PUBLIC' | :guest | true | true | 'process nuget metadata request at package name level' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget metadata request at package name level' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget metadata request at package name level' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :developer | false | true | 'process nuget metadata request at package name level' | :success
'PUBLIC' | :guest | false | true | 'process nuget metadata request at package name level' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget metadata request at package name level' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget metadata request at package name level' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget metadata request at package name level' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget metadata request at package name level' | :success
'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
@@ -130,21 +134,25 @@ RSpec.shared_examples 'handling nuget metadata requests with package name' do
subject { get api(url), headers: headers }
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
- it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'deploy token for package GET requests' do
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
- it_behaves_like 'rejects nuget access with unknown project id'
+ it_behaves_like 'rejects nuget access with unknown target id'
- it_behaves_like 'rejects nuget access with invalid project id'
+ it_behaves_like 'rejects nuget access with invalid target id'
end
end
-RSpec.shared_examples 'handling nuget metadata requests with package name and package version' do
+RSpec.shared_examples 'handling nuget metadata requests with package name and package version' do |anonymous_requests_example_name: 'process nuget metadata request at package name and package version level', anonymous_requests_status: :success|
include_context 'with expected presenters dependency groups'
let_it_be(:package_name) { 'Dummy.Package' }
@@ -157,19 +165,19 @@ RSpec.shared_examples 'handling nuget metadata requests with package name and pa
create_dependencies_for(package)
end
- context 'with valid project' do
+ context 'with valid target' do
using RSpec::Parameterized::TableSyntax
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'process nuget metadata request at package name and package version level' | :success
'PUBLIC' | :guest | true | true | 'process nuget metadata request at package name and package version level' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget metadata request at package name and package version level' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget metadata request at package name and package version level' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :developer | false | true | 'process nuget metadata request at package name and package version level' | :success
'PUBLIC' | :guest | false | true | 'process nuget metadata request at package name and package version level' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget metadata request at package name and package version level' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget metadata request at package name and package version level' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget metadata request at package name and package version level' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget metadata request at package name and package version level' | :success
'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
@@ -188,23 +196,25 @@ RSpec.shared_examples 'handling nuget metadata requests with package name and pa
subject { get api(url), headers: headers }
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
- it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'deploy token for package GET requests' do
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
- context 'with invalid package name' do
- let_it_be(:package_name) { 'Unkown' }
+ it_behaves_like 'rejects nuget access with unknown target id'
- it_behaves_like 'rejects nuget packages access', :developer, :not_found
- end
+ it_behaves_like 'rejects nuget access with invalid target id'
end
-RSpec.shared_examples 'handling nuget search requests' do
+RSpec.shared_examples 'handling nuget search requests' do |anonymous_requests_example_name: 'process nuget search request', anonymous_requests_status: :success|
let_it_be(:package_a) { create(:nuget_package, :with_metadatum, name: 'Dummy.PackageA', project: project) }
let_it_be(:tag) { create(:packages_tag, package: package_a, name: 'test') }
let_it_be(:packages_b) { create_list(:nuget_package, 5, name: 'Dummy.PackageB', project: project) }
@@ -219,19 +229,19 @@ RSpec.shared_examples 'handling nuget search requests' do
subject { get api(url) }
- context 'with valid project' do
+ context 'with valid target' do
using RSpec::Parameterized::TableSyntax
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'process nuget search request' | :success
'PUBLIC' | :guest | true | true | 'process nuget search request' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget search request' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget search request' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :developer | false | true | 'process nuget search request' | :success
'PUBLIC' | :guest | false | true | 'process nuget search request' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget search request' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget search request' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget search request' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget search request' | :success
'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
@@ -250,16 +260,20 @@ RSpec.shared_examples 'handling nuget search requests' do
subject { get api(url), headers: headers }
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
- it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'deploy token for package GET requests' do
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
- it_behaves_like 'rejects nuget access with unknown project id'
+ it_behaves_like 'rejects nuget access with unknown target id'
- it_behaves_like 'rejects nuget access with invalid project id'
+ it_behaves_like 'rejects nuget access with invalid target id'
end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index dc6ac5f0371..8b60857cdaf 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -3,7 +3,7 @@
RSpec.shared_examples 'rejects nuget packages access' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -21,7 +21,7 @@ end
RSpec.shared_examples 'process nuget service index request' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -37,7 +37,7 @@ RSpec.shared_examples 'process nuget service index request' do |user_type, statu
end
context 'with invalid format' do
- let(:url) { "/projects/#{project.id}/packages/nuget/index.xls" }
+ let(:url) { "/#{target_type}/#{target.id}/packages/nuget/index.xls" }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
@@ -57,7 +57,7 @@ end
RSpec.shared_examples 'process nuget metadata request at package name level' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -65,7 +65,7 @@ RSpec.shared_examples 'process nuget metadata request at package name level' do
it_behaves_like 'returning nuget metadata json response with json schema', 'public_api/v4/packages/nuget/packages_metadata'
context 'with invalid format' do
- let(:url) { "/projects/#{project.id}/packages/nuget/metadata/#{package_name}/index.xls" }
+ let(:url) { "/#{target_type}/#{target.id}/packages/nuget/metadata/#{package_name}/index.xls" }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
@@ -83,7 +83,7 @@ end
RSpec.shared_examples 'process nuget metadata request at package name and package version level' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -91,7 +91,7 @@ RSpec.shared_examples 'process nuget metadata request at package name and packag
it_behaves_like 'returning nuget metadata json response with json schema', 'public_api/v4/packages/nuget/package_metadata'
context 'with invalid format' do
- let(:url) { "/projects/#{project.id}/packages/nuget/metadata/#{package_name}/#{package.version}.xls" }
+ let(:url) { "/#{target_type}/#{target.id}/packages/nuget/metadata/#{package_name}/#{package.version}.xls" }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
@@ -109,7 +109,7 @@ end
RSpec.shared_examples 'process nuget workhorse authorization' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -128,7 +128,7 @@ RSpec.shared_examples 'process nuget workhorse authorization' do |user_type, sta
end
before do
- project.add_maintainer(user)
+ target.add_maintainer(user)
end
it_behaves_like 'returning response status', :forbidden
@@ -141,18 +141,18 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
it 'creates package files' do
expect(::Packages::Nuget::ExtractionWorker).to receive(:perform_async).once
expect { subject }
- .to change { project.packages.count }.by(1)
+ .to change { target.packages.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
expect(response).to have_gitlab_http_status(status)
- package_file = project.packages.last.package_files.reload.last
+ package_file = target.packages.last.package_files.reload.last
expect(package_file.file_name).to eq('package.nupkg')
end
end
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
context 'with object storage disabled' do
@@ -206,7 +206,7 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
context 'with crafted package.path param' do
let(:crafted_file) { Tempfile.new('nuget.crafted.package.path') }
- let(:url) { "/projects/#{project.id}/packages/nuget?package.path=#{crafted_file.path}" }
+ let(:url) { "/#{target_type}/#{target.id}/packages/nuget?package.path=#{crafted_file.path}" }
let(:params) { { file: temp_file(file_name) } }
let(:file_key) { :file }
@@ -255,7 +255,7 @@ RSpec.shared_examples 'process nuget download versions request' do |user_type, s
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -263,7 +263,7 @@ RSpec.shared_examples 'process nuget download versions request' do |user_type, s
it_behaves_like 'returns a valid nuget download versions json response'
context 'with invalid format' do
- let(:url) { "/projects/#{project.id}/packages/nuget/download/#{package_name}/index.xls" }
+ let(:url) { "/#{target_type}/#{target.id}/packages/nuget/download/#{package_name}/index.xls" }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
@@ -281,7 +281,7 @@ end
RSpec.shared_examples 'process nuget download content request' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returning response status', status
@@ -295,7 +295,7 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
end
context 'with invalid format' do
- let(:url) { "/projects/#{project.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.xls" }
+ let(:url) { "/#{target_type}/#{target.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.xls" }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
@@ -331,7 +331,7 @@ RSpec.shared_examples 'process nuget search request' do |user_type, status, add_
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
it_behaves_like 'returns a valid json search response', status, 4, [1, 5, 5, 1]
@@ -370,20 +370,20 @@ RSpec.shared_examples 'process nuget search request' do |user_type, status, add_
end
end
-RSpec.shared_examples 'rejects nuget access with invalid project id' do
- context 'with a project id with invalid integers' do
+RSpec.shared_examples 'rejects nuget access with invalid target id' do
+ context 'with a target id with invalid integers' do
using RSpec::Parameterized::TableSyntax
- let(:project) { OpenStruct.new(id: id) }
+ let(:target) { OpenStruct.new(id: id) }
where(:id, :status) do
- '/../' | :unauthorized
+ '/../' | :bad_request
'' | :not_found
- '%20' | :unauthorized
- '%2e%2e%2f' | :unauthorized
- 'NaN' | :unauthorized
+ '%20' | :bad_request
+ '%2e%2e%2f' | :bad_request
+ 'NaN' | :bad_request
00002345 | :unauthorized
- 'anything25' | :unauthorized
+ 'anything25' | :bad_request
end
with_them do
@@ -392,9 +392,9 @@ RSpec.shared_examples 'rejects nuget access with invalid project id' do
end
end
-RSpec.shared_examples 'rejects nuget access with unknown project id' do
- context 'with an unknown project' do
- let(:project) { OpenStruct.new(id: 1234567890) }
+RSpec.shared_examples 'rejects nuget access with unknown target id' do
+ context 'with an unknown target' do
+ let(:target) { OpenStruct.new(id: 1234567890) }
context 'as anonymous' do
it_behaves_like 'rejects nuget packages access', :anonymous, :unauthorized
diff --git a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
new file mode 100644
index 00000000000..b2970fd265d
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
@@ -0,0 +1,219 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'repository_storage_moves API' do |container_type|
+ include AccessMatchersForRequest
+
+ let_it_be(:user) { create(:admin) }
+
+ shared_examples 'get single container repository storage move' do
+ let(:repository_storage_move_id) { storage_move.id }
+
+ def get_container_repository_storage_move
+ get api(url, user)
+ end
+
+ it 'returns a container repository storage move', :aggregate_failures do
+ get_container_repository_storage_move
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema("public_api/v4/#{container_type.singularize}_repository_storage_move")
+ expect(json_response['id']).to eq(storage_move.id)
+ expect(json_response['state']).to eq(storage_move.human_state_name)
+ end
+
+ context 'non-existent container repository storage move' do
+ let(:repository_storage_move_id) { non_existing_record_id }
+
+ it 'returns not found' do
+ get_container_repository_storage_move
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ describe 'permissions' do
+ it { expect { get_container_repository_storage_move }.to be_allowed_for(:admin) }
+ it { expect { get_container_repository_storage_move }.to be_denied_for(:user) }
+ end
+ end
+
+ shared_examples 'get container repository storage move list' do
+ def get_container_repository_storage_moves
+ get api(url, user)
+ end
+
+ it 'returns container repository storage moves', :aggregate_failures do
+ get_container_repository_storage_moves
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema("public_api/v4/#{container_type.singularize}_repository_storage_moves")
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(storage_move.id)
+ expect(json_response.first['state']).to eq(storage_move.human_state_name)
+ end
+
+ it 'avoids N+1 queries', :request_store do
+ # prevent `let` from polluting the control
+ get_container_repository_storage_moves
+
+ control = ActiveRecord::QueryRecorder.new { get_container_repository_storage_moves }
+
+ create(repository_storage_move_factory, :scheduled, container: container)
+
+ expect { get_container_repository_storage_moves }.not_to exceed_query_limit(control)
+ end
+
+ it 'returns the most recently created first' do
+ storage_move_oldest = create(repository_storage_move_factory, :scheduled, container: container, created_at: 2.days.ago)
+ storage_move_middle = create(repository_storage_move_factory, :scheduled, container: container, created_at: 1.day.ago)
+
+ get_container_repository_storage_moves
+
+ json_ids = json_response.map {|storage_move| storage_move['id'] }
+ expect(json_ids).to eq([
+ storage_move.id,
+ storage_move_middle.id,
+ storage_move_oldest.id
+ ])
+ end
+
+ describe 'permissions' do
+ it { expect { get_container_repository_storage_moves }.to be_allowed_for(:admin) }
+ it { expect { get_container_repository_storage_moves }.to be_denied_for(:user) }
+ end
+ end
+
+ describe "GET /#{container_type}/:id/repository_storage_moves" do
+ it_behaves_like 'get container repository storage move list' do
+ let(:url) { "/#{container_type}/#{container.id}/repository_storage_moves" }
+ end
+ end
+
+ describe "GET /#{container_type}/:id/repository_storage_moves/:repository_storage_move_id" do
+ it_behaves_like 'get single container repository storage move' do
+ let(:url) { "/#{container_type}/#{container.id}/repository_storage_moves/#{repository_storage_move_id}" }
+ end
+ end
+
+ describe "GET /#{container_type.singularize}_repository_storage_moves" do
+ it_behaves_like 'get container repository storage move list' do
+ let(:url) { "/#{container_type.singularize}_repository_storage_moves" }
+ end
+ end
+
+ describe "GET /#{container_type.singularize}_repository_storage_moves/:repository_storage_move_id" do
+ it_behaves_like 'get single container repository storage move' do
+ let(:url) { "/#{container_type.singularize}_repository_storage_moves/#{repository_storage_move_id}" }
+ end
+ end
+
+ describe "POST /#{container_type}/:id/repository_storage_moves" do
+ let(:url) { "/#{container_type}/#{container.id}/repository_storage_moves" }
+ let(:destination_storage_name) { 'test_second_storage' }
+
+ def create_container_repository_storage_move
+ post api(url, user), params: { destination_storage_name: destination_storage_name }
+ end
+
+ before do
+ stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ end
+
+ it 'schedules a container repository storage move', :aggregate_failures do
+ create_container_repository_storage_move
+
+ storage_move = container.repository_storage_moves.last
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema("public_api/v4/#{container_type.singularize}_repository_storage_move")
+ expect(json_response['id']).to eq(storage_move.id)
+ expect(json_response['state']).to eq('scheduled')
+ expect(json_response['source_storage_name']).to eq('default')
+ expect(json_response['destination_storage_name']).to eq(destination_storage_name)
+ end
+
+ describe 'permissions' do
+ it { expect { create_container_repository_storage_move }.to be_allowed_for(:admin) }
+ it { expect { create_container_repository_storage_move }.to be_denied_for(:user) }
+ end
+
+ context 'destination_storage_name is missing', :aggregate_failures do
+ let(:destination_storage_name) { nil }
+
+ it 'schedules a container repository storage move' do
+ create_container_repository_storage_move
+
+ storage_move = container.repository_storage_moves.last
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema("public_api/v4/#{container_type.singularize}_repository_storage_move")
+ expect(json_response['id']).to eq(storage_move.id)
+ expect(json_response['state']).to eq('scheduled')
+ expect(json_response['source_storage_name']).to eq('default')
+ expect(json_response['destination_storage_name']).to be_present
+ end
+ end
+ end
+
+ describe "POST /#{container_type.singularize}_repository_storage_moves" do
+ let(:url) { "/#{container_type.singularize}_repository_storage_moves" }
+ let(:source_storage_name) { 'default' }
+ let(:destination_storage_name) { 'test_second_storage' }
+
+ def create_container_repository_storage_moves
+ post api(url, user), params: {
+ source_storage_name: source_storage_name,
+ destination_storage_name: destination_storage_name
+ }
+ end
+
+ before do
+ stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ end
+
+ it 'schedules the worker' do
+ expect(bulk_worker_klass).to receive(:perform_async).with(source_storage_name, destination_storage_name)
+
+ create_container_repository_storage_moves
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ context 'source_storage_name is invalid' do
+ let(:destination_storage_name) { 'not-a-real-storage' }
+
+ it 'gives an error' do
+ create_container_repository_storage_moves
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'destination_storage_name is missing' do
+ let(:destination_storage_name) { nil }
+
+ it 'schedules the worker' do
+ expect(bulk_worker_klass).to receive(:perform_async).with(source_storage_name, destination_storage_name)
+
+ create_container_repository_storage_moves
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+
+ context 'destination_storage_name is invalid' do
+ let(:destination_storage_name) { 'not-a-real-storage' }
+
+ it 'gives an error' do
+ create_container_repository_storage_moves
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ describe 'normal user' do
+ it { expect { create_container_repository_storage_moves }.to be_denied_for(:user) }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
index 5748e873fd4..460e8d57a2b 100644
--- a/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
@@ -9,6 +9,7 @@ RSpec.shared_examples 'resolvable discussions API' do |parent_type, noteable_typ
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes'].size).to eq(1)
expect(json_response['notes'][0]['resolved']).to eq(true)
+ expect(Time.parse(json_response['notes'][0]['resolved_at'])).to be_like_time(note.reload.resolved_at)
end
it "unresolves discussion if resolved is false" do
@@ -18,6 +19,7 @@ RSpec.shared_examples 'resolvable discussions API' do |parent_type, noteable_typ
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes'].size).to eq(1)
expect(json_response['notes'][0]['resolved']).to eq(false)
+ expect(json_response['notes'][0]['resolved_at']).to be_nil
end
it "returns a 400 bad request error if resolved parameter is not passed" do
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index 5d300d38e4a..3b039049ca9 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -154,10 +154,11 @@ RSpec.shared_examples 'rate-limited token-authenticated requests' do
end
def make_request(args)
+ path, options = args
if request_method == 'POST'
- post(*args)
+ post(path, **options)
else
- get(*args)
+ get(path, **options)
end
end
end
diff --git a/spec/support/shared_examples/requests/releases_shared_examples.rb b/spec/support/shared_examples/requests/releases_shared_examples.rb
new file mode 100644
index 00000000000..b835947e497
--- /dev/null
+++ b/spec/support/shared_examples/requests/releases_shared_examples.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'correct release milestone order' do
+ let_it_be_with_reload(:milestone_1) { create(:milestone, project: project) }
+ let_it_be_with_reload(:milestone_2) { create(:milestone, project: project) }
+
+ shared_examples 'correct sort order' do
+ it 'sorts milestonee_1 before milestone_2' do
+ freeze_time do
+ expect(actual_milestone_title_order).to eq([milestone_1.title, milestone_2.title])
+ end
+ end
+ end
+
+ context 'due_date' do
+ before do
+ milestone_1.update!(due_date: Time.zone.now, start_date: 1.day.ago, title: 'z')
+ milestone_2.update!(due_date: 1.day.from_now, start_date: 2.days.ago, title: 'a')
+ end
+
+ context 'when both milestones have a due_date' do
+ it_behaves_like 'correct sort order'
+ end
+
+ context 'when one milestone does not have a due_date' do
+ before do
+ milestone_2.update!(due_date: nil)
+ end
+
+ it_behaves_like 'correct sort order'
+ end
+ end
+
+ context 'start_date' do
+ before do
+ milestone_1.update!(due_date: 1.day.from_now, start_date: 1.day.ago, title: 'z' )
+ milestone_2.update!(due_date: 1.day.from_now, start_date: milestone_2_start_date, title: 'a' )
+ end
+
+ context 'when both milestones have a start_date' do
+ let(:milestone_2_start_date) { Time.zone.now }
+
+ it_behaves_like 'correct sort order'
+ end
+
+ context 'when one milestone does not have a start_date' do
+ let(:milestone_2_start_date) { nil }
+
+ it_behaves_like 'correct sort order'
+ end
+ end
+
+ context 'title' do
+ before do
+ milestone_1.update!(due_date: 1.day.from_now, start_date: Time.zone.now, title: 'a' )
+ milestone_2.update!(due_date: 1.day.from_now, start_date: Time.zone.now, title: 'z' )
+ end
+
+ it_behaves_like 'correct sort order'
+ end
+end
diff --git a/spec/support/shared_examples/requests/sessionless_auth_request_shared_examples.rb b/spec/support/shared_examples/requests/sessionless_auth_request_shared_examples.rb
new file mode 100644
index 00000000000..d82da1b01e1
--- /dev/null
+++ b/spec/support/shared_examples/requests/sessionless_auth_request_shared_examples.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'authenticates sessionless user for the request spec' do |params|
+ params ||= {}
+
+ before do
+ stub_authentication_activity_metrics(debug: false)
+ end
+
+ let(:user) { create(:user) }
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let(:default_params) { params.except(:public) || {} }
+
+ context "when the 'personal_access_token' param is populated with the personal access token" do
+ it 'logs the user in' do
+ expect(authentication_metrics)
+ .to increment(:user_authenticated_counter)
+ .and increment(:user_session_override_counter)
+ .and increment(:user_sessionless_authentication_counter)
+
+ get url, params: default_params.merge(private_token: personal_access_token.token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(controller.current_user).to eq(user)
+ end
+
+ it 'does not log the user in if page is public', if: params[:public] do
+ get url, params: default_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(controller.current_user).to be_nil
+ end
+ end
+
+ context 'when the personal access token has no api scope', unless: params[:public] do
+ it 'does not log the user in' do
+ # Several instances of where these specs are shared route the request
+ # through ApplicationController#route_not_found which does not involve
+ # the usual auth code from Devise, so does not increment the
+ # :user_unauthenticated_counter
+ #
+ unless params[:ignore_incrementing]
+ expect(authentication_metrics)
+ .to increment(:user_unauthenticated_counter)
+ end
+
+ personal_access_token.update!(scopes: [:read_user])
+
+ get url, params: default_params.merge(private_token: personal_access_token.token)
+
+ expect(response).not_to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context "when the 'PERSONAL_ACCESS_TOKEN' header is populated with the personal access token" do
+ it 'logs the user in' do
+ expect(authentication_metrics)
+ .to increment(:user_authenticated_counter)
+ .and increment(:user_session_override_counter)
+ .and increment(:user_sessionless_authentication_counter)
+
+ headers = { 'PRIVATE-TOKEN': personal_access_token.token }
+ get url, params: default_params, headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ it "doesn't log the user in otherwise", unless: params[:public] do
+ # Several instances of where these specs are shared route the request
+ # through ApplicationController#route_not_found which does not involve
+ # the usual auth code from Devise, so does not increment the
+ # :user_unauthenticated_counter
+ #
+ unless params[:ignore_incrementing]
+ expect(authentication_metrics)
+ .to increment(:user_unauthenticated_counter)
+ end
+
+ get url, params: default_params.merge(private_token: 'token')
+
+ expect(response).not_to have_gitlab_http_status(:ok)
+ end
+end
diff --git a/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb b/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb
index 808336db7b1..ae3f6425b5e 100644
--- a/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb
+++ b/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'redirecting a legacy project path' do |source, target|
+RSpec.shared_examples 'redirecting a legacy path' do |source, target|
include RSpec::Rails::RequestExampleGroup
it "redirects #{source} to #{target}" do
diff --git a/spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb b/spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb
new file mode 100644
index 00000000000..d5ffd5e7510
--- /dev/null
+++ b/spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+RSpec.shared_examples 'public artifacts' do
+ let_it_be(:project) { create(:project, :public) }
+ let(:pipeline) { create(:ci_empty_pipeline, status: :success, project: project) }
+
+ context 'that has artifacts' do
+ let!(:build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
+
+ it 'contains information about artifacts' do
+ expect(subject[:details][:artifacts].length).to eq(1)
+ end
+ end
+
+ context 'that has non public artifacts' do
+ let!(:build) { create(:ci_build, :success, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+
+ it 'does not contain information about artifacts' do
+ expect(subject[:details][:artifacts].length).to eq(0)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
index 06e2b715e6d..197b0694741 100644
--- a/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
@@ -19,78 +19,12 @@ RSpec.shared_examples 'issues list service' do
end
end
- it 'avoids N+1' do
- params = { board_id: board.id }
- control = ActiveRecord::QueryRecorder.new { described_class.new(parent, user, params).execute }
-
- create(:list, board: board)
-
- expect { described_class.new(parent, user, params).execute }.not_to exceed_query_limit(control)
- end
-
- context 'issues are ordered by priority' do
- it 'returns opened issues when list_id is missing' do
- params = { board_id: board.id }
-
- issues = described_class.new(parent, user, params).execute
-
- expect(issues).to eq [opened_issue2, reopened_issue1, opened_issue1]
- end
-
- it 'returns opened issues when listing issues from Backlog' do
- params = { board_id: board.id, id: backlog.id }
-
- issues = described_class.new(parent, user, params).execute
-
- expect(issues).to eq [opened_issue2, reopened_issue1, opened_issue1]
- end
-
- it 'returns opened issues that have label list applied when listing issues from a label list' do
- params = { board_id: board.id, id: list1.id }
-
- issues = described_class.new(parent, user, params).execute
-
- expect(issues).to eq [list1_issue3, list1_issue1, list1_issue2]
- end
- end
-
- context 'issues are ordered by date of closing' do
- it 'returns closed issues when listing issues from Closed' do
- params = { board_id: board.id, id: closed.id }
-
- issues = described_class.new(parent, user, params).execute
-
- expect(issues).to eq [closed_issue1, closed_issue2, closed_issue3, closed_issue4, closed_issue5]
- end
- end
-
- context 'with list that does not belong to the board' do
- it 'raises an error' do
- list = create(:list)
- service = described_class.new(parent, user, board_id: board.id, id: list.id)
-
- expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
-
- context 'with invalid list id' do
- it 'raises an error' do
- service = described_class.new(parent, user, board_id: board.id, id: nil)
-
- expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
-
- context 'when :all_lists is used' do
- it 'returns issues from all lists' do
- params = { board_id: board.id, all_lists: true }
-
- issues = described_class.new(parent, user, params).execute
-
- expected = [opened_issue2, reopened_issue1, opened_issue1, list1_issue1,
- list1_issue2, list1_issue3, list2_issue1, closed_issue1,
- closed_issue2, closed_issue3, closed_issue4, closed_issue5]
- expect(issues).to match_array(expected)
- end
+ it_behaves_like 'items list service' do
+ let(:backlog_items) { [opened_issue2, reopened_issue1, opened_issue1] }
+ let(:list1_items) { [list1_issue3, list1_issue1, list1_issue2] }
+ let(:closed_items) { [closed_issue1, closed_issue2, closed_issue3, closed_issue4, closed_issue5] }
+ let(:all_items) { backlog_items + list1_items + closed_items + [list2_issue1] }
+ let(:list_factory) { :list }
+ let(:new_list) { create(:list, board: board) }
end
end
diff --git a/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb
new file mode 100644
index 00000000000..9a3a0cc9cc8
--- /dev/null
+++ b/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'items list service' do
+ it 'avoids N+1' do
+ params = { board_id: board.id }
+ control = ActiveRecord::QueryRecorder.new { described_class.new(parent, user, params).execute }
+
+ new_list
+
+ expect { described_class.new(parent, user, params).execute }.not_to exceed_query_limit(control)
+ end
+
+ it 'returns opened items when list_id is missing' do
+ params = { board_id: board.id }
+
+ items = described_class.new(parent, user, params).execute
+
+ expect(items).to match_array(backlog_items)
+ end
+
+ it 'returns opened items when listing items from Backlog' do
+ params = { board_id: board.id, id: backlog.id }
+
+ items = described_class.new(parent, user, params).execute
+
+ expect(items).to match_array(backlog_items)
+ end
+
+ it 'returns opened items that have label list applied when listing items from a label list' do
+ params = { board_id: board.id, id: list1.id }
+
+ items = described_class.new(parent, user, params).execute
+
+ expect(items).to match_array(list1_items)
+ end
+
+ it 'returns closed items when listing items from Closed sorted by closed_at in descending order' do
+ params = { board_id: board.id, id: closed.id }
+
+ items = described_class.new(parent, user, params).execute
+
+ expect(items).to eq(closed_items)
+ end
+
+ it 'raises an error if the list does not belong to the board' do
+ list = create(list_factory) # rubocop:disable Rails/SaveBang
+ service = described_class.new(parent, user, board_id: board.id, id: list.id)
+
+ expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'raises an error if list id is invalid' do
+ service = described_class.new(parent, user, board_id: board.id, id: nil)
+
+ expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'returns items from all lists if :all_list is used' do
+ params = { board_id: board.id, all_lists: true }
+
+ items = described_class.new(parent, user, params).execute
+
+ expect(items).to match_array(all_items)
+ end
+end
diff --git a/spec/support/shared_examples/services/merge_request_shared_examples.rb b/spec/support/shared_examples/services/merge_request_shared_examples.rb
index 2bd06ac3e9c..56179b6cd00 100644
--- a/spec/support/shared_examples/services/merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/services/merge_request_shared_examples.rb
@@ -58,3 +58,18 @@ RSpec.shared_examples 'reviewer_ids filter' do
end
end
end
+
+RSpec.shared_examples 'merge request reviewers cache counters invalidator' do
+ let(:reviewer_1) { create(:user) }
+ let(:reviewer_2) { create(:user) }
+
+ before do
+ merge_request.update!(reviewers: [reviewer_1, reviewer_2])
+ end
+
+ it 'invalidates counter cache for reviewers' do
+ expect(merge_request.reviewers).to all(receive(:invalidate_merge_request_cache_counts))
+
+ described_class.new(project, user, {}).execute(merge_request)
+ end
+end
diff --git a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
new file mode 100644
index 00000000000..8398dd3c453
--- /dev/null
+++ b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'updating the namespace package setting attributes' do |from: {}, to:|
+ it_behaves_like 'not creating the namespace package setting'
+
+ it 'updates the namespace package setting' do
+ expect { subject }
+ .to change { namespace.package_settings.reload.maven_duplicates_allowed }.from(from[:maven_duplicates_allowed]).to(to[:maven_duplicates_allowed])
+ .and change { namespace.package_settings.reload.maven_duplicate_exception_regex }.from(from[:maven_duplicate_exception_regex]).to(to[:maven_duplicate_exception_regex])
+ end
+end
+
+RSpec.shared_examples 'not creating the namespace package setting' do
+ it "doesn't create the namespace package setting" do
+ expect { subject }.not_to change { Namespace::PackageSetting.count }
+ end
+end
+
+RSpec.shared_examples 'creating the namespace package setting' do
+ it 'creates a new package setting' do
+ expect { subject }.to change { Namespace::PackageSetting.count }.by(1)
+ end
+
+ it 'saves the settings', :aggregate_failures do
+ subject
+
+ expect(namespace.package_setting_relation.maven_duplicates_allowed).to eq(package_settings[:maven_duplicates_allowed])
+ expect(namespace.package_setting_relation.maven_duplicate_exception_regex).to eq(package_settings[:maven_duplicate_exception_regex])
+ end
+
+ it_behaves_like 'returning a success'
+end
diff --git a/spec/support/shared_examples/services/onboarding_progress_shared_examples.rb b/spec/support/shared_examples/services/onboarding_progress_shared_examples.rb
new file mode 100644
index 00000000000..8c6c2271af3
--- /dev/null
+++ b/spec/support/shared_examples/services/onboarding_progress_shared_examples.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'records an onboarding progress action' do |action|
+ include AfterNextHelpers
+
+ it do
+ expect_next(OnboardingProgressService, namespace)
+ .to receive(:execute).with(action: action).and_call_original
+
+ subject
+ end
+end
+
+RSpec.shared_examples 'does not record an onboarding progress action' do
+ it do
+ expect(OnboardingProgressService).not_to receive(:new)
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 70d29b4bc99..fa307d2a9a6 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -220,3 +220,45 @@ RSpec.shared_examples 'package workhorse uploads' do
end
end
end
+
+RSpec.shared_examples 'with versionless packages' do
+ context 'with versionless package' do
+ let!(:versionless_package) { create(:maven_package, project: project, version: nil) }
+
+ shared_examples 'not including the package' do
+ it 'does not return the package' do
+ subject
+
+ expect(json_response.map { |package| package['id'] }).not_to include(versionless_package.id)
+ end
+ end
+
+ it_behaves_like 'not including the package'
+
+ context 'with include_versionless param' do
+ context 'with true include_versionless param' do
+ [true, 'true', 1, '1'].each do |param|
+ context "for param #{param}" do
+ let(:params) { super().merge(include_versionless: param) }
+
+ it 'returns the package' do
+ subject
+
+ expect(json_response.map { |package| package['id'] }).to include(versionless_package.id)
+ end
+ end
+ end
+ end
+
+ context 'with falsy include_versionless param' do
+ [false, '', nil, 'false', 0, '0'].each do |param|
+ context "for param #{param}" do
+ let(:params) { super().merge(include_versionless: param) }
+
+ it_behaves_like 'not including the package'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb b/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
new file mode 100644
index 00000000000..a174ae94b75
--- /dev/null
+++ b/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'housekeeps repository' do
+ subject { described_class.new(resource) }
+
+ context 'with a clean redis state', :clean_gitlab_redis_shared_state do
+ describe '#execute' do
+ it 'enqueues a sidekiq job' do
+ expect(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
+ expect(subject).to receive(:lease_key).and_return(:the_lease_key)
+ expect(subject).to receive(:task).and_return(:incremental_repack)
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid).and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute }.to change(GitGarbageCollectWorker.jobs, :size).by(1)
+ end
+ end
+
+ it 'yields the block if given' do
+ expect do |block|
+ subject.execute(&block)
+ end.to yield_with_no_args
+ end
+
+ it 'resets counter after execution' do
+ expect(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
+ allow(subject).to receive(:gc_period).and_return(1)
+ resource.increment_pushes_since_gc
+
+ perform_enqueued_jobs do
+ expect { subject.execute }.to change { resource.pushes_since_gc }.to(0)
+ end
+ end
+
+ context 'when no lease can be obtained' do
+ before do
+ expect(subject).to receive(:try_obtain_lease).and_return(false)
+ end
+
+ it 'does not enqueue a job' do
+ expect(GitGarbageCollectWorker).not_to receive(:perform_async)
+
+ expect { subject.execute }.to raise_error(Repositories::HousekeepingService::LeaseTaken)
+ end
+
+ it 'does not reset pushes_since_gc' do
+ expect do
+ expect { subject.execute }.to raise_error(Repositories::HousekeepingService::LeaseTaken)
+ end.not_to change { resource.pushes_since_gc }
+ end
+
+ it 'does not yield' do
+ expect do |block|
+ expect { subject.execute(&block) }
+ .to raise_error(Repositories::HousekeepingService::LeaseTaken)
+ end.not_to yield_with_no_args
+ end
+ end
+
+ context 'task type' do
+ it 'goes through all three housekeeping tasks, executing only the highest task when there is overlap' do
+ allow(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
+ allow(subject).to receive(:lease_key).and_return(:the_lease_key)
+
+ # At push 200
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :gc, :the_lease_key, :the_uuid)
+ .once
+ # At push 50, 100, 150
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :full_repack, :the_lease_key, :the_uuid)
+ .exactly(3).times
+ # At push 10, 20, ... (except those above)
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid)
+ .exactly(16).times
+ # At push 6, 12, 18, ... (except those above)
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :pack_refs, :the_lease_key, :the_uuid)
+ .exactly(27).times
+
+ 201.times do
+ subject.increment!
+ subject.execute if subject.needed?
+ end
+
+ expect(resource.pushes_since_gc).to eq(1)
+ end
+ end
+
+ it 'runs the task specifically requested' do
+ housekeeping = described_class.new(resource, :gc)
+
+ allow(housekeeping).to receive(:try_obtain_lease).and_return(:gc_uuid)
+ allow(housekeeping).to receive(:lease_key).and_return(:gc_lease_key)
+
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :gc, :gc_lease_key, :gc_uuid).twice
+
+ 2.times do
+ housekeeping.execute
+ end
+ end
+ end
+
+ describe '#needed?' do
+ it 'when the count is low enough' do
+ expect(subject.needed?).to eq(false)
+ end
+
+ it 'when the count is high enough' do
+ allow(resource).to receive(:pushes_since_gc).and_return(10)
+ expect(subject.needed?).to eq(true)
+ end
+ end
+
+ describe '#increment!' do
+ it 'increments the pushes_since_gc counter' do
+ expect { subject.increment! }.to change { resource.pushes_since_gc }.by(1)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb b/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
new file mode 100644
index 00000000000..e67fc4ab04a
--- /dev/null
+++ b/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'moves repository shard in bulk' do
+ let(:source_storage_name) { 'default' }
+ let(:destination_storage_name) { 'test_second_storage' }
+
+ before do
+ stub_storage_settings(destination_storage_name => { 'path' => 'tmp/tests/extra_storage' })
+ end
+
+ describe '#execute' do
+ it 'schedules container repository storage moves' do
+ expect { subject.execute(source_storage_name, destination_storage_name) }
+ .to change(move_service_klass, :count).by(1)
+
+ storage_move = container.repository_storage_moves.last!
+
+ expect(storage_move).to have_attributes(
+ source_storage_name: source_storage_name,
+ destination_storage_name: destination_storage_name,
+ state_name: :scheduled
+ )
+ end
+
+ context 'read-only repository' do
+ it 'does not get scheduled' do
+ container.set_repository_read_only!
+
+ expect(subject).to receive(:log_info)
+ .with(/Container #{container.full_path} \(#{container.id}\) was skipped: #{container.class} is read only/)
+ expect { subject.execute(source_storage_name, destination_storage_name) }
+ .to change(move_service_klass, :count).by(0)
+ end
+ end
+ end
+
+ describe '.enqueue' do
+ it 'defers to the worker' do
+ expect(bulk_worker_klass).to receive(:perform_async).with(source_storage_name, destination_storage_name)
+
+ described_class.enqueue(source_storage_name, destination_storage_name)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb b/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb
new file mode 100644
index 00000000000..465aca63148
--- /dev/null
+++ b/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'schedules bulk repository shard moves' do
+ let(:source_storage_name) { 'default' }
+ let(:destination_storage_name) { 'test_second_storage' }
+
+ describe "#perform" do
+ before do
+ stub_storage_settings(destination_storage_name => { 'path' => 'tmp/tests/extra_storage' })
+
+ allow(worker_klass).to receive(:perform_async)
+ end
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { [source_storage_name, destination_storage_name] }
+
+ it 'schedules container repository storage moves' do
+ expect { subject }.to change(move_service_klass, :count).by(1)
+
+ storage_move = container.repository_storage_moves.last!
+
+ expect(storage_move).to have_attributes(
+ source_storage_name: source_storage_name,
+ destination_storage_name: destination_storage_name,
+ state_name: :scheduled
+ )
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb b/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
new file mode 100644
index 00000000000..babd7cfbbeb
--- /dev/null
+++ b/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'an update storage move worker' do
+ describe '#perform' do
+ let(:service) { double(:update_repository_storage_service) }
+
+ before do
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
+ end
+
+ context 'without repository storage move' do
+ it 'calls the update repository storage service' do
+ expect(service_klass).to receive(:new).and_return(service)
+ expect(service).to receive(:execute)
+
+ expect do
+ subject.perform(container.id, 'test_second_storage')
+ end.to change(repository_storage_move_klass, :count).by(1)
+
+ storage_move = container.repository_storage_moves.last
+ expect(storage_move).to have_attributes(
+ source_storage_name: 'default',
+ destination_storage_name: 'test_second_storage'
+ )
+ end
+ end
+
+ context 'with repository storage move' do
+ it 'calls the update repository storage service' do
+ expect(service_klass).to receive(:new).and_return(service)
+ expect(service).to receive(:execute)
+
+ expect do
+ subject.perform(nil, nil, repository_storage_move.id)
+ end.not_to change(repository_storage_move_klass, :count)
+ end
+ end
+ end
+end
diff --git a/spec/support_specs/matchers/be_sorted_spec.rb b/spec/support_specs/matchers/be_sorted_spec.rb
new file mode 100644
index 00000000000..e62bc9b36b3
--- /dev/null
+++ b/spec/support_specs/matchers/be_sorted_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+load File.expand_path('../../../spec/support/matchers/be_sorted.rb', __dir__)
+
+RSpec.describe 'be_sorted' do
+ it 'matches empty collections, regardless of arguments' do
+ expect([])
+ .to be_sorted
+ .and be_sorted.asc
+ .and be_sorted.desc
+ .and be_sorted(:foo)
+ .and be_sorted(:bar)
+
+ expect([].to_set).to be_sorted
+ expect({}).to be_sorted
+ end
+
+ it 'matches in both directions' do
+ expect([1, 2, 3]).to be_sorted.asc
+ expect([3, 2, 1]).to be_sorted.desc
+ end
+
+ it 'can match on a projection' do
+ xs = [['a', 10], ['b', 7], ['c', 4]]
+
+ expect(xs).to be_sorted.asc.by(&:first)
+ expect(xs).to be_sorted(:first, :asc)
+ expect(xs).to be_sorted.desc.by(&:second)
+ expect(xs).to be_sorted(:second, :desc)
+ end
+end
diff --git a/spec/tasks/gitlab/git_rake_spec.rb b/spec/tasks/gitlab/git_rake_spec.rb
index 4bc1b460d9b..4d89b126c9e 100644
--- a/spec/tasks/gitlab/git_rake_spec.rb
+++ b/spec/tasks/gitlab/git_rake_spec.rb
@@ -19,4 +19,38 @@ RSpec.describe 'gitlab:git rake tasks' do
expect { run_rake_task('gitlab:git:fsck') }.to output(/Performed integrity check for/).to_stdout
end
end
+
+ describe 'checksum_projects' do
+ it 'outputs the checksum for a repo' do
+ expected = /#{project.id},#{project.repository.checksum}/
+
+ expect { run_rake_task('gitlab:git:checksum_projects') }.to output(expected).to_stdout
+ end
+
+ it 'outputs blank checksum for no repo' do
+ no_repo = create(:project)
+
+ expected = /#{no_repo.id},$/
+
+ expect { run_rake_task('gitlab:git:checksum_projects') }.to output(expected).to_stdout
+ end
+
+ it 'outputs zeroes for empty repo' do
+ empty_repo = create(:project, :empty_repo)
+
+ expected = /#{empty_repo.id},0000000000000000000000000000000000000000/
+
+ expect { run_rake_task('gitlab:git:checksum_projects') }.to output(expected).to_stdout
+ end
+
+ it 'outputs errors' do
+ allow_next_found_instance_of(Project) do |project|
+ allow(project).to receive(:repo_exists?).and_raise('foo')
+ end
+
+ expected = /#{project.id},Ignored error: foo/
+
+ expect { run_rake_task('gitlab:git:checksum_projects') }.to output(expected).to_stdout
+ end
+ end
end
diff --git a/spec/tasks/gitlab/packages/events_rake_spec.rb b/spec/tasks/gitlab/packages/events_rake_spec.rb
index a485dc2ce58..d6aa6deac1e 100644
--- a/spec/tasks/gitlab/packages/events_rake_spec.rb
+++ b/spec/tasks/gitlab/packages/events_rake_spec.rb
@@ -38,8 +38,8 @@ RSpec.describe 'gitlab:packages:events namespace rake task' do
end
end
- describe 'generate_guest' do
- let(:task) { 'generate_guest' }
+ describe 'generate_counts' do
+ let(:task) { 'generate_counts' }
Packages::Event::EVENT_SCOPES.keys.each do |event_scope|
it "includes `#{event_scope}` scope" do
diff --git a/spec/tasks/gitlab/pages_rake_spec.rb b/spec/tasks/gitlab/pages_rake_spec.rb
new file mode 100644
index 00000000000..76808f52890
--- /dev/null
+++ b/spec/tasks/gitlab/pages_rake_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:pages:migrate_legacy_storagerake task' do
+ before(:context) do
+ Rake.application.rake_require 'tasks/gitlab/pages'
+ end
+
+ subject { run_rake_task('gitlab:pages:migrate_legacy_storage') }
+
+ let(:project) { create(:project) }
+
+ it 'does not try to migrate pages if pages are not deployed' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ subject
+ end
+
+ context 'when pages are marked as deployed' do
+ before do
+ project.mark_pages_as_deployed
+ end
+
+ context 'when pages directory does not exist' do
+ it 'tries to migrate the project, but does not crash' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ subject
+ end
+ end
+
+ context 'when pages directory exists on disk' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ it 'migrates pages projects without deployments' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect do
+ subject
+ end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil)
+ end
+
+ context 'when deployed already exists for the project' do
+ before do
+ deployment = create(:pages_deployment, project: project)
+ project.set_first_pages_deployment!(deployment)
+ end
+
+ it 'does not try to migrate project' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/kubernetes_client_spec.rb b/spec/tooling/lib/tooling/kubernetes_client_spec.rb
index fdd56aa0189..2511295206c 100644
--- a/spec/tooling/lib/tooling/kubernetes_client_spec.rb
+++ b/spec/tooling/lib/tooling/kubernetes_client_spec.rb
@@ -17,84 +17,111 @@ RSpec.describe Tooling::KubernetesClient do
end
end
- describe '#cleanup' do
+ describe '#cleanup_by_release' do
before do
allow(subject).to receive(:raw_resource_names).and_return(raw_resource_names)
end
+ shared_examples 'a kubectl command to delete resources' do
+ let(:wait) { true }
+ let(:release_names_in_command) { release_name.respond_to?(:join) ? %(-l 'release in (#{release_name.join(', ')})') : %(-l release="#{release_name}") }
+
+ specify do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with(["kubectl delete #{described_class::RESOURCE_LIST} " +
+ %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=#{wait} #{release_names_in_command})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+
+ # We're not verifying the output here, just silencing it
+ expect { subject.cleanup_by_release(release_name: release_name) }.to output.to_stdout
+ end
+ end
+
it 'raises an error if the Kubernetes command fails' do
expect(Gitlab::Popen).to receive(:popen_with_detail)
.with(["kubectl delete #{described_class::RESOURCE_LIST} " +
%(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=true -l release="#{release_name}")])
.and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
- expect { subject.cleanup(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
+ expect { subject.cleanup_by_release(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
end
- it 'calls kubectl with the correct arguments' do
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with(["kubectl delete #{described_class::RESOURCE_LIST} " +
- %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=true -l release="#{release_name}")])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+ it_behaves_like 'a kubectl command to delete resources'
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+ context 'with multiple releases' do
+ let(:release_name) { %w[my-release my-release-2] }
- # We're not verifying the output here, just silencing it
- expect { subject.cleanup(release_name: release_name) }.to output.to_stdout
+ it_behaves_like 'a kubectl command to delete resources'
end
- context 'with multiple releases' do
- let(:release_name) { %w[my-release my-release-2] }
+ context 'with `wait: false`' do
+ let(:wait) { false }
- it 'raises an error if the Kubernetes command fails' do
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with(["kubectl delete #{described_class::RESOURCE_LIST} " +
- %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=true -l 'release in (#{release_name.join(', ')})')])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+ it_behaves_like 'a kubectl command to delete resources'
+ end
+ end
- expect { subject.cleanup(release_name: release_name) }.to raise_error(described_class::CommandFailedError)
- end
+ describe '#cleanup_by_created_at' do
+ let(:two_days_ago) { Time.now - 3600 * 24 * 2 }
+ let(:resource_type) { 'pvc' }
+ let(:resource_names) { [pod_for_release] }
- it 'calls kubectl with the correct arguments' do
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with(["kubectl delete #{described_class::RESOURCE_LIST} " +
- %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=true -l 'release in (#{release_name.join(', ')})')])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+ before do
+ allow(subject).to receive(:resource_names_created_before).with(resource_type: resource_type, created_before: two_days_ago).and_return(resource_names)
+ end
+
+ shared_examples 'a kubectl command to delete resources by older than given creation time' do
+ let(:wait) { true }
+ let(:release_names_in_command) { resource_names.join(' ') }
+ specify do
expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+ .with(["kubectl delete #{resource_type} ".squeeze(' ') +
+ %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=#{wait} #{release_names_in_command})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
# We're not verifying the output here, just silencing it
- expect { subject.cleanup(release_name: release_name) }.to output.to_stdout
+ expect { subject.cleanup_by_created_at(resource_type: resource_type, created_before: two_days_ago) }.to output.to_stdout
end
end
+ it 'raises an error if the Kubernetes command fails' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with(["kubectl delete #{resource_type} " +
+ %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=true #{pod_for_release})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+
+ expect { subject.cleanup_by_created_at(resource_type: resource_type, created_before: two_days_ago) }.to raise_error(described_class::CommandFailedError)
+ end
+
+ it_behaves_like 'a kubectl command to delete resources by older than given creation time'
+
+ context 'with multiple resource names' do
+ let(:resource_names) { %w[pod-1 pod-2] }
+
+ it_behaves_like 'a kubectl command to delete resources by older than given creation time'
+ end
+
context 'with `wait: false`' do
- it 'raises an error if the Kubernetes command fails' do
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with(["kubectl delete #{described_class::RESOURCE_LIST} " +
- %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=false -l release="#{release_name}")])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+ let(:wait) { false }
- expect { subject.cleanup(release_name: release_name, wait: false) }.to raise_error(described_class::CommandFailedError)
- end
+ it_behaves_like 'a kubectl command to delete resources by older than given creation time'
+ end
- it 'calls kubectl with the correct arguments' do
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with(["kubectl delete #{described_class::RESOURCE_LIST} " +
- %(--namespace "#{namespace}" --now --ignore-not-found --include-uninitialized --wait=false -l release="#{release_name}")])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+ context 'with no resource_type given' do
+ let(:resource_type) { nil }
- expect(Gitlab::Popen).to receive(:popen_with_detail)
- .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})])
- .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+ it_behaves_like 'a kubectl command to delete resources by older than given creation time'
+ end
- # We're not verifying the output here, just silencing it
- expect { subject.cleanup(release_name: release_name, wait: false) }.to output.to_stdout
- end
+ context 'with multiple resource_type given' do
+ let(:resource_type) { 'pvc,service' }
+
+ it_behaves_like 'a kubectl command to delete resources by older than given creation time'
end
end
@@ -108,4 +135,59 @@ RSpec.describe Tooling::KubernetesClient do
expect(subject.__send__(:raw_resource_names)).to eq(raw_resource_names)
end
end
+
+ describe '#resource_names_created_before' do
+ let(:three_days_ago) { Time.now - 3600 * 24 * 3 }
+ let(:two_days_ago) { Time.now - 3600 * 24 * 2 }
+ let(:pvc_created_three_days_ago) { 'pvc-created-three-days-ago' }
+ let(:resource_type) { 'pvc' }
+ let(:raw_resources) do
+ {
+ items: [
+ {
+ apiVersion: "v1",
+ kind: "PersistentVolumeClaim",
+ metadata: {
+ creationTimestamp: three_days_ago,
+ name: pvc_created_three_days_ago
+ }
+ },
+ {
+ apiVersion: "v1",
+ kind: "PersistentVolumeClaim",
+ metadata: {
+ creationTimestamp: Time.now,
+ name: 'another-pvc'
+ }
+ }
+ ]
+ }.to_json
+ end
+
+ shared_examples 'a kubectl command to retrieve resource names sorted by creationTimestamp' do
+ specify do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with(["kubectl get #{resource_type} ".squeeze(' ') +
+ %(--namespace "#{namespace}" ) +
+ "--sort-by='{.metadata.creationTimestamp}' -o json"])
+ .and_return(Gitlab::Popen::Result.new([], raw_resources, '', double(success?: true)))
+
+ expect(subject.__send__(:resource_names_created_before, resource_type: resource_type, created_before: two_days_ago)).to contain_exactly(pvc_created_three_days_ago)
+ end
+ end
+
+ it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp'
+
+ context 'with no resource_type given' do
+ let(:resource_type) { nil }
+
+ it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp'
+ end
+
+ context 'with multiple resource_type given' do
+ let(:resource_type) { 'pvc,service' }
+
+ it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp'
+ end
+ end
end
diff --git a/spec/tooling/lib/tooling/test_file_finder_spec.rb b/spec/tooling/lib/tooling/test_file_finder_spec.rb
deleted file mode 100644
index 683bc647b8a..00000000000
--- a/spec/tooling/lib/tooling/test_file_finder_spec.rb
+++ /dev/null
@@ -1,175 +0,0 @@
-# frozen_string_literal: true
-
-require_relative '../../../../tooling/lib/tooling/test_file_finder'
-
-RSpec.describe Tooling::TestFileFinder do
- subject { described_class.new(file) }
-
- describe '#test_files' do
- context 'when given non .rb files' do
- let(:file) { 'app/assets/images/emoji.png' }
-
- it 'does not return a test file' do
- expect(subject.test_files).to be_empty
- end
- end
-
- context 'when given file in app/' do
- let(:file) { 'app/finders/admin/projects_finder.rb' }
-
- it 'returns the matching app spec file' do
- expect(subject.test_files).to contain_exactly('spec/finders/admin/projects_finder_spec.rb')
- end
- end
-
- context 'when given file in lib/' do
- let(:file) { 'lib/banzai/color_parser.rb' }
-
- it 'returns the matching app spec file' do
- expect(subject.test_files).to contain_exactly('spec/lib/banzai/color_parser_spec.rb')
- end
- end
-
- context 'when given a file in tooling/' do
- let(:file) { 'tooling/lib/tooling/test_file_finder.rb' }
-
- it 'returns the matching tooling test' do
- expect(subject.test_files).to contain_exactly('spec/tooling/lib/tooling/test_file_finder_spec.rb')
- end
- end
-
- context 'when given a test file' do
- let(:file) { 'spec/lib/banzai/color_parser_spec.rb' }
-
- it 'returns the matching test file itself' do
- expect(subject.test_files).to contain_exactly('spec/lib/banzai/color_parser_spec.rb')
- end
- end
-
- context 'when given an app file in ee/' do
- let(:file) { 'ee/app/models/analytics/cycle_analytics/group_level.rb' }
-
- it 'returns the matching ee/ test file' do
- expect(subject.test_files).to contain_exactly('ee/spec/models/analytics/cycle_analytics/group_level_spec.rb')
- end
- end
-
- context 'when given an ee extension module file' do
- let(:file) { 'ee/app/models/ee/user.rb' }
-
- it 'returns the matching ee/ class test file, ee extension module test file and the foss class test file' do
- test_files = ['ee/spec/models/user_spec.rb', 'ee/spec/models/ee/user_spec.rb', 'spec/app/models/user_spec.rb']
- expect(subject.test_files).to contain_exactly(*test_files)
- end
- end
-
- context 'when given a test file in ee/' do
- let(:file) { 'ee/spec/models/container_registry/event_spec.rb' }
-
- it 'returns the test file itself' do
- expect(subject.test_files).to contain_exactly('ee/spec/models/container_registry/event_spec.rb')
- end
- end
-
- context 'when given a module test file in ee/' do
- let(:file) { 'ee/spec/models/ee/appearance_spec.rb' }
-
- it 'returns the matching module test file itself and the corresponding spec model test file' do
- test_files = ['ee/spec/models/ee/appearance_spec.rb', 'spec/models/appearance_spec.rb']
- expect(subject.test_files).to contain_exactly(*test_files)
- end
- end
-
- context 'when given a factory file' do
- let(:file) { 'spec/factories/users.rb' }
-
- it 'returns spec/factories_spec.rb file' do
- expect(subject.test_files).to contain_exactly('spec/factories_spec.rb')
- end
- end
-
- context 'when given an ee factory file' do
- let(:file) { 'ee/spec/factories/users.rb' }
-
- it 'returns spec/factories_spec.rb file' do
- expect(subject.test_files).to contain_exactly('spec/factories_spec.rb')
- end
- end
-
- context 'when given db/structure.sql' do
- let(:file) { 'db/structure.sql' }
-
- it 'returns spec/db/schema_spec.rb' do
- expect(subject.test_files).to contain_exactly('spec/db/schema_spec.rb')
- end
- end
-
- context 'when given an initializer' do
- let(:file) { 'config/initializers/action_mailer_hooks.rb' }
-
- it 'returns the matching initializer spec' do
- expect(subject.test_files).to contain_exactly('spec/initializers/action_mailer_hooks_spec.rb')
- end
- end
-
- context 'when given a haml view' do
- let(:file) { 'app/views/admin/users/_user.html.haml' }
-
- it 'returns the matching view spec' do
- expect(subject.test_files).to contain_exactly('spec/views/admin/users/_user.html.haml_spec.rb')
- end
- end
-
- context 'when given a haml view in ee/' do
- let(:file) { 'ee/app/views/admin/users/_user.html.haml' }
-
- it 'returns the matching view spec' do
- expect(subject.test_files).to contain_exactly('ee/spec/views/admin/users/_user.html.haml_spec.rb')
- end
- end
-
- context 'when given a migration file' do
- let(:file) { 'db/migrate/20191023152913_add_default_and_free_plans.rb' }
-
- it 'returns the matching migration spec' do
- test_files = %w[
- spec/migrations/add_default_and_free_plans_spec.rb
- spec/migrations/20191023152913_add_default_and_free_plans_spec.rb
- ]
- expect(subject.test_files).to contain_exactly(*test_files)
- end
- end
-
- context 'when given a post-migration file' do
- let(:file) { 'db/post_migrate/20200608072931_backfill_imported_snippet_repositories.rb' }
-
- it 'returns the matching migration spec' do
- test_files = %w[
- spec/migrations/backfill_imported_snippet_repositories_spec.rb
- spec/migrations/20200608072931_backfill_imported_snippet_repositories_spec.rb
- ]
- expect(subject.test_files).to contain_exactly(*test_files)
- end
- end
-
- context 'with foss_test_only: true' do
- subject { Tooling::TestFileFinder.new(file, foss_test_only: true) }
-
- context 'when given a module file in ee/' do
- let(:file) { 'ee/app/models/ee/user.rb' }
-
- it 'returns only the corresponding spec model test file in foss' do
- expect(subject.test_files).to contain_exactly('spec/app/models/user_spec.rb')
- end
- end
-
- context 'when given an app file in ee/' do
- let(:file) { 'ee/app/models/approval.rb' }
-
- it 'returns no test file in foss' do
- expect(subject.test_files).to be_empty
- end
- end
- end
- end
-end
diff --git a/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb b/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb
new file mode 100644
index 00000000000..d36bfac4de8
--- /dev/null
+++ b/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::DistributionReleaseFileUploader do
+ [:project, :group].each do |container_type|
+ context "Packages::Debian::#{container_type.capitalize}Distribution" do
+ let(:factory) { "debian_#{container_type}_distribution" }
+ let(:distribution) { create(factory, :with_file) }
+ let(:uploader) { described_class.new(distribution, :file) }
+ let(:path) { Gitlab.config.packages.storage_path }
+
+ subject { uploader }
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_distribution/\d+$],
+ cache_dir: %r[/packages/tmp/cache$],
+ work_dir: %r[/packages/tmp/work$]
+
+ context 'object store is remote' do
+ before do
+ stub_package_file_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_distribution/\d+$],
+ cache_dir: %r[/packages/tmp/cache$],
+ work_dir: %r[/packages/tmp/work$]
+ end
+
+ describe 'remote file' do
+ let(:distribution) { create(factory, :with_file, :object_storage) }
+
+ context 'with object storage enabled' do
+ before do
+ stub_package_file_object_storage
+ end
+
+ it 'can store file remotely' do
+ allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
+
+ distribution
+
+ expect(distribution.file_store).to eq(described_class::Store::REMOTE)
+ expect(distribution.file.path).not_to be_blank
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/packages/package_file_uploader_spec.rb b/spec/uploaders/packages/package_file_uploader_spec.rb
index f4e72892c8f..e8f4cae7b04 100644
--- a/spec/uploaders/packages/package_file_uploader_spec.rb
+++ b/spec/uploaders/packages/package_file_uploader_spec.rb
@@ -2,43 +2,50 @@
require 'spec_helper'
RSpec.describe Packages::PackageFileUploader do
- let(:package_file) { create(:package_file, :xml) }
- let(:uploader) { described_class.new(package_file, :file) }
- let(:path) { Gitlab.config.packages.storage_path }
-
- subject { uploader }
-
- it_behaves_like "builds correct paths",
- store_dir: %r[\h{2}/\h{2}],
- cache_dir: %r[/packages/tmp/cache],
- work_dir: %r[/packages/tmp/work]
-
- context 'object store is remote' do
- before do
- stub_package_file_object_storage
- end
-
- include_context 'with storage', described_class::Store::REMOTE
-
- it_behaves_like "builds correct paths",
- store_dir: %r[\h{2}/\h{2}]
- end
+ {
+ package_file: %r[^\h{2}/\h{2}/\h{64}/packages/\d+/files/\d+$],
+ debian_package_file: %r[^\h{2}/\h{2}/\h{64}/packages/debian/files/\d+$]
+ }.each do |factory, store_dir_regex|
+ context factory.to_s do
+ let(:package_file) { create(factory) } # rubocop:disable Rails/SaveBang
+ let(:uploader) { described_class.new(package_file, :file) }
+ let(:path) { Gitlab.config.packages.storage_path }
+
+ subject { uploader }
+
+ it_behaves_like "builds correct paths",
+ store_dir: store_dir_regex,
+ cache_dir: %r[/packages/tmp/cache],
+ work_dir: %r[/packages/tmp/work]
+
+ context 'object store is remote' do
+ before do
+ stub_package_file_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: store_dir_regex
+ end
- describe 'remote file' do
- let(:package_file) { create(:package_file, :object_storage, :xml) }
+ describe 'remote file' do
+ let(:package_file) { create(factory, :object_storage) }
- context 'with object storage enabled' do
- before do
- stub_package_file_object_storage
- end
+ context 'with object storage enabled' do
+ before do
+ stub_package_file_object_storage
+ end
- it 'can store file remotely' do
- allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
+ it 'can store file remotely' do
+ allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
- package_file
+ package_file
- expect(package_file.file_store).to eq(described_class::Store::REMOTE)
- expect(package_file.file.path).not_to be_blank
+ expect(package_file.file_store).to eq(described_class::Store::REMOTE)
+ expect(package_file.file.path).not_to be_blank
+ end
+ end
end
end
end
diff --git a/spec/views/layouts/_head.html.haml_spec.rb b/spec/views/layouts/_head.html.haml_spec.rb
index d8748873f64..15fdfaaaa65 100644
--- a/spec/views/layouts/_head.html.haml_spec.rb
+++ b/spec/views/layouts/_head.html.haml_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe 'layouts/_head' do
render
- expect(rendered).to match('<link rel="stylesheet" media="all" href="/stylesheets/highlight/themes/solarised-light.css" />')
+ expect(rendered).to match('<link rel="stylesheet" media="print" href="/stylesheets/highlight/themes/solarised-light.css" />')
end
context 'when an asset_host is set and snowplow url is set' do
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
index cf33ec9884b..01892e72c97 100644
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
@@ -3,10 +3,42 @@
require 'spec_helper'
RSpec.describe 'layouts/header/_new_dropdown' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
+ shared_examples_for 'invite member quick link' do
+ context 'when an experiment is active' do
+ before do
+ allow(Gitlab::Experimentation).to receive(:active?).and_return(true)
+ allow(view).to receive(:experiment_tracking_category_and_group)
+ allow(view).to receive(:tracking_label).with(user)
+ end
+
+ context 'with ability to invite members' do
+ it { is_expected.to have_link('Invite members', href: href) }
+
+ it 'records the experiment' do
+ subject
+
+ expect(view).to have_received(:experiment_tracking_category_and_group)
+ .with(:invite_members_new_dropdown, subject: user)
+ expect(view).to have_received(:tracking_label).with(user)
+ end
+ end
+
+ context 'without ability to invite members' do
+ let(:invite_member) { false }
+
+ it { is_expected.not_to have_link('Invite members') }
+ end
+ end
+
+ context 'when experiment is not active' do
+ it { is_expected.not_to have_link('Invite members') }
+ end
+ end
context 'group-specific links' do
- let(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
before do
stub_current_user(user)
@@ -22,25 +54,39 @@ RSpec.describe 'layouts/header/_new_dropdown' do
it 'has a "New project" link' do
render
- expect(rendered).to have_link(
- 'New project',
- href: new_project_path(namespace_id: group.id)
- )
+ expect(rendered).to have_link('New project', href: new_project_path(namespace_id: group.id))
end
it 'has a "New subgroup" link' do
render
- expect(rendered).to have_link(
- 'New subgroup',
- href: new_group_path(parent_id: group.id)
- )
+ expect(rendered).to have_link('New subgroup', href: new_group_path(parent_id: group.id))
end
end
+
+ describe 'invite members quick link' do
+ let(:href) { group_group_members_path(group) }
+ let(:invite_member) { true }
+
+ before do
+ allow(view).to receive(:can?).with(user, :create_projects, group).and_return(true)
+ allow(view).to receive(:can?).with(user, :admin_group_member, group).and_return(invite_member)
+ allow(view).to receive(:can_import_members?).and_return(invite_member)
+ allow(view).to receive(:experiment_enabled?)
+ end
+
+ subject do
+ render
+
+ rendered
+ end
+
+ it_behaves_like 'invite member quick link'
+ end
end
context 'project-specific links' do
- let(:project) { create(:project, creator: user, namespace: user.namespace) }
+ let_it_be(:project) { create(:project, creator: user, namespace: user.namespace) }
before do
assign(:project, project)
@@ -54,33 +100,24 @@ RSpec.describe 'layouts/header/_new_dropdown' do
it 'has a "New issue" link' do
render
- expect(rendered).to have_link(
- 'New issue',
- href: new_project_issue_path(project)
- )
+ expect(rendered).to have_link('New issue', href: new_project_issue_path(project))
end
it 'has a "New merge request" link' do
render
- expect(rendered).to have_link(
- 'New merge request',
- href: project_new_merge_request_path(project)
- )
+ expect(rendered).to have_link('New merge request', href: project_new_merge_request_path(project))
end
it 'has a "New snippet" link' do
render
- expect(rendered).to have_link(
- 'New snippet',
- href: new_project_snippet_path(project)
- )
+ expect(rendered).to have_link('New snippet', href: new_project_snippet_path(project))
end
end
context 'as a Project guest' do
- let(:guest) { create(:user) }
+ let_it_be(:guest) { create(:user) }
before do
stub_current_user(guest)
@@ -96,12 +133,28 @@ RSpec.describe 'layouts/header/_new_dropdown' do
it 'has no "New snippet" link' do
render
- expect(rendered).not_to have_link(
- 'New snippet',
- href: new_project_snippet_path(project)
- )
+ expect(rendered).not_to have_link('New snippet', href: new_project_snippet_path(project))
end
end
+
+ describe 'invite members quick link' do
+ let(:invite_member) { true }
+ let(:href) { project_project_members_path(project) }
+
+ before do
+ allow(view).to receive(:can_import_members?).and_return(invite_member)
+ stub_current_user(user)
+ allow(view).to receive(:experiment_enabled?)
+ end
+
+ subject do
+ render
+
+ rendered
+ end
+
+ it_behaves_like 'invite member quick link'
+ end
end
context 'global links' do
@@ -128,7 +181,7 @@ RSpec.describe 'layouts/header/_new_dropdown' do
end
context 'when the user is not allowed to create snippets' do
- let(:user) { create(:user, :external)}
+ let(:user) { create(:user, :external) }
it 'has no "New snippet" link' do
render
diff --git a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
index b41933f9c36..defd8190eda 100644
--- a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'notify/pipeline_failed_email.html.haml' do
it_behaves_like 'pipeline status changes email' do
- let(:title) { 'Your pipeline has failed' }
+ let(:title) { "Pipeline ##{pipeline.id} has failed!" }
let(:status) { :failed }
end
end
diff --git a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
index a5c31632b0d..dd637af5137 100644
--- a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'notify/pipeline_failed_email.text.erb' do
it 'renders the email correctly' do
render
- expect(rendered).to have_content('Your pipeline has failed')
+ expect(rendered).to have_content("Pipeline ##{pipeline.id} has failed!")
expect(rendered).to have_content(pipeline.project.name)
expect(rendered).to have_content(pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' '))
expect(rendered).to have_content(pipeline.commit.author_name)
diff --git a/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb b/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb
index 2b3b08f8e8c..bdfc8fb5f6b 100644
--- a/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_fixed_email.html.haml_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'notify/pipeline_fixed_email.html.haml' do
it_behaves_like 'pipeline status changes email' do
- let(:title) { 'Your pipeline has been fixed!' }
+ let(:title) { "Pipeline has been fixed and ##{pipeline.id} has passed!" }
let(:status) { :success }
end
end
diff --git a/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb b/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb
index 8640998acaa..d0bc110f95c 100644
--- a/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb
+++ b/spec/views/notify/pipeline_fixed_email.text.erb_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'notify/pipeline_fixed_email.text.erb' do
it_behaves_like 'pipeline status changes email' do
- let(:title) { 'Your pipeline has been fixed!' }
+ let(:title) { "Pipeline has been fixed and ##{pipeline.id} has passed!" }
let(:status) { :success }
end
end
diff --git a/spec/views/notify/pipeline_success_email.html.haml_spec.rb b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
index 51ea7ef5066..ce03f672700 100644
--- a/spec/views/notify/pipeline_success_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'notify/pipeline_success_email.html.haml' do
it_behaves_like 'pipeline status changes email' do
- let(:title) { 'Your pipeline has passed' }
+ let(:title) { "Pipeline ##{pipeline.id} has passed!" }
let(:status) { :success }
end
end
diff --git a/spec/views/notify/pipeline_success_email.text.erb_spec.rb b/spec/views/notify/pipeline_success_email.text.erb_spec.rb
index 3acf4dd3e26..02334a48fa3 100644
--- a/spec/views/notify/pipeline_success_email.text.erb_spec.rb
+++ b/spec/views/notify/pipeline_success_email.text.erb_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'notify/pipeline_success_email.text.erb' do
it_behaves_like 'pipeline status changes email' do
- let(:title) { 'Your pipeline has passed' }
+ let(:title) { "Pipeline ##{pipeline.id} has passed!" }
let(:status) { :success }
end
end
diff --git a/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb b/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb
index b3f59a59ac8..95c51be9867 100644
--- a/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb
+++ b/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'projects/settings/ci_cd/_autodevops_form' do
it 'shows a warning message about Kubernetes cluster' do
render
- expect(rendered).to have_text('Add a Kubernetes cluster integration with a domain or create an AUTO_DEVOPS_PLATFORM_TARGET CI variable')
+ expect(rendered).to have_text('Add a Kubernetes cluster integration with a domain, or create an AUTO_DEVOPS_PLATFORM_TARGET CI variable.')
end
context 'when the project has an available kubernetes cluster' do
@@ -28,7 +28,7 @@ RSpec.describe 'projects/settings/ci_cd/_autodevops_form' do
it 'shows a warning message about base domain' do
render
- expect(rendered).to have_text('You must add a base domain to your Kubernetes cluster in order for your deployment strategy to work.')
+ expect(rendered).to have_text('Add a base domain to your Kubernetes cluster for your deployment strategy to work.')
end
end
end
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index facb4e2016d..a22853d40d8 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -15,14 +15,6 @@ RSpec.describe 'projects/settings/operations/show' do
end
let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
- let_it_be(:alerts_service) { create(:alerts_service, project: project) }
-
- let(:operations_show_locals) do
- {
- prometheus_service: prometheus_service,
- alerts_service: alerts_service
- }
- end
before_all do
project.add_maintainer(user)
@@ -35,13 +27,24 @@ RSpec.describe 'projects/settings/operations/show' do
.and_return(error_tracking_setting)
allow(view).to receive(:tracing_setting)
.and_return(tracing_setting)
+ allow(view).to receive(:prometheus_service)
+ .and_return(prometheus_service)
allow(view).to receive(:current_user).and_return(user)
end
+ describe 'Operations > Alerts' do
+ it 'renders the Operations Settings page' do
+ render
+
+ expect(rendered).to have_content _('Alerts')
+ expect(rendered).to have_content _('Display alerts from all your monitoring tools directly within GitLab.')
+ end
+ end
+
describe 'Operations > Error Tracking' do
context 'Settings page ' do
it 'renders the Operations Settings page' do
- render template: 'projects/settings/operations/show', locals: operations_show_locals
+ render
expect(rendered).to have_content _('Error tracking')
expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
@@ -49,10 +52,34 @@ RSpec.describe 'projects/settings/operations/show' do
end
end
+ describe 'Operations > Prometheus' do
+ context 'when settings_operations_prometheus_service flag is enabled' do
+ it 'renders the Operations Settings page' do
+ render
+
+ expect(rendered).to have_content _('Prometheus')
+ expect(rendered).to have_content _('Link Prometheus monitoring to GitLab.')
+ expect(rendered).to have_content _('To enable the installation of Prometheus on your clusters, deactivate the manual configuration below')
+ end
+ end
+
+ context 'when settings_operations_prometheus_service is disabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: false)
+ end
+
+ it 'renders the Operations Settings page' do
+ render
+
+ expect(rendered).not_to have_content _('Select the Active checkbox to override the Auto Configuration with custom settings. If unchecked, Auto Configuration settings are used.')
+ end
+ end
+ end
+
describe 'Operations > Tracing' do
context 'with project.tracing_external_url' do
it 'links to project.tracing_external_url' do
- render template: 'projects/settings/operations/show', locals: operations_show_locals
+ render
expect(rendered).to have_link('Tracing', href: tracing_setting.external_url)
end
@@ -66,7 +93,7 @@ RSpec.describe 'projects/settings/operations/show' do
end
it 'sanitizes external_url' do
- render template: 'projects/settings/operations/show', locals: operations_show_locals
+ render
expect(tracing_setting.external_url).to eq(malicious_tracing_url)
expect(rendered).to have_link('Tracing', href: cleaned_url)
@@ -82,7 +109,7 @@ RSpec.describe 'projects/settings/operations/show' do
end
it 'links to Tracing page' do
- render template: 'projects/settings/operations/show', locals: operations_show_locals
+ render
expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
end
diff --git a/spec/views/shared/wikis/_sidebar.html.haml_spec.rb b/spec/views/shared/wikis/_sidebar.html.haml_spec.rb
index 3e691862937..70991369506 100644
--- a/spec/views/shared/wikis/_sidebar.html.haml_spec.rb
+++ b/spec/views/shared/wikis/_sidebar.html.haml_spec.rb
@@ -80,4 +80,28 @@ RSpec.describe 'shared/wikis/_sidebar.html.haml' do
end
end
end
+
+ describe 'link to edit the sidebar' do
+ before do
+ allow(view).to receive(:can?).with(anything, :create_wiki, anything).and_return(can_edit)
+
+ render
+ end
+
+ context 'when the user has edit permission' do
+ let(:can_edit) { true }
+
+ it 'renders the link' do
+ expect(rendered).to have_link('Edit sidebar', href: wiki_page_path(wiki, Wiki::SIDEBAR, action: :edit))
+ end
+ end
+
+ context 'when the user does not have edit permission' do
+ let(:can_edit) { false }
+
+ it 'does not render the link' do
+ expect(rendered).not_to have_link('Edit sidebar')
+ end
+ end
+ end
end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 12783f40528..d3a4144d606 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -4,13 +4,74 @@ require 'spec_helper'
RSpec.describe BulkImportWorker do
describe '#perform' do
- it 'executes Group Importer' do
- bulk_import_id = 1
+ before do
+ stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
+ end
+
+ context 'when no bulk import is found' do
+ it 'does nothing' do
+ expect(described_class).not_to receive(:perform_in)
+
+ subject.perform(non_existing_record_id)
+ end
+ end
+
+ context 'when bulk import is finished' do
+ it 'does nothing' do
+ bulk_import = create(:bulk_import, :finished)
+
+ expect(described_class).not_to receive(:perform_in)
+
+ subject.perform(bulk_import.id)
+ end
+ end
+
+ context 'when all entities are processed' do
+ it 'marks bulk import as finished' do
+ bulk_import = create(:bulk_import, :started)
+ create(:bulk_import_entity, :finished, bulk_import: bulk_import)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(bulk_import.reload.finished?).to eq(true)
+ end
+ end
+
+ context 'when maximum allowed number of import entities in progress' do
+ it 'reenqueues itself' do
+ bulk_import = create(:bulk_import, :started)
+ (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :started, bulk_import: bulk_import) }
+
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+
+ subject.perform(bulk_import.id)
+ end
+ end
+
+ context 'when bulk import is created' do
+ it 'marks bulk import as started' do
+ bulk_import = create(:bulk_import, :created)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(bulk_import.reload.started?).to eq(true)
+ end
+
+ context 'when there are created entities to process' do
+ it 'marks a batch of entities as started, enqueues BulkImports::EntityWorker and reenqueues' do
+ bulk_import = create(:bulk_import, :created)
+ (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :created, bulk_import: bulk_import) }
+
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+ expect(BulkImports::EntityWorker).to receive(:perform_async)
- expect(BulkImports::Importers::GroupsImporter)
- .to receive(:new).with(bulk_import_id).and_return(double(execute: true))
+ subject.perform(bulk_import.id)
- described_class.new.perform(bulk_import_id)
+ expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
+ end
+ end
end
end
end
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
new file mode 100644
index 00000000000..31515b31947
--- /dev/null
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::EntityWorker do
+ describe '#execute' do
+ let(:bulk_import) { create(:bulk_import) }
+
+ context 'when started entity exists' do
+ let(:entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
+
+ it 'executes BulkImports::Importers::GroupImporter' do
+ expect(BulkImports::Importers::GroupImporter).to receive(:new).with(entity).and_call_original
+
+ subject.perform(entity.id)
+ end
+
+ it 'sets jid' do
+ jid = 'jid'
+
+ allow(subject).to receive(:jid).and_return(jid)
+
+ subject.perform(entity.id)
+
+ expect(entity.reload.jid).to eq(jid)
+ end
+ end
+
+ context 'when started entity does not exist' do
+ it 'does not execute BulkImports::Importers::GroupImporter' do
+ entity = create(:bulk_import_entity, bulk_import: bulk_import)
+
+ expect(BulkImports::Importers::GroupImporter).not_to receive(:new)
+
+ subject.perform(entity.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipelines/create_artifact_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
index 31d2c4e9559..000eda055af 100644
--- a/spec/workers/ci/pipelines/create_artifact_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Pipelines::CreateArtifactWorker do
+RSpec.describe ::Ci::PipelineArtifacts::CoverageReportWorker do
describe '#perform' do
subject { described_class.new.perform(pipeline_id) }
@@ -11,7 +11,7 @@ RSpec.describe ::Ci::Pipelines::CreateArtifactWorker do
let(:pipeline_id) { pipeline.id }
it 'calls pipeline report result service' do
- expect_next_instance_of(::Ci::Pipelines::CreateArtifactService) do |create_artifact_service|
+ expect_next_instance_of(::Ci::PipelineArtifacts::CoverageReportService) do |create_artifact_service|
expect(create_artifact_service).to receive(:execute)
end
@@ -23,7 +23,7 @@ RSpec.describe ::Ci::Pipelines::CreateArtifactWorker do
let(:pipeline_id) { non_existing_record_id }
it 'does not call pipeline create artifact service' do
- expect(Ci::Pipelines::CreateArtifactService).not_to receive(:execute)
+ expect(Ci::PipelineArtifacts::CoverageReportService).not_to receive(:execute)
subject
end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
new file mode 100644
index 00000000000..9e9aa962b63
--- /dev/null
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be(:pipeline_artifact) do
+ create(:ci_pipeline_artifact, expire_at: 1.week.ago)
+ end
+
+ it 'executes a service' do
+ expect_next_instance_of(::Ci::PipelineArtifacts::DestroyExpiredArtifactsService) do |instance|
+ expect(instance).to receive(:execute)
+ end
+
+ worker.perform
+ end
+
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple(worker: worker)
+ end
+
+ it 'removes the artifact only once' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:destroyed_pipeline_artifacts_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:destroyed_pipeline_artifacts_count, 0)
+
+ subject
+
+ expect { pipeline_artifact.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index d98ea1b6ab2..e6592f7f204 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:repository, reload: true) { create(:container_repository, :cleanup_scheduled) }
let_it_be(:project) { repository.project }
let_it_be(:policy) { project.container_expiration_policy }
@@ -19,23 +21,55 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
RSpec.shared_examples 'handling all repository conditions' do
it 'sends the repository for cleaning' do
+ service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :finished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
subject
end
context 'with unfinished cleanup' do
it 'logs an unfinished cleanup' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(status: :unfinished, repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :unfinished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished)
subject
end
+
+ context 'with a truncated list of tags to delete' do
+ it 'logs an unfinished cleanup' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository, cleanup_tags_service_after_truncate_size: 10, cleanup_tags_service_before_delete_size: 5)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: true)
+
+ subject
+ end
+ end
+
+ context 'the truncated log field' do
+ where(:before_truncate_size, :after_truncate_size, :truncated) do
+ 100 | 100 | false
+ 100 | 80 | true
+ nil | 100 | false
+ 100 | nil | false
+ nil | nil | false
+ end
+
+ with_them do
+ it 'is logged properly' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository, cleanup_tags_service_after_truncate_size: after_truncate_size, cleanup_tags_service_before_truncate_size: before_truncate_size)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: truncated)
+
+ subject
+ end
+ end
+ end
end
context 'with policy running shortly' do
@@ -87,10 +121,10 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
it 'process the cleanup scheduled repository first' do
+ service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :finished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
subject
end
@@ -105,10 +139,10 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
it 'process the repository with the oldest expiration_policy_started_at' do
+ service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :finished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
subject
end
@@ -164,8 +198,28 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
- def cleanup_service_response(status: :finished, repository:)
- ServiceResponse.success(message: "cleanup #{status}", payload: { cleanup_status: status, container_repository_id: repository.id })
+ def cleanup_service_response(status: :finished, repository:, cleanup_tags_service_original_size: 100, cleanup_tags_service_before_truncate_size: 80, cleanup_tags_service_after_truncate_size: 80, cleanup_tags_service_before_delete_size: 50)
+ ServiceResponse.success(
+ message: "cleanup #{status}",
+ payload: {
+ cleanup_status: status,
+ container_repository_id: repository.id,
+ cleanup_tags_service_original_size: cleanup_tags_service_original_size,
+ cleanup_tags_service_before_truncate_size: cleanup_tags_service_before_truncate_size,
+ cleanup_tags_service_after_truncate_size: cleanup_tags_service_after_truncate_size,
+ cleanup_tags_service_before_delete_size: cleanup_tags_service_before_delete_size
+ }.compact
+ )
+ end
+
+ def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, cleanup_status)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ %i[cleanup_tags_service_original_size cleanup_tags_service_before_truncate_size cleanup_tags_service_after_truncate_size cleanup_tags_service_before_delete_size].each do |field|
+ value = service_response.payload[field]
+ expect(worker).to receive(:log_extra_metadata_on_done).with(field, value) unless value.nil?
+ end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_truncated, truncated)
end
end
diff --git a/spec/workers/experiments/record_conversion_event_worker_spec.rb b/spec/workers/experiments/record_conversion_event_worker_spec.rb
new file mode 100644
index 00000000000..05e4ebc13ba
--- /dev/null
+++ b/spec/workers/experiments/record_conversion_event_worker_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Experiments::RecordConversionEventWorker, '#perform' do
+ subject(:perform) { described_class.new.perform(:experiment_key, 1234) }
+
+ before do
+ stub_experiment(experiment_key: experiment_active)
+ end
+
+ context 'when the experiment is active' do
+ let(:experiment_active) { true }
+
+ include_examples 'an idempotent worker' do
+ subject { perform }
+
+ it 'records the event' do
+ expect(Experiment).to receive(:record_conversion_event).with(:experiment_key, 1234)
+
+ perform
+ end
+ end
+ end
+
+ context 'when the experiment is not active' do
+ let(:experiment_active) { false }
+
+ it 'records the event' do
+ expect(Experiment).not_to receive(:record_conversion_event)
+
+ perform
+ end
+ end
+end
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 995f37daf17..6d73d715d21 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -8,9 +8,11 @@ RSpec.describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
- expect(instance).to receive(:execute)
+ expect(instance).to receive(:execute).and_call_original
end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:destroyed_job_artifacts_count, 0)
+
worker.perform
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
index 7acf1a338d3..75d4d2dff2e 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
@@ -9,23 +9,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker do
let(:client) { double(:client) }
describe '#import' do
- it 'does not import with the feature disabled' do
- stub_feature_flags(github_import_pull_request_reviews: false)
-
- expect(Gitlab::JobWaiter)
- .to receive(:new)
- .and_return(double(key: '123', jobs_remaining: 0))
-
- expect(Gitlab::GithubImport::AdvanceStageWorker)
- .to receive(:perform_async)
- .with(project.id, { '123' => 0 }, :issues_and_diff_notes)
-
- worker.import(client, project)
- end
-
it 'imports all the pull request reviews' do
- stub_feature_flags(github_import_pull_request_reviews: true)
-
importer = double(:importer)
waiter = Gitlab::JobWaiter.new(2, '123')
diff --git a/spec/workers/gitlab_performance_bar_stats_worker_spec.rb b/spec/workers/gitlab_performance_bar_stats_worker_spec.rb
index 367003dd1ad..3638add1524 100644
--- a/spec/workers/gitlab_performance_bar_stats_worker_spec.rb
+++ b/spec/workers/gitlab_performance_bar_stats_worker_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe GitlabPerformanceBarStatsWorker do
let(:uuid) { 1 }
before do
- expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
+ expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
expect_to_cancel_exclusive_lease(GitlabPerformanceBarStatsWorker::LEASE_KEY, uuid)
end
diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb
index ab3dd19dec1..82ae9010a24 100644
--- a/spec/workers/group_destroy_worker_spec.rb
+++ b/spec/workers/group_destroy_worker_spec.rb
@@ -4,8 +4,12 @@ require 'spec_helper'
RSpec.describe GroupDestroyWorker do
let(:group) { create(:group) }
- let(:user) { create(:admin) }
let!(:project) { create(:project, namespace: group) }
+ let(:user) { create(:user) }
+
+ before do
+ group.add_owner(user)
+ end
subject { described_class.new }
diff --git a/spec/workers/jira_connect/sync_branch_worker_spec.rb b/spec/workers/jira_connect/sync_branch_worker_spec.rb
index c8453064b0d..7c715f36fb4 100644
--- a/spec/workers/jira_connect/sync_branch_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_branch_worker_spec.rb
@@ -13,82 +13,67 @@ RSpec.describe JiraConnect::SyncBranchWorker do
let(:project_id) { project.id }
let(:branch_name) { 'master' }
let(:commit_shas) { %w(b83d6e3 5a62481) }
-
- subject { described_class.new.perform(project_id, branch_name, commit_shas) }
+ let(:update_sequence_id) { 1 }
def expect_jira_sync_service_execute(args)
- expect_next_instance_of(JiraConnect::SyncService) do |instance|
- expect(instance).to receive(:execute).with(args.merge(update_sequence_id: nil))
+ expect_next_instances_of(JiraConnect::SyncService, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |instance|
+ expect(instance).to receive(:execute).with(args)
end
end
- it 'calls JiraConnect::SyncService#execute' do
- expect_jira_sync_service_execute(
- branches: [instance_of(Gitlab::Git::Branch)],
- commits: project.commits_by(oids: commit_shas)
- )
-
- subject
- end
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project_id, branch_name, commit_shas, update_sequence_id] }
- context 'without branch name' do
- let(:branch_name) { nil }
-
- it 'calls JiraConnect::SyncService#execute' do
- expect_jira_sync_service_execute(
- branches: nil,
- commits: project.commits_by(oids: commit_shas)
- )
-
- subject
+ before do
+ stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
end
- end
-
- context 'without commits' do
- let(:commit_shas) { nil }
it 'calls JiraConnect::SyncService#execute' do
expect_jira_sync_service_execute(
branches: [instance_of(Gitlab::Git::Branch)],
- commits: nil
+ commits: project.commits_by(oids: commit_shas),
+ update_sequence_id: update_sequence_id
)
subject
end
- end
- context 'when project no longer exists' do
- let(:project_id) { non_existing_record_id }
+ context 'without branch name' do
+ let(:branch_name) { nil }
- it 'does not call JiraConnect::SyncService' do
- expect(JiraConnect::SyncService).not_to receive(:new)
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_jira_sync_service_execute(
+ branches: nil,
+ commits: project.commits_by(oids: commit_shas),
+ update_sequence_id: update_sequence_id
+ )
- subject
+ subject
+ end
end
- end
- context 'with update_sequence_id' do
- let(:update_sequence_id) { 1 }
- let(:request_path) { '/rest/devinfo/0.10/bulk' }
- let(:request_body) do
- {
- repositories: [
- Atlassian::JiraConnect::Serializers::RepositoryEntity.represent(
- project,
- commits: project.commits_by(oids: commit_shas),
- branches: [project.repository.find_branch(branch_name)],
- update_sequence_id: update_sequence_id
- )
- ]
- }
+ context 'without commits' do
+ let(:commit_shas) { nil }
+
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_jira_sync_service_execute(
+ branches: [instance_of(Gitlab::Git::Branch)],
+ commits: nil,
+ update_sequence_id: update_sequence_id
+ )
+
+ subject
+ end
end
- subject { described_class.new.perform(project_id, branch_name, commit_shas, update_sequence_id) }
+ context 'when project no longer exists' do
+ let(:project_id) { non_existing_record_id }
- it 'sends the reqeust with custom update_sequence_id' do
- expect_next(Atlassian::JiraConnect::Client).to receive(:post).with(request_path, request_body)
+ it 'does not call JiraConnect::SyncService' do
+ expect(JiraConnect::SyncService).not_to receive(:new)
- subject
+ subject
+ end
end
end
end
diff --git a/spec/workers/jira_connect/sync_deployments_worker_spec.rb b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
new file mode 100644
index 00000000000..9485f4cd3a7
--- /dev/null
+++ b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::JiraConnect::SyncDeploymentsWorker do
+ include AfterNextHelpers
+ include ServicesHelper
+
+ describe '#perform' do
+ let_it_be(:deployment) { create(:deployment) }
+
+ let(:sequence_id) { Random.random_number(1..10_000) }
+ let(:object_id) { deployment.id }
+
+ subject { described_class.new.perform(object_id, sequence_id) }
+
+ context 'when the object exists' do
+ it 'calls the Jira sync service' do
+ expect_next(::JiraConnect::SyncService, deployment.project)
+ .to receive(:execute).with(deployments: contain_exactly(deployment), update_sequence_id: sequence_id)
+
+ subject
+ end
+ end
+
+ context 'when the object does not exist' do
+ let(:object_id) { non_existing_record_id }
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_sync_deployments: false)
+ end
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is enabled for this project' do
+ before do
+ stub_feature_flags(jira_sync_deployments: deployment.project)
+ end
+
+ it 'calls the sync service' do
+ expect_next(::JiraConnect::SyncService).to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
new file mode 100644
index 00000000000..035f4ebdd3c
--- /dev/null
+++ b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::JiraConnect::SyncFeatureFlagsWorker do
+ include AfterNextHelpers
+ include ServicesHelper
+
+ describe '#perform' do
+ let_it_be(:feature_flag) { create(:operations_feature_flag) }
+
+ let(:sequence_id) { Random.random_number(1..10_000) }
+ let(:feature_flag_id) { feature_flag.id }
+
+ subject { described_class.new.perform(feature_flag_id, sequence_id) }
+
+ context 'when object exists' do
+ it 'calls the Jira sync service' do
+ expect_next(::JiraConnect::SyncService, feature_flag.project)
+ .to receive(:execute).with(feature_flags: contain_exactly(feature_flag), update_sequence_id: sequence_id)
+
+ subject
+ end
+ end
+
+ context 'when object does not exist' do
+ let(:feature_flag_id) { non_existing_record_id }
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_sync_feature_flags: false)
+ end
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is enabled for this project' do
+ before do
+ stub_feature_flags(jira_sync_feature_flags: feature_flag.project)
+ end
+
+ it 'calls the sync service' do
+ expect_next(::JiraConnect::SyncService).to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
index 1a40aa2b3ad..6a0a0744f6f 100644
--- a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
@@ -12,48 +12,31 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker do
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let(:merge_request_id) { merge_request.id }
+ let(:update_sequence_id) { 1 }
- subject { described_class.new.perform(merge_request_id) }
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request_id, update_sequence_id] }
- it 'calls JiraConnect::SyncService#execute' do
- expect_next_instance_of(JiraConnect::SyncService) do |service|
- expect(service).to receive(:execute).with(merge_requests: [merge_request], update_sequence_id: nil)
+ before do
+ stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
end
- subject
- end
-
- context 'when MR no longer exists' do
- let(:merge_request_id) { non_existing_record_id }
-
- it 'does not call JiraConnect::SyncService' do
- expect(JiraConnect::SyncService).not_to receive(:new)
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_next_instances_of(JiraConnect::SyncService, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |service|
+ expect(service).to receive(:execute).with(merge_requests: [merge_request], update_sequence_id: update_sequence_id)
+ end
subject
end
- end
- context 'with update_sequence_id' do
- let(:update_sequence_id) { 1 }
- let(:request_path) { '/rest/devinfo/0.10/bulk' }
- let(:request_body) do
- {
- repositories: [
- Atlassian::JiraConnect::Serializers::RepositoryEntity.represent(
- project,
- merge_requests: [merge_request],
- update_sequence_id: update_sequence_id
- )
- ]
- }
- end
-
- subject { described_class.new.perform(merge_request_id, update_sequence_id) }
+ context 'when MR no longer exists' do
+ let(:merge_request_id) { non_existing_record_id }
- it 'sends the request with custom update_sequence_id' do
- expect_next(Atlassian::JiraConnect::Client).to receive(:post).with(request_path, request_body)
+ it 'does not call JiraConnect::SyncService' do
+ expect(JiraConnect::SyncService).not_to receive(:new)
- subject
+ subject
+ end
end
end
end
diff --git a/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb b/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
new file mode 100644
index 00000000000..f1789fa8fbd
--- /dev/null
+++ b/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::OnboardingPipelineCreatedWorker, '#perform' do
+ include AfterNextHelpers
+
+ let_it_be(:ci_pipeline) { create(:ci_pipeline) }
+
+ before do
+ OnboardingProgress.onboard(ci_pipeline.project.namespace)
+ end
+
+ it 'registers an onboarding progress action' do
+ expect_next(OnboardingProgressService, ci_pipeline.project.namespace)
+ .to receive(:execute).with(action: :pipeline_created).and_call_original
+
+ subject.perform(ci_pipeline.project.namespace_id)
+
+ expect(OnboardingProgress.completed?(ci_pipeline.project.namespace, :pipeline_created)).to eq(true)
+ end
+
+ context "when a namespace doesn't exist" do
+ it 'does not register an onboarding progress action' do
+ expect_next(OnboardingProgressService, ci_pipeline.project.namespace).not_to receive(:execute)
+
+ subject.perform(nil)
+
+ expect(OnboardingProgress.completed?(ci_pipeline.project.namespace, :pipeline_created)).to eq(false)
+ end
+ end
+end
diff --git a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb b/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
index 03c668259f8..a773e160fab 100644
--- a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
@@ -7,10 +7,16 @@ RSpec.describe Namespaces::OnboardingUserAddedWorker, '#perform' do
let_it_be(:group) { create(:group) }
- it 'records the event' do
+ before do
+ OnboardingProgress.onboard(group)
+ end
+
+ it 'registers an onboarding progress action' do
expect_next(OnboardingProgressService, group)
.to receive(:execute).with(action: :user_added).and_call_original
- expect { subject.perform(group.id) }.to change(NamespaceOnboardingAction, :count).by(1)
+ subject.perform(group.id)
+
+ expect(OnboardingProgress.completed?(group, :user_added)).to be(true)
end
end
diff --git a/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb b/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb
index aadfae51906..fb762593d75 100644
--- a/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb
+++ b/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb
@@ -3,31 +3,10 @@
require 'spec_helper'
RSpec.describe ProjectScheduleBulkRepositoryShardMovesWorker do
- describe "#perform" do
- before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ it_behaves_like 'schedules bulk repository shard moves' do
+ let_it_be_with_reload(:container) { create(:project, :repository).tap { |project| project.track_project_repository } }
- allow(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async)
- end
-
- let!(:project) { create(:project, :repository).tap { |project| project.track_project_repository } }
- let(:source_storage_name) { 'default' }
- let(:destination_storage_name) { 'test_second_storage' }
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [source_storage_name, destination_storage_name] }
-
- it 'schedules project repository storage moves' do
- expect { subject }.to change(ProjectRepositoryStorageMove, :count).by(1)
-
- storage_move = project.repository_storage_moves.last!
-
- expect(storage_move).to have_attributes(
- source_storage_name: source_storage_name,
- destination_storage_name: destination_storage_name,
- state_name: :scheduled
- )
- end
- end
+ let(:move_service_klass) { ProjectRepositoryStorageMove }
+ let(:worker_klass) { ProjectUpdateRepositoryStorageWorker }
end
end
diff --git a/spec/workers/project_update_repository_storage_worker_spec.rb b/spec/workers/project_update_repository_storage_worker_spec.rb
index f75bb3d1642..490f1f5a2ad 100644
--- a/spec/workers/project_update_repository_storage_worker_spec.rb
+++ b/spec/workers/project_update_repository_storage_worker_spec.rb
@@ -3,45 +3,13 @@
require 'spec_helper'
RSpec.describe ProjectUpdateRepositoryStorageWorker do
- let(:project) { create(:project, :repository) }
-
subject { described_class.new }
- describe "#perform" do
- let(:service) { double(:update_repository_storage_service) }
-
- before do
- allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
- end
-
- context 'without repository storage move' do
- it "calls the update repository storage service" do
- expect(Projects::UpdateRepositoryStorageService).to receive(:new).and_return(service)
- expect(service).to receive(:execute)
-
- expect do
- subject.perform(project.id, 'test_second_storage')
- end.to change(ProjectRepositoryStorageMove, :count).by(1)
-
- storage_move = project.repository_storage_moves.last
- expect(storage_move).to have_attributes(
- source_storage_name: "default",
- destination_storage_name: "test_second_storage"
- )
- end
- end
-
- context 'with repository storage move' do
- let!(:repository_storage_move) { create(:project_repository_storage_move) }
-
- it "calls the update repository storage service" do
- expect(Projects::UpdateRepositoryStorageService).to receive(:new).and_return(service)
- expect(service).to receive(:execute)
+ it_behaves_like 'an update storage move worker' do
+ let_it_be_with_refind(:container) { create(:project, :repository) }
+ let_it_be(:repository_storage_move) { create(:project_repository_storage_move) }
- expect do
- subject.perform(nil, nil, repository_storage_move.id)
- end.not_to change(ProjectRepositoryStorageMove, :count)
- end
- end
+ let(:service_klass) { Projects::UpdateRepositoryStorageService }
+ let(:repository_storage_move_klass) { ProjectRepositoryStorageMove }
end
end
diff --git a/spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb b/spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb
new file mode 100644
index 00000000000..3a09b6ce449
--- /dev/null
+++ b/spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SnippetScheduleBulkRepositoryShardMovesWorker do
+ it_behaves_like 'schedules bulk repository shard moves' do
+ let_it_be_with_reload(:container) { create(:snippet, :repository).tap { |snippet| snippet.create_repository } }
+
+ let(:move_service_klass) { SnippetRepositoryStorageMove }
+ let(:worker_klass) { SnippetUpdateRepositoryStorageWorker }
+ end
+end
diff --git a/spec/workers/snippet_update_repository_storage_worker_spec.rb b/spec/workers/snippet_update_repository_storage_worker_spec.rb
new file mode 100644
index 00000000000..a48abe4abf7
--- /dev/null
+++ b/spec/workers/snippet_update_repository_storage_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SnippetUpdateRepositoryStorageWorker do
+ subject { described_class.new }
+
+ it_behaves_like 'an update storage move worker' do
+ let_it_be_with_refind(:container) { create(:snippet, :repository) }
+ let_it_be(:repository_storage_move) { create(:snippet_repository_storage_move) }
+
+ let(:service_klass) { Snippets::UpdateRepositoryStorageService }
+ let(:repository_storage_move_klass) { SnippetRepositoryStorageMove }
+ end
+end