summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/benchmarks/banzai_benchmark.rb114
-rw-r--r--spec/config/object_store_settings_spec.rb69
-rw-r--r--spec/controllers/admin/cohorts_controller_spec.rb34
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb3
-rw-r--r--spec/controllers/admin/users_controller_spec.rb7
-rw-r--r--spec/controllers/chaos_controller_spec.rb19
-rw-r--r--spec/controllers/concerns/spammable_actions_spec.rb99
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb16
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb12
-rw-r--r--spec/controllers/groups_controller_spec.rb60
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb20
-rw-r--r--spec/controllers/invites_controller_spec.rb43
-rw-r--r--spec/controllers/projects/discussions_controller_spec.rb7
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb22
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb57
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb8
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb102
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb4
-rw-r--r--spec/controllers/projects/pipelines/tests_controller_spec.rb24
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb12
-rw-r--r--spec/controllers/projects/security/configuration_controller_spec.rb53
-rw-r--r--spec/controllers/registrations/experience_levels_controller_spec.rb107
-rw-r--r--spec/controllers/registrations_controller_spec.rb12
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb8
-rw-r--r--spec/controllers/search_controller_spec.rb392
-rw-r--r--spec/db/schema_spec.rb1
-rw-r--r--spec/deprecation_toolkit_env.rb99
-rw-r--r--spec/experiments/application_experiment_spec.rb80
-rw-r--r--spec/experiments/members/invite_email_experiment_spec.rb41
-rw-r--r--spec/factories/audit_events.rb15
-rw-r--r--spec/factories/ci/bridge.rb9
-rw-r--r--spec/factories/ci/builds.rb6
-rw-r--r--spec/factories/ci/pipeline_artifacts.rb33
-rw-r--r--spec/factories/ci/pipelines.rb16
-rw-r--r--spec/factories/ci/reports/codequality_degradations.rb98
-rw-r--r--spec/factories/ci/resource.rb2
-rw-r--r--spec/factories/dependency_proxy.rb3
-rw-r--r--spec/factories/merge_request_diffs.rb6
-rw-r--r--spec/factories/merge_requests.rb12
-rw-r--r--spec/factories/packages.rb18
-rw-r--r--spec/factories/packages/debian/group_component.rb9
-rw-r--r--spec/factories/packages/debian/project_component.rb9
-rw-r--r--spec/factories/token_with_ivs.rb9
-rw-r--r--spec/factories/u2f_registrations.rb2
-rw-r--r--spec/features/admin/admin_cohorts_spec.rb33
-rw-r--r--spec/features/admin/admin_disables_git_access_protocol_spec.rb15
-rw-r--r--spec/features/admin/admin_groups_spec.rb2
-rw-r--r--spec/features/admin/admin_projects_spec.rb3
-rw-r--r--spec/features/admin/admin_settings_spec.rb73
-rw-r--r--spec/features/admin/admin_users_spec.rb70
-rw-r--r--spec/features/alert_management/alert_details_spec.rb2
-rw-r--r--spec/features/alerts_settings/user_views_alerts_settings_spec.rb3
-rw-r--r--spec/features/boards/boards_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb8
-rw-r--r--spec/features/commits_spec.rb3
-rw-r--r--spec/features/discussion_comments/issue_spec.rb2
-rw-r--r--spec/features/discussion_comments/merge_request_spec.rb1
-rw-r--r--spec/features/discussion_comments/snippets_spec.rb29
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb16
-rw-r--r--spec/features/groups/navbar_spec.rb8
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb7
-rw-r--r--spec/features/groups/show_spec.rb1
-rw-r--r--spec/features/issuables/issuable_list_spec.rb2
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb10
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb35
-rw-r--r--spec/features/issues/issue_state_spec.rb26
-rw-r--r--spec/features/issues/user_toggles_subscription_spec.rb6
-rw-r--r--spec/features/labels_hierarchy_spec.rb5
-rw-r--r--spec/features/markdown/mermaid_spec.rb2
-rw-r--r--spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb37
-rw-r--r--spec/features/merge_request/user_manages_subscription_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb4
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb8
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_reverts_merge_request_spec.rb41
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb234
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb17
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb16
-rw-r--r--spec/features/profiles/user_visits_notifications_tab_spec.rb1
-rw-r--r--spec/features/projects/commit/cherry_pick_spec.rb160
-rw-r--r--spec/features/projects/commit/user_reverts_commit_spec.rb91
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb9
-rw-r--r--spec/features/projects/compare_spec.rb23
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb8
-rw-r--r--spec/features/projects/files/gitignore_dropdown_spec.rb10
-rw-r--r--spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb7
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb10
-rw-r--r--spec/features/projects/jobs_spec.rb16
-rw-r--r--spec/features/projects/members/anonymous_user_sees_members_spec.rb2
-rw-r--r--spec/features/projects/members/group_members_spec.rb2
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb2
-rw-r--r--spec/features/projects/members/invite_group_spec.rb5
-rw-r--r--spec/features/projects/members/list_spec.rb8
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb4
-rw-r--r--spec/features/projects/members/sorting_spec.rb2
-rw-r--r--spec/features/projects/members/tabs_spec.rb1
-rw-r--r--spec/features/projects/navbar_spec.rb19
-rw-r--r--spec/features/projects/pages/user_adds_domain_spec.rb185
-rw-r--r--spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb (renamed from spec/features/projects/pages_lets_encrypt_spec.rb)2
-rw-r--r--spec/features/projects/pages/user_edits_settings_spec.rb201
-rw-r--r--spec/features/projects/pages_spec.rb411
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb15
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb106
-rw-r--r--spec/features/projects/services/user_activates_slack_slash_command_spec.rb4
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb6
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb4
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb1
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb7
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb4
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb34
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb2
-rw-r--r--spec/features/task_lists_spec.rb8
-rw-r--r--spec/features/u2f_spec.rb10
-rw-r--r--spec/features/user_sees_revert_modal_spec.rb27
-rw-r--r--spec/features/webauthn_spec.rb4
-rw-r--r--spec/features/whats_new_spec.rb35
-rw-r--r--spec/finders/autocomplete/users_finder_spec.rb5
-rw-r--r--spec/finders/merge_request/metrics_finder_spec.rb76
-rw-r--r--spec/finders/merge_requests/oldest_per_commit_finder_spec.rb46
-rw-r--r--spec/finders/repositories/commits_with_trailer_finder_spec.rb38
-rw-r--r--spec/finders/terraform/states_finder_spec.rb45
-rw-r--r--spec/finders/user_recent_events_finder_spec.rb14
-rw-r--r--spec/fixtures/api/schemas/entities/codequality_mr_diff_report.json21
-rw-r--r--spec/fixtures/api/schemas/entities/member.json4
-rw-r--r--spec/fixtures/api/schemas/entities/member_user.json1
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_composer_details.json12
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_composer_metadata.json21
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json38
-rw-r--r--spec/fixtures/api/schemas/group_group_links.json6
-rw-r--r--spec/fixtures/api/schemas/group_link/group_group_link.json16
-rw-r--r--spec/fixtures/api/schemas/group_link/group_group_links.json6
-rw-r--r--spec/fixtures/api/schemas/group_link/group_link.json (renamed from spec/fixtures/api/schemas/entities/group_group_link.json)10
-rw-r--r--spec/fixtures/api/schemas/group_link/project_group_link.json16
-rw-r--r--spec/fixtures/api/schemas/group_link/project_group_links.json6
-rw-r--r--spec/fixtures/dependency_proxy/manifest44
-rw-r--r--spec/fixtures/markdown.md.erb8
-rw-r--r--spec/fixtures/packages/composer/package.json1
-rw-r--r--spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json23
-rw-r--r--spec/fixtures/whats_new/invalid.yml2
-rw-r--r--spec/fixtures/whats_new/valid.yml2
-rw-r--r--spec/frontend/__helpers__/graphql_helpers.js14
-rw-r--r--spec/frontend/__helpers__/graphql_helpers_spec.js23
-rw-r--r--spec/frontend/__helpers__/stub_component.js25
-rw-r--r--spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js2
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js138
-rw-r--r--spec/frontend/admin/users/components/user_avatar_spec.js64
-rw-r--r--spec/frontend/admin/users/components/user_date_spec.js34
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js26
-rw-r--r--spec/frontend/admin/users/index_spec.js4
-rw-r--r--spec/frontend/admin/users/mock_data.js1
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js2
-rw-r--r--spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap98
-rw-r--r--spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap406
-rw-r--r--spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js (renamed from spec/frontend/alerts_settings/alert_mapping_builder_spec.js)49
-rw-r--r--spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js (renamed from spec/frontend/alerts_settings/alerts_integrations_list_spec.js)0
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_form_spec.js (renamed from spec/frontend/alerts_settings/alerts_settings_form_spec.js)240
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js (renamed from spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js)2
-rw-r--r--spec/frontend/alerts_settings/components/mocks/apollo_mock.js (renamed from spec/frontend/alerts_settings/mocks/apollo_mock.js)0
-rw-r--r--spec/frontend/alerts_settings/components/mocks/integrations.json (renamed from spec/frontend/alerts_settings/mocks/integrations.json)0
-rw-r--r--spec/frontend/alerts_settings/components/util.js (renamed from spec/frontend/alerts_settings/util.js)0
-rw-r--r--spec/frontend/alerts_settings/mocks/alertFields.json123
-rw-r--r--spec/frontend/alerts_settings/utils/mapping_transformations_spec.js81
-rw-r--r--spec/frontend/api/api_utils_spec.js4
-rw-r--r--spec/frontend/api_spec.js22
-rw-r--r--spec/frontend/behaviors/autosize_spec.js32
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap2
-rw-r--r--spec/frontend/blob/components/blob_content_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_header_filepath_spec.js2
-rw-r--r--spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js2
-rw-r--r--spec/frontend/boards/board_list_deprecated_spec.js2
-rw-r--r--spec/frontend/boards/board_list_helper.js2
-rw-r--r--spec/frontend/boards/board_list_spec.js4
-rw-r--r--spec/frontend/boards/boards_store_spec.js2
-rw-r--r--spec/frontend/boards/boards_util_spec.js17
-rw-r--r--spec/frontend/boards/components/board_assignee_dropdown_spec.js17
-rw-r--r--spec/frontend/boards/components/board_card_layout_deprecated_spec.js158
-rw-r--r--spec/frontend/boards/components/board_card_layout_spec.js65
-rw-r--r--spec/frontend/boards/components/board_configuration_options_spec.js15
-rw-r--r--spec/frontend/boards/components/board_content_spec.js2
-rw-r--r--spec/frontend/boards/components/board_form_spec.js2
-rw-r--r--spec/frontend/boards/components/board_list_header_deprecated_spec.js8
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js10
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js14
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js2
-rw-r--r--spec/frontend/boards/issue_card_deprecated_spec.js2
-rw-r--r--spec/frontend/boards/issue_card_inner_spec.js2
-rw-r--r--spec/frontend/boards/issue_spec.js2
-rw-r--r--spec/frontend/boards/mock_data.js4
-rw-r--r--spec/frontend/boards/stores/actions_spec.js81
-rw-r--r--spec/frontend/boards/stores/getters_spec.js18
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js42
-rw-r--r--spec/frontend/captcha/init_recaptcha_script_spec.js49
-rw-r--r--spec/frontend/ci_variable_list/store/actions_spec.js2
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap8
-rw-r--r--spec/frontend/clusters/components/applications_spec.js58
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js2
-rw-r--r--spec/frontend/clusters_list/store/mutations_spec.js2
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/actions_spec.js2
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js2
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js2
-rw-r--r--spec/frontend/deploy_keys/components/key_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_notes/design_discussion_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_overlay_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_sidebar_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_todo_button_spec.js2
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap10
-rw-r--r--spec/frontend/design_management/components/list/item_spec.js50
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js8
-rw-r--r--spec/frontend/design_management/pages/index_spec.js10
-rw-r--r--spec/frontend/design_management/utils/cache_update_spec.js2
-rw-r--r--spec/frontend/diffs/components/app_spec.js6
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js296
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js63
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js11
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js12
-rw-r--r--spec/frontend/diffs/components/inline_diff_table_row_spec.js2
-rw-r--r--spec/frontend/diffs/components/parallel_diff_table_row_spec.js2
-rw-r--r--spec/frontend/diffs/store/actions_spec.js2
-rw-r--r--spec/frontend/diffs/store/getters_spec.js22
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js2
-rw-r--r--spec/frontend/diffs/utils/diff_file_spec.js13
-rw-r--r--spec/frontend/diffs/utils/file_reviews_spec.js48
-rw-r--r--spec/frontend/editor/editor_lite_spec.js298
-rw-r--r--spec/frontend/environments/environment_actions_spec.js2
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js9
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js43
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js12
-rw-r--r--spec/frontend/filtered_search/recent_searches_root_spec.js49
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js2
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_search_input_spec.js34
-rw-r--r--spec/frontend/frequent_items/store/actions_spec.js2
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js13
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap6
-rw-r--r--spec/frontend/groups/components/app_spec.js4
-rw-r--r--spec/frontend/groups/components/group_folder_spec.js2
-rw-r--r--spec/frontend/groups/components/group_item_spec.js2
-rw-r--r--spec/frontend/groups/components/groups_spec.js4
-rw-r--r--spec/frontend/groups/components/item_actions_spec.js16
-rw-r--r--spec/frontend/groups/members/mock_data.js33
-rw-r--r--spec/frontend/groups/members/utils_spec.js45
-rw-r--r--spec/frontend/ide/components/activity_bar_spec.js17
-rw-r--r--spec/frontend/ide/components/commit_sidebar/actions_spec.js25
-rw-r--r--spec/frontend/ide/components/commit_sidebar/form_spec.js391
-rw-r--r--spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js9
-rw-r--r--spec/frontend/ide/components/ide_sidebar_nav_spec.js3
-rw-r--r--spec/frontend/ide/components/ide_spec.js56
-rw-r--r--spec/frontend/ide/components/preview/navigator_spec.js2
-rw-r--r--spec/frontend/ide/lib/decorations/controller_spec.js2
-rw-r--r--spec/frontend/ide/lib/editor_spec.js3
-rw-r--r--spec/frontend/ide/stores/actions_spec.js2
-rw-r--r--spec/frontend/ide/stores/getters_spec.js5
-rw-r--r--spec/frontend/ide/stores/modules/commit/actions_spec.js17
-rw-r--r--spec/frontend/ide/stores/modules/commit/getters_spec.js9
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js156
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js103
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js88
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap4
-rw-r--r--spec/frontend/incidents_settings/components/pagerduty_form_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js157
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js69
-rw-r--r--spec/frontend/integrations/edit/store/actions_spec.js33
-rw-r--r--spec/frontend/integrations/edit/store/mutations_spec.js26
-rw-r--r--spec/frontend/integrations/edit/store/state_spec.js3
-rw-r--r--spec/frontend/integrations/integration_settings_form_spec.js79
-rw-r--r--spec/frontend/issuable_list/components/issuable_item_spec.js17
-rw-r--r--spec/frontend/issue_show/components/app_spec.js8
-rw-r--r--spec/frontend/issue_show/components/incidents/incident_tabs_spec.js2
-rw-r--r--spec/frontend/issue_show/issue_spec.js2
-rw-r--r--spec/frontend/issues_list/components/issuable_spec.js2
-rw-r--r--spec/frontend/jira_connect/api_spec.js2
-rw-r--r--spec/frontend/jira_connect/components/app_spec.js36
-rw-r--r--spec/frontend/jira_connect/components/groups_list_item_spec.js109
-rw-r--r--spec/frontend/jira_connect/components/groups_list_spec.js30
-rw-r--r--spec/frontend/jira_connect/index_spec.js56
-rw-r--r--spec/frontend/jira_connect/mock_data.js2
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js79
-rw-r--r--spec/frontend/jobs/components/erased_block_spec.js4
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js1
-rw-r--r--spec/frontend/jobs/components/job_sidebar_details_container_spec.js9
-rw-r--r--spec/frontend/jobs/components/job_sidebar_retry_button_spec.js2
-rw-r--r--spec/frontend/jobs/components/trigger_block_spec.js116
-rw-r--r--spec/frontend/lib/utils/array_utility_spec.js32
-rw-r--r--spec/frontend/lib/utils/color_utils_spec.js17
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js8
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js343
-rw-r--r--spec/frontend/lib/utils/unit_format/formatter_factory_spec.js21
-rw-r--r--spec/frontend/logs/components/log_advanced_filters_spec.js3
-rw-r--r--spec/frontend/logs/components/log_simple_filters_spec.js3
-rw-r--r--spec/frontend/logs/stores/actions_spec.js2
-rw-r--r--spec/frontend/members/components/app_spec.js (renamed from spec/frontend/groups/members/components/app_spec.js)8
-rw-r--r--spec/frontend/members/components/avatars/group_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/avatars/invite_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/avatars/user_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/table/members_table_cell_spec.js22
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js9
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js3
-rw-r--r--spec/frontend/members/index_spec.js (renamed from spec/frontend/groups/members/index_spec.js)20
-rw-r--r--spec/frontend/members/mock_data.js6
-rw-r--r--spec/frontend/members/store/actions_spec.js12
-rw-r--r--spec/frontend/members/store/mutations_spec.js64
-rw-r--r--spec/frontend/members/utils_spec.js98
-rw-r--r--spec/frontend/merge_request/components/status_box_spec.js6
-rw-r--r--spec/frontend/milestones/milestone_combobox_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/anomaly_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/single_stat_spec.js11
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js4
-rw-r--r--spec/frontend/monitoring/components/dashboard_actions_menu_spec.js4
-rw-r--r--spec/frontend/monitoring/components/dashboard_header_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_builder_spec.js5
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_spec.js32
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js4
-rw-r--r--spec/frontend/monitoring/csv_export_spec.js2
-rw-r--r--spec/frontend/monitoring/mock_data.js2
-rw-r--r--spec/frontend/monitoring/requests/index_spec.js2
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js2
-rw-r--r--spec/frontend/monitoring/store/variable_mapping_spec.js2
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js9
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js2
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js7
-rw-r--r--spec/frontend/notes/components/note_form_spec.js28
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js2
-rw-r--r--spec/frontend/notes/stores/actions_spec.js63
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js13
-rw-r--r--spec/frontend/notifications/components/notifications_dropdown_spec.js240
-rw-r--r--spec/frontend/onboarding_issues/index_spec.js137
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js2
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap14
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap555
-rw-r--r--spec/frontend/packages/list/components/packages_filter_spec.js50
-rw-r--r--spec/frontend/packages/list/components/packages_list_app_spec.js49
-rw-r--r--spec/frontend/packages/list/components/packages_search_spec.js145
-rw-r--r--spec/frontend/packages/list/components/packages_sort_spec.js90
-rw-r--r--spec/frontend/packages/list/components/tokens/package_type_token_spec.js48
-rw-r--r--spec/frontend/packages/list/stores/actions_spec.js12
-rw-r--r--spec/frontend/packages/list/stores/mutations_spec.js9
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap10
-rw-r--r--spec/frontend/packages/shared/components/packages_list_loader_spec.js4
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js99
-rw-r--r--spec/frontend/packages_and_registries/settings/group/mock_data.js12
-rw-r--r--spec/frontend/pages/projects/edit/mount_search_settings_spec.js25
-rw-r--r--spec/frontend/pages/projects/graphs/code_coverage_spec.js2
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js8
-rw-r--r--spec/frontend/pipeline_editor/components/commit/commit_form_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/components/commit/commit_section_spec.js223
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js34
-rw-r--r--spec/frontend/pipeline_editor/components/header/validation_segment_spec.js (renamed from spec/frontend/pipeline_editor/components/info/validation_segment_spec.js)9
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js129
-rw-r--r--spec/frontend/pipeline_editor/components/text_editor_spec.js93
-rw-r--r--spec/frontend/pipeline_editor/components/ui/confirm_unsaved_changes_dialog_spec.js42
-rw-r--r--spec/frontend/pipeline_editor/graphql/resolvers_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js424
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js50
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js113
-rw-r--r--spec/frontend/pipelines/blank_state_spec.js21
-rw-r--r--spec/frontend/pipelines/graph/graph_component_legacy_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js12
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js7
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_spec.js4
-rw-r--r--spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap23
-rw-r--r--spec/frontend/pipelines/graph_shared/links_inner_spec.js197
-rw-r--r--spec/frontend/pipelines/graph_shared/links_layer_spec.js (renamed from spec/frontend/pipelines/shared/links_layer_spec.js)0
-rw-r--r--spec/frontend/pipelines/header_component_spec.js8
-rw-r--r--spec/frontend/pipelines/legacy_header_component_spec.js116
-rw-r--r--spec/frontend/pipelines/pipeline_graph/mock_data.js135
-rw-r--r--spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js2
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js28
-rw-r--r--spec/frontend/pipelines/pipelines_actions_spec.js64
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js6
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js4
-rw-r--r--spec/frontend/pipelines/stage_spec.js278
-rw-r--r--spec/frontend/pipelines/test_reports/stores/actions_spec.js5
-rw-r--r--spec/frontend/pipelines/test_reports/test_case_details_spec.js35
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js2
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js2
-rw-r--r--spec/frontend/profile/account/components/delete_account_modal_spec.js2
-rw-r--r--spec/frontend/projects/commit/components/branches_dropdown_spec.js2
-rw-r--r--spec/frontend/projects/commit/components/form_modal_spec.js5
-rw-r--r--spec/frontend/projects/commit/store/actions_spec.js2
-rw-r--r--spec/frontend/projects/commit_box/info/load_branches_spec.js2
-rw-r--r--spec/frontend/projects/compare/components/app_spec.js116
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_spec.js92
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap4
-rw-r--r--spec/frontend/projects/members/utils_spec.js14
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js118
-rw-r--r--spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js51
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js236
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js190
-rw-r--r--spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js111
-rw-r--r--spec/frontend/registry/explorer/components/delete_image_spec.js153
-rw-r--r--spec/frontend/registry/explorer/components/details_page/empty_state_spec.js54
-rw-r--r--spec/frontend/registry/explorer/components/details_page/empty_tags_state_spec.js43
-rw-r--r--spec/frontend/registry/explorer/components/list_page/group_empty_state_spec.js2
-rw-r--r--spec/frontend/registry/explorer/components/list_page/project_empty_state_spec.js2
-rw-r--r--spec/frontend/registry/explorer/mock_data.js6
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js58
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js49
-rw-r--r--spec/frontend/releases/components/app_index_spec.js2
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js2
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js2
-rw-r--r--spec/frontend/releases/stores/modules/list/actions_spec.js2
-rw-r--r--spec/frontend/releases/stores/modules/list/mutations_spec.js2
-rw-r--r--spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js5
-rw-r--r--spec/frontend/reports/codequality_report/mock_data.js50
-rw-r--r--spec/frontend/reports/codequality_report/store/actions_spec.js169
-rw-r--r--spec/frontend/reports/codequality_report/store/mutations_spec.js14
-rw-r--r--spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js18
-rw-r--r--spec/frontend/reports/components/grouped_test_reports_app_spec.js3
-rw-r--r--spec/frontend/reports/components/summary_row_spec.js4
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js96
-rw-r--r--spec/frontend/search/highlight_blob_search_result_spec.js3
-rw-r--r--spec/frontend/search/index_spec.js3
-rw-r--r--spec/frontend/search/mock_data.js22
-rw-r--r--spec/frontend/search/sort/components/app_spec.js168
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js113
-rw-r--r--spec/frontend/search/topbar/components/project_filter_spec.js2
-rw-r--r--spec/frontend/search_settings/index_spec.js37
-rw-r--r--spec/frontend/search_settings/mount_spec.js36
-rw-r--r--spec/frontend/search_spec.js23
-rw-r--r--spec/frontend/serverless/components/environment_row_spec.js2
-rw-r--r--spec/frontend/serverless/store/actions_spec.js2
-rw-r--r--spec/frontend/sidebar/__snapshots__/todo_spec.js.snap2
-rw-r--r--spec/frontend/sidebar/assignees_realtime_spec.js2
-rw-r--r--spec/frontend/sidebar/assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js2
-rw-r--r--spec/frontend/sidebar/reviewers_spec.js9
-rw-r--r--spec/frontend/sidebar/subscriptions_spec.js25
-rw-r--r--spec/frontend/sidebar/todo_spec.js4
-rw-r--r--spec/frontend/snippets/components/show_spec.js2
-rw-r--r--spec/frontend/static_site_editor/components/edit_meta_modal_spec.js2
-rw-r--r--spec/frontend/static_site_editor/pages/success_spec.js2
-rw-r--r--spec/frontend/static_site_editor/services/front_matterify_spec.js3
-rw-r--r--spec/frontend/static_site_editor/services/parse_source_file_spec.js3
-rw-r--r--spec/frontend/terraform/components/states_table_actions_spec.js89
-rw-r--r--spec/frontend/terraform/components/states_table_spec.js19
-rw-r--r--spec/frontend/terraform/components/terraform_list_spec.js25
-rw-r--r--spec/frontend/user_lists/components/edit_user_list_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js46
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap24
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js16
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js26
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js39
-rw-r--r--spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js75
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_details_spec.js (renamed from spec/frontend/alert_management/components/alert_details_spec.js)49
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_management_sidebar_todo_spec.js (renamed from spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js)8
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_metrics_spec.js (renamed from spec/frontend/alert_management/components/alert_metrics_spec.js)4
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_status_spec.js (renamed from spec/frontend/alert_management/components/alert_status_spec.js)27
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_summary_row_spec.js (renamed from spec/frontend/alert_management/components/alert_summary_row_spec.js)2
-rw-r--r--spec/frontend/vue_shared/alert_details/mocks/alerts.json (renamed from spec/frontend/alert_management/mocks/alerts.json)0
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_managment_sidebar_assignees_spec.js (renamed from spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js)8
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js (renamed from spec/frontend/alert_management/components/sidebar/alert_sidebar_spec.js)6
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js (renamed from spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js)33
-rw-r--r--spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js (renamed from spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js)4
-rw-r--r--spec/frontend/vue_shared/components/color_picker/color_picker_spec.js39
-rw-r--r--spec/frontend/vue_shared/components/editor_lite_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/file_row_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap8
-rw-r--r--spec/frontend/vue_shared/components/gl_countdown_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/gl_modal_vuex_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/help_popover_spec.js65
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_milestone_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/modal_copy_button_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/mock_data.js107
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js113
-rw-r--r--spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap43
-rw-r--r--spec/frontend/vue_shared/components/settings/settings_block_spec.js86
-rw-r--r--spec/frontend/vue_shared/components/todo_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js2
-rw-r--r--spec/frontend/vue_shared/directives/track_event_spec.js2
-rw-r--r--spec/frontend/vue_shared/security_reports/mock_data.js17
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js368
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js2
-rw-r--r--spec/frontend_integration/ide/helpers/start.js2
-rw-r--r--spec/frontend_integration/ide/user_opens_file_spec.js2
-rw-r--r--spec/frontend_integration/ide/user_opens_ide_spec.js2
-rw-r--r--spec/frontend_integration/test_helpers/fixtures.js8
-rw-r--r--spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb46
-rw-r--r--spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb120
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/board_list_issues_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb62
-rw-r--r--spec/graphql/resolvers/package_details_resolver_spec.rb5
-rw-r--r--spec/graphql/resolvers/release_milestones_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/terraform/states_resolver_spec.rb20
-rw-r--r--spec/graphql/types/ci_configuration/sast/analyzers_entity_input_type_spec.rb9
-rw-r--r--spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/entity_input_type_spec.rb9
-rw-r--r--spec/graphql/types/ci_configuration/sast/entity_type_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/input_type_spec.rb9
-rw-r--r--spec/graphql/types/ci_configuration/sast/options_entity_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/type_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/ui_component_size_enum_spec.rb11
-rw-r--r--spec/graphql/types/packages/composer/details_type_spec.rb23
-rw-r--r--spec/graphql/types/packages/composer/metadatum_type_spec.rb4
-rw-r--r--spec/graphql/types/packages/package_type_spec.rb7
-rw-r--r--spec/graphql/types/packages/package_without_versions_type_spec.rb13
-rw-r--r--spec/graphql/types/project_type_spec.rb168
-rw-r--r--spec/graphql/types/query_type_spec.rb6
-rw-r--r--spec/helpers/commits_helper_spec.rb31
-rw-r--r--spec/helpers/container_registry_helper_spec.rb4
-rw-r--r--spec/helpers/diff_helper_spec.rb24
-rw-r--r--spec/helpers/enable_search_settings_helper_spec.rb21
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb16
-rw-r--r--spec/helpers/groups_helper_spec.rb78
-rw-r--r--spec/helpers/invite_members_helper_spec.rb87
-rw-r--r--spec/helpers/issuables_helper_spec.rb46
-rw-r--r--spec/helpers/issues_helper_spec.rb27
-rw-r--r--spec/helpers/jira_connect_helper_spec.rb41
-rw-r--r--spec/helpers/notes_helper_spec.rb11
-rw-r--r--spec/helpers/operations_helper_spec.rb2
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb56
-rw-r--r--spec/helpers/projects_helper_spec.rb98
-rw-r--r--spec/helpers/search_helper_spec.rb21
-rw-r--r--spec/helpers/sorting_helper_spec.rb18
-rw-r--r--spec/helpers/stat_anchors_helper_spec.rb4
-rw-r--r--spec/helpers/tree_helper_spec.rb88
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb20
-rw-r--r--spec/initializers/validate_puma_spec.rb92
-rw-r--r--spec/javascripts/test_bundle.js8
-rw-r--r--spec/lib/api/entities/user_spec.rb12
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb66
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb5
-rw-r--r--spec/lib/backup/files_spec.rb10
-rw-r--r--spec/lib/banzai/filter/asset_proxy_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/truncate_source_filter_spec.rb2
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb12
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb118
-rw-r--r--spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb66
-rw-r--r--spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/transformers/hash_key_digger_spec.rb28
-rw-r--r--spec/lib/bulk_imports/common/transformers/underscorify_keys_transformer_spec.rb27
-rw-r--r--spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb24
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb31
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb31
-rw-r--r--spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb17
-rw-r--r--spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb30
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb49
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb116
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb24
-rw-r--r--spec/lib/bulk_imports/importers/group_importer_spec.rb13
-rw-r--r--spec/lib/bulk_imports/pipeline/context_spec.rb38
-rw-r--r--spec/lib/bulk_imports/pipeline/extracted_data_spec.rb53
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb76
-rw-r--r--spec/lib/gitlab/access/branch_protection_spec.rb10
-rw-r--r--spec/lib/gitlab/auth/ip_rate_limiter_spec.rb3
-rw-r--r--spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb29
-rw-r--r--spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb135
-rw-r--r--spec/lib/gitlab/background_migration_spec.rb2
-rw-r--r--spec/lib/gitlab/changelog/committer_spec.rb128
-rw-r--r--spec/lib/gitlab/changelog/config_spec.rb96
-rw-r--r--spec/lib/gitlab/changelog/generator_spec.rb164
-rw-r--r--spec/lib/gitlab/changelog/release_spec.rb107
-rw-r--r--spec/lib/gitlab/changelog/template/compiler_spec.rb136
-rw-r--r--spec/lib/gitlab/ci/badge/coverage/metadata_spec.rb (renamed from spec/lib/gitlab/badge/coverage/metadata_spec.rb)4
-rw-r--r--spec/lib/gitlab/ci/badge/coverage/report_spec.rb (renamed from spec/lib/gitlab/badge/coverage/report_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/badge/coverage/template_spec.rb (renamed from spec/lib/gitlab/badge/coverage/template_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/badge/pipeline/metadata_spec.rb (renamed from spec/lib/gitlab/badge/pipeline/metadata_spec.rb)4
-rw-r--r--spec/lib/gitlab/ci/badge/pipeline/status_spec.rb (renamed from spec/lib/gitlab/badge/pipeline/status_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/badge/pipeline/template_spec.rb (renamed from spec/lib/gitlab/badge/pipeline/template_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/badge/shared/metadata.rb (renamed from spec/lib/gitlab/badge/shared/metadata.rb)0
-rw-r--r--spec/lib/gitlab/ci/build/credentials/registry/dependency_proxy_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/build/credentials/registry/gitlab_registry_spec.rb (renamed from spec/lib/gitlab/ci/build/credentials/registry_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/charts_spec.rb82
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/parsers/instrumentation_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/parsers_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/trace/chunked_io_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/helpers_spec.rb103
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb3
-rw-r--r--spec/lib/gitlab/cluster/lifecycle_events_spec.rb85
-rw-r--r--spec/lib/gitlab/composer/cache_spec.rb133
-rw-r--r--spec/lib/gitlab/composer/version_index_spec.rb16
-rw-r--r--spec/lib/gitlab/conan_token_spec.rb2
-rw-r--r--spec/lib/gitlab/crypto_helper_spec.rb78
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb28
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb4
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb221
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb253
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb3
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/char_diff_spec.rb77
-rw-r--r--spec/lib/gitlab/diff/file_collection_sorter_spec.rb10
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb18
-rw-r--r--spec/lib/gitlab/diff/inline_diff_spec.rb27
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb27
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb3
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb65
-rw-r--r--spec/lib/gitlab/file_finder_spec.rb8
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb3
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb7
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb5
-rw-r--r--spec/lib/gitlab/graphql/pagination/connections_spec.rb97
-rw-r--r--spec/lib/gitlab/hook_data/group_builder_spec.rb68
-rw-r--r--spec/lib/gitlab/hook_data/subgroup_builder_spec.rb52
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml5
-rw-r--r--spec/lib/gitlab/import_export/design_repo_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/design_repo_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/importer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/repo_saver_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml4
-rw-r--r--spec/lib/gitlab/import_export/saver_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb1
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb6
-rw-r--r--spec/lib/gitlab/kas_spec.rb44
-rw-r--r--spec/lib/gitlab/metrics/subscribers/external_http_spec.rb172
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rack_attack_spec.rb203
-rw-r--r--spec/lib/gitlab/patch/prependable_spec.rb18
-rw-r--r--spec/lib/gitlab/performance_bar/stats_spec.rb8
-rw-r--r--spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb89
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb3
-rw-r--r--spec/lib/gitlab/repository_cache_adapter_spec.rb7
-rw-r--r--spec/lib/gitlab/search/query_spec.rb18
-rw-r--r--spec/lib/gitlab/search_results_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb3
-rw-r--r--spec/lib/gitlab/suggestions/commit_message_spec.rb11
-rw-r--r--spec/lib/gitlab/template/finders/global_template_finder_spec.rb23
-rw-r--r--spec/lib/gitlab/terraform/state_migration_helper_spec.rb21
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb50
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb46
-rw-r--r--spec/lib/gitlab/url_blockers/url_allowlist_spec.rb28
-rw-r--r--spec/lib/gitlab/usage/docs/renderer_spec.rb21
-rw-r--r--spec/lib/gitlab/usage/docs/value_formatter_spec.rb26
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb23
-rw-r--r--spec/lib/gitlab/usage/metric_spec.rb8
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb256
-rw-r--r--spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb195
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb48
-rw-r--r--spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb163
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb144
-rw-r--r--spec/lib/gitlab/utils/markdown_spec.rb44
-rw-r--r--spec/lib/gitlab/utils/override_spec.rb67
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb103
-rw-r--r--spec/lib/gitlab/utils_spec.rb8
-rw-r--r--spec/lib/gitlab_spec.rb24
-rw-r--r--spec/lib/peek/views/external_http_spec.rb191
-rw-r--r--spec/lib/release_highlights/validator/entry_spec.rb2
-rw-r--r--spec/lib/release_highlights/validator_spec.rb2
-rw-r--r--spec/lib/security/ci_configuration/sast_build_actions_spec.rb539
-rw-r--r--spec/mailers/notify_spec.rb13
-rw-r--r--spec/migrations/20201112130710_schedule_remove_duplicate_vulnerabilities_findings_spec.rb140
-rw-r--r--spec/migrations/20210119122354_alter_vsa_issue_first_mentioned_in_commit_value_spec.rb30
-rw-r--r--spec/migrations/add_has_external_issue_tracker_trigger_spec.rb164
-rw-r--r--spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb2
-rw-r--r--spec/migrations/schedule_migrate_security_scans_spec.rb11
-rw-r--r--spec/models/active_session_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb18
-rw-r--r--spec/models/bulk_imports/entity_spec.rb31
-rw-r--r--spec/models/ci/build_dependencies_spec.rb28
-rw-r--r--spec/models/ci/build_spec.rb66
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb2
-rw-r--r--spec/models/ci/build_trace_chunks/fog_spec.rb21
-rw-r--r--spec/models/ci/pipeline_artifact_spec.rb100
-rw-r--r--spec/models/ci/pipeline_spec.rb107
-rw-r--r--spec/models/ci/processable_spec.rb54
-rw-r--r--spec/models/ci/resource_group_spec.rb12
-rw-r--r--spec/models/ci/resource_spec.rb2
-rw-r--r--spec/models/ci/stage_spec.rb3
-rw-r--r--spec/models/commit_spec.rb13
-rw-r--r--spec/models/commit_status_spec.rb17
-rw-r--r--spec/models/concerns/atomic_internal_id_spec.rb152
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb7
-rw-r--r--spec/models/concerns/featurable_spec.rb16
-rw-r--r--spec/models/concerns/issuable_spec.rb19
-rw-r--r--spec/models/concerns/spammable_spec.rb55
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb4
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb8
-rw-r--r--spec/models/dependency_proxy/manifest_spec.rb20
-rw-r--r--spec/models/deployment_spec.rb20
-rw-r--r--spec/models/experiment_spec.rb57
-rw-r--r--spec/models/group_spec.rb24
-rw-r--r--spec/models/issue_link_spec.rb2
-rw-r--r--spec/models/merge_request/metrics_spec.rb11
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb9
-rw-r--r--spec/models/merge_request_diff_spec.rb45
-rw-r--r--spec/models/merge_request_spec.rb143
-rw-r--r--spec/models/onboarding_progress_spec.rb67
-rw-r--r--spec/models/packages/composer/cache_file_spec.rb32
-rw-r--r--spec/models/packages/composer/metadatum_spec.rb16
-rw-r--r--spec/models/packages/debian/group_component_spec.rb7
-rw-r--r--spec/models/packages/debian/project_component_spec.rb7
-rw-r--r--spec/models/pages/lookup_path_spec.rb47
-rw-r--r--spec/models/pages/virtual_domain_spec.rb21
-rw-r--r--spec/models/project_services/chat_notification_service_spec.rb33
-rw-r--r--spec/models/project_services/confluence_service_spec.rb6
-rw-r--r--spec/models/project_services/datadog_service_spec.rb36
-rw-r--r--spec/models/project_spec.rb174
-rw-r--r--spec/models/project_wiki_spec.rb1
-rw-r--r--spec/models/prometheus_metric_spec.rb10
-rw-r--r--spec/models/readme_blob_spec.rb17
-rw-r--r--spec/models/release_spec.rb57
-rw-r--r--spec/models/repository_spec.rb74
-rw-r--r--spec/models/service_spec.rb78
-rw-r--r--spec/models/snippet_spec.rb6
-rw-r--r--spec/models/terraform/state_spec.rb12
-rw-r--r--spec/models/terraform/state_version_spec.rb18
-rw-r--r--spec/models/token_with_iv_spec.rb29
-rw-r--r--spec/models/u2f_registration_spec.rb37
-rw-r--r--spec/models/user_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb65
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb10
-rw-r--r--spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb66
-rw-r--r--spec/presenters/gitlab/whats_new/item_presenter_spec.rb29
-rw-r--r--spec/presenters/project_presenter_spec.rb2
-rw-r--r--spec/requests/api/api_spec.rb36
-rw-r--r--spec/requests/api/applications_spec.rb2
-rw-r--r--spec/requests/api/debian_group_packages_spec.rb20
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb28
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb16
-rw-r--r--spec/requests/api/events_spec.rb6
-rw-r--r--spec/requests/api/generic_packages_spec.rb11
-rw-r--r--spec/requests/api/graphql/issue/issue_spec.rb14
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb16
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb65
-rw-r--r--spec/requests/api/graphql/mutations/releases/create_spec.rb8
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb36
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb34
-rw-r--r--spec/requests/api/graphql/packages/package_composer_details_spec.rb39
-rw-r--r--spec/requests/api/graphql/packages/package_spec.rb80
-rw-r--r--spec/requests/api/graphql/project/issue/designs/notes_spec.rb19
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb94
-rw-r--r--spec/requests/api/graphql/project/packages_spec.rb31
-rw-r--r--spec/requests/api/graphql/project/release_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/terraform/state_spec.rb83
-rw-r--r--spec/requests/api/group_labels_spec.rb54
-rw-r--r--spec/requests/api/groups_spec.rb7
-rw-r--r--spec/requests/api/internal/base_spec.rb98
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb68
-rw-r--r--spec/requests/api/labels_spec.rb59
-rw-r--r--spec/requests/api/maven_packages_spec.rb11
-rw-r--r--spec/requests/api/merge_requests_spec.rb62
-rw-r--r--spec/requests/api/oauth_tokens_spec.rb15
-rw-r--r--spec/requests/api/projects_spec.rb35
-rw-r--r--spec/requests/api/pypi_packages_spec.rb9
-rw-r--r--spec/requests/api/repositories_spec.rb98
-rw-r--r--spec/requests/api/resource_access_tokens_spec.rb293
-rw-r--r--spec/requests/api/settings_spec.rb18
-rw-r--r--spec/requests/api/suggestions_spec.rb27
-rw-r--r--spec/requests/api/templates_spec.rb6
-rw-r--r--spec/requests/api/users_spec.rb8
-rw-r--r--spec/requests/api/version_spec.rb12
-rw-r--r--spec/requests/groups/email_campaigns_controller_spec.rb86
-rw-r--r--spec/requests/oauth/tokens_controller_spec.rb14
-rw-r--r--spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb46
-rw-r--r--spec/requests/projects/noteable_notes_spec.rb11
-rw-r--r--spec/requests/rack_attack_global_spec.rb6
-rw-r--r--spec/requests/users_controller_spec.rb8
-rw-r--r--spec/requests/whats_new_controller_spec.rb60
-rw-r--r--spec/routing/admin_routing_spec.rb7
-rw-r--r--spec/routing/projects/security/configuration_controller_routing_spec.rb15
-rw-r--r--spec/rubocop/code_reuse_helpers_spec.rb56
-rw-r--r--spec/rubocop/cop/active_record_association_reload_spec.rb2
-rw-r--r--spec/rubocop/cop/api/base_spec.rb5
-rw-r--r--spec/rubocop/cop/api/grape_array_missing_coerce_spec.rb28
-rw-r--r--spec/rubocop/cop/avoid_becomes_spec.rb30
-rw-r--r--spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb6
-rw-r--r--spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb7
-rw-r--r--spec/rubocop/cop/ban_catch_throw_spec.rb26
-rw-r--r--spec/rubocop/cop/code_reuse/finder_spec.rb5
-rw-r--r--spec/rubocop/cop/code_reuse/presenter_spec.rb3
-rw-r--r--spec/rubocop/cop/code_reuse/serializer_spec.rb3
-rw-r--r--spec/rubocop/cop/code_reuse/service_class_spec.rb3
-rw-r--r--spec/rubocop/cop/code_reuse/worker_spec.rb3
-rw-r--r--spec/rubocop/cop/default_scope_spec.rb39
-rw-r--r--spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb9
-rw-r--r--spec/rubocop/cop/gitlab/bulk_insert_spec.rb11
-rw-r--r--spec/rubocop/cop/gitlab/change_timezone_spec.rb5
-rw-r--r--spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb47
-rw-r--r--spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/except_spec.rb3
-rw-r--r--spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb49
-rw-r--r--spec/rubocop/cop/gitlab/httparty_spec.rb34
-rw-r--r--spec/rubocop/cop/gitlab/intersect_spec.rb3
-rw-r--r--spec/rubocop/cop/gitlab/json_spec.rb33
-rw-r--r--spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb35
-rw-r--r--spec/rubocop/cop/gitlab/namespaced_class_spec.rb73
-rw-r--r--spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb3
-rw-r--r--spec/rubocop/cop/gitlab/predicate_memoization_spec.rb74
-rw-r--r--spec/rubocop/cop/gitlab/rails_logger_spec.rb22
-rw-r--r--spec/rubocop/cop/gitlab/union_spec.rb3
-rw-r--r--spec/rubocop/cop/graphql/authorize_types_spec.rb7
-rw-r--r--spec/rubocop/cop/graphql/descriptions_spec.rb22
-rw-r--r--spec/rubocop/cop/graphql/gid_expected_type_spec.rb7
-rw-r--r--spec/rubocop/cop/graphql/id_type_spec.rb7
-rw-r--r--spec/rubocop/cop/graphql/json_type_spec.rb24
-rw-r--r--spec/rubocop/cop/graphql/resolver_type_spec.rb18
-rw-r--r--spec/rubocop/cop/group_public_or_visible_to_user_spec.rb23
-rw-r--r--spec/rubocop/cop/inject_enterprise_edition_module_spec.rb24
-rw-r--r--spec/rubocop/cop/lint/last_keyword_argument_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb18
-rw-r--r--spec/rubocop/cop/put_project_routes_under_scope_spec.rb2
-rw-r--r--spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb4
-rw-r--r--spec/rubocop/cop/qa/element_with_pattern_spec.rb4
-rw-r--r--spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb2
-rw-r--r--spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb67
-rw-r--r--spec/rubocop/cop/static_translation_definition_spec.rb98
-rw-r--r--spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb42
-rw-r--r--spec/rubocop/cop/usage_data/large_table_spec.rb56
-rw-r--r--spec/rubocop/qa_helpers_spec.rb6
-rw-r--r--spec/serializers/admin/user_entity_spec.rb1
-rw-r--r--spec/serializers/admin/user_serializer_spec.rb1
-rw-r--r--spec/serializers/ci/codequality_mr_diff_entity_spec.rb27
-rw-r--r--spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb32
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb261
-rw-r--r--spec/serializers/codequality_degradation_entity_spec.rb63
-rw-r--r--spec/serializers/codequality_reports_comparer_entity_spec.rb58
-rw-r--r--spec/serializers/codequality_reports_comparer_serializer_spec.rb58
-rw-r--r--spec/serializers/diff_file_metadata_entity_spec.rb27
-rw-r--r--spec/serializers/diffs_entity_spec.rb15
-rw-r--r--spec/serializers/group_link/group_group_link_entity_spec.rb (renamed from spec/serializers/group_group_link_entity_spec.rb)8
-rw-r--r--spec/serializers/group_link/group_group_link_serializer_spec.rb (renamed from spec/serializers/group_group_link_serializer_spec.rb)4
-rw-r--r--spec/serializers/group_link/group_link_entity_spec.rb25
-rw-r--r--spec/serializers/group_link/project_group_link_entity_spec.rb30
-rw-r--r--spec/serializers/group_link/project_group_link_serializer_spec.rb13
-rw-r--r--spec/serializers/member_entity_spec.rb36
-rw-r--r--spec/serializers/member_serializer_spec.rb4
-rw-r--r--spec/serializers/merge_request_user_entity_spec.rb18
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb6
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb4
-rw-r--r--spec/serializers/pipeline_entity_spec.rb264
-rw-r--r--spec/serializers/user_serializer_spec.rb2
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb2
-rw-r--r--spec/services/application_settings/update_service_spec.rb2
-rw-r--r--spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb2
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb51
-rw-r--r--spec/services/captcha/captcha_verification_service_spec.rb39
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb25
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb14
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb51
-rw-r--r--spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb62
-rw-r--r--spec/services/ci/process_build_service_spec.rb6
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb29
-rw-r--r--spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb86
-rw-r--r--spec/services/ci/register_job_service_spec.rb6
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb46
-rw-r--r--spec/services/dependency_proxy/head_manifest_service_spec.rb9
-rw-r--r--spec/services/dependency_proxy/pull_manifest_service_spec.rb6
-rw-r--r--spec/services/deployments/create_service_spec.rb21
-rw-r--r--spec/services/discussions/resolve_service_spec.rb14
-rw-r--r--spec/services/discussions/unresolve_service_spec.rb32
-rw-r--r--spec/services/feature_flags/create_service_spec.rb12
-rw-r--r--spec/services/feature_flags/update_service_spec.rb12
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb83
-rw-r--r--spec/services/git/wiki_push_service_spec.rb50
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb14
-rw-r--r--spec/services/groups/open_issues_count_service_spec.rb106
-rw-r--r--spec/services/integrations/test/project_service_spec.rb96
-rw-r--r--spec/services/issue_rebalancing_service_spec.rb108
-rw-r--r--spec/services/issues/close_service_spec.rb3
-rw-r--r--spec/services/issues/create_service_spec.rb176
-rw-r--r--spec/services/issues/update_service_spec.rb2
-rw-r--r--spec/services/members/update_service_spec.rb24
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb13
-rw-r--r--spec/services/merge_requests/build_service_spec.rb2
-rw-r--r--spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb2
-rw-r--r--spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb45
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb28
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb45
-rw-r--r--spec/services/merge_requests/reload_merge_head_diff_service_spec.rb61
-rw-r--r--spec/services/merge_requests/request_review_service_spec.rb69
-rw-r--r--spec/services/merge_requests/update_service_spec.rb25
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb159
-rw-r--r--spec/services/notification_recipients/build_service_spec.rb24
-rw-r--r--spec/services/notification_service_spec.rb40
-rw-r--r--spec/services/packages/debian/destroy_distribution_service_spec.rb71
-rw-r--r--spec/services/packages/debian/update_distribution_service_spec.rb144
-rw-r--r--spec/services/packages/maven/find_or_create_package_service_spec.rb7
-rw-r--r--spec/services/pages/migrate_from_legacy_storage_service_spec.rb92
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb2
-rw-r--r--spec/services/post_receive_service_spec.rb14
-rw-r--r--spec/services/projects/cleanup_service_spec.rb2
-rw-r--r--spec/services/projects/create_service_spec.rb44
-rw-r--r--spec/services/projects/fork_service_spec.rb44
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb7
-rw-r--r--spec/services/projects/update_pages_service_spec.rb9
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb13
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb24
-rw-r--r--spec/services/repositories/changelog_service_spec.rb74
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb10
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb8
-rw-r--r--spec/services/resource_events/change_milestone_service_spec.rb4
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb69
-rw-r--r--spec/services/security/ci_configuration/sast_parser_service_spec.rb76
-rw-r--r--spec/services/snippets/create_service_spec.rb5
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb13
-rw-r--r--spec/services/snippets/update_service_spec.rb13
-rw-r--r--spec/services/spam/spam_action_service_spec.rb182
-rw-r--r--spec/services/suggestions/apply_service_spec.rb104
-rw-r--r--spec/services/suggestions/create_service_spec.rb33
-rw-r--r--spec/services/system_hooks_service_spec.rb9
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb2
-rw-r--r--spec/services/test_hooks/project_service_spec.rb93
-rw-r--r--spec/services/todo_service_spec.rb11
-rw-r--r--spec/services/users/approve_service_spec.rb24
-rw-r--r--spec/services/users/build_service_spec.rb6
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb15
-rw-r--r--spec/services/users/reject_service_spec.rb20
-rw-r--r--spec/spec_helper.rb13
-rw-r--r--spec/support/gitlab_experiment.rb12
-rw-r--r--spec/support/graphql/field_selection.rb2
-rw-r--r--spec/support/helpers/dependency_proxy_helpers.rb4
-rw-r--r--spec/support/helpers/graphql_helpers.rb12
-rw-r--r--spec/support/helpers/next_found_instance_of.rb16
-rw-r--r--spec/support/helpers/smime_helper.rb2
-rw-r--r--spec/support/helpers/snowplow_helpers.rb10
-rw-r--r--spec/support/helpers/sorting_helper.rb31
-rw-r--r--spec/support/helpers/stub_configuration.rb6
-rw-r--r--spec/support/helpers/stub_object_storage.rb7
-rw-r--r--spec/support/helpers/wait_for_requests.rb2
-rw-r--r--spec/support/matchers/pushed_frontend_feature_flags_matcher.rb23
-rw-r--r--spec/support/matchers/track_self_describing_event_matcher.rb12
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb7
-rw-r--r--spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb2
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/comment_and_close_button_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb3
-rw-r--r--spec/support/shared_examples/features/editable_merge_request_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb36
-rw-r--r--spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb75
-rw-r--r--spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb50
-rw-r--r--spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb95
-rw-r--r--spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/models/concerns/can_move_repository_storage_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/packages/debian/component_shared_examples.rb50
-rw-r--r--spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/requests/api/read_user_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/snippets_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb320
-rw-r--r--spec/support/snowplow.rb1
-rw-r--r--spec/tasks/gitlab/cleanup_rake_spec.rb22
-rw-r--r--spec/tasks/gitlab/pages_rake_spec.rb60
-rw-r--r--spec/tasks/gitlab/password_rake_spec.rb76
-rw-r--r--spec/tasks/gitlab/terraform/migrate_rake_spec.rb45
-rw-r--r--spec/tooling/danger/base_linter_spec.rb (renamed from spec/lib/gitlab/danger/base_linter_spec.rb)5
-rw-r--r--spec/tooling/danger/changelog_spec.rb (renamed from spec/lib/gitlab/danger/changelog_spec.rb)9
-rw-r--r--spec/tooling/danger/commit_linter_spec.rb (renamed from spec/lib/gitlab/danger/commit_linter_spec.rb)5
-rw-r--r--spec/tooling/danger/danger_spec_helper.rb (renamed from spec/lib/gitlab/danger/danger_spec_helper.rb)0
-rw-r--r--spec/tooling/danger/emoji_checker_spec.rb (renamed from spec/lib/gitlab/danger/emoji_checker_spec.rb)5
-rw-r--r--spec/tooling/danger/feature_flag_spec.rb157
-rw-r--r--spec/tooling/danger/helper_spec.rb (renamed from spec/lib/gitlab/danger/helper_spec.rb)120
-rw-r--r--spec/tooling/danger/merge_request_linter_spec.rb (renamed from spec/lib/gitlab/danger/merge_request_linter_spec.rb)5
-rw-r--r--spec/tooling/danger/roulette_spec.rb (renamed from spec/lib/gitlab/danger/roulette_spec.rb)48
-rw-r--r--spec/tooling/danger/sidekiq_queues_spec.rb (renamed from spec/lib/gitlab/danger/sidekiq_queues_spec.rb)5
-rw-r--r--spec/tooling/danger/teammate_spec.rb (renamed from spec/lib/gitlab/danger/teammate_spec.rb)15
-rw-r--r--spec/tooling/danger/title_linting_spec.rb (renamed from spec/lib/gitlab/danger/title_linting_spec.rb)41
-rw-r--r--spec/tooling/danger/weightage/maintainers_spec.rb (renamed from spec/lib/gitlab/danger/weightage/maintainers_spec.rb)10
-rw-r--r--spec/tooling/danger/weightage/reviewers_spec.rb (renamed from spec/lib/gitlab/danger/weightage/reviewers_spec.rb)14
-rw-r--r--spec/tooling/gitlab_danger_spec.rb (renamed from spec/lib/gitlab_danger_spec.rb)2
-rw-r--r--spec/uploaders/packages/composer/cache_uploader_spec.rb45
-rw-r--r--spec/validators/nested_attributes_duplicates_validator_spec.rb (renamed from spec/validators/variable_duplicates_validator_spec.rb)50
-rw-r--r--spec/views/groups/show.html.haml_spec.rb52
-rw-r--r--spec/views/layouts/_head.html.haml_spec.rb38
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb6
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb29
-rw-r--r--spec/views/projects/_home_panel.html.haml_spec.rb1
-rw-r--r--spec/views/projects/empty.html.haml_spec.rb37
-rw-r--r--spec/views/projects/show.html.haml_spec.rb51
-rw-r--r--spec/views/projects/tree/_tree_row.html.haml_spec.rb43
-rw-r--r--spec/views/search/_filter.html.haml_spec.rb17
-rw-r--r--spec/views/search/_form.html.haml_spec.rb14
-rw-r--r--spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb20
-rw-r--r--spec/workers/bulk_import_worker_spec.rb15
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb14
-rw-r--r--spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb40
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb5
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb346
-rw-r--r--spec/workers/jira_connect/sync_builds_worker_spec.rb24
-rw-r--r--spec/workers/jira_connect/sync_deployments_worker_spec.rb24
-rw-r--r--spec/workers/jira_connect/sync_feature_flags_worker_spec.rb24
-rw-r--r--spec/workers/merge_request_cleanup_refs_worker_spec.rb12
-rw-r--r--spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb29
-rw-r--r--spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb25
-rw-r--r--spec/workers/namespaces/onboarding_user_added_worker_spec.rb17
-rw-r--r--spec/workers/projects/git_garbage_collect_worker_spec.rb78
-rw-r--r--spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb12
-rw-r--r--spec/workers/wikis/git_garbage_collect_worker_spec.rb14
1028 files changed, 26491 insertions, 10584 deletions
diff --git a/spec/benchmarks/banzai_benchmark.rb b/spec/benchmarks/banzai_benchmark.rb
new file mode 100644
index 00000000000..e489237a2f2
--- /dev/null
+++ b/spec/benchmarks/banzai_benchmark.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+if ENV.key?('BENCHMARK')
+ require 'spec_helper'
+ require 'erb'
+ require 'benchmark/ips'
+
+ # This benchmarks some of the Banzai pipelines and filters.
+ # They are not definitive, but can be used by a developer to
+ # get a rough idea how the changing or addition of a new filter
+ # will effect performance.
+ #
+ # Run by:
+ # BENCHMARK=1 rspec spec/benchmarks/banzai_benchmark.rb
+ # or
+ # rake benchmark:banzai
+ #
+ RSpec.describe 'GitLab Markdown Benchmark', :aggregate_failures do
+ include MarkupHelper
+
+ let_it_be(:feature) { MarkdownFeature.new }
+ let_it_be(:project) { feature.project }
+ let_it_be(:group) { feature.group }
+ let_it_be(:wiki) { feature.wiki }
+ let_it_be(:wiki_page) { feature.wiki_page }
+ let_it_be(:markdown_text) { feature.raw_markdown }
+
+ let!(:render_context) { Banzai::RenderContext.new(project, current_user) }
+
+ before do
+ stub_application_setting(asset_proxy_enabled: true)
+ stub_application_setting(asset_proxy_secret_key: 'shared-secret')
+ stub_application_setting(asset_proxy_url: 'https://assets.example.com')
+ stub_application_setting(asset_proxy_whitelist: %w(gitlab.com *.mydomain.com))
+
+ Banzai::Filter::AssetProxyFilter.initialize_settings
+ end
+
+ context 'pipelines' do
+ it 'benchmarks several pipelines' do
+ path = 'images/example.jpg'
+ gitaly_wiki_file = Gitlab::GitalyClient::WikiFile.new(path: path)
+ allow(wiki).to receive(:find_file).with(path).and_return(Gitlab::Git::WikiFile.new(gitaly_wiki_file))
+ allow(wiki).to receive(:wiki_base_path) { '/namespace1/gitlabhq/wikis' }
+
+ puts "\n--> Benchmarking Full, Wiki, and Plain pipelines\n"
+
+ Benchmark.ips do |x|
+ x.config(time: 10, warmup: 2)
+
+ x.report('Full pipeline') { markdown(markdown_text, { pipeline: :full }) }
+ x.report('Wiki pipeline') { markdown(markdown_text, { pipeline: :wiki, wiki: wiki, page_slug: wiki_page.slug }) }
+ x.report('Plain pipeline') { markdown(markdown_text, { pipeline: :plain_markdown }) }
+
+ x.compare!
+ end
+ end
+ end
+
+ context 'filters' do
+ let(:context) do
+ tmp = { project: project, current_user: current_user, render_context: render_context }
+ Banzai::Filter::AssetProxyFilter.transform_context(tmp)
+ end
+
+ it 'benchmarks all filters in the FullPipeline' do
+ benchmark_pipeline_filters(:full)
+ end
+
+ it 'benchmarks all filters in the PlainMarkdownPipeline' do
+ benchmark_pipeline_filters(:plain_markdown)
+ end
+ end
+
+ # build up the source text for each filter
+ def build_filter_text(pipeline, initial_text)
+ filter_source = {}
+ input_text = initial_text
+
+ pipeline.filters.each do |filter_klass|
+ filter_source[filter_klass] = input_text
+
+ output = filter_klass.call(input_text, context)
+ input_text = output
+ end
+
+ filter_source
+ end
+
+ def benchmark_pipeline_filters(pipeline_type)
+ pipeline = Banzai::Pipeline[pipeline_type]
+ filter_source = build_filter_text(pipeline, markdown_text)
+
+ puts "\n--> Benchmarking #{pipeline.name.demodulize} filters\n"
+
+ Benchmark.ips do |x|
+ x.config(time: 10, warmup: 2)
+
+ pipeline.filters.each do |filter_klass|
+ label = filter_klass.name.demodulize.delete_suffix('Filter').truncate(20)
+
+ x.report(label) { filter_klass.call(filter_source[filter_klass], context) }
+ end
+
+ x.compare!
+ end
+ end
+
+ # Fake a `current_user` helper
+ def current_user
+ feature.user
+ end
+ end
+end
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 9e7dfa043c3..68b37197ca7 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -49,6 +49,20 @@ RSpec.describe ObjectStoreSettings do
}
end
+ shared_examples 'consolidated settings for objects accelerated by Workhorse' do
+ it 'consolidates active object storage settings' do
+ described_class::WORKHORSE_ACCELERATED_TYPES.each do |object_type|
+ # Use to_h to avoid https://gitlab.com/gitlab-org/gitlab/-/issues/286873
+ section = subject.try(object_type).to_h
+
+ next unless section.dig('object_store', 'enabled')
+
+ expect(section['object_store']['connection']).to eq(connection)
+ expect(section['object_store']['consolidated_settings']).to be true
+ end
+ end
+ end
+
it 'sets correct default values' do
subject
@@ -77,9 +91,7 @@ RSpec.describe ObjectStoreSettings do
expect(settings.pages['object_store']['consolidated_settings']).to be true
expect(settings.external_diffs['enabled']).to be false
- expect(settings.external_diffs['object_store']['enabled']).to be false
- expect(settings.external_diffs['object_store']['remote_directory']).to eq('external_diffs')
- expect(settings.external_diffs['object_store']['consolidated_settings']).to be true
+ expect(settings.external_diffs['object_store']).to be_nil
end
it 'raises an error when a bucket is missing' do
@@ -95,29 +107,50 @@ RSpec.describe ObjectStoreSettings do
expect(settings.pages['object_store']).to eq(nil)
end
- it 'allows pages to define its own connection' do
- pages_connection = { 'provider' => 'Google', 'google_application_default' => true }
- config['pages'] = {
- 'enabled' => true,
- 'object_store' => {
+ context 'GitLab Pages' do
+ let(:pages_connection) { { 'provider' => 'Google', 'google_application_default' => true } }
+
+ before do
+ config['pages'] = {
'enabled' => true,
- 'connection' => pages_connection
+ 'object_store' => {
+ 'enabled' => true,
+ 'connection' => pages_connection
+ }
}
- }
+ end
- expect { subject }.not_to raise_error
+ it_behaves_like 'consolidated settings for objects accelerated by Workhorse'
- described_class::WORKHORSE_ACCELERATED_TYPES.each do |object_type|
- section = settings.try(object_type)
+ it 'allows pages to define its own connection' do
+ expect { subject }.not_to raise_error
- next unless section
+ expect(settings.pages['object_store']['connection']).to eq(pages_connection)
+ expect(settings.pages['object_store']['consolidated_settings']).to be_falsey
+ end
+ end
- expect(section['object_store']['connection']).to eq(connection)
- expect(section['object_store']['consolidated_settings']).to be true
+ context 'when object storage is disabled for artifacts with no bucket' do
+ before do
+ config['artifacts'] = {
+ 'enabled' => true,
+ 'object_store' => {}
+ }
+ config['object_store']['objects']['artifacts'] = {
+ 'enabled' => false
+ }
end
- expect(settings.pages['object_store']['connection']).to eq(pages_connection)
- expect(settings.pages['object_store']['consolidated_settings']).to be_falsey
+ it_behaves_like 'consolidated settings for objects accelerated by Workhorse'
+
+ it 'does not enable consolidated settings for artifacts' do
+ subject
+
+ expect(settings.artifacts['enabled']).to be true
+ expect(settings.artifacts['object_store']['remote_directory']).to be_nil
+ expect(settings.artifacts['object_store']['enabled']).to be_falsey
+ expect(settings.artifacts['object_store']['consolidated_settings']).to be_falsey
+ end
end
context 'with legacy config' do
diff --git a/spec/controllers/admin/cohorts_controller_spec.rb b/spec/controllers/admin/cohorts_controller_spec.rb
index 9eb2a713517..77a9c8eb223 100644
--- a/spec/controllers/admin/cohorts_controller_spec.rb
+++ b/spec/controllers/admin/cohorts_controller_spec.rb
@@ -3,37 +3,15 @@
require 'spec_helper'
RSpec.describe Admin::CohortsController do
- context 'as admin' do
- let(:user) { create(:admin) }
+ let(:user) { create(:admin) }
- before do
- sign_in(user)
- end
-
- it 'renders 200' do
- get :index
-
- expect(response).to have_gitlab_http_status(:success)
- end
-
- describe 'GET #index' do
- it_behaves_like 'tracking unique visits', :index do
- let(:target_id) { 'i_analytics_cohorts' }
- end
- end
+ before do
+ sign_in(user)
end
- context 'as normal user' do
- let(:user) { create(:user) }
-
- before do
- sign_in(user)
- end
-
- it 'renders a 404' do
- get :index
+ it 'redirects to Overview->Users' do
+ get :index
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ expect(response).to redirect_to(admin_users_path(tab: 'cohorts'))
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 3fffc50475c..cba25dbff95 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -27,7 +27,8 @@ RSpec.describe Admin::RunnersController do
# There is still an N+1 query for `runner.builds.count`
# We also need to add 1 because it takes 2 queries to preload tags
- expect { get :index }.not_to exceed_query_limit(control_count + 6)
+ # also looking for token nonce requires database queries
+ expect { get :index }.not_to exceed_query_limit(control_count + 16)
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to have_content('tag1')
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index f902a3d2541..6faec315eb6 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -29,6 +29,11 @@ RSpec.describe Admin::UsersController do
expect(assigns(:users).first.association(:authorized_projects)).to be_loaded
end
+
+ it_behaves_like 'tracking unique visits', :index do
+ let(:target_id) { 'i_analytics_cohorts' }
+ let(:request_params) { { tab: 'cohorts' } }
+ end
end
describe 'GET :id' do
@@ -180,7 +185,7 @@ RSpec.describe Admin::UsersController do
it 'displays the error' do
subject
- expect(flash[:alert]).to eq('The user you are trying to approve is not pending an approval')
+ expect(flash[:alert]).to eq('The user you are trying to approve is not pending approval')
end
it 'does not activate the user' do
diff --git a/spec/controllers/chaos_controller_spec.rb b/spec/controllers/chaos_controller_spec.rb
index 550303d292a..cb4f12ff829 100644
--- a/spec/controllers/chaos_controller_spec.rb
+++ b/spec/controllers/chaos_controller_spec.rb
@@ -124,4 +124,23 @@ RSpec.describe ChaosController do
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ describe '#gc' do
+ let(:gc_stat) { GC.stat.stringify_keys }
+
+ it 'runs a full GC on the current web worker' do
+ expect(Prometheus::PidProvider).to receive(:worker_id).and_return('worker-0')
+ expect(Gitlab::Chaos).to receive(:run_gc).and_return(gc_stat)
+
+ post :gc
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response_json['worker_id']).to eq('worker-0')
+ expect(response_json['gc_stat']).to eq(gc_stat)
+ end
+ end
+
+ def response_json
+ Gitlab::Json.parse(response.body)
+ end
end
diff --git a/spec/controllers/concerns/spammable_actions_spec.rb b/spec/controllers/concerns/spammable_actions_spec.rb
index 3b5b4d11a9b..25d5398c9da 100644
--- a/spec/controllers/concerns/spammable_actions_spec.rb
+++ b/spec/controllers/concerns/spammable_actions_spec.rb
@@ -6,21 +6,8 @@ RSpec.describe SpammableActions do
controller(ActionController::Base) do
include SpammableActions
- # #create is used to test spammable_params
- # for testing purposes
- def create
- spam_params = spammable_params
-
- # replace the actual request with a string in the JSON response, all we care is that it got set
- spam_params[:request] = 'this is the request' if spam_params[:request]
-
- # just return the params in the response so they can be verified in this fake controller spec.
- # Normally, they are processed further by the controller action
- render json: spam_params.to_json, status: :ok
- end
-
- # #update is used to test recaptcha_check_with_fallback
- # for testing purposes
+ # #update is used here to test #recaptcha_check_with_fallback, but it could be invoked
+ # from #create or any other action which mutates a spammable via a controller.
def update
should_redirect = params[:should_redirect] == 'true'
@@ -35,80 +22,7 @@ RSpec.describe SpammableActions do
end
before do
- # Ordinarily we would not stub a method on the class under test, but :ensure_spam_config_loaded!
- # returns false in the test environment, and is also strong_memoized, so we need to stub it
- allow(controller).to receive(:ensure_spam_config_loaded!) { true }
- end
-
- describe '#spammable_params' do
- subject { post :create, format: :json, params: params }
-
- shared_examples 'expects request param only' do
- it do
- subject
-
- expect(response).to be_successful
- expect(json_response).to eq({ 'request' => 'this is the request' })
- end
- end
-
- shared_examples 'expects all spammable params' do
- it do
- subject
-
- expect(response).to be_successful
- expect(json_response['request']).to eq('this is the request')
- expect(json_response['recaptcha_verified']).to eq(true)
- expect(json_response['spam_log_id']).to eq('1')
- end
- end
-
- let(:recaptcha_response) { nil }
- let(:spam_log_id) { nil }
-
- context 'when recaptcha response is not present' do
- let(:params) do
- {
- spam_log_id: spam_log_id
- }
- end
-
- it_behaves_like 'expects request param only'
- end
-
- context 'when recaptcha response is present' do
- let(:recaptcha_response) { 'abd123' }
- let(:params) do
- {
- 'g-recaptcha-response': recaptcha_response,
- spam_log_id: spam_log_id
- }
- end
-
- context 'when verify_recaptcha returns falsey' do
- before do
- expect(controller).to receive(:verify_recaptcha).with(
- {
- response: recaptcha_response
- }) { false }
- end
-
- it_behaves_like 'expects request param only'
- end
-
- context 'when verify_recaptcha returns truthy' do
- let(:spam_log_id) { 1 }
-
- before do
- expect(controller).to receive(:verify_recaptcha).with(
- {
- response: recaptcha_response
- }) { true }
- end
-
- it_behaves_like 'expects all spammable params'
- end
- end
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
end
describe '#recaptcha_check_with_fallback' do
@@ -154,12 +68,9 @@ RSpec.describe SpammableActions do
allow(spammable).to receive(:valid?) { false }
end
- # NOTE: Not adding coverage of details of render_recaptcha?, the plan is to refactor it out
- # of this module anyway as part of adding support for the GraphQL reCAPTCHA flow.
-
- context 'when render_recaptcha? is true' do
+ context 'when spammable.render_recaptcha? is true' do
before do
- expect(controller).to receive(:render_recaptcha?) { true }
+ expect(spammable).to receive(:render_recaptcha?) { true }
end
context 'when format is :html' do
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 39cbdfb9123..2056feb6434 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
}
end
- it 'proxies status from the remote token request' do
+ it 'proxies status from the remote token request', :aggregate_failures do
subject
expect(response).to have_gitlab_http_status(:service_unavailable)
@@ -147,7 +147,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
}
end
- it 'proxies status from the remote manifest request' do
+ it 'proxies status from the remote manifest request', :aggregate_failures do
subject
expect(response).to have_gitlab_http_status(:bad_request)
@@ -156,15 +156,19 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
it 'sends a file' do
- expect(controller).to receive(:send_file).with(manifest.file.path, {})
+ expect(controller).to receive(:send_file).with(manifest.file.path, type: manifest.content_type)
subject
end
- it 'returns Content-Disposition: attachment' do
+ it 'returns Content-Disposition: attachment', :aggregate_failures do
subject
expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Docker-Content-Digest']).to eq(manifest.digest)
+ expect(response.headers['Content-Length']).to eq(manifest.size)
+ expect(response.headers['Docker-Distribution-Api-Version']).to eq(DependencyProxy::DISTRIBUTION_API_VERSION)
+ expect(response.headers['Etag']).to eq("\"#{manifest.digest}\"")
expect(response.headers['Content-Disposition']).to match(/^attachment/)
end
end
@@ -207,7 +211,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
}
end
- it 'proxies status from the remote blob request' do
+ it 'proxies status from the remote blob request', :aggregate_failures do
subject
expect(response).to have_gitlab_http_status(:bad_request)
@@ -221,7 +225,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
subject
end
- it 'returns Content-Disposition: attachment' do
+ it 'returns Content-Disposition: attachment', :aggregate_failures do
subject
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index 4d78419e8eb..ff7a7f55863 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -225,6 +225,18 @@ RSpec.describe Groups::GroupMembersController do
expect(requester.reload.expires_at).not_to eq(expires_at.to_date)
end
+
+ it 'returns error status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ it 'returns error message' do
+ subject
+
+ expect(json_response).to eq({ 'message' => 'Expires at cannot be a date in the past' })
+ end
end
context 'when set to a date in the future' do
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 939c36a98b2..9e5f68820d9 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -306,66 +306,6 @@ RSpec.describe GroupsController, factory_default: :keep do
end
end
end
-
- describe 'tracking group creation for onboarding issues experiment' do
- before do
- sign_in(user)
- end
-
- subject(:create_namespace) { post :create, params: { group: { name: 'new_group', path: 'new_group' } } }
-
- context 'experiment disabled' do
- before do
- stub_experiment(onboarding_issues: false)
- end
-
- it 'does not track anything', :snowplow do
- create_namespace
-
- expect_no_snowplow_event
- end
- end
-
- context 'experiment enabled' do
- before do
- stub_experiment(onboarding_issues: true)
- end
-
- context 'and the user is part of the control group' do
- before do
- stub_experiment_for_subject(onboarding_issues: false)
- end
-
- it 'tracks the event with the "created_namespace" action with the "control_group" property', :snowplow do
- create_namespace
-
- expect_snowplow_event(
- category: 'Growth::Conversion::Experiment::OnboardingIssues',
- action: 'created_namespace',
- label: anything,
- property: 'control_group'
- )
- end
- end
-
- context 'and the user is part of the experimental group' do
- before do
- stub_experiment_for_subject(onboarding_issues: true)
- end
-
- it 'tracks the event with the "created_namespace" action with the "experimental_group" property', :snowplow do
- create_namespace
-
- expect_snowplow_event(
- category: 'Growth::Conversion::Experiment::OnboardingIssues',
- action: 'created_namespace',
- label: anything,
- property: 'experimental_group'
- )
- end
- end
- end
- end
end
describe 'GET #index' do
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index d1c138617bb..9927ef0903a 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -59,7 +59,14 @@ RSpec.describe Import::BulkImportsController do
parsed_response: [
{ 'id' => 1, 'full_name' => 'group1', 'full_path' => 'full/path/group1', 'web_url' => 'http://demo.host/full/path/group1' },
{ 'id' => 2, 'full_name' => 'group2', 'full_path' => 'full/path/group2', 'web_url' => 'http://demo.host/full/path/group1' }
- ]
+ ],
+ headers: {
+ 'x-next-page' => '2',
+ 'x-page' => '1',
+ 'x-per-page' => '20',
+ 'x-total' => '37',
+ 'x-total-pages' => '2'
+ }
)
end
@@ -81,6 +88,17 @@ RSpec.describe Import::BulkImportsController do
expect(json_response).to eq({ importable_data: client_response.parsed_response }.as_json)
end
+ it 'forwards pagination headers' do
+ get :status, format: :json
+
+ expect(response.headers['x-per-page']).to eq client_response.headers['x-per-page']
+ expect(response.headers['x-page']).to eq client_response.headers['x-page']
+ expect(response.headers['x-next-page']).to eq client_response.headers['x-next-page']
+ expect(response.headers['x-prev-page']).to eq client_response.headers['x-prev-page']
+ expect(response.headers['x-total']).to eq client_response.headers['x-total']
+ expect(response.headers['x-total-pages']).to eq client_response.headers['x-total-pages']
+ end
+
context 'when filtering' do
it 'returns filtered result' do
filter = 'test'
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index e863f5ef2fc..a8d38d12f23 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe InvitesController, :snowplow do
+RSpec.describe InvitesController do
let_it_be(:user) { create(:user) }
let(:member) { create(:project_member, :invited, invite_email: user.email) }
let(:raw_invite_token) { member.raw_invite_token }
@@ -51,6 +51,28 @@ RSpec.describe InvitesController, :snowplow do
end
it_behaves_like 'invalid token'
+
+ context 'when invite comes from the initial email invite' do
+ let(:params) { { id: raw_invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE } }
+
+ it 'tracks via experiment', :aggregate_failures do
+ experiment = double(track: true)
+ allow(controller).to receive(:experiment).and_return(experiment)
+
+ request
+
+ expect(experiment).to have_received(:track).with(:opened)
+ expect(experiment).to have_received(:track).with(:accepted)
+ end
+ end
+
+ context 'when invite does not come from initial email invite' do
+ it 'does not track via experiment' do
+ expect(controller).not_to receive(:experiment)
+
+ request
+ end
+ end
end
context 'when not logged in' do
@@ -82,6 +104,25 @@ RSpec.describe InvitesController, :snowplow do
subject(:request) { post :accept, params: params }
it_behaves_like 'invalid token'
+
+ context 'when invite comes from the initial email invite' do
+ it 'tracks via experiment' do
+ experiment = double(track: true)
+ allow(controller).to receive(:experiment).and_return(experiment)
+
+ post :accept, params: params, session: { invite_type: Members::InviteEmailExperiment::INVITE_TYPE }
+
+ expect(experiment).to have_received(:track).with(:accepted)
+ end
+ end
+
+ context 'when invite does not come from initial email invite' do
+ it 'does not track via experiment' do
+ expect(controller).not_to receive(:experiment)
+
+ request
+ end
+ end
end
describe 'POST #decline for link in UI' do
diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb
index f9d16e761cb..8a793e29bfa 100644
--- a/spec/controllers/projects/discussions_controller_spec.rb
+++ b/spec/controllers/projects/discussions_controller_spec.rb
@@ -186,6 +186,13 @@ RSpec.describe Projects::DiscussionsController do
expect(Note.find(note.id).discussion.resolved?).to be false
end
+ it "tracks thread unresolve usage data" do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_unresolve_thread_action).with(user: user)
+
+ delete :unresolve, params: request_params
+ end
+
it "returns status 200" do
delete :unresolve, params: request_params
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index e8b30294cdd..7da3d403b53 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -209,6 +209,13 @@ RSpec.describe Projects::ForksController do
}
end
+ let(:created_project) do
+ Namespace
+ .find_by_id(params[:namespace_key])
+ .projects
+ .find_by_path(params.fetch(:path, project.path))
+ end
+
subject do
post :create, params: params
end
@@ -260,6 +267,21 @@ RSpec.describe Projects::ForksController do
expect(response).to redirect_to(namespace_project_import_path(user.namespace, project, continue: continue_params))
end
end
+
+ context 'custom attributes set' do
+ let(:params) { super().merge(path: 'something_custom', name: 'Something Custom', description: 'Something Custom', visibility: 'private') }
+
+ it 'creates a project with custom values' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(namespace_project_import_path(user.namespace, params[:path]))
+ expect(created_project.path).to eq(params[:path])
+ expect(created_project.name).to eq(params[:name])
+ expect(created_project.description).to eq(params[:description])
+ expect(created_project.visibility).to eq(params[:visibility])
+ end
+ end
end
context 'when user is not signed in' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index d3bdf1baaae..81ffd2c4512 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -63,53 +63,20 @@ RSpec.describe Projects::IssuesController do
end
end
- describe 'the null hypothesis experiment', :snowplow do
- it 'defines the expected before actions' do
- expect(controller).to use_before_action(:run_null_hypothesis_experiment)
- end
-
- context 'when rolled out to 100%' do
- it 'assigns the candidate experience and tracks the event' do
- get :index, params: { namespace_id: project.namespace, project_id: project }
-
- expect_snowplow_event(
- category: 'null_hypothesis',
- action: 'index',
- context: [{
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0',
- data: { variant: 'candidate', experiment: 'null_hypothesis', key: anything }
- }]
- )
- end
+ describe 'the null hypothesis experiment', :experiment do
+ before do
+ stub_experiments(null_hypothesis: :candidate)
end
- context 'when not rolled out' do
- before do
- stub_feature_flags(null_hypothesis: false)
- end
-
- it 'assigns the control experience and tracks the event' do
- get :index, params: { namespace_id: project.namespace, project_id: project }
-
- expect_snowplow_event(
- category: 'null_hypothesis',
- action: 'index',
- context: [{
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0',
- data: { variant: 'control', experiment: 'null_hypothesis', key: anything }
- }]
- )
- end
+ it 'defines the expected before actions' do
+ expect(controller).to use_before_action(:run_null_hypothesis_experiment)
end
- context 'when gitlab_experiments is disabled' do
- it 'does not run the experiment at all' do
- stub_feature_flags(gitlab_experiments: false)
+ it 'assigns the candidate experience and tracks the event' do
+ expect(experiment(:null_hypothesis)).to track('index').on_any_instance.for(:candidate)
+ .with_context(project: project)
- expect(controller).not_to receive(:run_null_hypothesis_experiment)
-
- get :index, params: { namespace_id: project.namespace, project_id: project }
- end
+ get :index, params: { namespace_id: project.namespace, project_id: project }
end
end
end
@@ -1314,11 +1281,13 @@ RSpec.describe Projects::IssuesController do
let!(:last_spam_log) { spam_logs.last }
def post_verified_issue
- post_new_issue({}, { spam_log_id: last_spam_log.id, 'g-recaptcha-response': true } )
+ post_new_issue({}, { spam_log_id: last_spam_log.id, 'g-recaptcha-response': 'abc123' } )
end
before do
- expect(controller).to receive_messages(verify_recaptcha: true)
+ expect_next_instance_of(Captcha::CaptchaVerificationService) do |instance|
+ expect(instance).to receive(:execute) { true }
+ end
end
it 'accepts an issue after reCAPTCHA is verified' do
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index f54a07de853..50f8942d9d5 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -226,11 +226,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
let(:diffable_merge_ref) { true }
it 'compares diffs with the head' do
- MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
-
- expect(CompareService).to receive(:new).with(
- project, merge_request.merge_ref_head.sha
- ).and_call_original
+ create(:merge_request_diff, :merge_head, merge_request: merge_request)
go(diff_head: true)
@@ -242,8 +238,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
let(:diffable_merge_ref) { false }
it 'compares diffs with the base' do
- expect(CompareService).not_to receive(:new)
-
go(diff_head: true)
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index cf8b4c564c4..c53dd1265e6 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1118,6 +1118,108 @@ RSpec.describe Projects::MergeRequestsController do
end
end
+ describe 'GET codequality_mr_diff_reports' do
+ let_it_be(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ source_project: project)
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :success,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:find_codequality_mr_diff_reports)
+ .and_return(report)
+
+ allow_any_instance_of(MergeRequest)
+ .to receive(:actual_head_pipeline)
+ .and_return(pipeline)
+ end
+
+ subject(:get_codequality_mr_diff_reports) do
+ get :codequality_mr_diff_reports, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ },
+ format: :json
+ end
+
+ context 'permissions on a public project with private CI/CD' do
+ let(:project) { create :project, :repository, :public, :builds_private }
+ let(:report) { { status: :parsed, data: { 'files' => {} } } }
+
+ context 'while signed out' do
+ before do
+ sign_out(user)
+ end
+
+ it 'responds with a 404' do
+ get_codequality_mr_diff_reports
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+
+ context 'while signed in as an unrelated user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it 'responds with a 404' do
+ get_codequality_mr_diff_reports
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+ end
+
+ context 'when pipeline has jobs with codequality mr diff report' do
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:has_codequality_mr_diff_report?)
+ .and_return(true)
+ end
+
+ context 'when processing codequality mr diff report is in progress' do
+ let(:report) { { status: :parsing } }
+
+ it 'sends polling interval' do
+ expect(Gitlab::PollingInterval).to receive(:set_header)
+
+ get_codequality_mr_diff_reports
+ end
+
+ it 'returns 204 HTTP status' do
+ get_codequality_mr_diff_reports
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when processing codequality mr diff report is completed' do
+ let(:report) { { status: :parsed, data: { 'files' => {} } } }
+
+ it 'returns codequality mr diff report' do
+ get_codequality_mr_diff_reports
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'files' => {} })
+ end
+ end
+ end
+ end
+
describe 'GET terraform_reports' do
let_it_be(:merge_request) do
create(:merge_request,
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index e96113c0133..6b77794c66d 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe Projects::NotesController do
end
it 'returns an empty page of notes' do
- expect(Gitlab::EtagCaching::Middleware).not_to receive(:skip!)
+ expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
request.headers['X-Last-Fetched-At'] = microseconds(Time.zone.now)
@@ -169,6 +169,8 @@ RSpec.describe Projects::NotesController do
end
it 'returns all notes' do
+ expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
+
get :index, params: request_params
expect(json_response['notes'].count).to eq((page_1 + page_2 + page_3).size + 1)
diff --git a/spec/controllers/projects/pipelines/tests_controller_spec.rb b/spec/controllers/projects/pipelines/tests_controller_spec.rb
index 61118487e20..e6ff3a487ac 100644
--- a/spec/controllers/projects/pipelines/tests_controller_spec.rb
+++ b/spec/controllers/projects/pipelines/tests_controller_spec.rb
@@ -34,20 +34,38 @@ RSpec.describe Projects::Pipelines::TestsController do
end
describe 'GET #show.json' do
- context 'when pipeline has build report results' do
- let(:pipeline) { create(:ci_pipeline, :with_report_results, project: project) }
+ context 'when pipeline has builds with test reports' do
+ let(:main_pipeline) { create(:ci_pipeline, :with_test_reports_with_three_failures, project: project) }
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports_with_three_failures, project: project, ref: 'new-feature') }
let(:suite_name) { 'test' }
let(:build_ids) { pipeline.latest_builds.pluck(:id) }
+ before do
+ build = main_pipeline.builds.last
+ build.update_column(:finished_at, 1.day.ago) # Just to be sure we are included in the report window
+
+ # The JUnit fixture for the given build has 3 failures.
+ # This service will create 1 test case failure record for each.
+ Ci::TestFailureHistoryService.new(main_pipeline).execute
+ end
+
it 'renders test suite data' do
get_tests_show_json(build_ids)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('test')
+
+ # Each test failure in this pipeline has a matching failure in the default branch
+ recent_failures = json_response['test_cases'].map { |tc| tc['recent_failures'] }
+ expect(recent_failures).to eq([
+ { 'count' => 1, 'base_branch' => 'master' },
+ { 'count' => 1, 'base_branch' => 'master' },
+ { 'count' => 1, 'base_branch' => 'master' }
+ ])
end
end
- context 'when pipeline does not have build report results' do
+ context 'when pipeline has no builds that matches the given build_ids' do
let(:pipeline) { create(:ci_empty_pipeline) }
let(:suite_name) { 'test' }
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index d30cc8cbfd9..53a7c2ca069 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -325,6 +325,18 @@ RSpec.describe Projects::ProjectMembersController do
expect(requester.reload.expires_at).not_to eq(expires_at.to_date)
end
+
+ it 'returns error status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ it 'returns error message' do
+ subject
+
+ expect(json_response).to eq({ 'message' => 'Expires at cannot be a date in the past' })
+ end
end
context 'when set to a date in the future' do
diff --git a/spec/controllers/projects/security/configuration_controller_spec.rb b/spec/controllers/projects/security/configuration_controller_spec.rb
new file mode 100644
index 00000000000..afbebbad3d1
--- /dev/null
+++ b/spec/controllers/projects/security/configuration_controller_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Security::ConfigurationController do
+ let(:project) { create(:project, :public) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET show' do
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(secure_security_and_compliance_configuration_page_on_ce: false)
+ end
+
+ it 'renders not found' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ context 'when user has guest access' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'denies access' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when user has developer access' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'grants access' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/registrations/experience_levels_controller_spec.rb b/spec/controllers/registrations/experience_levels_controller_spec.rb
index 015daba8682..a84c3257774 100644
--- a/spec/controllers/registrations/experience_levels_controller_spec.rb
+++ b/spec/controllers/registrations/experience_levels_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Registrations::ExperienceLevelsController do
+ include AfterNextHelpers
+
let_it_be(:namespace) { create(:group, path: 'group-path' ) }
let_it_be(:user) { create(:user) }
@@ -45,6 +47,9 @@ RSpec.describe Registrations::ExperienceLevelsController do
end
context 'with an authenticated user' do
+ let_it_be(:project) { build(:project, namespace: namespace, creator: user, path: 'project-path') }
+ let_it_be(:issues_board) { build(:board, id: 123, project: project) }
+
before do
sign_in(user)
stub_experiment_for_subject(onboarding_issues: true)
@@ -85,91 +90,57 @@ RSpec.describe Registrations::ExperienceLevelsController do
end
end
- describe 'redirection' do
- let(:project) { build(:project, namespace: namespace, creator: user, path: 'project-path') }
- let(:issues_board) { build(:board, id: 123, project: project) }
+ context 'when "Learn GitLab" project exists' do
+ let(:learn_gitlab_available?) { true }
before do
- stub_experiment_for_subject(
- onboarding_issues: true,
- default_to_issues_board: default_to_issues_board_xp?
- )
allow_next_instance_of(LearnGitlab) do |learn_gitlab|
allow(learn_gitlab).to receive(:available?).and_return(learn_gitlab_available?)
allow(learn_gitlab).to receive(:project).and_return(project)
allow(learn_gitlab).to receive(:board).and_return(issues_board)
+ allow(learn_gitlab).to receive(:label).and_return(double(id: 1))
end
end
- context 'when namespace_path param is missing' do
- let(:params) { super().merge(namespace_path: nil) }
-
- where(
- default_to_issues_board_xp?: [true, false],
- learn_gitlab_available?: [true, false]
- )
-
- with_them do
- it { is_expected.to redirect_to('/') }
- end
- end
-
- context 'when we have a namespace_path param' do
- using RSpec::Parameterized::TableSyntax
+ context 'redirection' do
+ context 'when namespace_path param is missing' do
+ let(:params) { super().merge(namespace_path: nil) }
- where(:default_to_issues_board_xp?, :learn_gitlab_available?, :path) do
- true | true | '/group-path/project-path/-/boards/123'
- true | false | '/group-path'
- false | true | '/group-path'
- false | false | '/group-path'
- end
+ where(
+ learn_gitlab_available?: [true, false]
+ )
- with_them do
- it { is_expected.to redirect_to(path) }
- end
- end
- end
-
- describe 'applying the chosen level' do
- context 'when a "Learn GitLab" project is available' do
- before do
- allow_next_instance_of(LearnGitlab) do |learn_gitlab|
- allow(learn_gitlab).to receive(:available?).and_return(true)
- allow(learn_gitlab).to receive(:label).and_return(double(id: 1))
+ with_them do
+ it { is_expected.to redirect_to('/') }
end
end
- context 'when novice' do
- let(:params) { super().merge(experience_level: :novice) }
+ context 'when we have a namespace_path param' do
+ using RSpec::Parameterized::TableSyntax
- it 'adds a BoardLabel' do
- expect_next_instance_of(Boards::UpdateService) do |service|
- expect(service).to receive(:execute)
- end
-
- subject
+ where(:learn_gitlab_available?, :path) do
+ true | '/group-path/project-path/-/boards/123'
+ false | '/group-path'
end
- end
-
- context 'when experienced' do
- let(:params) { super().merge(experience_level: :experienced) }
- it 'does not add a BoardLabel' do
- expect(Boards::UpdateService).not_to receive(:new)
-
- subject
+ with_them do
+ it { is_expected.to redirect_to(path) }
end
end
end
- context 'when no "Learn GitLab" project exists' do
+ context 'when novice' do
let(:params) { super().merge(experience_level: :novice) }
- before do
- allow_next_instance_of(LearnGitlab) do |learn_gitlab|
- allow(learn_gitlab).to receive(:available?).and_return(false)
- end
+ it 'adds a BoardLabel' do
+ expect_next(Boards::UpdateService).to receive(:execute)
+
+ subject
end
+ end
+
+ context 'when experienced' do
+ let(:params) { super().merge(experience_level: :experienced) }
it 'does not add a BoardLabel' do
expect(Boards::UpdateService).not_to receive(:new)
@@ -178,6 +149,20 @@ RSpec.describe Registrations::ExperienceLevelsController do
end
end
end
+
+ context 'when no "Learn GitLab" project exists' do
+ let(:params) { super().merge(experience_level: :novice) }
+
+ before do
+ allow_next(LearnGitlab).to receive(:available?).and_return(false)
+ end
+
+ it 'does not add a BoardLabel' do
+ expect(Boards::UpdateService).not_to receive(:new)
+
+ subject
+ end
+ end
end
context 'when user update fails' do
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 737ec4f95c5..aac7c10d878 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -73,6 +73,18 @@ RSpec.describe RegistrationsController do
end
end
end
+
+ context 'audit events' do
+ context 'when not licensed' do
+ before do
+ stub_licensed_features(admin_audit_log: false)
+ end
+
+ it 'does not log any audit event' do
+ expect { subject }.not_to change(AuditEvent, :count)
+ end
+ end
+ end
end
context 'when the `require_admin_approval_after_user_signup` setting is turned off' do
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index 34052496871..bd5d07fda71 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -52,12 +52,10 @@ RSpec.describe Repositories::GitHttpController do
}.from(0).to(1)
end
- it 'records an onboarding progress action' do
- expect_next_instance_of(OnboardingProgressService) do |service|
- expect(service).to receive(:execute).with(action: :git_read)
- end
+ it_behaves_like 'records an onboarding progress action', :git_read do
+ let(:namespace) { project.namespace }
- send_request
+ subject { send_request }
end
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index bbd39fd4c83..c531c699e98 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -5,278 +5,296 @@ require 'spec_helper'
RSpec.describe SearchController do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
+ context 'authorized user' do
+ let(:user) { create(:user) }
- before do
- sign_in(user)
- end
-
- shared_examples_for 'when the user cannot read cross project' do |action, params|
before do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?)
- .with(user, :read_cross_project, :global) { false }
+ sign_in(user)
end
- it 'blocks access without a project_id' do
- get action, params: params
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ shared_examples_for 'when the user cannot read cross project' do |action, params|
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_cross_project, :global) { false }
+ end
- it 'allows access with a project_id' do
- get action, params: params.merge(project_id: create(:project, :public).id)
+ it 'blocks access without a project_id' do
+ get action, params: params
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- shared_examples_for 'with external authorization service enabled' do |action, params|
- let(:project) { create(:project, namespace: user.namespace) }
- let(:note) { create(:note_on_issue, project: project) }
+ it 'allows access with a project_id' do
+ get action, params: params.merge(project_id: create(:project, :public).id)
- before do
- enable_external_authorization_service_check
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
- it 'renders a 403 when no project is given' do
- get action, params: params
+ shared_examples_for 'with external authorization service enabled' do |action, params|
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:note) { create(:note_on_issue, project: project) }
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ before do
+ enable_external_authorization_service_check
+ end
- it 'renders a 200 when a project was set' do
- get action, params: params.merge(project_id: project.id)
+ it 'renders a 403 when no project is given' do
+ get action, params: params
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- describe 'GET #show' do
- it_behaves_like 'when the user cannot read cross project', :show, { search: 'hello' } do
- it 'still allows accessing the search page' do
- get :show
+ it 'renders a 200 when a project was set' do
+ get action, params: params.merge(project_id: project.id)
expect(response).to have_gitlab_http_status(:ok)
end
end
- it_behaves_like 'with external authorization service enabled', :show, { search: 'hello' }
+ describe 'GET #show' do
+ it_behaves_like 'when the user cannot read cross project', :show, { search: 'hello' } do
+ it 'still allows accessing the search page' do
+ get :show
- context 'uses the right partials depending on scope' do
- using RSpec::Parameterized::TableSyntax
- render_views
-
- let_it_be(:project) { create(:project, :public, :repository, :wiki_repo) }
-
- before do
- expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
- subject { get(:show, params: { project_id: project.id, scope: scope, search: 'merge' }) }
+ it_behaves_like 'with external authorization service enabled', :show, { search: 'hello' }
- where(:partial, :scope) do
- '_blob' | :blobs
- '_wiki_blob' | :wiki_blobs
- '_commit' | :commits
- end
+ context 'uses the right partials depending on scope' do
+ using RSpec::Parameterized::TableSyntax
+ render_views
- with_them do
- it do
- project_wiki = create(:project_wiki, project: project, user: user)
- create(:wiki_page, wiki: project_wiki, title: 'merge', content: 'merge')
+ let_it_be(:project) { create(:project, :public, :repository, :wiki_repo) }
- expect(subject).to render_template("search/results/#{partial}")
+ before do
+ expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
end
- end
- end
- context 'global search' do
- using RSpec::Parameterized::TableSyntax
- render_views
+ subject { get(:show, params: { project_id: project.id, scope: scope, search: 'merge' }) }
- context 'when block_anonymous_global_searches is disabled' do
- before do
- stub_feature_flags(block_anonymous_global_searches: false)
+ where(:partial, :scope) do
+ '_blob' | :blobs
+ '_wiki_blob' | :wiki_blobs
+ '_commit' | :commits
end
- it 'omits pipeline status from load' do
- project = create(:project, :public)
- expect(Gitlab::Cache::Ci::ProjectPipelineStatus).not_to receive(:load_in_batch_for_projects)
+ with_them do
+ it do
+ project_wiki = create(:project_wiki, project: project, user: user)
+ create(:wiki_page, wiki: project_wiki, title: 'merge', content: 'merge')
- get :show, params: { scope: 'projects', search: project.name }
-
- expect(assigns[:search_objects].first).to eq project
+ expect(subject).to render_template("search/results/#{partial}")
+ end
end
+ end
- context 'check search term length' do
- let(:search_queries) do
- char_limit = SearchService::SEARCH_CHAR_LIMIT
- term_limit = SearchService::SEARCH_TERM_LIMIT
- {
- chars_under_limit: ('a' * (char_limit - 1)),
- chars_over_limit: ('a' * (char_limit + 1)),
- terms_under_limit: ('abc ' * (term_limit - 1)),
- terms_over_limit: ('abc ' * (term_limit + 1))
- }
+ context 'global search' do
+ using RSpec::Parameterized::TableSyntax
+ render_views
+
+ context 'when block_anonymous_global_searches is disabled' do
+ before do
+ stub_feature_flags(block_anonymous_global_searches: false)
end
- where(:string_name, :expectation) do
- :chars_under_limit | :not_to_set_flash
- :chars_over_limit | :set_chars_flash
- :terms_under_limit | :not_to_set_flash
- :terms_over_limit | :set_terms_flash
+ it 'omits pipeline status from load' do
+ project = create(:project, :public)
+ expect(Gitlab::Cache::Ci::ProjectPipelineStatus).not_to receive(:load_in_batch_for_projects)
+
+ get :show, params: { scope: 'projects', search: project.name }
+
+ expect(assigns[:search_objects].first).to eq project
end
- with_them do
- it do
- get :show, params: { scope: 'projects', search: search_queries[string_name] }
-
- case expectation
- when :not_to_set_flash
- expect(controller).not_to set_flash[:alert]
- when :set_chars_flash
- expect(controller).to set_flash[:alert].to(/characters/)
- when :set_terms_flash
- expect(controller).to set_flash[:alert].to(/terms/)
+ context 'check search term length' do
+ let(:search_queries) do
+ char_limit = SearchService::SEARCH_CHAR_LIMIT
+ term_limit = SearchService::SEARCH_TERM_LIMIT
+ {
+ chars_under_limit: ('a' * (char_limit - 1)),
+ chars_over_limit: ('a' * (char_limit + 1)),
+ terms_under_limit: ('abc ' * (term_limit - 1)),
+ terms_over_limit: ('abc ' * (term_limit + 1))
+ }
+ end
+
+ where(:string_name, :expectation) do
+ :chars_under_limit | :not_to_set_flash
+ :chars_over_limit | :set_chars_flash
+ :terms_under_limit | :not_to_set_flash
+ :terms_over_limit | :set_terms_flash
+ end
+
+ with_them do
+ it do
+ get :show, params: { scope: 'projects', search: search_queries[string_name] }
+
+ case expectation
+ when :not_to_set_flash
+ expect(controller).not_to set_flash[:alert]
+ when :set_chars_flash
+ expect(controller).to set_flash[:alert].to(/characters/)
+ when :set_terms_flash
+ expect(controller).to set_flash[:alert].to(/terms/)
+ end
end
end
end
end
- end
- context 'when block_anonymous_global_searches is enabled' do
- context 'for unauthenticated user' do
- before do
- sign_out(user)
- end
+ context 'when block_anonymous_global_searches is enabled' do
+ context 'for unauthenticated user' do
+ before do
+ sign_out(user)
+ end
- it 'redirects to login page' do
- get :show, params: { scope: 'projects', search: '*' }
+ it 'redirects to login page' do
+ get :show, params: { scope: 'projects', search: '*' }
- expect(response).to redirect_to new_user_session_path
+ expect(response).to redirect_to new_user_session_path
+ end
end
- end
- context 'for authenticated user' do
- it 'succeeds' do
- get :show, params: { scope: 'projects', search: '*' }
+ context 'for authenticated user' do
+ it 'succeeds' do
+ get :show, params: { scope: 'projects', search: '*' }
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
end
end
- end
- it 'finds issue comments' do
- project = create(:project, :public)
- note = create(:note_on_issue, project: project)
+ it 'finds issue comments' do
+ project = create(:project, :public)
+ note = create(:note_on_issue, project: project)
- get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
-
- expect(assigns[:search_objects].first).to eq note
- end
+ get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
- context 'unique users tracking' do
- before do
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ expect(assigns[:search_objects].first).to eq note
end
- it_behaves_like 'tracking unique hll events', :search_track_unique_users do
- subject(:request) { get :show, params: { scope: 'projects', search: 'term' } }
+ context 'unique users tracking' do
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ end
+
+ it_behaves_like 'tracking unique hll events', :search_track_unique_users do
+ subject(:request) { get :show, params: { scope: 'projects', search: 'term' } }
- let(:target_id) { 'i_search_total' }
- let(:expected_type) { instance_of(String) }
+ let(:target_id) { 'i_search_total' }
+ let(:expected_type) { instance_of(String) }
+ end
end
- end
- context 'on restricted projects' do
- context 'when signed out' do
- before do
- sign_out(user)
+ context 'on restricted projects' do
+ context 'when signed out' do
+ before do
+ sign_out(user)
+ end
+
+ it "doesn't expose comments on issues" do
+ project = create(:project, :public, :issues_private)
+ note = create(:note_on_issue, project: project)
+
+ get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
+
+ expect(assigns[:search_objects].count).to eq(0)
+ end
end
- it "doesn't expose comments on issues" do
- project = create(:project, :public, :issues_private)
- note = create(:note_on_issue, project: project)
+ it "doesn't expose comments on merge_requests" do
+ project = create(:project, :public, :merge_requests_private)
+ note = create(:note_on_merge_request, project: project)
get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
expect(assigns[:search_objects].count).to eq(0)
end
- end
- it "doesn't expose comments on merge_requests" do
- project = create(:project, :public, :merge_requests_private)
- note = create(:note_on_merge_request, project: project)
+ it "doesn't expose comments on snippets" do
+ project = create(:project, :public, :snippets_private)
+ note = create(:note_on_project_snippet, project: project)
- get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
+ get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
- expect(assigns[:search_objects].count).to eq(0)
+ expect(assigns[:search_objects].count).to eq(0)
+ end
end
+ end
- it "doesn't expose comments on snippets" do
- project = create(:project, :public, :snippets_private)
- note = create(:note_on_project_snippet, project: project)
-
- get :show, params: { project_id: project.id, scope: 'notes', search: note.note }
+ describe 'GET #count' do
+ it_behaves_like 'when the user cannot read cross project', :count, { search: 'hello', scope: 'projects' }
+ it_behaves_like 'with external authorization service enabled', :count, { search: 'hello', scope: 'projects' }
- expect(assigns[:search_objects].count).to eq(0)
- end
- end
- end
+ it 'returns the result count for the given term and scope' do
+ create(:project, :public, name: 'hello world')
+ create(:project, :public, name: 'foo bar')
- describe 'GET #count' do
- it_behaves_like 'when the user cannot read cross project', :count, { search: 'hello', scope: 'projects' }
- it_behaves_like 'with external authorization service enabled', :count, { search: 'hello', scope: 'projects' }
+ get :count, params: { search: 'hello', scope: 'projects' }
- it 'returns the result count for the given term and scope' do
- create(:project, :public, name: 'hello world')
- create(:project, :public, name: 'foo bar')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'count' => '1' })
+ end
- get :count, params: { search: 'hello', scope: 'projects' }
+ it 'raises an error if search term is missing' do
+ expect do
+ get :count, params: { scope: 'projects' }
+ end.to raise_error(ActionController::ParameterMissing)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq({ 'count' => '1' })
+ it 'raises an error if search scope is missing' do
+ expect do
+ get :count, params: { search: 'hello' }
+ end.to raise_error(ActionController::ParameterMissing)
+ end
end
- it 'raises an error if search term is missing' do
- expect do
- get :count, params: { scope: 'projects' }
- end.to raise_error(ActionController::ParameterMissing)
+ describe 'GET #autocomplete' do
+ it_behaves_like 'when the user cannot read cross project', :autocomplete, { term: 'hello' }
+ it_behaves_like 'with external authorization service enabled', :autocomplete, { term: 'hello' }
end
- it 'raises an error if search scope is missing' do
- expect do
- get :count, params: { search: 'hello' }
- end.to raise_error(ActionController::ParameterMissing)
- end
- end
+ describe '#append_info_to_payload' do
+ it 'appends search metadata for logging' do
+ last_payload = nil
+ original_append_info_to_payload = controller.method(:append_info_to_payload)
- describe 'GET #autocomplete' do
- it_behaves_like 'when the user cannot read cross project', :autocomplete, { term: 'hello' }
- it_behaves_like 'with external authorization service enabled', :autocomplete, { term: 'hello' }
- end
+ expect(controller).to receive(:append_info_to_payload) do |payload|
+ original_append_info_to_payload.call(payload)
+ last_payload = payload
+ end
- describe '#append_info_to_payload' do
- it 'appends search metadata for logging' do
- last_payload = nil
- original_append_info_to_payload = controller.method(:append_info_to_payload)
+ get :show, params: { scope: 'issues', search: 'hello world', group_id: '123', project_id: '456', confidential: true, state: true, force_search_results: true }
- expect(controller).to receive(:append_info_to_payload) do |payload|
- original_append_info_to_payload.call(payload)
- last_payload = payload
+ expect(last_payload[:metadata]['meta.search.group_id']).to eq('123')
+ expect(last_payload[:metadata]['meta.search.project_id']).to eq('456')
+ expect(last_payload[:metadata]).not_to have_key('meta.search.search')
+ expect(last_payload[:metadata]['meta.search.scope']).to eq('issues')
+ expect(last_payload[:metadata]['meta.search.force_search_results']).to eq('true')
+ expect(last_payload[:metadata]['meta.search.filters.confidential']).to eq('true')
+ expect(last_payload[:metadata]['meta.search.filters.state']).to eq('true')
end
+ end
+ end
- get :show, params: { scope: 'issues', search: 'hello world', group_id: '123', project_id: '456', confidential: true, state: true, force_search_results: true }
+ context 'unauthorized user' do
+ describe 'GET #opensearch' do
+ render_views
+
+ it 'renders xml' do
+ get :opensearch, format: :xml
+
+ doc = Nokogiri::XML.parse(response.body)
- expect(last_payload[:metadata]['meta.search.group_id']).to eq('123')
- expect(last_payload[:metadata]['meta.search.project_id']).to eq('456')
- expect(last_payload[:metadata]).not_to have_key('meta.search.search')
- expect(last_payload[:metadata]['meta.search.scope']).to eq('issues')
- expect(last_payload[:metadata]['meta.search.force_search_results']).to eq('true')
- expect(last_payload[:metadata]['meta.search.filters.confidential']).to eq('true')
- expect(last_payload[:metadata]['meta.search.filters.state']).to eq('true')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(doc.css('OpenSearchDescription ShortName').text).to eq('GitLab')
+ expect(doc.css('OpenSearchDescription *').map(&:name)).to eq(%w[ShortName Description InputEncoding Image Url SearchForm])
+ end
end
end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 3087beb1326..efbf6e7baab 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -184,6 +184,7 @@ RSpec.describe 'Database schema' do
"ApplicationSetting" => %w[repository_storages_weighted],
"AlertManagement::Alert" => %w[payload],
"Ci::BuildMetadata" => %w[config_options config_variables],
+ "ExperimentSubject" => %w[context],
"ExperimentUser" => %w[context],
"Geo::Event" => %w[payload],
"GeoNodeStatus" => %w[status],
diff --git a/spec/deprecation_toolkit_env.rb b/spec/deprecation_toolkit_env.rb
index bc90f67f0db..f66458cfbfb 100644
--- a/spec/deprecation_toolkit_env.rb
+++ b/spec/deprecation_toolkit_env.rb
@@ -1,18 +1,89 @@
# frozen_string_literal: true
-if ENV.key?('RECORD_DEPRECATIONS')
- require 'deprecation_toolkit'
- require 'deprecation_toolkit/rspec'
- DeprecationToolkit::Configuration.test_runner = :rspec
- DeprecationToolkit::Configuration.deprecation_path = 'deprecations'
- DeprecationToolkit::Configuration.behavior = DeprecationToolkit::Behaviors::Record
-
- # Enable ruby deprecations for keywords, it's suppressed by default in Ruby 2.7.2
- Warning[:deprecated] = true
-
- kwargs_warnings = [
- # Taken from https://github.com/jeremyevans/ruby-warning/blob/1.1.0/lib/warning.rb#L18
+require 'deprecation_toolkit'
+require 'deprecation_toolkit/rspec'
+
+module DeprecationToolkitEnv
+ module DeprecationBehaviors
+ class SelectiveRaise
+ attr_reader :disallowed_deprecations_proc
+
+ class RaiseDisallowedDeprecation < StandardError
+ def initialize(test, current_deprecations)
+ message = <<~EOF
+ Disallowed deprecations detected while running test #{test}:
+
+ #{current_deprecations.deprecations.join("\n")}
+ EOF
+
+ super(message)
+ end
+ end
+
+ def initialize(disallowed_deprecations_proc)
+ @disallowed_deprecations_proc = disallowed_deprecations_proc
+ end
+
+ # Note: trigger does not get called if the current_deprecations matches recorded_deprecations
+ # See https://github.com/Shopify/deprecation_toolkit/blob/2398f38acb62220fb79a6cd720f61d9cea26bc06/lib/deprecation_toolkit/test_triggerer.rb#L8-L11
+ def trigger(test, current_deprecations, recorded_deprecations)
+ if selected_for_raise?(current_deprecations)
+ raise RaiseDisallowedDeprecation.new(test, current_deprecations)
+ elsif ENV['RECORD_DEPRECATIONS']
+ record(test, current_deprecations, recorded_deprecations)
+ end
+ end
+
+ private
+
+ def selected_for_raise?(current_deprecations)
+ disallowed_deprecations_proc.call(current_deprecations.deprecations_without_stacktrace)
+ end
+
+ def record(test, current_deprecations, recorded_deprecations)
+ ::DeprecationToolkit::Behaviors::Record.trigger(test, current_deprecations, recorded_deprecations)
+ end
+ end
+ end
+
+ # Taken from https://github.com/jeremyevans/ruby-warning/blob/1.1.0/lib/warning.rb#L18
+ def self.kwargs_warning
%r{warning: (?:Using the last argument (?:for `.+' )?as keyword parameters is deprecated; maybe \*\* should be added to the call|Passing the keyword argument (?:for `.+' )?as the last hash parameter is deprecated|Splitting the last argument (?:for `.+' )?into positional and keyword parameters is deprecated|The called method (?:`.+' )?is defined here)\n\z}
- ]
- DeprecationToolkit::Configuration.warnings_treated_as_deprecation = kwargs_warnings
+ end
+
+ # Allow these Gem paths to trigger keyword warnings as we upgrade these gems
+ # one by one
+ def self.allowed_kwarg_warning_paths
+ %w[
+ spec/support/gitlab_experiment.rb
+ activerecord-6.0.3.4/lib/active_record/migration.rb
+ devise-4.7.3/lib/devise/test/controller_helpers.rb
+ grape-1.5.1/lib/grape/middleware/stack.rb
+ grape-1.5.1/lib/grape/validations/validators/coerce.rb
+ grape_logging-1.8.3/lib/grape_logging/middleware/request_logger.rb
+ activesupport-6.0.3.4/lib/active_support/cache.rb
+ factory_bot-6.1.0/lib/factory_bot/decorator.rb
+ batch-loader-1.4.0/lib/batch_loader/graphql.rb
+ carrierwave-1.3.1/lib/carrierwave/sanitized_file.rb
+ activerecord-6.0.3.4/lib/active_record/relation.rb
+ ]
+ end
+
+ def self.configure!
+ # Enable ruby deprecations for keywords, it's suppressed by default in Ruby 2.7.2
+ Warning[:deprecated] = true
+
+ DeprecationToolkit::Configuration.test_runner = :rspec
+ DeprecationToolkit::Configuration.deprecation_path = 'deprecations'
+ DeprecationToolkit::Configuration.warnings_treated_as_deprecation = [kwargs_warning]
+
+ disallowed_deprecations = -> (deprecations) do
+ deprecations.any? do |deprecation|
+ kwargs_warning.match?(deprecation) &&
+ allowed_kwarg_warning_paths.none? { |path| deprecation.include?(path) }
+ end
+ end
+
+ DeprecationToolkit::Configuration.behavior = DeprecationBehaviors::SelectiveRaise.new(disallowed_deprecations)
+ end
end
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index ece52d37351..6b4a3ece59e 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -2,8 +2,60 @@
require 'spec_helper'
-RSpec.describe ApplicationExperiment do
- subject { described_class.new(:stub) }
+RSpec.describe ApplicationExperiment, :experiment do
+ subject { described_class.new('namespaced/stub') }
+
+ let(:feature_definition) do
+ { name: 'namespaced_stub', type: 'experiment', group: 'group::adoption', default_enabled: false }
+ end
+
+ around do |example|
+ Feature::Definition.definitions[:namespaced_stub] = Feature::Definition.new('namespaced_stub.yml', feature_definition)
+ example.run
+ Feature::Definition.definitions.delete(:namespaced_stub)
+ end
+
+ before do
+ allow(subject).to receive(:enabled?).and_return(true)
+ end
+
+ it "naively assumes a 1x1 relationship to feature flags for tests" do
+ expect(Feature).to receive(:persist_used!).with('namespaced_stub')
+
+ described_class.new('namespaced/stub')
+ end
+
+ describe "enabled" do
+ before do
+ allow(subject).to receive(:enabled?).and_call_original
+
+ allow(Feature::Definition).to receive(:get).and_return('_instance_')
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Feature).to receive(:get).and_return(double(state: :on))
+ end
+
+ it "is enabled when all criteria are met" do
+ expect(subject).to be_enabled
+ end
+
+ it "isn't enabled if the feature definition doesn't exist" do
+ expect(Feature::Definition).to receive(:get).with('namespaced_stub').and_return(nil)
+
+ expect(subject).not_to be_enabled
+ end
+
+ it "isn't enabled if we're not in dev or dotcom environments" do
+ expect(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+
+ expect(subject).not_to be_enabled
+ end
+
+ it "isn't enabled if the feature flag state is :off" do
+ expect(Feature).to receive(:get).with('namespaced_stub').and_return(double(state: :off))
+
+ expect(subject).not_to be_enabled
+ end
+ end
describe "publishing results" do
it "tracks the assignment" do
@@ -16,9 +68,9 @@ RSpec.describe ApplicationExperiment do
expect(Gon.global).to receive(:push).with(
{
experiment: {
- 'stub' => { # string key because it can be namespaced
- experiment: 'stub',
- key: 'e8f65fd8d973f9985dc7ea3cf1614ae1',
+ 'namespaced/stub' => { # string key because it can be namespaced
+ experiment: 'namespaced/stub',
+ key: '86208ac54ca798e11f127e8b23ec396a',
variant: 'control'
}
}
@@ -31,8 +83,8 @@ RSpec.describe ApplicationExperiment do
end
describe "tracking events", :snowplow do
- it "doesn't track if excluded" do
- subject.exclude { true }
+ it "doesn't track if we shouldn't track" do
+ allow(subject).to receive(:should_track?).and_return(false)
subject.track(:action)
@@ -45,7 +97,7 @@ RSpec.describe ApplicationExperiment do
])
expect_snowplow_event(
- category: 'stub',
+ category: 'namespaced/stub',
action: 'action',
property: '_property_',
context: [
@@ -55,7 +107,7 @@ RSpec.describe ApplicationExperiment do
},
{
schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0',
- data: { experiment: 'stub', key: 'e8f65fd8d973f9985dc7ea3cf1614ae1', variant: 'control' }
+ data: { experiment: 'namespaced/stub', key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' }
}
]
)
@@ -63,8 +115,14 @@ RSpec.describe ApplicationExperiment do
end
describe "variant resolution" do
+ it "uses the default value as specified in the yaml" do
+ expect(Feature).to receive(:enabled?).with('namespaced_stub', subject, type: :experiment, default_enabled: :yaml)
+
+ expect(subject.variant.name).to eq('control')
+ end
+
it "returns nil when not rolled out" do
- stub_feature_flags(stub: false)
+ stub_feature_flags(namespaced_stub: false)
expect(subject.variant.name).to eq('control')
end
@@ -105,7 +163,7 @@ RSpec.describe ApplicationExperiment do
# every control variant assigned, we'd inflate the cache size and
# wouldn't be able to roll out to subjects that we'd already assigned to
# the control.
- stub_feature_flags(stub: false) # simulate being not rolled out
+ stub_feature_flags(namespaced_stub: false) # simulate being not rolled out
expect(subject.variant.name).to eq('control') # if we ask, it should be control
diff --git a/spec/experiments/members/invite_email_experiment_spec.rb b/spec/experiments/members/invite_email_experiment_spec.rb
new file mode 100644
index 00000000000..b8ef502382c
--- /dev/null
+++ b/spec/experiments/members/invite_email_experiment_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::InviteEmailExperiment do
+ subject do
+ experiment('members/invite_email', actor: double('Member', created_by: double('User', avatar_url: '_avatar_url_')))
+ end
+
+ before do
+ allow(subject).to receive(:enabled?).and_return(true)
+ end
+
+ describe "variant resolution" do
+ it "returns nil when not rolled out" do
+ stub_feature_flags(members_invite_email: false)
+
+ expect(subject.variant.name).to eq('control')
+ end
+
+ context "when rolled out to 100%" do
+ it "returns the first variant name" do
+ subject.try(:avatar) {}
+
+ expect(subject.variant.name).to eq('avatar')
+ end
+ end
+ end
+
+ describe "exclusions", :experiment do
+ it "excludes when created by is nil" do
+ expect(experiment('members/invite_email')).to exclude(actor: double(created_by: nil))
+ end
+
+ it "excludes when avatar_url is nil" do
+ member_without_avatar_url = double('Member', created_by: double('User', avatar_url: nil))
+
+ expect(experiment('members/invite_email')).to exclude(actor: member_without_avatar_url)
+ end
+ end
+end
diff --git a/spec/factories/audit_events.rb b/spec/factories/audit_events.rb
index 4e72976a9e5..05b86d2f13b 100644
--- a/spec/factories/audit_events.rb
+++ b/spec/factories/audit_events.rb
@@ -49,6 +49,21 @@ FactoryBot.define do
end
end
+ trait :unauthenticated do
+ author_id { -1 }
+ details do
+ {
+ custom_message: 'Custom action',
+ author_name: 'An unauthenticated user',
+ target_id: target_project.id,
+ target_type: 'Project',
+ target_details: target_project.name,
+ ip_address: '127.0.0.1',
+ entity_path: target_project.full_path
+ }
+ end
+ end
+
trait :group_event do
transient { target_group { association(:group) } }
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index 7727a468633..7258b3367d3 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -53,6 +53,11 @@ FactoryBot.define do
finished_at { '2013-10-29 09:53:28 CET' }
end
+ trait :success do
+ finished
+ status { 'success' }
+ end
+
trait :failed do
finished
status { 'failed' }
@@ -75,5 +80,9 @@ FactoryBot.define do
trait :playable do
manual
end
+
+ trait :allowed_to_fail do
+ allow_failure { true }
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 24abad66530..c85918a3187 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -308,12 +308,6 @@ FactoryBot.define do
end
end
- trait :codequality_report do
- after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :codequality, job: build)
- end
- end
-
trait :test_reports do
after(:build) do |build|
build.job_artifacts << create(:ci_job_artifact, :junit, job: build)
diff --git a/spec/factories/ci/pipeline_artifacts.rb b/spec/factories/ci/pipeline_artifacts.rb
index fa33609dd6c..05ff7afed7c 100644
--- a/spec/factories/ci/pipeline_artifacts.rb
+++ b/spec/factories/ci/pipeline_artifacts.rb
@@ -4,18 +4,30 @@ FactoryBot.define do
factory :ci_pipeline_artifact, class: 'Ci::PipelineArtifact' do
pipeline factory: :ci_pipeline
project { pipeline.project }
- file_type { :code_coverage }
file_format { :raw }
file_store { ObjectStorage::SUPPORTED_STORES.first }
- size { 1.megabytes }
-
+ size { 1.megabyte }
+ file_type { :code_coverage }
after(:build) do |artifact, _evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json')
end
- trait :with_multibyte_characters do
+ trait :with_coverage_report do
+ file_type { :code_coverage }
+
+ after(:build) do |artifact, _evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json')
+ end
+
+ size { file.size }
+ end
+
+ trait :with_coverage_multibyte_characters do
+ file_type { :code_coverage }
size { { "utf8" => "✓" }.to_json.bytesize }
+
after(:build) do |artifact, _evaluator|
artifact.file = CarrierWaveStringFile.new_file(
file_content: { "utf8" => "✓" }.to_json,
@@ -26,12 +38,25 @@ FactoryBot.define do
end
trait :with_code_coverage_with_multiple_files do
+ file_type { :code_coverage }
+
after(:build) do |artifact, _evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage_with_multiple_files.json'), 'application/json'
)
end
+ size { 1.megabyte }
+ end
+
+ trait :with_codequality_mr_diff_report do
+ file_type { :code_quality_mr_diff }
+
+ after(:build) do |artifact, _evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json'), 'application/json')
+ end
+
size { file.size }
end
end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 86a8b008e48..530bb0cd25b 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -93,14 +93,6 @@ FactoryBot.define do
end
end
- trait :with_codequality_report do
- status { :success }
-
- after(:build) do |pipeline, evaluator|
- pipeline.builds << build(:ci_build, :codequality_report, pipeline: pipeline, project: pipeline.project)
- end
- end
-
trait :with_test_reports do
status { :success }
@@ -159,7 +151,13 @@ FactoryBot.define do
trait :with_coverage_report_artifact do
after(:build) do |pipeline, evaluator|
- pipeline.pipeline_artifacts << build(:ci_pipeline_artifact, pipeline: pipeline, project: pipeline.project)
+ pipeline.pipeline_artifacts << build(:ci_pipeline_artifact, :with_coverage_report, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
+ trait :with_codequality_mr_diff_report do
+ after(:build) do |pipeline, evaluator|
+ pipeline.pipeline_artifacts << build(:ci_pipeline_artifact, :with_codequality_mr_diff_report, pipeline: pipeline, project: pipeline.project)
end
end
diff --git a/spec/factories/ci/reports/codequality_degradations.rb b/spec/factories/ci/reports/codequality_degradations.rb
new file mode 100644
index 00000000000..d82157b457a
--- /dev/null
+++ b/spec/factories/ci/reports/codequality_degradations.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :codequality_degradation_1, class: Hash do
+ skip_create
+
+ initialize_with do
+ {
+ "categories": [
+ "Complexity"
+ ],
+ "check_name": "argument_count",
+ "content": {
+ "body": ""
+ },
+ "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
+ "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
+ "location": {
+ "path": "file_a.rb",
+ "lines": {
+ "begin": 10,
+ "end": 10
+ }
+ },
+ "other_locations": [],
+ "remediation_points": 900000,
+ "severity": "major",
+ "type": "issue",
+ "engine_name": "structure"
+ }.with_indifferent_access
+ end
+ end
+
+ factory :codequality_degradation_2, class: Hash do
+ skip_create
+
+ initialize_with do
+ {
+ "categories": [
+ "Complexity"
+ ],
+ "check_name": "argument_count",
+ "content": {
+ "body": ""
+ },
+ "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
+ "fingerprint": "f3bdc1e8c102ba5fbd9e7f6cda51c95e",
+ "location": {
+ "path": "file_a.rb",
+ "lines": {
+ "begin": 10,
+ "end": 10
+ }
+ },
+ "other_locations": [],
+ "remediation_points": 900000,
+ "severity": "major",
+ "type": "issue",
+ "engine_name": "structure"
+ }.with_indifferent_access
+ end
+ end
+
+ factory :codequality_degradation_3, class: Hash do
+ skip_create
+
+ initialize_with do
+ {
+ "type": "Issue",
+ "check_name": "Rubocop/Metrics/ParameterLists",
+ "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
+ "categories": [
+ "Complexity"
+ ],
+ "remediation_points": 550000,
+ "location": {
+ "path": "file_b.rb",
+ "positions": {
+ "begin": {
+ "column": 14,
+ "line": 10
+ },
+ "end": {
+ "column": 39,
+ "line": 10
+ }
+ }
+ },
+ "content": {
+ "body": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count."
+ },
+ "engine_name": "rubocop",
+ "fingerprint": "ab5f8b935886b942d621399f5a2ca16e",
+ "severity": "minor"
+ }.with_indifferent_access
+ end
+ end
+end
diff --git a/spec/factories/ci/resource.rb b/spec/factories/ci/resource.rb
index 515329506e5..dec26013a25 100644
--- a/spec/factories/ci/resource.rb
+++ b/spec/factories/ci/resource.rb
@@ -5,7 +5,7 @@ FactoryBot.define do
resource_group factory: :ci_resource_group
trait(:retained) do
- build factory: :ci_build
+ processable factory: :ci_build
end
end
end
diff --git a/spec/factories/dependency_proxy.rb b/spec/factories/dependency_proxy.rb
index de95df19876..94a7986a8fa 100644
--- a/spec/factories/dependency_proxy.rb
+++ b/spec/factories/dependency_proxy.rb
@@ -10,7 +10,8 @@ FactoryBot.define do
factory :dependency_proxy_manifest, class: 'DependencyProxy::Manifest' do
group
file { fixture_file_upload('spec/fixtures/dependency_proxy/manifest') }
- digest { 'sha256:5ab5a6872b264fe4fd35d63991b9b7d8425f4bc79e7cf4d563c10956581170c9' }
+ digest { 'sha256:d0710affa17fad5f466a70159cc458227bd25d4afb39514ef662ead3e6c99515' }
file_name { 'alpine:latest.json' }
+ content_type { 'application/vnd.docker.distribution.manifest.v2+json' }
end
end
diff --git a/spec/factories/merge_request_diffs.rb b/spec/factories/merge_request_diffs.rb
index 481cabdae6d..f93f3f22109 100644
--- a/spec/factories/merge_request_diffs.rb
+++ b/spec/factories/merge_request_diffs.rb
@@ -10,12 +10,18 @@ FactoryBot.define do
head_commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
start_commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
+ diff_type { :regular }
+
trait :external do
external_diff { fixture_file_upload("spec/fixtures/doc_sample.txt", "plain/txt") }
stored_externally { true }
importing { true } # this avoids setting the state to 'empty'
end
+ trait :merge_head do
+ diff_type { :merge_head }
+ end
+
factory :external_merge_request_diff, traits: [:external]
end
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index e69743122cc..dba98593a03 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -200,6 +200,18 @@ FactoryBot.define do
end
end
+ trait :with_codequality_mr_diff_reports do
+ after(:build) do |merge_request|
+ merge_request.head_pipeline = build(
+ :ci_pipeline,
+ :success,
+ :with_codequality_mr_diff_report,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+ end
+
trait :with_terraform_reports do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 31f1aabe5dd..ca38793ac08 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -176,6 +176,24 @@ FactoryBot.define do
composer_json { { name: 'foo' } }
end
+ factory :composer_cache_file, class: 'Packages::Composer::CacheFile' do
+ group
+
+ file_sha256 { '1' * 64 }
+
+ transient do
+ file_fixture { 'spec/fixtures/packages/composer/package.json' }
+ end
+
+ after(:build) do |cache_file, evaluator|
+ cache_file.file = fixture_file_upload(evaluator.file_fixture)
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::Composer::CacheUploader::Store::REMOTE }
+ end
+ end
+
factory :maven_metadatum, class: 'Packages::Maven::Metadatum' do
association :package, package_type: :maven
path { 'my/company/app/my-app/1.0-SNAPSHOT' }
diff --git a/spec/factories/packages/debian/group_component.rb b/spec/factories/packages/debian/group_component.rb
new file mode 100644
index 00000000000..92d438be389
--- /dev/null
+++ b/spec/factories/packages/debian/group_component.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_group_component, class: 'Packages::Debian::GroupComponent' do
+ distribution { association(:debian_group_distribution) }
+
+ sequence(:name) { |n| "group-component-#{n}" }
+ end
+end
diff --git a/spec/factories/packages/debian/project_component.rb b/spec/factories/packages/debian/project_component.rb
new file mode 100644
index 00000000000..a56aec4cef0
--- /dev/null
+++ b/spec/factories/packages/debian/project_component.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_project_component, class: 'Packages::Debian::ProjectComponent' do
+ distribution { association(:debian_project_distribution) }
+
+ sequence(:name) { |n| "project-component-#{n}" }
+ end
+end
diff --git a/spec/factories/token_with_ivs.rb b/spec/factories/token_with_ivs.rb
new file mode 100644
index 00000000000..68989f6c5bc
--- /dev/null
+++ b/spec/factories/token_with_ivs.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :token_with_iv do
+ hashed_token { ::Digest::SHA256.digest(SecureRandom.hex(50)) }
+ iv { ::Digest::SHA256.digest(SecureRandom.hex(50)) }
+ hashed_plaintext_token { ::Digest::SHA256.digest(SecureRandom.hex(50)) }
+ end
+end
diff --git a/spec/factories/u2f_registrations.rb b/spec/factories/u2f_registrations.rb
index 7017b0ee9e7..40ad221415c 100644
--- a/spec/factories/u2f_registrations.rb
+++ b/spec/factories/u2f_registrations.rb
@@ -2,6 +2,8 @@
FactoryBot.define do
factory :u2f_registration do
+ user
+
certificate { FFaker::BaconIpsum.characters(728) }
key_handle { FFaker::BaconIpsum.characters(86) }
public_key { FFaker::BaconIpsum.characters(88) }
diff --git a/spec/features/admin/admin_cohorts_spec.rb b/spec/features/admin/admin_cohorts_spec.rb
deleted file mode 100644
index 982a9333275..00000000000
--- a/spec/features/admin/admin_cohorts_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Cohorts page' do
- before do
- admin = create(:admin)
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- end
-
- context 'with usage ping enabled' do
- it 'shows users count per month' do
- stub_application_setting(usage_ping_enabled: true)
-
- create_list(:user, 2)
-
- visit admin_cohorts_path
-
- expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0")
- end
- end
-
- context 'with usage ping disabled' do
- it 'shows empty state', :js do
- stub_application_setting(usage_ping_enabled: false)
-
- visit admin_cohorts_path
-
- expect(page).to have_selector(".js-empty-state")
- end
- end
-end
diff --git a/spec/features/admin/admin_disables_git_access_protocol_spec.rb b/spec/features/admin/admin_disables_git_access_protocol_spec.rb
index f7f0592a315..b370b779afe 100644
--- a/spec/features/admin/admin_disables_git_access_protocol_spec.rb
+++ b/spec/features/admin/admin_disables_git_access_protocol_spec.rb
@@ -37,7 +37,10 @@ RSpec.describe 'Admin disables Git access protocol', :js do
it 'shows only the SSH clone information' do
resize_screen_xs
visit_project
- find('.dropdown-toggle').click
+
+ within('.js-mobile-git-clone') do
+ find('.dropdown-toggle').click
+ end
expect(page).to have_content('Copy SSH clone URL')
expect(page).not_to have_content('Copy HTTP clone URL')
@@ -66,7 +69,10 @@ RSpec.describe 'Admin disables Git access protocol', :js do
it 'shows only the HTTP clone information' do
resize_screen_xs
visit_project
- find('.dropdown-toggle').click
+
+ within('.js-mobile-git-clone') do
+ find('.dropdown-toggle').click
+ end
expect(page).to have_content('Copy HTTP clone URL')
expect(page).not_to have_content('Copy SSH clone URL')
@@ -97,7 +103,10 @@ RSpec.describe 'Admin disables Git access protocol', :js do
it 'shows both SSH and HTTP clone information' do
resize_screen_xs
visit_project
- find('.dropdown-toggle').click
+
+ within('.js-mobile-git-clone') do
+ find('.dropdown-toggle').click
+ end
expect(page).to have_content('Copy HTTP clone URL')
expect(page).to have_content('Copy SSH clone URL')
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index a8e18385bd2..bbdf2f7f4a9 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe 'Admin Groups' do
click_button "Create group"
expect(current_path).to eq admin_group_path(Group.find_by(path: path_component))
- content = page.find('div#content-body')
+ content = page.find('#content-body')
h3_texts = content.all('h3').collect(&:text).join("\n")
expect(h3_texts).to match group_name
li_texts = content.all('li').collect(&:text).join("\n")
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index ff4e592234b..74e6aac8845 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe "Admin::Projects" do
let(:current_user) { create(:admin) }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
end
@@ -94,6 +96,7 @@ RSpec.describe "Admin::Projects" do
describe 'add admin himself to a project' do
before do
project.add_maintainer(user)
+ stub_feature_flags(invite_members_group_modal: false)
end
it 'adds admin a to a project as developer', :js do
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 0c66775c323..78e8ce91c10 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -315,50 +315,59 @@ RSpec.describe 'Admin updates settings' do
end
context 'Container Registry' do
- context 'delete tags service execution timeout' do
- let(:feature_flag_enabled) { true }
- let(:client_support) { true }
-
- before do
- stub_container_registry_config(enabled: true)
- stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
- allow(ContainerRegistry::Client).to receive(:supports_tag_delete?).and_return(client_support)
- end
+ let(:feature_flag_enabled) { true }
+ let(:client_support) { true }
+ let(:settings_titles) do
+ {
+ container_registry_delete_tags_service_timeout: 'Container Registry delete tags service execution timeout',
+ container_registry_expiration_policies_worker_capacity: 'Cleanup policy maximum workers running concurrently',
+ container_registry_cleanup_tags_service_max_list_size: 'Cleanup policy maximum number of tags to be deleted'
+ }
+ end
+
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
+ allow(ContainerRegistry::Client).to receive(:supports_tag_delete?).and_return(client_support)
+ end
- RSpec.shared_examples 'not having service timeout settings' do
- it 'lacks the timeout settings' do
- visit ci_cd_admin_application_settings_path
+ shared_examples 'not having container registry setting' do |registry_setting|
+ it "lacks the container setting #{registry_setting}" do
+ visit ci_cd_admin_application_settings_path
- expect(page).not_to have_content "Container Registry delete tags service execution timeout"
- end
+ expect(page).not_to have_content(settings_titles[registry_setting])
end
+ end
- context 'with feature flag enabled' do
- context 'with client supporting tag delete' do
- it 'changes the timeout' do
- visit ci_cd_admin_application_settings_path
+ %i[container_registry_delete_tags_service_timeout container_registry_expiration_policies_worker_capacity container_registry_cleanup_tags_service_max_list_size].each do |setting|
+ context "for container registry setting #{setting}" do
+ context 'with feature flag enabled' do
+ context 'with client supporting tag delete' do
+ it 'changes the setting' do
+ visit ci_cd_admin_application_settings_path
- page.within('.as-registry') do
- fill_in 'application_setting_container_registry_delete_tags_service_timeout', with: 400
- click_button 'Save changes'
- end
+ page.within('.as-registry') do
+ fill_in "application_setting_#{setting}", with: 400
+ click_button 'Save changes'
+ end
- expect(current_settings.container_registry_delete_tags_service_timeout).to eq(400)
- expect(page).to have_content "Application settings saved successfully"
+ expect(current_settings.public_send(setting)).to eq(400)
+ expect(page).to have_content "Application settings saved successfully"
+ end
end
- end
- context 'with client not supporting tag delete' do
- let(:client_support) { false }
+ context 'with client not supporting tag delete' do
+ let(:client_support) { false }
- it_behaves_like 'not having service timeout settings'
+ it_behaves_like 'not having container registry setting', setting
+ end
end
- end
- context 'with feature flag disabled' do
- let(:feature_flag_enabled) { false }
+ context 'with feature flag disabled' do
+ let(:feature_flag_enabled) { false }
- it_behaves_like 'not having service timeout settings'
+ it_behaves_like 'not having container registry setting', setting
+ end
end
end
end
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
new file mode 100644
index 00000000000..4fc60d17886
--- /dev/null
+++ b/spec/features/admin/admin_users_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Admin::Users" do
+ let(:current_user) { create(:admin) }
+
+ before do
+ sign_in(current_user)
+ gitlab_enable_admin_mode_sign_in(current_user)
+ end
+
+ describe 'Tabs', :js do
+ let(:tabs_selector) { '.js-users-tabs' }
+ let(:active_tab_selector) { '.nav-link.active' }
+
+ it 'does not add the tab param when the Users tab is selected' do
+ visit admin_users_path
+
+ within tabs_selector do
+ click_link 'Users'
+ end
+
+ expect(page).to have_current_path(admin_users_path)
+ end
+
+ it 'adds the ?tab=cohorts param when the Cohorts tab is selected' do
+ visit admin_users_path
+
+ within tabs_selector do
+ click_link 'Cohorts'
+ end
+
+ expect(page).to have_current_path(admin_users_path(tab: 'cohorts'))
+ end
+
+ it 'shows the cohorts tab when the tab param is set' do
+ visit admin_users_path(tab: 'cohorts')
+
+ within tabs_selector do
+ expect(page).to have_selector active_tab_selector, text: 'Cohorts'
+ end
+ end
+ end
+
+ describe 'Cohorts tab content' do
+ context 'with usage ping enabled' do
+ it 'shows users count per month' do
+ stub_application_setting(usage_ping_enabled: true)
+
+ create_list(:user, 2)
+
+ visit admin_users_path(tab: 'cohorts')
+
+ expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0")
+ end
+ end
+
+ context 'with usage ping disabled' do
+ it 'shows empty state', :js do
+ stub_application_setting(usage_ping_enabled: false)
+
+ visit admin_users_path(tab: 'cohorts')
+
+ expect(page).to have_selector(".js-empty-state")
+ expect(page).to have_content("Activate user activity analysis")
+ end
+ end
+ end
+end
diff --git a/spec/features/alert_management/alert_details_spec.rb b/spec/features/alert_management/alert_details_spec.rb
index d190e4b6939..ce82b5adf8d 100644
--- a/spec/features/alert_management/alert_details_spec.rb
+++ b/spec/features/alert_management/alert_details_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'Alert details', :js do
expect(page).to have_selector('[data-testid="alert-todo-button"]')
todo_button = find('[data-testid="alert-todo-button"]')
- expect(todo_button).to have_content('Add a To-Do')
+ expect(todo_button).to have_content('Add a to do')
find('[data-testid="alert-todo-button"]').click
wait_for_requests
diff --git a/spec/features/alerts_settings/user_views_alerts_settings_spec.rb b/spec/features/alerts_settings/user_views_alerts_settings_spec.rb
index 698a36d3f76..07c87f98eb6 100644
--- a/spec/features/alerts_settings/user_views_alerts_settings_spec.rb
+++ b/spec/features/alerts_settings/user_views_alerts_settings_spec.rb
@@ -19,13 +19,14 @@ RSpec.describe 'Alert integrations settings form', :js do
describe 'when viewing alert integrations as a maintainer' do
context 'with the default page permissions' do
before do
+ stub_feature_flags(multiple_http_integrations_custom_mapping: false)
visit project_settings_operations_path(project, anchor: 'js-alert-management-settings')
wait_for_requests
end
it 'shows the alerts setting form title' do
page.within('#js-alert-management-settings') do
- expect(find('h3')).to have_content('Alerts')
+ expect(find('h4')).to have_content('Alerts')
end
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index b3cc2eb418d..bcc653991c9 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'Issue Boards', :js do
let_it_be(:user2) { create(:user) }
before do
+ stub_feature_flags(board_new_list: false)
+
project.add_maintainer(user)
project.add_maintainer(user2)
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 2af5b787a78..cf7554b3646 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -411,10 +411,10 @@ RSpec.describe 'Issue Boards', :js do
wait_for_requests
page.within('.subscriptions') do
- find('.js-issuable-subscribe-button button:not(.is-checked)').click
+ find('[data-testid="subscription-toggle"] button:not(.is-checked)').click
wait_for_requests
- expect(page).to have_css('.js-issuable-subscribe-button button.is-checked')
+ expect(page).to have_css('[data-testid="subscription-toggle"] button.is-checked')
end
end
@@ -427,10 +427,10 @@ RSpec.describe 'Issue Boards', :js do
wait_for_requests
page.within('.subscriptions') do
- find('.js-issuable-subscribe-button button.is-checked').click
+ find('[data-testid="subscription-toggle"] button.is-checked').click
wait_for_requests
- expect(page).to have_css('.js-issuable-subscribe-button button:not(.is-checked)')
+ expect(page).to have_css('[data-testid="subscription-toggle"] button:not(.is-checked)')
end
end
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index f8e84043c1b..1622979812d 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -138,9 +138,8 @@ RSpec.describe 'Commits' do
end
end
- context 'when accessing internal project with disallowed access', :js do
+ context 'when accessing internal project with disallowed access', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/299575' do
before do
- stub_feature_flags(graphql_pipeline_header: false)
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
diff --git a/spec/features/discussion_comments/issue_spec.rb b/spec/features/discussion_comments/issue_spec.rb
index 2ad77a2884c..86743e31fbd 100644
--- a/spec/features/discussion_comments/issue_spec.rb
+++ b/spec/features/discussion_comments/issue_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe 'Thread Comments Issue', :js do
let(:issue) { create(:issue, project: project) }
before do
+ stub_feature_flags(remove_comment_close_reopen: false)
+
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/discussion_comments/merge_request_spec.rb b/spec/features/discussion_comments/merge_request_spec.rb
index 761cc7ae796..82dcdf9f918 100644
--- a/spec/features/discussion_comments/merge_request_spec.rb
+++ b/spec/features/discussion_comments/merge_request_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'Thread Comments Merge Request', :js do
before do
stub_feature_flags(remove_resolve_note: false)
+ stub_feature_flags(remove_comment_close_reopen: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/discussion_comments/snippets_spec.rb b/spec/features/discussion_comments/snippets_spec.rb
index b2d3fbf4b5d..42053e571e9 100644
--- a/spec/features/discussion_comments/snippets_spec.rb
+++ b/spec/features/discussion_comments/snippets_spec.rb
@@ -4,15 +4,34 @@ require 'spec_helper'
RSpec.describe 'Thread Comments Snippet', :js do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
before do
- project.add_maintainer(user)
sign_in(user)
+ end
+
+ context 'with project snippets' do
+ let_it_be(:project) do
+ create(:project).tap do |p|
+ p.add_maintainer(user)
+ end
+ end
+
+ let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
+
+ before do
+ visit project_snippet_path(project, snippet)
+ end
- visit project_snippet_path(project, snippet)
+ it_behaves_like 'thread comments', 'snippet'
end
- it_behaves_like 'thread comments', 'snippet'
+ context 'with personal snippets' do
+ let_it_be(:snippet) { create(:personal_snippet, :private, :repository, author: user) }
+
+ before do
+ visit snippet_path(snippet)
+ end
+
+ it_behaves_like 'thread comments', 'snippet'
+ end
end
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index 2e1bf27ba8b..73de49101ea 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -23,8 +23,8 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
source_url = 'https://gitlab.com'
pat = 'demo-pat'
stub_path = 'stub-group'
-
- stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=30&top_level_only=true&min_access_level=40" % { url: source_url }).to_return(
+ total = 37
+ stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=20&top_level_only=true&min_access_level=40&search=" % { url: source_url }).to_return(
body: [{
id: 2595438,
web_url: 'https://gitlab.com/groups/auto-breakfast',
@@ -33,17 +33,25 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
full_name: 'Stub',
full_path: stub_path
}].to_json,
- headers: { 'Content-Type' => 'application/json' }
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'X-Next-Page' => 2,
+ 'X-Page' => 1,
+ 'X-Per-Page' => 20,
+ 'X-Total' => total,
+ 'X-Total-Pages' => 2
+ }
)
expect(page).to have_content 'Import groups from another instance of GitLab'
+ expect(page).to have_content 'Not all related objects are migrated'
fill_in :bulk_import_gitlab_url, with: source_url
fill_in :bulk_import_gitlab_access_token, with: pat
click_on 'Connect instance'
- expect(page).to have_content 'Importing groups from %{url}' % { url: source_url }
+ expect(page).to have_content 'Showing 1-1 of %{total} groups from %{url}' % { url: source_url, total: total }
expect(page).to have_content stub_path
end
end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index a4c450c9a2c..7025874a4ff 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -87,12 +87,4 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
-
- context 'when invite team members is not available' do
- it 'does not display the js-invite-members-trigger' do
- visit group_path(group)
-
- expect(page).not_to have_selector('.js-invite-members-trigger')
- end
- end
end
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
index b8ffd73335d..09b0707492f 100644
--- a/spec/features/groups/settings/packages_and_registries_spec.rb
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -47,6 +47,13 @@ RSpec.describe 'Group Packages & Registries settings' do
sidebar = find('.nav-sidebar')
expect(sidebar).to have_link _('Packages & Registries')
end
+
+ it 'has a Package Registry section', :js do
+ visit_settings_page
+
+ expect(page).to have_content('Package Registry')
+ expect(page).to have_button('Collapse')
+ end
end
def find_settings_menu
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index 3a42fd508b4..5067f11be67 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -163,6 +163,7 @@ RSpec.describe 'Group show page' do
let!(:project) { create(:project, namespace: group) }
before do
+ stub_feature_flags(vue_notification_dropdown: false)
group.add_maintainer(maintainer)
sign_in(maintainer)
end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 3f00bdc478d..a0786d36fdf 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe 'issuable list', :js do
visit_issuable_list(:issue)
- expect(page).to have_text('Open ? Closed ? All ?')
+ expect(page).to have_text('Open Closed All')
end
it "counts merge requests closing issues icons for each issue" do
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 9b2a11c4b0e..e2087868035 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -183,6 +183,16 @@ RSpec.describe 'GFM autocomplete', :js do
expect(find('#at-view-users')).to have_content(user.name)
end
+ it 'searches across full name for assignees' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys('@speciąlsome')
+ end
+
+ wait_for_requests
+
+ expect(find('.atwho-view li', visible: true)).to have_content(user.name)
+ end
+
it 'selects the first item for non-assignee dropdowns if a query is entered' do
page.within '.timeline-content-form' do
find('#note-body').native.send_keys(':1')
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 59fba5f65e0..ca44978d223 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -11,6 +11,11 @@ RSpec.describe 'Issue Sidebar' do
let!(:label) { create(:label, project: project, title: 'bug') }
let(:issue) { create(:labeled_issue, project: project, labels: [label]) }
let!(:xss_label) { create(:label, project: project, title: '&lt;script&gt;alert("xss");&lt;&#x2F;script&gt;') }
+ let!(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
+ let!(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
+ let!(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
+ let!(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
+ let!(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
before do
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
@@ -134,6 +139,36 @@ RSpec.describe 'Issue Sidebar' do
end
end
+ context 'editing issue milestone', :js do
+ before do
+ page.within('.block.milestone > .title') do
+ click_on 'Edit'
+ end
+ end
+
+ it 'shows milestons list in the dropdown' do
+ page.within('.block.milestone .dropdown-content') do
+ # 5 milestones + "No milestone" = 6 items
+ expect(page.find('ul')).to have_selector('li[data-milestone-id]', count: 6)
+ end
+ end
+
+ it 'shows expired milestone at the bottom of the list' do
+ page.within('.block.milestone .dropdown-content ul') do
+ expect(page.find('li:last-child')).to have_content milestone_expired.title
+ end
+ end
+
+ it 'shows milestone due earliest at the top of the list' do
+ page.within('.block.milestone .dropdown-content ul') do
+ expect(page.all('li[data-milestone-id]')[1]).to have_content milestone3.title
+ expect(page.all('li[data-milestone-id]')[2]).to have_content milestone2.title
+ expect(page.all('li[data-milestone-id]')[3]).to have_content milestone1.title
+ expect(page.all('li[data-milestone-id]')[4]).to have_content milestone_no_duedate.title
+ end
+ end
+ end
+
context 'editing issue labels', :js do
before do
issue.update(labels: [label])
diff --git a/spec/features/issues/issue_state_spec.rb b/spec/features/issues/issue_state_spec.rb
index d5a115433aa..409f498798b 100644
--- a/spec/features/issues/issue_state_spec.rb
+++ b/spec/features/issues/issue_state_spec.rb
@@ -42,9 +42,15 @@ RSpec.describe 'issue state', :js do
end
describe 'when open', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297348' do
- context 'when clicking the top `Close issue` button', :aggregate_failures do
- let(:open_issue) { create(:issue, project: project) }
+ let(:open_issue) { create(:issue, project: project) }
+ it_behaves_like 'page with comment and close button', 'Close issue' do
+ def setup
+ visit project_issue_path(project, open_issue)
+ end
+ end
+
+ context 'when clicking the top `Close issue` button', :aggregate_failures do
before do
visit project_issue_path(project, open_issue)
end
@@ -53,9 +59,8 @@ RSpec.describe 'issue state', :js do
end
context 'when clicking the bottom `Close issue` button', :aggregate_failures do
- let(:open_issue) { create(:issue, project: project) }
-
before do
+ stub_feature_flags(remove_comment_close_reopen: false)
visit project_issue_path(project, open_issue)
end
@@ -64,9 +69,15 @@ RSpec.describe 'issue state', :js do
end
describe 'when closed', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297201' do
- context 'when clicking the top `Reopen issue` button', :aggregate_failures do
- let(:closed_issue) { create(:issue, project: project, state: 'closed') }
+ let(:closed_issue) { create(:issue, project: project, state: 'closed') }
+ it_behaves_like 'page with comment and close button', 'Reopen issue' do
+ def setup
+ visit project_issue_path(project, closed_issue)
+ end
+ end
+
+ context 'when clicking the top `Reopen issue` button', :aggregate_failures do
before do
visit project_issue_path(project, closed_issue)
end
@@ -75,9 +86,8 @@ RSpec.describe 'issue state', :js do
end
context 'when clicking the bottom `Reopen issue` button', :aggregate_failures do
- let(:closed_issue) { create(:issue, project: project, state: 'closed') }
-
before do
+ stub_feature_flags(remove_comment_close_reopen: false)
visit project_issue_path(project, closed_issue)
end
diff --git a/spec/features/issues/user_toggles_subscription_spec.rb b/spec/features/issues/user_toggles_subscription_spec.rb
index 971c8a3b431..d91c187c840 100644
--- a/spec/features/issues/user_toggles_subscription_spec.rb
+++ b/spec/features/issues/user_toggles_subscription_spec.rb
@@ -15,13 +15,13 @@ RSpec.describe "User toggles subscription", :js do
end
it "unsubscribes from issue" do
- subscription_button = find(".js-issuable-subscribe-button")
+ subscription_button = find('[data-testid="subscription-toggle"]')
# Check we're subscribed.
expect(subscription_button).to have_css("button.is-checked")
# Toggle subscription.
- find(".js-issuable-subscribe-button button").click
+ find('[data-testid="subscription-toggle"]').click
wait_for_requests
# Check we're unsubscribed.
@@ -33,7 +33,7 @@ RSpec.describe "User toggles subscription", :js do
it 'is disabled' do
expect(page).to have_content('Notifications have been disabled by the project or group owner')
- expect(page).not_to have_selector('.js-issuable-subscribe-button')
+ expect(page).not_to have_selector('[data-testid="subscription-toggle"]')
end
end
end
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index 5d141580874..aeb42cc2edb 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'Labels Hierarchy', :js do
before do
stub_feature_flags(graphql_board_lists: false)
+ stub_feature_flags(board_new_list: false)
grandparent.add_owner(user)
sign_in(user)
@@ -270,6 +271,10 @@ RSpec.describe 'Labels Hierarchy', :js do
end
context 'creating boards lists' do
+ before do
+ stub_feature_flags(board_new_list: false)
+ end
+
context 'on project boards' do
let(:board) { create(:board, project: project_1) }
diff --git a/spec/features/markdown/mermaid_spec.rb b/spec/features/markdown/mermaid_spec.rb
index 23cdd9d2ce5..207678e07c3 100644
--- a/spec/features/markdown/mermaid_spec.rb
+++ b/spec/features/markdown/mermaid_spec.rb
@@ -108,7 +108,7 @@ RSpec.describe 'Mermaid rendering', :js do
expect(svg[:style]).to match(/max-width/)
expect(svg[:width].to_i).to eq(100)
- expect(svg[:height].to_i).to eq(0)
+ expect(svg[:height].to_i).to be_within(5).of(220)
end
end
diff --git a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
index 2b94c072c8b..ab3ef7c1ac0 100644
--- a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
+++ b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User closes/reopens a merge request', :js do
+RSpec.describe 'User closes/reopens a merge request', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297500' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -12,9 +12,15 @@ RSpec.describe 'User closes/reopens a merge request', :js do
end
describe 'when open' do
- context 'when clicking the top `Close merge request` link', :aggregate_failures do
- let(:open_merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:open_merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
+ it_behaves_like 'page with comment and close button', 'Close merge request' do
+ def setup
+ visit merge_request_path(open_merge_request)
+ end
+ end
+ context 'when clicking the top `Close merge request` link', :aggregate_failures do
before do
visit merge_request_path(open_merge_request)
end
@@ -34,9 +40,8 @@ RSpec.describe 'User closes/reopens a merge request', :js do
end
context 'when clicking the bottom `Close merge request` button', :aggregate_failures do
- let(:open_merge_request) { create(:merge_request, source_project: project, target_project: project) }
-
before do
+ stub_feature_flags(remove_comment_close_reopen: false)
visit merge_request_path(open_merge_request)
end
@@ -55,10 +60,23 @@ RSpec.describe 'User closes/reopens a merge request', :js do
end
end
- describe 'when closed', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297500' do
- context 'when clicking the top `Reopen merge request` link', :aggregate_failures do
- let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
+ describe 'when closed' do
+ let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
+ it_behaves_like 'page with comment and close button', 'Close merge request' do
+ def setup
+ visit merge_request_path(closed_merge_request)
+
+ within '.detail-page-header' do
+ click_button 'Toggle dropdown'
+ click_link 'Reopen merge request'
+ end
+
+ wait_for_requests
+ end
+ end
+
+ context 'when clicking the top `Reopen merge request` link', :aggregate_failures do
before do
visit merge_request_path(closed_merge_request)
end
@@ -78,9 +96,8 @@ RSpec.describe 'User closes/reopens a merge request', :js do
end
context 'when clicking the bottom `Reopen merge request` button', :aggregate_failures do
- let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
-
before do
+ stub_feature_flags(remove_comment_close_reopen: false)
visit merge_request_path(closed_merge_request)
end
diff --git a/spec/features/merge_request/user_manages_subscription_spec.rb b/spec/features/merge_request/user_manages_subscription_spec.rb
index 9ed5b67fa0e..3cdb22000f6 100644
--- a/spec/features/merge_request/user_manages_subscription_spec.rb
+++ b/spec/features/merge_request/user_manages_subscription_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'User manages subscription', :js do
end
it 'toggles subscription' do
- page.within('.js-issuable-subscribe-button') do
+ page.within('[data-testid="subscription-toggle"]') do
wait_for_requests
expect(page).to have_css 'button:not(.is-checked)'
diff --git a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
index ea3e90a4508..05873d8655e 100644
--- a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js do
wait_for_requests
expect(page).to have_css('button[disabled="disabled"]', text: 'Merge')
- expect(page).to have_content('Please retry the job or push a new commit to fix the failure')
+ expect(page).to have_content('The pipeline for this merge request did not complete. Push a new commit to fix the failure or check the troubleshooting documentation to see other possible actions.')
end
end
@@ -70,7 +70,7 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js do
wait_for_requests
expect(page).not_to have_button 'Merge'
- expect(page).to have_content('Please retry the job or push a new commit to fix the failure')
+ expect(page).to have_content('The pipeline for this merge request did not complete. Push a new commit to fix the failure or check the troubleshooting documentation to see other possible actions.')
end
end
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index 5e99383e4a1..63b463a2c5f 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
wait_for_requests
- expect(page).to have_content 'Merge when pipeline succeeds', wait: 0
+ expect(page).to have_content 'Merge when pipeline succeeds'
end
it_behaves_like 'Merge when pipeline succeeds activator'
@@ -145,7 +145,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
before do
merge_request.update!(
merge_user: merge_request.author,
- merge_error: 'Something went wrong.'
+ merge_error: 'Something went wrong'
)
refresh
end
@@ -155,7 +155,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
wait_for_requests
page.within('.mr-section-container') do
- expect(page).to have_content('Merge failed: Something went wrong. Please try again.')
+ expect(page).to have_content('Something went wrong. Try again.')
end
end
end
@@ -174,7 +174,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
wait_for_requests
page.within('.mr-section-container') do
- expect(page).to have_content('Merge failed: Something went wrong. Please try again.')
+ expect(page).to have_content('Something went wrong. Try again.')
end
end
end
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 489582521b5..e629bc0dc53 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -161,7 +161,7 @@ RSpec.describe 'Merge request > User posts notes', :js do
fill_in 'note[note]', with: 'Some new content'
accept_confirm do
- find('.btn-cancel').click
+ find('[data-testid="cancel"]').click
end
end
expect(find('.js-note-text').text).to eq ''
diff --git a/spec/features/merge_request/user_reverts_merge_request_spec.rb b/spec/features/merge_request/user_reverts_merge_request_spec.rb
index 5e9611de460..d40c9f0ce6f 100644
--- a/spec/features/merge_request/user_reverts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reverts_merge_request_spec.rb
@@ -17,46 +17,28 @@ RSpec.describe 'User reverts a merge request', :js do
wait_for_requests
- visit(merge_request_path(merge_request))
+ # do not reload the page by visiting, let javascript update the page as it will validate we have loaded the modal
+ # code correctly on page update that adds the `revert` button
end
it 'reverts a merge request', :sidekiq_might_not_need_inline do
- find("a[href='#modal-revert-commit']").click
+ revert_commit
- page.within('#modal-revert-commit') do
- uncheck('create_merge_request')
- click_button('Revert')
- end
+ wait_for_requests
expect(page).to have_content('The merge request has been successfully reverted.')
-
- wait_for_requests
end
it 'does not revert a merge request that was previously reverted', :sidekiq_might_not_need_inline do
- find("a[href='#modal-revert-commit']").click
-
- page.within('#modal-revert-commit') do
- uncheck('create_merge_request')
- click_button('Revert')
- end
-
- find("a[href='#modal-revert-commit']").click
+ revert_commit
- page.within('#modal-revert-commit') do
- uncheck('create_merge_request')
- click_button('Revert')
- end
+ revert_commit
expect(page).to have_content('Sorry, we cannot revert this merge request automatically.')
end
it 'reverts a merge request in a new merge request', :sidekiq_might_not_need_inline do
- find("a[href='#modal-revert-commit']").click
-
- page.within('#modal-revert-commit') do
- click_button('Revert')
- end
+ revert_commit(create_merge_request: true)
expect(page).to have_content('The merge request has been successfully reverted. You can now submit a merge request to get this change into the original branch.')
end
@@ -68,4 +50,13 @@ RSpec.describe 'User reverts a merge request', :js do
expect(page).not_to have_link('Revert')
end
+
+ def revert_commit(create_merge_request: false)
+ click_button('Revert')
+
+ page.within('[data-testid="modal-commit"]') do
+ uncheck('create_merge_request') unless create_merge_request
+ click_button('Revert')
+ end
+ end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index d9743f6f330..708ce53b4fe 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'merges the merge request' do
expect(page).to have_content('Merged by')
- expect(page).to have_link('Revert')
+ expect(page).to have_button('Revert')
end
end
@@ -357,7 +357,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'merges the merge request' do
expect(page).to have_content('Merged by')
- expect(page).to have_link('Revert')
+ expect(page).to have_button('Revert')
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index c2b2ada47be..9bd6cd8863f 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -319,7 +319,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
wait_for_requests
page.within('.mr-section-container') do
- expect(page).to have_content('Merge failed: Something went wrong')
+ expect(page).to have_content('Something went wrong.')
end
end
end
@@ -340,7 +340,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
wait_for_requests
page.within('.mr-section-container') do
- expect(page).to have_content('Merge failed: Something went wrong')
+ expect(page).to have_content('Something went wrong.')
end
end
end
diff --git a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
index 04d8c52df61..1ef6d2a1068 100644
--- a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
@@ -9,152 +9,166 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: 'master', status: 'running', sha: project.commit.id) }
let(:build) { create(:ci_build, pipeline: pipeline, stage: 'test') }
- before do
- build.run
- build.trace.set('hello')
- sign_in(user)
- visit_merge_request
- end
-
- def visit_merge_request(format: :html, serializer: nil)
- visit project_merge_request_path(project, merge_request, format: format, serializer: serializer)
- end
-
- it 'displays a mini pipeline graph' do
- expect(page).to have_selector('.mr-widget-pipeline-graph')
- end
-
- context 'as json' do
- let(:artifacts_file1) { fixture_file_upload(File.join('spec/fixtures/banana_sample.gif'), 'image/gif') }
- let(:artifacts_file2) { fixture_file_upload(File.join('spec/fixtures/dk.png'), 'image/png') }
-
+ shared_examples 'mini pipeline renders' do |ci_mini_pipeline_gl_dropdown_enabled|
before do
- job = create(:ci_build, :success, :trace_artifact, pipeline: pipeline)
- create(:ci_job_artifact, :archive, file: artifacts_file1, job: job)
- create(:ci_build, :manual, pipeline: pipeline, when: 'manual')
+ build.run
+ build.trace.set('hello')
+ sign_in(user)
+ stub_feature_flags(ci_mini_pipeline_gl_dropdown: ci_mini_pipeline_gl_dropdown_enabled)
+ visit_merge_request
end
- # TODO: https://gitlab.com/gitlab-org/gitlab-foss/issues/48034
- xit 'avoids repeated database queries' do
- before = ActiveRecord::QueryRecorder.new { visit_merge_request(format: :json, serializer: 'widget') }
-
- job = create(:ci_build, :success, :trace_artifact, pipeline: pipeline)
- create(:ci_job_artifact, :archive, file: artifacts_file2, job: job)
- create(:ci_build, :manual, pipeline: pipeline, when: 'manual')
-
- after = ActiveRecord::QueryRecorder.new { visit_merge_request(format: :json, serializer: 'widget') }
+ let_it_be(:dropdown_toggle_selector) do
+ if ci_mini_pipeline_gl_dropdown_enabled
+ '[data-testid="mini-pipeline-graph-dropdown"] .dropdown-toggle'
+ else
+ '[data-testid="mini-pipeline-graph-dropdown-toggle"]'
+ end
+ end
- expect(before.count).to eq(after.count)
- expect(before.cached_count).to eq(after.cached_count)
+ def visit_merge_request(format: :html, serializer: nil)
+ visit project_merge_request_path(project, merge_request, format: format, serializer: serializer)
end
- end
- describe 'build list toggle' do
- let(:toggle) do
- find('.mini-pipeline-graph-dropdown-toggle')
- first('.mini-pipeline-graph-dropdown-toggle')
+ it 'displays a mini pipeline graph' do
+ expect(page).to have_selector('.mr-widget-pipeline-graph')
end
- # Status icon button styles should update as described in
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/42769
- it 'has unique styles for default, :hover, :active, and :focus states' do
- find('.mini-pipeline-graph-dropdown-toggle')
- default_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- default_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- default_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ context 'as json' do
+ let(:artifacts_file1) { fixture_file_upload(File.join('spec/fixtures/banana_sample.gif'), 'image/gif') }
+ let(:artifacts_file2) { fixture_file_upload(File.join('spec/fixtures/dk.png'), 'image/png') }
- toggle.hover
+ before do
+ job = create(:ci_build, :success, :trace_artifact, pipeline: pipeline)
+ create(:ci_job_artifact, :archive, file: artifacts_file1, job: job)
+ create(:ci_build, :manual, pipeline: pipeline, when: 'manual')
+ end
- find('.mini-pipeline-graph-dropdown-toggle')
- hover_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- hover_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- hover_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ # TODO: https://gitlab.com/gitlab-org/gitlab-foss/issues/48034
+ xit 'avoids repeated database queries' do
+ before = ActiveRecord::QueryRecorder.new { visit_merge_request(format: :json, serializer: 'widget') }
- page.driver.browser.action.click_and_hold(toggle.native).perform
+ job = create(:ci_build, :success, :trace_artifact, pipeline: pipeline)
+ create(:ci_job_artifact, :archive, file: artifacts_file2, job: job)
+ create(:ci_build, :manual, pipeline: pipeline, when: 'manual')
- find('.mini-pipeline-graph-dropdown-toggle')
- active_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- active_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- active_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ after = ActiveRecord::QueryRecorder.new { visit_merge_request(format: :json, serializer: 'widget') }
- page.driver.browser.action.release(toggle.native)
- .move_by(100, 100)
- .perform
+ expect(before.count).to eq(after.count)
+ expect(before.cached_count).to eq(after.cached_count)
+ end
+ end
- find('.mini-pipeline-graph-dropdown-toggle')
- focus_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- focus_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- focus_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ describe 'build list toggle' do
+ let(:toggle) do
+ find(dropdown_toggle_selector)
+ first(dropdown_toggle_selector)
+ end
- expect(default_background_color).not_to eq(hover_background_color)
- expect(hover_background_color).not_to eq(active_background_color)
- expect(default_background_color).not_to eq(active_background_color)
+ # Status icon button styles should update as described in
+ # https://gitlab.com/gitlab-org/gitlab-foss/issues/42769
+ it 'has unique styles for default, :hover, :active, and :focus states' do
+ default_background_color, default_foreground_color, default_box_shadow = get_toggle_colors(dropdown_toggle_selector)
- expect(default_foreground_color).not_to eq(hover_foreground_color)
- expect(hover_foreground_color).not_to eq(active_foreground_color)
- expect(default_foreground_color).not_to eq(active_foreground_color)
+ toggle.hover
+ hover_background_color, hover_foreground_color, hover_box_shadow = get_toggle_colors(dropdown_toggle_selector)
- expect(focus_background_color).to eq(hover_background_color)
- expect(focus_foreground_color).to eq(hover_foreground_color)
+ page.driver.browser.action.click_and_hold(toggle.native).perform
+ active_background_color, active_foreground_color, active_box_shadow = get_toggle_colors(dropdown_toggle_selector)
+ page.driver.browser.action.release(toggle.native).perform
- expect(default_box_shadow).to eq('none')
- expect(hover_box_shadow).to eq('none')
- expect(active_box_shadow).not_to eq('none')
- expect(focus_box_shadow).not_to eq('none')
- end
+ page.driver.browser.action.click(toggle.native).move_by(100, 100).perform
+ focus_background_color, focus_foreground_color, focus_box_shadow = get_toggle_colors(dropdown_toggle_selector)
- it 'shows tooltip when hovered' do
- toggle.hover
+ expect(default_background_color).not_to eq(hover_background_color)
+ expect(hover_background_color).not_to eq(active_background_color)
+ expect(default_background_color).not_to eq(active_background_color)
- expect(page).to have_selector('.tooltip')
- end
- end
+ expect(default_foreground_color).not_to eq(hover_foreground_color)
+ expect(hover_foreground_color).not_to eq(active_foreground_color)
+ expect(default_foreground_color).not_to eq(active_foreground_color)
- describe 'builds list menu' do
- let(:toggle) do
- find('.mini-pipeline-graph-dropdown-toggle')
- first('.mini-pipeline-graph-dropdown-toggle')
- end
+ expect(focus_background_color).to eq(hover_background_color)
+ expect(focus_foreground_color).to eq(hover_foreground_color)
- before do
- toggle.click
- wait_for_requests
- end
+ expect(default_box_shadow).to eq('none')
+ expect(hover_box_shadow).to eq('none')
+ expect(active_box_shadow).not_to eq('none')
+ expect(focus_box_shadow).not_to eq('none')
+ end
+
+ it 'shows tooltip when hovered' do
+ toggle.hover
- it 'pens when toggle is clicked' do
- expect(toggle.find(:xpath, '..')).to have_selector('.mini-pipeline-graph-dropdown-menu')
+ expect(page).to have_selector('.tooltip')
+ end
end
- it 'closes when toggle is clicked again' do
- toggle.click
+ describe 'builds list menu' do
+ let(:toggle) do
+ find(dropdown_toggle_selector)
+ first(dropdown_toggle_selector)
+ end
- expect(toggle.find(:xpath, '..')).not_to have_selector('.mini-pipeline-graph-dropdown-menu')
- end
+ before do
+ toggle.click
+ wait_for_requests
+ end
- it 'closes when clicking somewhere else' do
- find('body').click
+ it 'pens when toggle is clicked' do
+ expect(toggle.find(:xpath, '..')).to have_selector('.mini-pipeline-graph-dropdown-menu')
+ end
- expect(toggle.find(:xpath, '..')).not_to have_selector('.mini-pipeline-graph-dropdown-menu')
- end
+ it 'closes when toggle is clicked again' do
+ toggle.click
- describe 'build list build item' do
- let(:build_item) do
- find('.mini-pipeline-graph-dropdown-item')
- first('.mini-pipeline-graph-dropdown-item')
+ expect(toggle.find(:xpath, '..')).not_to have_selector('.mini-pipeline-graph-dropdown-menu')
end
- it 'visits the build page when clicked' do
- build_item.click
- find('.build-page')
+ it 'closes when clicking somewhere else' do
+ find('body').click
- expect(current_path).to eql(project_job_path(project, build))
+ expect(toggle.find(:xpath, '..')).not_to have_selector('.mini-pipeline-graph-dropdown-menu')
end
- it 'shows tooltip when hovered' do
- build_item.hover
+ describe 'build list build item' do
+ let(:build_item) do
+ find('.mini-pipeline-graph-dropdown-item')
+ first('.mini-pipeline-graph-dropdown-item')
+ end
- expect(page).to have_selector('.tooltip')
+ it 'visits the build page when clicked' do
+ build_item.click
+ find('.build-page')
+
+ expect(current_path).to eql(project_job_path(project, build))
+ end
+
+ it 'shows tooltip when hovered' do
+ build_item.hover
+
+ expect(page).to have_selector('.tooltip')
+ end
end
end
end
+
+ context 'with ci_mini_pipeline_gl_dropdown disabled' do
+ it_behaves_like "mini pipeline renders", false
+ end
+
+ context 'with ci_mini_pipeline_gl_dropdown enabled' do
+ it_behaves_like "mini pipeline renders", true
+ end
+
+ private
+
+ def get_toggle_colors(selector)
+ find(selector)
+ [
+ evaluate_script("$('#{selector}:visible').css('background-color');"),
+ evaluate_script("$('#{selector}:visible svg').css('fill');"),
+ evaluate_script("$('#{selector}:visible').css('box-shadow');")
+ ]
+ end
end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index a2ec34335ec..bbeb91bbd19 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -73,6 +73,23 @@ RSpec.describe 'User comments on a diff', :js do
end
end
+ it 'allows suggestions in replies' do
+ click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: "```suggestion\n# change to a comment\n```")
+ click_button('Add comment now')
+ end
+
+ wait_for_requests
+
+ click_button 'Reply...'
+
+ find('.js-suggestion-btn').click
+
+ expect(find('.js-vue-issue-note-form').value).to include("url = https://github.com/gitlabhq/gitlab-shell.git")
+ end
+
it 'suggestion is appliable' do
click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index e8998f9457a..fdf29d32836 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -107,5 +107,21 @@ RSpec.describe 'User views an open merge request' do
end
end
end
+
+ context 'when the assignee\'s availability set' do
+ before do
+ merge_request.author.create_status(availability: 'busy')
+ merge_request.assignees << merge_request.author
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'exposes the availability in the data-availability attribute' do
+ assignees_data = find_all("input[name='merge_request[assignee_ids][]']", visible: false)
+
+ expect(assignees_data.size).to eq(1)
+ expect(assignees_data.first['data-availability']).to eq('busy')
+ end
+ end
end
end
diff --git a/spec/features/profiles/user_visits_notifications_tab_spec.rb b/spec/features/profiles/user_visits_notifications_tab_spec.rb
index 997cc8e3c4b..289fbff0404 100644
--- a/spec/features/profiles/user_visits_notifications_tab_spec.rb
+++ b/spec/features/profiles/user_visits_notifications_tab_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User visits the notifications tab', :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(vue_notification_dropdown: false)
project.add_maintainer(user)
sign_in(user)
visit(profile_notifications_path)
diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb
index 9fe3f4cd63e..489a90cc8fc 100644
--- a/spec/features/projects/commit/cherry_pick_spec.rb
+++ b/spec/features/projects/commit/cherry_pick_spec.rb
@@ -2,108 +2,126 @@
require 'spec_helper'
-RSpec.describe 'Cherry-pick Commits' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project) { create(:project, :repository, namespace: group) }
- let(:master_pickable_commit) { project.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
- let(:master_pickable_merge) { project.commit('e56497bb5f03a90a51293fc6d516788730953899') }
+RSpec.describe 'Cherry-pick Commits', :js do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:sha) { '7d3b0f7cff5f37573aea97cebfd5692ea1689924' }
+ let!(:project) { create_default(:project, :repository, namespace: user.namespace) }
+ let(:master_pickable_commit) { project.commit(sha) }
before do
sign_in(user)
- project.add_maintainer(user)
- visit project_commit_path(project, master_pickable_commit.id)
end
- context "I cherry-pick a commit" do
- it do
- find("a[href='#modal-cherry-pick-commit']").click
- expect(page).not_to have_content('v1.0.0') # Only branches, not tags
- page.within('#modal-cherry-pick-commit') do
- uncheck 'create_merge_request'
- click_button 'Cherry-pick'
- end
- expect(page).to have_content('The commit has been successfully cherry-picked into master.')
- end
- end
+ context 'when clicking cherry-pick from the dropdown for a commit on pipelines tab' do
+ it 'launches the modal form' do
+ create(:ci_empty_pipeline, sha: sha)
+ visit project_commit_path(project, master_pickable_commit.id)
+ click_link 'Pipelines'
- context "I cherry-pick a merge commit" do
- it do
- find("a[href='#modal-cherry-pick-commit']").click
- page.within('#modal-cherry-pick-commit') do
- uncheck 'create_merge_request'
- click_button 'Cherry-pick'
+ open_modal
+
+ page.within(modal_selector) do
+ expect(page).to have_content('Cherry-pick this commit')
end
- expect(page).to have_content('The commit has been successfully cherry-picked into master.')
end
end
- context "I cherry-pick a commit that was previously cherry-picked" do
- it do
- find("a[href='#modal-cherry-pick-commit']").click
- page.within('#modal-cherry-pick-commit') do
- uncheck 'create_merge_request'
- click_button 'Cherry-pick'
- end
+ context 'when starting from the commit tab' do
+ before do
visit project_commit_path(project, master_pickable_commit.id)
- find("a[href='#modal-cherry-pick-commit']").click
- page.within('#modal-cherry-pick-commit') do
- uncheck 'create_merge_request'
- click_button 'Cherry-pick'
- end
- expect(page).to have_content('Sorry, we cannot cherry-pick this commit automatically.')
end
- end
- context "I cherry-pick a commit in a new merge request", :js do
- it do
- find('.header-action-buttons a.dropdown-toggle').click
- find("a[href='#modal-cherry-pick-commit']").click
- page.within('#modal-cherry-pick-commit') do
- click_button 'Cherry-pick'
+ context 'when cherry-picking a commit' do
+ specify do
+ cherry_pick_commit
+
+ expect(page).to have_content('The commit has been successfully cherry-picked into master.')
end
+ end
- wait_for_requests
+ context 'when cherry-picking a merge commit' do
+ specify do
+ cherry_pick_commit
- expect(page).to have_content("The commit has been successfully cherry-picked into cherry-pick-#{master_pickable_commit.short_id}. You can now submit a merge request to get this change into the original branch.")
- expect(page).to have_content("From cherry-pick-#{master_pickable_commit.short_id} into master")
+ expect(page).to have_content('The commit has been successfully cherry-picked into master.')
+ end
end
- end
- context "I cherry-pick a commit from a different branch", :js do
- it do
- find('.header-action-buttons a.dropdown-toggle').click
- find(:css, "a[href='#modal-cherry-pick-commit']").click
+ context 'when cherry-picking a commit that was previously cherry-picked' do
+ specify do
+ cherry_pick_commit
- page.within('#modal-cherry-pick-commit') do
- click_button 'master'
+ visit project_commit_path(project, master_pickable_commit.id)
+
+ cherry_pick_commit
+
+ expect(page).to have_content('Sorry, we cannot cherry-pick this commit automatically.')
end
+ end
- wait_for_requests
+ context 'when cherry-picking a commit in a new merge request' do
+ specify do
+ cherry_pick_commit(create_merge_request: true)
- page.within('#modal-cherry-pick-commit .dropdown-menu') do
- find('.dropdown-input input').set('feature')
- wait_for_requests
- click_link "feature"
+ expect(page).to have_content("The commit has been successfully cherry-picked into cherry-pick-#{master_pickable_commit.short_id}. You can now submit a merge request to get this change into the original branch.")
+ expect(page).to have_content("From cherry-pick-#{master_pickable_commit.short_id} into master")
end
+ end
- page.within('#modal-cherry-pick-commit') do
- uncheck 'create_merge_request'
- click_button 'Cherry-pick'
+ context 'when I cherry-picking a commit from a different branch' do
+ specify do
+ open_modal
+
+ page.within(modal_selector) do
+ click_button 'master'
+ end
+
+ page.within("#{modal_selector} .dropdown-menu") do
+ find('[data-testid="dropdown-search-box"]').set('feature')
+ wait_for_requests
+ click_button 'feature'
+ end
+
+ submit_cherry_pick
+
+ expect(page).to have_content('The commit has been successfully cherry-picked into feature.')
end
+ end
+
+ context 'when the project is archived' do
+ let(:project) { create(:project, :repository, :archived, namespace: user.namespace) }
- expect(page).to have_content('The commit has been successfully cherry-picked into feature.')
+ it 'does not show the cherry-pick link' do
+ open_dropdown
+
+ expect(page).not_to have_text("Cherry-pick")
+ end
end
end
- context 'when the project is archived' do
- let(:project) { create(:project, :repository, :archived, namespace: group) }
+ def cherry_pick_commit(create_merge_request: false)
+ open_modal
- it 'does not show the cherry-pick link' do
- find('.header-action-buttons a.dropdown-toggle').click
+ submit_cherry_pick(create_merge_request: create_merge_request)
+ end
+
+ def open_dropdown
+ find('.header-action-buttons .dropdown').click
+ end
- expect(page).not_to have_text("Cherry-pick")
- expect(page).not_to have_css("a[href='#modal-cherry-pick-commit']")
+ def open_modal
+ open_dropdown
+ find('[data-testid="cherry-pick-commit-link"]').click
+ end
+
+ def submit_cherry_pick(create_merge_request: false)
+ page.within(modal_selector) do
+ uncheck('create_merge_request') unless create_merge_request
+ click_button('Cherry-pick')
end
end
+
+ def modal_selector
+ '[data-testid="modal-commit"]'
+ end
end
diff --git a/spec/features/projects/commit/user_reverts_commit_spec.rb b/spec/features/projects/commit/user_reverts_commit_spec.rb
index f3c364dab97..72c639a027e 100644
--- a/spec/features/projects/commit/user_reverts_commit_spec.rb
+++ b/spec/features/projects/commit/user_reverts_commit_spec.rb
@@ -6,58 +6,89 @@ RSpec.describe 'User reverts a commit', :js do
include RepoHelpers
let_it_be(:user) { create(:user) }
- let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let!(:project) { create_default(:project, :repository, namespace: user.namespace) }
before do
sign_in(user)
-
- visit(project_commit_path(project, sample_commit.id))
end
- def revert_commit(create_merge_request: false)
- find('.header-action-buttons .dropdown').click
- find('[data-testid="revert-commit-link"]').click
+ context 'when clicking revert from the dropdown for a commit on pipelines tab' do
+ it 'launches the modal and is able to submit the revert' do
+ sha = '7d3b0f7cff5f37573aea97cebfd5692ea1689924'
+ create(:ci_empty_pipeline, sha: sha)
+ visit project_commit_path(project, project.commit(sha).id)
+ click_link 'Pipelines'
- page.within('[data-testid="modal-commit"]') do
- uncheck('create_merge_request') unless create_merge_request
- click_button('Revert')
+ open_modal
+
+ page.within(modal_selector) do
+ expect(page).to have_content('Revert this commit')
+ end
end
end
- context 'without creating a new merge request' do
- it 'reverts a commit' do
- revert_commit
+ context 'when starting from the commit tab' do
+ before do
+ visit project_commit_path(project, sample_commit.id)
+ end
+
+ context 'without creating a new merge request' do
+ it 'reverts a commit' do
+ revert_commit
+
+ expect(page).to have_content('The commit has been successfully reverted.')
+ end
+
+ it 'does not revert a previously reverted commit' do
+ revert_commit
+ # Visit the comment again once it was reverted.
+ visit project_commit_path(project, sample_commit.id)
+
+ revert_commit
- expect(page).to have_content('The commit has been successfully reverted.')
+ expect(page).to have_content('Sorry, we cannot revert this commit automatically.')
+ end
end
- it 'does not revert a previously reverted commit' do
- revert_commit
- # Visit the comment again once it was reverted.
- visit project_commit_path(project, sample_commit.id)
+ context 'with creating a new merge request' do
+ it 'reverts a commit' do
+ revert_commit(create_merge_request: true)
+
+ expect(page).to have_content('The commit has been successfully reverted. You can now submit a merge request to get this change into the original branch.')
+ expect(page).to have_content("From revert-#{Commit.truncate_sha(sample_commit.id)} into master")
+ end
+ end
- revert_commit
+ context 'when the project is archived' do
+ let(:project) { create(:project, :repository, :archived, namespace: user.namespace) }
- expect(page).to have_content('Sorry, we cannot revert this commit automatically.')
+ it 'does not show the revert link' do
+ open_dropdown
+
+ expect(page).not_to have_link('Revert')
+ end
end
end
- context 'with creating a new merge request' do
- it 'reverts a commit' do
- revert_commit(create_merge_request: true)
+ def revert_commit(create_merge_request: false)
+ open_modal
- expect(page).to have_content('The commit has been successfully reverted. You can now submit a merge request to get this change into the original branch.')
- expect(page).to have_content("From revert-#{Commit.truncate_sha(sample_commit.id)} into master")
+ page.within(modal_selector) do
+ uncheck('create_merge_request') unless create_merge_request
+ click_button('Revert')
end
end
- context 'when the project is archived' do
- let(:project) { create(:project, :repository, :archived, namespace: user.namespace) }
+ def open_dropdown
+ find('.header-action-buttons .dropdown').click
+ end
- it 'does not show the revert link' do
- find('.header-action-buttons .dropdown').click
+ def open_modal
+ open_dropdown
+ find('[data-testid="revert-commit-link"]').click
+ end
- expect(page).not_to have_link('Revert')
- end
+ def modal_selector
+ '[data-testid="modal-commit"]'
end
end
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index 596b4773716..4894e2b7f3e 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -203,10 +203,11 @@ RSpec.describe 'User browses commits' do
context 'when click the compare tab' do
before do
+ wait_for_requests
click_link('Compare')
end
- it 'does not render create merge request button' do
+ it 'does not render create merge request button', :js do
expect(page).not_to have_link 'Create merge request'
end
end
@@ -236,10 +237,11 @@ RSpec.describe 'User browses commits' do
context 'when click the compare tab' do
before do
+ wait_for_requests
click_link('Compare')
end
- it 'renders create merge request button' do
+ it 'renders create merge request button', :js do
expect(page).to have_link 'Create merge request'
end
end
@@ -276,10 +278,11 @@ RSpec.describe 'User browses commits' do
context 'when click the compare tab' do
before do
+ wait_for_requests
click_link('Compare')
end
- it 'renders button to the merge request' do
+ it 'renders button to the merge request', :js do
expect(page).not_to have_link 'Create merge request'
expect(page).to have_link 'View open merge request', href: project_merge_request_path(project, merge_request)
end
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index e387ea4d473..64e9968061c 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -17,10 +17,10 @@ RSpec.describe "Compare", :js do
visit project_compare_index_path(project, from: 'master', to: 'master')
select_using_dropdown 'from', 'feature'
- expect(find('.js-compare-from-dropdown .dropdown-toggle-text')).to have_content('feature')
+ expect(find('.js-compare-from-dropdown .gl-new-dropdown-button-text')).to have_content('feature')
select_using_dropdown 'to', 'binary-encoding'
- expect(find('.js-compare-to-dropdown .dropdown-toggle-text')).to have_content('binary-encoding')
+ expect(find('.js-compare-to-dropdown .gl-new-dropdown-button-text')).to have_content('binary-encoding')
click_button 'Compare'
@@ -32,8 +32,8 @@ RSpec.describe "Compare", :js do
it "pre-populates fields" do
visit project_compare_index_path(project, from: "master", to: "master")
- expect(find(".js-compare-from-dropdown .dropdown-toggle-text")).to have_content("master")
- expect(find(".js-compare-to-dropdown .dropdown-toggle-text")).to have_content("master")
+ expect(find(".js-compare-from-dropdown .gl-new-dropdown-button-text")).to have_content("master")
+ expect(find(".js-compare-to-dropdown .gl-new-dropdown-button-text")).to have_content("master")
end
it_behaves_like 'compares branches'
@@ -99,7 +99,7 @@ RSpec.describe "Compare", :js do
find(".js-compare-from-dropdown .compare-dropdown-toggle").click
- expect(find(".js-compare-from-dropdown .dropdown-content")).to have_selector("li", count: 3)
+ expect(find(".js-compare-from-dropdown .gl-new-dropdown-contents")).to have_selector('li.gl-new-dropdown-item', count: 1)
end
context 'when commit has overflow', :js do
@@ -125,10 +125,10 @@ RSpec.describe "Compare", :js do
visit project_compare_index_path(project, from: "master", to: "master")
select_using_dropdown "from", "v1.0.0"
- expect(find(".js-compare-from-dropdown .dropdown-toggle-text")).to have_content("v1.0.0")
+ expect(find(".js-compare-from-dropdown .gl-new-dropdown-button-text")).to have_content("v1.0.0")
select_using_dropdown "to", "v1.1.0"
- expect(find(".js-compare-to-dropdown .dropdown-toggle-text")).to have_content("v1.1.0")
+ expect(find(".js-compare-to-dropdown .gl-new-dropdown-button-text")).to have_content("v1.1.0")
click_button "Compare"
expect(page).to have_content "Commits"
@@ -136,19 +136,22 @@ RSpec.describe "Compare", :js do
end
def select_using_dropdown(dropdown_type, selection, commit: false)
+ wait_for_requests
+
dropdown = find(".js-compare-#{dropdown_type}-dropdown")
dropdown.find(".compare-dropdown-toggle").click
# find input before using to wait for the inputs visibility
dropdown.find('.dropdown-menu')
dropdown.fill_in("Filter by Git revision", with: selection)
+
wait_for_requests
if commit
- dropdown.find('input[type="search"]').send_keys(:return)
+ dropdown.find('.gl-search-box-by-type-input').send_keys(:return)
else
# find before all to wait for the items visibility
- dropdown.find("a[data-ref=\"#{selection}\"]", match: :first)
- dropdown.all("a[data-ref=\"#{selection}\"]").last.click
+ dropdown.find(".js-compare-#{dropdown_type}-dropdown .dropdown-item", text: selection, match: :first)
+ dropdown.all(".js-compare-#{dropdown_type}-dropdown .dropdown-item", text: selection).first.click
end
end
end
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index 17258f7042f..40d19a94b42 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -2,14 +2,16 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297400' do
+RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js do
+ include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+
before do
project = create(:project, :repository)
sign_in project.owner
visit project_new_blob_path(project, 'master', file_name: 'Dockerfile')
end
- it 'user can pick a Dockerfile file from the dropdown', :js do
+ it 'user can pick a Dockerfile file from the dropdown' do
expect(page).to have_css('.dockerfile-selector')
find('.js-dockerfile-selector').click
@@ -24,6 +26,6 @@ RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', quarant
wait_for_requests
expect(page).to have_css('.dockerfile-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(page).to have_content('COPY ./ /usr/local/apache2/htdocs/')
+ expect(editor_get_value).to have_content('COPY ./ /usr/local/apache2/htdocs/')
end
end
diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb
index 5a39f2bcd98..a9f2463ecf6 100644
--- a/spec/features/projects/files/gitignore_dropdown_spec.rb
+++ b/spec/features/projects/files/gitignore_dropdown_spec.rb
@@ -2,14 +2,16 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a .gitignore file' do
+RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js do
+ include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+
before do
project = create(:project, :repository)
sign_in project.owner
visit project_new_blob_path(project, 'master', file_name: '.gitignore')
end
- it 'user can pick a .gitignore file from the dropdown', :js do
+ it 'user can pick a .gitignore file from the dropdown' do
expect(page).to have_css('.gitignore-selector')
find('.js-gitignore-selector').click
@@ -24,7 +26,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitignore file' do
wait_for_requests
expect(page).to have_css('.gitignore-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(page).to have_content('/.bundle')
- expect(page).to have_content('config/initializers/secret_token.rb')
+ expect(editor_get_value).to have_content('/.bundle')
+ expect(editor_get_value).to have_content('config/initializers/secret_token.rb')
end
end
diff --git a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
index 6308acb41f5..ca6f03472dd 100644
--- a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
+ include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+
before do
project = create(:project, :repository)
sign_in project.owner
@@ -34,8 +36,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
let(:experiment_active) { true }
let(:in_experiment_group) { true }
- it 'allows the user to pick a "Learn CI/CD syntax" template from the dropdown', :js,
- { quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297347' } } do
+ it 'allows the user to pick a "Learn CI/CD syntax" template from the dropdown', :js do
expect(page).to have_css('.gitlab-ci-syntax-yml-selector')
find('.js-gitlab-ci-syntax-yml-selector').click
@@ -50,7 +51,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
wait_for_requests
expect(page).to have_css('.gitlab-ci-syntax-yml-selector .dropdown-toggle-text', text: 'Learn CI/CD syntax')
- expect(page).to have_content('You can use artifacts to pass data to jobs in later stages.')
+ expect(editor_get_value).to have_content('You can use artifacts to pass data to jobs in later stages.')
end
end
end
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index 879cb6a65c8..55b9f38d8e7 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -2,14 +2,16 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
+RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js do
+ include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+
before do
project = create(:project, :repository)
sign_in project.owner
visit project_new_blob_path(project, 'master', file_name: '.gitlab-ci.yml')
end
- it 'user can pick a template from the dropdown', :js do
+ it 'user can pick a template from the dropdown' do
expect(page).to have_css('.gitlab-ci-yml-selector')
find('.js-gitlab-ci-yml-selector').click
@@ -24,7 +26,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
wait_for_requests
expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(page).to have_content('This file is a template, and might need editing before it works on your project')
- expect(page).to have_content('jekyll build -d test')
+ expect(editor_get_value).to have_content('This file is a template, and might need editing before it works on your project')
+ expect(editor_get_value).to have_content('jekyll build -d test')
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 1557a8a2d72..adf664f26af 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -533,10 +533,10 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
expect(page).to have_content('Trigger token')
expect(page).to have_content('Trigger variables')
- expect(page).not_to have_css('.js-reveal-variables')
+ expect(page).not_to have_selector('[data-testid="trigger-reveal-values-button"]')
- expect(page).to have_selector('.js-build-variable', text: 'TRIGGER_KEY_1')
- expect(page).to have_selector('.js-build-value', text: '••••••')
+ expect(page).to have_selector('[data-testid="trigger-build-key"]', text: 'TRIGGER_KEY_1')
+ expect(page).to have_selector('[data-testid="trigger-build-value"]', text: '••••••')
end
end
@@ -571,17 +571,17 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
expect(page).to have_content('Trigger token')
expect(page).to have_content('Trigger variables')
- expect(page).to have_css('.js-reveal-variables')
+ expect(page).to have_selector('[data-testid="trigger-reveal-values-button"]')
- expect(page).to have_selector('.js-build-variable', text: 'TRIGGER_KEY_1')
- expect(page).to have_selector('.js-build-value', text: '••••••')
+ expect(page).to have_selector('[data-testid="trigger-build-key"]', text: 'TRIGGER_KEY_1')
+ expect(page).to have_selector('[data-testid="trigger-build-value"]', text: '••••••')
end
it 'reveals values on button click', :js do
click_button 'Reveal values'
- expect(page).to have_selector('.js-build-variable', text: 'TRIGGER_KEY_1')
- expect(page).to have_selector('.js-build-value', text: 'TRIGGER_VALUE_1')
+ expect(page).to have_selector('[data-testid="trigger-build-key"]', text: 'TRIGGER_KEY_1')
+ expect(page).to have_selector('[data-testid="trigger-build-value"]', text: 'TRIGGER_VALUE_1')
end
end
diff --git a/spec/features/projects/members/anonymous_user_sees_members_spec.rb b/spec/features/projects/members/anonymous_user_sees_members_spec.rb
index 3b0f00c5494..c0849cc7330 100644
--- a/spec/features/projects/members/anonymous_user_sees_members_spec.rb
+++ b/spec/features/projects/members/anonymous_user_sees_members_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe 'Projects > Members > Anonymous user sees members' do
let(:project) { create(:project, :public) }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
project.add_maintainer(user)
create(:project_group_link, project: project, group: group)
end
diff --git a/spec/features/projects/members/group_members_spec.rb b/spec/features/projects/members/group_members_spec.rb
index aa15f04bf24..b6a5fbf5584 100644
--- a/spec/features/projects/members/group_members_spec.rb
+++ b/spec/features/projects/members/group_members_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'Projects members', :js do
let(:group_requester) { create(:user) }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
project.add_developer(developer)
group.add_owner(user)
sign_in(user)
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index 686d86b1783..de27692b535 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
let!(:group_link) { create(:project_group_link, project: project, group: group, **additional_link_attrs) }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
travel_to Time.now.utc.beginning_of_day
project.add_maintainer(user)
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index bb56ae348fb..8e956c4a7cd 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -8,6 +8,11 @@ RSpec.describe 'Project > Members > Invite group', :js do
let(:maintainer) { create(:user) }
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ stub_feature_flags(vue_project_members_list: false)
+ end
+
describe 'Share with group lock' do
shared_examples 'the project can be shared with groups' do
it 'the "Invite group" tab exists' do
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 62115f2dce6..a62ccfab244 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -13,10 +13,18 @@ RSpec.describe 'Project members list' do
before do
stub_feature_flags(invite_members_group_modal: false)
+ stub_feature_flags(vue_project_members_list: false)
+
sign_in(user1)
group.add_owner(user1)
end
+ it 'pushes `vue_project_members_list` feature flag to the frontend' do
+ visit_members_page
+
+ expect(page).to have_pushed_frontend_feature_flags(vueProjectMembersList: false)
+ end
+
it 'show members from project and group' do
project.add_developer(user2)
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index d69c3f2652c..e7970b28e37 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
let(:new_member) { create(:user) }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
travel_to Time.now.utc.beginning_of_day
project.add_maintainer(maintainer)
@@ -18,6 +20,8 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
end
it 'expiration date is displayed in the members list' do
+ stub_feature_flags(invite_members_group_modal: false)
+
visit project_project_members_path(project)
page.within '.invite-users-form' do
diff --git a/spec/features/projects/members/sorting_spec.rb b/spec/features/projects/members/sorting_spec.rb
index be27cbc0d66..9b9f1f26d66 100644
--- a/spec/features/projects/members/sorting_spec.rb
+++ b/spec/features/projects/members/sorting_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe 'Projects > Members > Sorting' do
let(:project) { create(:project, namespace: maintainer.namespace, creator: maintainer) }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
create(:project_member, :developer, user: developer, project: project, created_at: 3.days.ago)
sign_in(maintainer)
diff --git a/spec/features/projects/members/tabs_spec.rb b/spec/features/projects/members/tabs_spec.rb
index bdcf02c82a4..7c72ba31029 100644
--- a/spec/features/projects/members/tabs_spec.rb
+++ b/spec/features/projects/members/tabs_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe 'Projects > Members > Tabs' do
end
before do
+ stub_feature_flags(vue_project_members_list: false)
allow(Kaminari.config).to receive(:default_per_page).and_return(1)
sign_in(user)
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 25791b393bc..4ff3827b240 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -67,23 +67,4 @@ RSpec.describe 'Project navbar' do
it_behaves_like 'verified navigation bar'
end
-
- context 'when invite team members is not available' do
- it 'does not display the js-invite-members-trigger' do
- visit project_path(project)
-
- expect(page).not_to have_selector('.js-invite-members-trigger')
- end
- end
-
- context 'when invite team members is available' do
- it 'includes the div for js-invite-members-trigger' do
- stub_feature_flags(invite_members_group_modal: true)
- allow_any_instance_of(InviteMembersHelper).to receive(:invite_members_allowed?).and_return(true)
-
- visit project_path(project)
-
- expect(page).to have_selector('.js-invite-members-trigger')
- end
- end
end
diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb
new file mode 100644
index 00000000000..24c9edb79e5
--- /dev/null
+++ b/spec/features/projects/pages/user_adds_domain_spec.rb
@@ -0,0 +1,185 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'User adds pages domain', :js do
+ include LetsEncryptHelpers
+
+ let_it_be(:project) { create(:project, pages_https_only: false) }
+ let(:user) { create(:user) }
+
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ context 'when pages are exposed on external HTTP address', :http_pages_enabled do
+ let(:project) { create(:project, pages_https_only: false) }
+
+ shared_examples 'adds new domain' do
+ it 'adds new domain' do
+ visit new_project_pages_domain_path(project)
+
+ fill_in 'Domain', with: 'my.test.domain.com'
+ click_button 'Create New Domain'
+
+ expect(page).to have_content('my.test.domain.com')
+ end
+ end
+
+ it 'allows to add new domain' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('New Domain')
+ end
+
+ it_behaves_like 'adds new domain'
+
+ context 'when project in group namespace' do
+ it_behaves_like 'adds new domain' do
+ let(:group) { create :group }
+ let(:project) { create(:project, namespace: group, pages_https_only: false) }
+ end
+ end
+
+ context 'when pages domain is added' do
+ before do
+ create(:pages_domain, project: project, domain: 'my.test.domain.com')
+
+ visit new_project_pages_domain_path(project)
+ end
+
+ it 'renders certificates is disabled' do
+ expect(page).to have_content('Support for custom certificates is disabled')
+ end
+
+ it 'does not adds new domain and renders error message' do
+ fill_in 'Domain', with: 'my.test.domain.com'
+ click_button 'Create New Domain'
+
+ expect(page).to have_content('Domain has already been taken')
+ end
+ end
+ end
+
+ context 'when pages are exposed on external HTTPS address', :https_pages_enabled, :js do
+ let(:certificate_pem) do
+ attributes_for(:pages_domain)[:certificate]
+ end
+
+ let(:certificate_key) do
+ attributes_for(:pages_domain)[:key]
+ end
+
+ it 'adds new domain with certificate' do
+ visit new_project_pages_domain_path(project)
+
+ fill_in 'Domain', with: 'my.test.domain.com'
+
+ fill_in 'Certificate (PEM)', with: certificate_pem
+ fill_in 'Key (PEM)', with: certificate_key
+ click_button 'Create New Domain'
+
+ expect(page).to have_content('my.test.domain.com')
+ end
+
+ it "adds new domain with certificate if Let's Encrypt is enabled" do
+ stub_lets_encrypt_settings
+
+ visit new_project_pages_domain_path(project)
+
+ fill_in 'Domain', with: 'my.test.domain.com'
+
+ find('.js-auto-ssl-toggle-container .project-feature-toggle').click
+
+ fill_in 'Certificate (PEM)', with: certificate_pem
+ fill_in 'Key (PEM)', with: certificate_key
+ click_button 'Create New Domain'
+
+ expect(page).to have_content('my.test.domain.com')
+ end
+
+ it 'shows validation error if domain is duplicated' do
+ project.pages_domains.create!(domain: 'my.test.domain.com')
+
+ visit new_project_pages_domain_path(project)
+
+ fill_in 'Domain', with: 'my.test.domain.com'
+ click_button 'Create New Domain'
+
+ expect(page).to have_content('Domain has already been taken')
+ end
+
+ describe 'with dns verification enabled' do
+ before do
+ stub_application_setting(pages_domain_verification_enabled: true)
+ end
+
+ it 'shows the DNS verification record' do
+ domain = create(:pages_domain, project: project)
+
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+ expect(page).to have_field :domain_verification, with: "#{domain.verification_domain} TXT #{domain.keyed_verification_code}"
+ end
+ end
+
+ describe 'updating the certificate for an existing domain' do
+ let!(:domain) do
+ create(:pages_domain, project: project, auto_ssl_enabled: false)
+ end
+
+ it 'allows the certificate to be updated' do
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+ click_button 'Save Changes'
+
+ expect(page).to have_content('Domain was updated')
+ end
+
+ context 'when the certificate is invalid' do
+ let!(:domain) do
+ create(:pages_domain, :without_certificate, :without_key, project: project)
+ end
+
+ it 'tells the user what the problem is' do
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+
+ fill_in 'Certificate (PEM)', with: 'invalid data'
+ click_button 'Save Changes'
+
+ expect(page).to have_content('Certificate must be a valid PEM certificate')
+ expect(page).to have_content('Certificate misses intermediates')
+ expect(page).to have_content("Key doesn't match the certificate")
+ end
+ end
+
+ it 'allows the certificate to be removed', :js do
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+
+ accept_confirm { click_link 'Remove' }
+
+ expect(page).to have_field('Certificate (PEM)', with: '')
+ expect(page).to have_field('Key (PEM)', with: '')
+ domain.reload
+ expect(domain.certificate).to be_nil
+ expect(domain.key).to be_nil
+ end
+
+ it 'shows the DNS CNAME record' do
+ visit project_pages_path(project)
+
+ within('#content-body') { click_link 'Edit' }
+ expect(page).to have_field :domain_dns, with: "#{domain.domain} CNAME #{domain.project.pages_subdomain}.#{Settings.pages.host}."
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/pages_lets_encrypt_spec.rb b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
index 302e9f5e533..cf8438d5e6f 100644
--- a/spec/features/projects/pages_lets_encrypt_spec.rb
+++ b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
project.add_role(user, role)
sign_in(user)
- project.namespace.update(owner: user)
+ project.namespace.update!(owner: user)
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:pages_deployed?) { true }
end
diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb
new file mode 100644
index 00000000000..3649fae17ce
--- /dev/null
+++ b/spec/features/projects/pages/user_edits_settings_spec.rb
@@ -0,0 +1,201 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Pages edits pages settings', :js do
+ let(:project) { create(:project, pages_https_only: false) }
+ let(:user) { create(:user) }
+
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ context 'when user is the owner' do
+ before do
+ project.namespace.update!(owner: user)
+ end
+
+ context 'when pages deployed' do
+ before do
+ project.mark_pages_as_deployed
+ end
+
+ it 'renders Access pages' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Access pages')
+ end
+
+ context 'when pages are disabled in the project settings' do
+ it 'renders disabled warning' do
+ project.project_feature.update!(pages_access_level: ProjectFeature::DISABLED)
+
+ visit project_pages_path(project)
+
+ expect(page).to have_content('GitLab Pages are disabled for this project')
+ end
+ end
+
+ it 'renders first deployment warning' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('It may take up to 30 minutes before the site is available after the first deployment.')
+ end
+
+ shared_examples 'does not render access control warning' do
+ it 'does not render access control warning' do
+ visit project_pages_path(project)
+
+ expect(page).not_to have_content('Access Control is enabled for this Pages website')
+ end
+ end
+
+ include_examples 'does not render access control warning'
+
+ context 'when access control is enabled in gitlab settings' do
+ before do
+ stub_pages_setting(access_control: true)
+ end
+
+ it 'renders access control warning' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Access Control is enabled for this Pages website')
+ end
+
+ context 'when pages are public' do
+ before do
+ project.project_feature.update!(pages_access_level: ProjectFeature::PUBLIC)
+ end
+
+ include_examples 'does not render access control warning'
+ end
+ end
+
+ context 'when support for external domains is disabled' do
+ it 'renders message that support is disabled' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Support for domains and certificates is disabled')
+ end
+ end
+ end
+
+ it 'does not see anything to destroy' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Configure pages')
+ expect(page).not_to have_link('Remove pages')
+ end
+
+ describe 'project settings page' do
+ it 'renders "Pages" tab' do
+ visit edit_project_path(project)
+
+ page.within '.nav-sidebar' do
+ expect(page).to have_link('Pages')
+ end
+ end
+
+ context 'when pages are disabled' do
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(false)
+ end
+
+ it 'does not render "Pages" tab' do
+ visit edit_project_path(project)
+
+ page.within '.nav-sidebar' do
+ expect(page).not_to have_link('Pages')
+ end
+ end
+ end
+ end
+ end
+
+ describe 'HTTPS settings', :https_pages_enabled do
+ before do
+ project.namespace.update!(owner: user)
+
+ project.mark_pages_as_deployed
+ end
+
+ it 'tries to change the setting' do
+ visit project_pages_path(project)
+ expect(page).to have_content("Force HTTPS (requires valid certificates)")
+
+ uncheck :project_pages_https_only
+
+ click_button 'Save'
+
+ expect(page).to have_text('Your changes have been saved')
+ expect(page).not_to have_checked_field('project_pages_https_only')
+ end
+
+ context 'setting could not be updated' do
+ let(:service) { instance_double('Projects::UpdateService') }
+
+ before do
+ allow(Projects::UpdateService).to receive(:new).and_return(service)
+ allow(service).to receive(:execute).and_return(status: :error, message: 'Some error has occured')
+ end
+
+ it 'tries to change the setting' do
+ visit project_pages_path(project)
+
+ uncheck :project_pages_https_only
+
+ click_button 'Save'
+
+ expect(page).to have_text('Some error has occured')
+ end
+ end
+
+ context 'non-HTTPS domain exists' do
+ let(:project) { create(:project, pages_https_only: false) }
+
+ before do
+ create(:pages_domain, :without_key, :without_certificate, project: project)
+ end
+
+ it 'the setting is disabled' do
+ visit project_pages_path(project)
+
+ expect(page).to have_field(:project_pages_https_only, disabled: true)
+ expect(page).to have_button('Save')
+ end
+ end
+
+ context 'HTTPS pages are disabled', :https_pages_disabled do
+ it 'the setting is unavailable' do
+ visit project_pages_path(project)
+
+ expect(page).not_to have_field(:project_pages_https_only)
+ expect(page).not_to have_content('Force HTTPS (requires valid certificates)')
+ expect(page).to have_button('Save')
+ end
+ end
+ end
+
+ describe 'Remove page' do
+ context 'when pages are deployed' do
+ before do
+ project.mark_pages_as_deployed
+ end
+
+ it 'removes the pages', :sidekiq_inline do
+ visit project_pages_path(project)
+
+ expect(page).to have_link('Remove pages')
+
+ accept_confirm { click_link 'Remove pages' }
+
+ expect(page).to have_content('Pages were scheduled for removal')
+ expect(project.reload.pages_deployed?).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb
deleted file mode 100644
index 11f712fde81..00000000000
--- a/spec/features/projects/pages_spec.rb
+++ /dev/null
@@ -1,411 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.shared_examples 'pages settings editing' do
- let_it_be(:project) { create(:project, pages_https_only: false) }
- let(:user) { create(:user) }
- let(:role) { :maintainer }
-
- before do
- allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
-
- project.add_role(user, role)
-
- sign_in(user)
- end
-
- context 'when user is the owner' do
- before do
- project.namespace.update(owner: user)
- end
-
- context 'when pages deployed' do
- before do
- allow_any_instance_of(Project).to receive(:pages_deployed?) { true }
- end
-
- it 'renders Access pages' do
- visit project_pages_path(project)
-
- expect(page).to have_content('Access pages')
- end
-
- context 'when pages are disabled in the project settings' do
- it 'renders disabled warning' do
- project.project_feature.update!(pages_access_level: ProjectFeature::DISABLED)
-
- visit project_pages_path(project)
-
- expect(page).to have_content('GitLab Pages are disabled for this project')
- end
- end
-
- it 'renders first deployment warning' do
- visit project_pages_path(project)
-
- expect(page).to have_content('It may take up to 30 minutes before the site is available after the first deployment.')
- end
-
- shared_examples 'does not render access control warning' do
- it 'does not render access control warning' do
- visit project_pages_path(project)
-
- expect(page).not_to have_content('Access Control is enabled for this Pages website')
- end
- end
-
- include_examples 'does not render access control warning'
-
- context 'when access control is enabled in gitlab settings' do
- before do
- stub_pages_setting(access_control: true)
- end
-
- it 'renders access control warning' do
- visit project_pages_path(project)
-
- expect(page).to have_content('Access Control is enabled for this Pages website')
- end
-
- context 'when pages are public' do
- before do
- project.project_feature.update!(pages_access_level: ProjectFeature::PUBLIC)
- end
-
- include_examples 'does not render access control warning'
- end
- end
-
- context 'when support for external domains is disabled' do
- it 'renders message that support is disabled' do
- visit project_pages_path(project)
-
- expect(page).to have_content('Support for domains and certificates is disabled')
- end
- end
-
- context 'when pages are exposed on external HTTP address', :http_pages_enabled do
- let(:project) { create(:project, pages_https_only: false) }
-
- shared_examples 'adds new domain' do
- it 'adds new domain' do
- visit new_project_pages_domain_path(project)
-
- fill_in 'Domain', with: 'my.test.domain.com'
- click_button 'Create New Domain'
-
- expect(page).to have_content('my.test.domain.com')
- end
- end
-
- it 'allows to add new domain' do
- visit project_pages_path(project)
-
- expect(page).to have_content('New Domain')
- end
-
- it_behaves_like 'adds new domain'
-
- context 'when project in group namespace' do
- it_behaves_like 'adds new domain' do
- let(:group) { create :group }
- let(:project) { create(:project, namespace: group, pages_https_only: false) }
- end
- end
-
- context 'when pages domain is added' do
- before do
- create(:pages_domain, project: project, domain: 'my.test.domain.com')
-
- visit new_project_pages_domain_path(project)
- end
-
- it 'renders certificates is disabled' do
- expect(page).to have_content('Support for custom certificates is disabled')
- end
-
- it 'does not adds new domain and renders error message' do
- fill_in 'Domain', with: 'my.test.domain.com'
- click_button 'Create New Domain'
-
- expect(page).to have_content('Domain has already been taken')
- end
- end
- end
-
- context 'when pages are exposed on external HTTPS address', :https_pages_enabled, :js do
- let(:certificate_pem) do
- attributes_for(:pages_domain)[:certificate]
- end
-
- let(:certificate_key) do
- attributes_for(:pages_domain)[:key]
- end
-
- it 'adds new domain with certificate' do
- visit new_project_pages_domain_path(project)
-
- fill_in 'Domain', with: 'my.test.domain.com'
-
- if ::Gitlab::LetsEncrypt.enabled?
- find('.js-auto-ssl-toggle-container .project-feature-toggle').click
- end
-
- fill_in 'Certificate (PEM)', with: certificate_pem
- fill_in 'Key (PEM)', with: certificate_key
- click_button 'Create New Domain'
-
- expect(page).to have_content('my.test.domain.com')
- end
-
- it 'shows validation error if domain is duplicated' do
- project.pages_domains.create!(domain: 'my.test.domain.com')
-
- visit new_project_pages_domain_path(project)
-
- fill_in 'Domain', with: 'my.test.domain.com'
- click_button 'Create New Domain'
-
- expect(page).to have_content('Domain has already been taken')
- end
-
- describe 'with dns verification enabled' do
- before do
- stub_application_setting(pages_domain_verification_enabled: true)
- end
-
- it 'shows the DNS verification record' do
- domain = create(:pages_domain, project: project)
-
- visit project_pages_path(project)
-
- within('#content-body') { click_link 'Edit' }
- expect(page).to have_field :domain_verification, with: "#{domain.verification_domain} TXT #{domain.keyed_verification_code}"
- end
- end
-
- describe 'updating the certificate for an existing domain' do
- let!(:domain) do
- create(:pages_domain, project: project, auto_ssl_enabled: false)
- end
-
- it 'allows the certificate to be updated' do
- visit project_pages_path(project)
-
- within('#content-body') { click_link 'Edit' }
- click_button 'Save Changes'
-
- expect(page).to have_content('Domain was updated')
- end
-
- context 'when the certificate is invalid' do
- let!(:domain) do
- create(:pages_domain, :without_certificate, :without_key, project: project)
- end
-
- it 'tells the user what the problem is' do
- visit project_pages_path(project)
-
- within('#content-body') { click_link 'Edit' }
-
- if ::Gitlab::LetsEncrypt.enabled?
- find('.js-auto-ssl-toggle-container .project-feature-toggle').click
- end
-
- fill_in 'Certificate (PEM)', with: 'invalid data'
- click_button 'Save Changes'
-
- expect(page).to have_content('Certificate must be a valid PEM certificate')
- expect(page).to have_content('Certificate misses intermediates')
- expect(page).to have_content("Key doesn't match the certificate")
- end
- end
-
- it 'allows the certificate to be removed', :js do
- visit project_pages_path(project)
-
- within('#content-body') { click_link 'Edit' }
-
- accept_confirm { click_link 'Remove' }
-
- expect(page).to have_field('Certificate (PEM)', with: '')
- expect(page).to have_field('Key (PEM)', with: '')
- domain.reload
- expect(domain.certificate).to be_nil
- expect(domain.key).to be_nil
- end
-
- it 'shows the DNS CNAME record' do
- visit project_pages_path(project)
-
- within('#content-body') { click_link 'Edit' }
- expect(page).to have_field :domain_dns, with: "#{domain.domain} CNAME #{domain.project.pages_subdomain}.#{Settings.pages.host}."
- end
- end
- end
- end
-
- it 'does not see anything to destroy' do
- visit project_pages_path(project)
-
- expect(page).to have_content('Configure pages')
- expect(page).not_to have_link('Remove pages')
- end
-
- describe 'project settings page' do
- it 'renders "Pages" tab' do
- visit edit_project_path(project)
-
- page.within '.nav-sidebar' do
- expect(page).to have_link('Pages')
- end
- end
-
- context 'when pages are disabled' do
- before do
- allow(Gitlab.config.pages).to receive(:enabled).and_return(false)
- end
-
- it 'does not render "Pages" tab' do
- visit edit_project_path(project)
-
- page.within '.nav-sidebar' do
- expect(page).not_to have_link('Pages')
- end
- end
- end
- end
- end
-
- describe 'HTTPS settings', :https_pages_enabled do
- before do
- project.namespace.update(owner: user)
-
- allow_any_instance_of(Project).to receive(:pages_deployed?) { true }
- end
-
- it 'tries to change the setting' do
- visit project_pages_path(project)
- expect(page).to have_content("Force HTTPS (requires valid certificates)")
-
- uncheck :project_pages_https_only
-
- click_button 'Save'
-
- expect(page).to have_text('Your changes have been saved')
- expect(page).not_to have_checked_field('project_pages_https_only')
- end
-
- context 'setting could not be updated' do
- let(:service) { instance_double('Projects::UpdateService') }
-
- before do
- allow(Projects::UpdateService).to receive(:new).and_return(service)
- allow(service).to receive(:execute).and_return(status: :error, message: 'Some error has occured')
- end
-
- it 'tries to change the setting' do
- visit project_pages_path(project)
-
- uncheck :project_pages_https_only
-
- click_button 'Save'
-
- expect(page).to have_text('Some error has occured')
- end
- end
-
- context 'non-HTTPS domain exists' do
- let(:project) { create(:project, pages_https_only: false) }
-
- before do
- create(:pages_domain, :without_key, :without_certificate, project: project)
- end
-
- it 'the setting is disabled' do
- visit project_pages_path(project)
-
- expect(page).to have_field(:project_pages_https_only, disabled: true)
- expect(page).to have_button('Save')
- end
- end
-
- context 'HTTPS pages are disabled', :https_pages_disabled do
- it 'the setting is unavailable' do
- visit project_pages_path(project)
-
- expect(page).not_to have_field(:project_pages_https_only)
- expect(page).not_to have_content('Force HTTPS (requires valid certificates)')
- expect(page).to have_button('Save')
- end
- end
- end
-
- describe 'Remove page' do
- let(:project) { create :project, :repository }
-
- context 'when pages are deployed' do
- let(:pipeline) do
- commit_sha = project.commit('HEAD').sha
-
- project.ci_pipelines.create(
- ref: 'HEAD',
- sha: commit_sha,
- source: :push,
- protected: false
- )
- end
-
- let(:ci_build) do
- create(
- :ci_build,
- project: project,
- pipeline: pipeline,
- ref: 'HEAD')
- end
-
- let!(:artifact) do
- create(:ci_job_artifact, :archive, :correct_checksum,
- file: fixture_file_upload(File.join('spec/fixtures/pages.zip')), job: ci_build)
- end
-
- let!(:metadata) do
- create(:ci_job_artifact, :metadata,
- file: fixture_file_upload(File.join('spec/fixtures/pages.zip.meta')), job: ci_build)
- end
-
- before do
- result = Projects::UpdatePagesService.new(project, ci_build).execute
- expect(result[:status]).to eq(:success)
- expect(project).to be_pages_deployed
- end
-
- it 'removes the pages', :sidekiq_inline do
- visit project_pages_path(project)
-
- expect(page).to have_link('Remove pages')
-
- accept_confirm { click_link 'Remove pages' }
-
- expect(page).to have_content('Pages were scheduled for removal')
- expect(project.reload.pages_deployed?).to be_falsey
- end
- end
- end
-end
-
-RSpec.describe 'Pages', :js do
- include LetsEncryptHelpers
-
- context 'when editing normally' do
- include_examples 'pages settings editing'
- end
-
- context 'when letsencrypt support is enabled' do
- before do
- stub_lets_encrypt_settings
- end
-
- include_examples 'pages settings editing'
- end
-end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index ac3566fbbdd..e1d68a3f12e 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Pipeline', :js do
sign_in(user)
project.add_role(user, role)
stub_feature_flags(graphql_pipeline_details: false)
+ stub_feature_flags(graphql_pipeline_details_users: false)
end
shared_context 'pipeline builds' do
@@ -625,20 +626,6 @@ RSpec.describe 'Pipeline', :js do
end
end
end
-
- context 'when FF dag_pipeline_tab is disabled' do
- before do
- stub_feature_flags(dag_pipeline_tab: false)
- visit_pipeline
- end
-
- it 'does not show DAG link' do
- expect(page).to have_link('Pipeline')
- expect(page).to have_link('Jobs')
- expect(page).not_to have_link('DAG')
- expect(page).to have_link('Failed Jobs')
- end
- end
end
context 'when user does not have access to read jobs' do
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 450524b8d70..e0a0591fe6b 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'Pipelines', :js do
before do
sign_in(user)
stub_feature_flags(graphql_pipeline_details: false)
+ stub_feature_flags(graphql_pipeline_details_users: false)
project.add_developer(user)
project.update!(auto_devops_attributes: { enabled: false })
end
@@ -287,23 +288,23 @@ RSpec.describe 'Pipelines', :js do
end
it 'has a dropdown with play button' do
- expect(page).to have_selector('.dropdown-new.btn.btn-default .icon-play')
+ expect(page).to have_selector('[data-testid="pipelines-manual-actions-dropdown"] [data-testid="play-icon"]')
end
it 'has link to the manual action' do
- find('.js-pipeline-dropdown-manual-actions').click
+ find('[data-testid="pipelines-manual-actions-dropdown"]').click
expect(page).to have_button('manual build')
end
context 'when manual action was played' do
before do
- find('.js-pipeline-dropdown-manual-actions').click
+ find('[data-testid="pipelines-manual-actions-dropdown"]').click
click_button('manual build')
end
it 'enqueues manual action job' do
- expect(page).to have_selector('.js-pipeline-dropdown-manual-actions:disabled')
+ expect(page).to have_selector('[data-testid="pipelines-manual-actions-dropdown"] .gl-dropdown-toggle:disabled')
end
end
end
@@ -321,11 +322,11 @@ RSpec.describe 'Pipelines', :js do
end
it 'has a dropdown for actionable jobs' do
- expect(page).to have_selector('.dropdown-new.btn.btn-default .icon-play')
+ expect(page).to have_selector('[data-testid="pipelines-manual-actions-dropdown"] [data-testid="play-icon"]')
end
it "has link to the delayed job's action" do
- find('.js-pipeline-dropdown-manual-actions').click
+ find('[data-testid="pipelines-manual-actions-dropdown"]').click
time_diff = [0, delayed_job.scheduled_at - Time.now].max
expect(page).to have_button('delayed job 1')
@@ -341,7 +342,7 @@ RSpec.describe 'Pipelines', :js do
end
it "shows 00:00:00 as the remaining time" do
- find('.js-pipeline-dropdown-manual-actions').click
+ find('[data-testid="pipelines-manual-actions-dropdown"]').click
expect(page).to have_content("00:00:00")
end
@@ -349,7 +350,7 @@ RSpec.describe 'Pipelines', :js do
context 'when user played a delayed job immediately' do
before do
- find('.js-pipeline-dropdown-manual-actions').click
+ find('[data-testid="pipelines-manual-actions-dropdown"]').click
page.accept_confirm { click_button('delayed job 1') }
wait_for_requests
end
@@ -517,56 +518,75 @@ RSpec.describe 'Pipelines', :js do
end
end
- context 'mini pipeline graph' do
- let!(:build) do
- create(:ci_build, :pending, pipeline: pipeline,
- stage: 'build',
- name: 'build')
- end
-
- before do
- visit_project_pipelines
- end
+ shared_examples 'mini pipeline renders' do |ci_mini_pipeline_gl_dropdown_enabled|
+ context 'mini pipeline graph' do
+ let!(:build) do
+ create(:ci_build, :pending, pipeline: pipeline,
+ stage: 'build',
+ name: 'build')
+ end
- it 'renders a mini pipeline graph' do
- expect(page).to have_selector('[data-testid="widget-mini-pipeline-graph"]')
- expect(page).to have_selector('.js-builds-dropdown-button')
- end
+ before do
+ stub_feature_flags(ci_mini_pipeline_gl_dropdown: ci_mini_pipeline_gl_dropdown_enabled)
+ visit_project_pipelines
+ end
- context 'when clicking a stage badge' do
- it 'opens a dropdown' do
- find('.js-builds-dropdown-button').click
+ let_it_be(:dropdown_toggle_selector) do
+ if ci_mini_pipeline_gl_dropdown_enabled
+ '[data-testid="mini-pipeline-graph-dropdown"] .dropdown-toggle'
+ else
+ '[data-testid="mini-pipeline-graph-dropdown-toggle"]'
+ end
+ end
- expect(page).to have_link build.name
+ it 'renders a mini pipeline graph' do
+ expect(page).to have_selector('[data-testid="widget-mini-pipeline-graph"]')
+ expect(page).to have_selector(dropdown_toggle_selector)
end
- it 'is possible to cancel pending build' do
- find('.js-builds-dropdown-button').click
- find('.js-ci-action').click
- wait_for_requests
+ context 'when clicking a stage badge' do
+ it 'opens a dropdown' do
+ find(dropdown_toggle_selector).click
- expect(build.reload).to be_canceled
- end
- end
+ expect(page).to have_link build.name
+ end
- context 'for a failed pipeline' do
- let!(:build) do
- create(:ci_build, :failed, pipeline: pipeline,
- stage: 'build',
- name: 'build')
+ it 'is possible to cancel pending build' do
+ find(dropdown_toggle_selector).click
+ find('.js-ci-action').click
+ wait_for_requests
+
+ expect(build.reload).to be_canceled
+ end
end
- it 'displays the failure reason' do
- find('.js-builds-dropdown-button').click
+ context 'for a failed pipeline' do
+ let!(:build) do
+ create(:ci_build, :failed, pipeline: pipeline,
+ stage: 'build',
+ name: 'build')
+ end
+
+ it 'displays the failure reason' do
+ find(dropdown_toggle_selector).click
- within('.js-builds-dropdown-list') do
- build_element = page.find('.mini-pipeline-graph-dropdown-item')
- expect(build_element['title']).to eq('build - failed - (unknown failure)')
+ within('.js-builds-dropdown-list') do
+ build_element = page.find('.mini-pipeline-graph-dropdown-item')
+ expect(build_element['title']).to eq('build - failed - (unknown failure)')
+ end
end
end
end
end
+ context 'with ci_mini_pipeline_gl_dropdown disabled' do
+ it_behaves_like "mini pipeline renders", false
+ end
+
+ context 'with ci_mini_pipeline_gl_dropdown enabled' do
+ it_behaves_like "mini pipeline renders", true
+ end
+
context 'with pagination' do
before do
allow(Ci::Pipeline).to receive(:default_per_page).and_return(1)
diff --git a/spec/features/projects/services/user_activates_slack_slash_command_spec.rb b/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
index 3994f55caee..4dfd4416eeb 100644
--- a/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
+++ b/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
@@ -40,4 +40,8 @@ RSpec.describe 'Slack slash commands', :js do
value = find_field('url').value
expect(value).to match("api/v4/projects/#{project.id}/services/slack_slash_commands/trigger")
end
+
+ it 'shows help content' do
+ expect(page).to have_content('This service allows users to perform common operations on this project by entering slash commands in Slack.')
+ end
end
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index c087237fd7c..39c4315bf0f 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe "Projects > Settings > Pipelines settings" do
it 'updates auto_cancel_pending_pipelines' do
visit project_settings_ci_cd_path(project)
- page.check('Auto-cancel redundant, pending pipelines')
+ page.check('Auto-cancel redundant pipelines')
page.within '#js-general-pipeline-settings' do
click_on 'Save changes'
end
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 3e520142117..2f257d299d8 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
click_button 'Add key'
expect(page).to have_content('new_deploy_key')
- expect(page).to have_content('Write access allowed')
+ expect(page).to have_content('Grant write permissions to this key')
end
it 'edit an existing deploy key' do
@@ -77,7 +77,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
click_button 'Save changes'
expect(page).to have_content('updated_deploy_key')
- expect(page).to have_content('Write access allowed')
+ expect(page).to have_content('Grant write permissions to this key')
end
it 'edit an existing public deploy key to be writable' do
@@ -90,7 +90,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
click_button 'Save changes'
expect(page).to have_content('public_deploy_key')
- expect(page).to have_content('Write access allowed')
+ expect(page).to have_content('Grant write permissions to this key')
end
it 'edit a deploy key from projects user has access to' do
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index 726b8fb6840..a4abdf9f571 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe 'Projects > Settings > User manages project members' do
let(:user_mike) { create(:user, name: 'Mike') }
before do
+ stub_feature_flags(vue_project_members_list: false)
+
project.add_maintainer(user)
project.add_developer(user_dmitriy)
sign_in(user)
@@ -37,6 +39,8 @@ RSpec.describe 'Projects > Settings > User manages project members' do
end
it 'imports a team from another project' do
+ stub_feature_flags(invite_members_group_modal: false)
+
project2.add_maintainer(user)
project2.add_reporter(user_mike)
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index d444ea27d35..5f7d9b0963b 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
let(:project) { create(:project, :public, :repository) }
before do
+ stub_feature_flags(vue_notification_dropdown: false)
sign_in(project.owner)
end
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index febdb70de86..e6157887c12 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -5,6 +5,13 @@ require 'spec_helper'
RSpec.describe 'Projects > Show > User sees Git instructions' do
let_it_be(:user) { create(:user) }
+ before do
+ # Reset user notification settings between examples to prevent
+ # validation failure on NotificationSetting.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/299822#note_492817174
+ user.notification_settings.reset
+ end
+
shared_examples_for 'redirects to the sign in page' do
it 'redirects to the sign in page' do
expect(current_path).to eq(new_user_session_path)
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index b860cd08e64..1a882050126 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for commits' do
+RSpec.describe 'User searches for commits', :js do
let(:project) { create(:project, :repository) }
let(:sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
let(:user) { create(:user) }
@@ -41,7 +41,7 @@ RSpec.describe 'User searches for commits' do
submit_search('See merge request')
select_search_scope('Commits')
- expect(page).to have_selector('.commit-row-description', count: 9)
+ expect(page).to have_selector('.commit-row-description', visible: false, count: 9)
end
end
end
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index e253b9f2f7a..828e478d701 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe 'User searches for issues', :js do
expect(page.all('.search-result-row').last).to have_link(issue1.title)
end
- find('.reverse-sort-btn').click
+ find('[data-testid="sort-highest-icon"]').click
page.within('.results') do
expect(page.all('.search-result-row').first).to have_link(issue1.title)
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index 21e8075739f..7271716644b 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -5,8 +5,14 @@ require 'spec_helper'
RSpec.describe 'User searches for merge requests', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
- let!(:merge_request1) { create(:merge_request, title: 'Foo', source_project: project, target_project: project) }
- let!(:merge_request2) { create(:merge_request, :simple, title: 'Bar', source_project: project, target_project: project) }
+ let!(:merge_request1) { create(:merge_request, title: 'Merge Request Foo', source_project: project, target_project: project, created_at: 1.hour.ago) }
+ let!(:merge_request2) { create(:merge_request, :simple, title: 'Merge Request Bar', source_project: project, target_project: project) }
+
+ def search_for_mr(search)
+ fill_in('dashboard_search', with: search)
+ find('.btn-search').click
+ select_search_scope('Merge requests')
+ end
before do
project.add_maintainer(user)
@@ -18,9 +24,7 @@ RSpec.describe 'User searches for merge requests', :js do
include_examples 'top right search form'
it 'finds a merge request' do
- fill_in('dashboard_search', with: merge_request1.title)
- find('.btn-search').click
- select_search_scope('Merge requests')
+ search_for_mr(merge_request1.title)
page.within('.results') do
expect(page).to have_link(merge_request1.title)
@@ -28,6 +32,22 @@ RSpec.describe 'User searches for merge requests', :js do
end
end
+ it 'sorts by created date' do
+ search_for_mr('Merge Request')
+
+ page.within('.results') do
+ expect(page.all('.search-result-row').first).to have_link(merge_request2.title)
+ expect(page.all('.search-result-row').last).to have_link(merge_request1.title)
+ end
+
+ find('[data-testid="sort-highest-icon"]').click
+
+ page.within('.results') do
+ expect(page.all('.search-result-row').first).to have_link(merge_request1.title)
+ expect(page.all('.search-result-row').last).to have_link(merge_request2.title)
+ end
+ end
+
context 'when on a project page' do
it 'finds a merge request' do
find('[data-testid="project-filter"]').click
@@ -38,9 +58,7 @@ RSpec.describe 'User searches for merge requests', :js do
click_on(project.full_name)
end
- fill_in('dashboard_search', with: merge_request1.title)
- find('.btn-search').click
- select_search_scope('Merge requests')
+ search_for_mr(merge_request1.title)
page.within('.results') do
expect(page).to have_link(merge_request1.title)
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index b64909dd42f..e34ae031679 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for projects' do
+RSpec.describe 'User searches for projects', :js do
let!(:project) { create(:project, :public, name: 'Shop') }
context 'when signed out' do
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index 0f8daaf8e15..e17521e1d02 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -69,7 +69,13 @@ RSpec.describe 'Task Lists', :js do
wait_for_requests
expect(page).to have_selector(".md .task-list .task-list-item .task-list-item-checkbox")
- expect(page).to have_selector('.btn-close')
+ end
+
+ it_behaves_like 'page with comment and close button', 'Close issue' do
+ def setup
+ visit_issue(project, issue)
+ wait_for_requests
+ end
end
it 'is only editable by author' do
diff --git a/spec/features/u2f_spec.rb b/spec/features/u2f_spec.rb
index 5762a54a717..eed67e3ac78 100644
--- a/spec/features/u2f_spec.rb
+++ b/spec/features/u2f_spec.rb
@@ -39,6 +39,11 @@ RSpec.describe 'Using U2F (Universal 2nd Factor) Devices for Authentication', :j
end
it 'allows the same device to be registered for multiple users' do
+ # U2f specs will be removed after WebAuthn migration completed
+ pending('FakeU2fDevice has static key handle, '\
+ 'leading to duplicate credential_xid for WebAuthn during migration, '\
+ 'resulting in unique constraint violation')
+
# First user
visit profile_account_path
manage_two_factor_authentication
@@ -148,6 +153,11 @@ RSpec.describe 'Using U2F (Universal 2nd Factor) Devices for Authentication', :j
describe "and also the current user" do
it "allows logging in with that particular device" do
+ # U2f specs will be removed after WebAuthn migration completed
+ pending('FakeU2fDevice has static key handle, '\
+ 'leading to duplicate credential_xid for WebAuthn during migration, '\
+ 'resulting in unique constraint violation')
+
# Register current user with the same U2F device
current_user = gitlab_sign_in(:user)
current_user.update_attribute(:otp_required_for_login, true)
diff --git a/spec/features/user_sees_revert_modal_spec.rb b/spec/features/user_sees_revert_modal_spec.rb
index 331f51dad95..5edf8358244 100644
--- a/spec/features/user_sees_revert_modal_spec.rb
+++ b/spec/features/user_sees_revert_modal_spec.rb
@@ -7,26 +7,33 @@ RSpec.describe 'Merge request > User sees revert modal', :js, :sidekiq_might_not
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
+ shared_examples 'showing the revert modal' do
+ it 'shows the revert modal' do
+ click_button('Revert')
+
+ page.within('[data-testid="modal-commit"]') do
+ expect(page).to have_content 'Revert this merge request'
+ end
+ end
+ end
+
before do
sign_in(user)
visit(project_merge_request_path(project, merge_request))
click_button('Merge')
wait_for_requests
-
- visit(merge_request_path(merge_request))
- click_link('Revert')
end
- it 'shows the revert modal' do
- page.within('.modal-header') do
- expect(page).to have_content 'Revert this merge request'
- end
+ context 'without page reload after merge validates js correctly loaded' do
+ it_behaves_like 'showing the revert modal'
end
- it 'closes the revert modal with escape keypress' do
- find('#modal-revert-commit').send_keys(:escape)
+ context 'with page reload validates js correctly loaded' do
+ before do
+ visit(merge_request_path(merge_request))
+ end
- expect(page).not_to have_selector('#modal-revert-commit', visible: true)
+ it_behaves_like 'showing the revert modal'
end
end
diff --git a/spec/features/webauthn_spec.rb b/spec/features/webauthn_spec.rb
index 2ffb6bb3477..4eebc9d2c1e 100644
--- a/spec/features/webauthn_spec.rb
+++ b/spec/features/webauthn_spec.rb
@@ -129,6 +129,10 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js do
end
it 'falls back to U2F' do
+ # WebAuthn registration is automatically created with the U2fRegistration because of the after_create callback
+ # so we need to delete it
+ WebauthnRegistration.delete_all
+
gitlab_sign_in(user)
u2f_device.respond_to_u2f_authentication
diff --git a/spec/features/whats_new_spec.rb b/spec/features/whats_new_spec.rb
new file mode 100644
index 00000000000..7c5625486f5
--- /dev/null
+++ b/spec/features/whats_new_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe "renders a `whats new` dropdown item", :js do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'shows notification dot and count and removes it once viewed' do
+ visit root_dashboard_path
+
+ page.within '.header-help' do
+ expect(page).to have_selector('.notification-dot', visible: true)
+
+ find('.header-help-dropdown-toggle').click
+
+ expect(page).to have_button(text: "What's new")
+ expect(page).to have_selector('.js-whats-new-notification-count')
+
+ find('button', text: "What's new").click
+ end
+
+ find('.whats-new-drawer .gl-drawer-close-button').click
+ find('.header-help-dropdown-toggle').click
+
+ page.within '.header-help' do
+ expect(page).not_to have_selector('.notification-dot', visible: true)
+ expect(page).to have_button(text: "What's new")
+ expect(page).not_to have_selector('.js-whats-new-notification-count')
+ end
+ end
+end
diff --git a/spec/finders/autocomplete/users_finder_spec.rb b/spec/finders/autocomplete/users_finder_spec.rb
index 357b6dfcea2..28bd7e12916 100644
--- a/spec/finders/autocomplete/users_finder_spec.rb
+++ b/spec/finders/autocomplete/users_finder_spec.rb
@@ -118,5 +118,10 @@ RSpec.describe Autocomplete::UsersFinder do
it { is_expected.to match_array([user1, external_user, omniauth_user, current_user]) }
end
+
+ it 'preloads the status association' do
+ associations = subject.map { |user| user.association(:status) }
+ expect(associations).to all(be_loaded)
+ end
end
end
diff --git a/spec/finders/merge_request/metrics_finder_spec.rb b/spec/finders/merge_request/metrics_finder_spec.rb
new file mode 100644
index 00000000000..ea039462e66
--- /dev/null
+++ b/spec/finders/merge_request/metrics_finder_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequest::MetricsFinder do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request_not_merged) { create(:merge_request, :unique_branches, source_project: project) }
+ let_it_be(:merged_at) { Time.new(2020, 5, 1) }
+ let_it_be(:merge_request_merged) do
+ create(:merge_request, :unique_branches, :merged, source_project: project).tap do |mr|
+ mr.metrics.update!(merged_at: merged_at)
+ end
+ end
+
+ let(:params) do
+ {
+ target_project: project,
+ merged_after: merged_at - 10.days,
+ merged_before: merged_at + 10.days
+ }
+ end
+
+ subject { described_class.new(current_user, params).execute.to_a }
+
+ context 'when target project is missing' do
+ before do
+ params.delete(:target_project)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when the user is not part of the project' do
+ it { is_expected.to be_empty }
+ end
+
+ context 'when user is part of the project' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'returns merge request records' do
+ is_expected.to eq([merge_request_merged.metrics])
+ end
+
+ it 'excludes not merged records' do
+ is_expected.not_to eq([merge_request_not_merged.metrics])
+ end
+
+ context 'when only merged_before is given' do
+ before do
+ params.delete(:merged_after)
+ end
+
+ it { is_expected.to eq([merge_request_merged.metrics]) }
+ end
+
+ context 'when only merged_after is given' do
+ before do
+ params.delete(:merged_before)
+ end
+
+ it { is_expected.to eq([merge_request_merged.metrics]) }
+ end
+
+ context 'when no records matching the date range' do
+ before do
+ params[:merged_before] = merged_at - 1.year
+ params[:merged_after] = merged_at - 2.years
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
new file mode 100644
index 00000000000..4e9d021fa5d
--- /dev/null
+++ b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::OldestPerCommitFinder do
+ describe '#execute' do
+ it 'returns a Hash mapping commit SHAs to their oldest merge requests' do
+ project = create(:project)
+ mr1 = create(:merge_request, :merged, target_project: project)
+ mr2 = create(:merge_request, :merged, target_project: project)
+ mr1_diff = create(:merge_request_diff, merge_request: mr1)
+ mr2_diff = create(:merge_request_diff, merge_request: mr2)
+ sha1 = Digest::SHA1.hexdigest('foo')
+ sha2 = Digest::SHA1.hexdigest('bar')
+
+ create(:merge_request_diff_commit, merge_request_diff: mr1_diff, sha: sha1)
+ create(:merge_request_diff_commit, merge_request_diff: mr2_diff, sha: sha1)
+ create(
+ :merge_request_diff_commit,
+ merge_request_diff: mr2_diff,
+ sha: sha2,
+ relative_order: 1
+ )
+
+ commits = [double(:commit, id: sha1), double(:commit, id: sha2)]
+
+ expect(described_class.new(project).execute(commits)).to eq(
+ sha1 => mr1,
+ sha2 => mr2
+ )
+ end
+
+ it 'skips merge requests that are not merged' do
+ mr = create(:merge_request)
+ mr_diff = create(:merge_request_diff, merge_request: mr)
+ sha = Digest::SHA1.hexdigest('foo')
+
+ create(:merge_request_diff_commit, merge_request_diff: mr_diff, sha: sha)
+
+ commits = [double(:commit, id: sha)]
+
+ expect(described_class.new(mr.target_project).execute(commits))
+ .to be_empty
+ end
+ end
+end
diff --git a/spec/finders/repositories/commits_with_trailer_finder_spec.rb b/spec/finders/repositories/commits_with_trailer_finder_spec.rb
new file mode 100644
index 00000000000..0c457aae340
--- /dev/null
+++ b/spec/finders/repositories/commits_with_trailer_finder_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::CommitsWithTrailerFinder do
+ let(:project) { create(:project, :repository) }
+
+ describe '#each_page' do
+ it 'only yields commits with the given trailer' do
+ finder = described_class.new(
+ project: project,
+ from: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ to: 'c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd'
+ )
+
+ commits = finder.each_page('Signed-off-by').to_a.flatten
+
+ expect(commits.length).to eq(1)
+ expect(commits.first.id).to eq('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
+ expect(commits.first.trailers).to eq(
+ 'Signed-off-by' => 'Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>'
+ )
+ end
+
+ it 'supports paginating of commits' do
+ finder = described_class.new(
+ project: project,
+ from: 'c1acaa58bbcbc3eafe538cb8274ba387047b69f8',
+ to: '5937ac0a7beb003549fc5fd26fc247adbce4a52e',
+ per_page: 1
+ )
+
+ commits = finder.each_page('Signed-off-by')
+
+ expect(commits.count).to eq(4)
+ end
+ end
+end
diff --git a/spec/finders/terraform/states_finder_spec.rb b/spec/finders/terraform/states_finder_spec.rb
new file mode 100644
index 00000000000..260e5f4818f
--- /dev/null
+++ b/spec/finders/terraform/states_finder_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::StatesFinder do
+ describe '#execute' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:state_1) { create(:terraform_state, project: project) }
+ let_it_be(:state_2) { create(:terraform_state, project: project) }
+
+ let(:user) { project.creator }
+
+ subject { described_class.new(project, user).execute }
+
+ it { is_expected.to contain_exactly(state_1, state_2) }
+
+ context 'user does not have permission' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_guest(user)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'filtering by name' do
+ let(:params) { { name: name_param } }
+
+ subject { described_class.new(project, user, params: params).execute }
+
+ context 'name does not match' do
+ let(:name_param) { 'other-name' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'name does match' do
+ let(:name_param) { state_1.name }
+
+ it { is_expected.to contain_exactly(state_1) }
+ end
+ end
+ end
+end
diff --git a/spec/finders/user_recent_events_finder_spec.rb b/spec/finders/user_recent_events_finder_spec.rb
index ddba9b595a4..afebff5b5c9 100644
--- a/spec/finders/user_recent_events_finder_spec.rb
+++ b/spec/finders/user_recent_events_finder_spec.rb
@@ -51,6 +51,20 @@ RSpec.describe UserRecentEventsFinder do
end
end
+ describe 'issue activity events' do
+ let(:issue) { create(:issue, project: public_project) }
+ let(:note) { create(:note_on_issue, noteable: issue, project: public_project) }
+ let!(:event_a) { create(:event, :commented, target: note, author: project_owner) }
+ let!(:event_b) { create(:event, :closed, target: issue, author: project_owner) }
+
+ it 'includes all issue related events', :aggregate_failures do
+ events = finder.execute
+
+ expect(events).to include(event_a)
+ expect(events).to include(event_b)
+ end
+ end
+
context 'limits' do
before do
stub_const("#{described_class}::DEFAULT_LIMIT", 1)
diff --git a/spec/fixtures/api/schemas/entities/codequality_mr_diff_report.json b/spec/fixtures/api/schemas/entities/codequality_mr_diff_report.json
new file mode 100644
index 00000000000..63e0c68e9cd
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/codequality_mr_diff_report.json
@@ -0,0 +1,21 @@
+{
+ "type": "object",
+ "description": "The schema used to display codequality report in mr diff",
+ "required": ["files"],
+ "properties": {
+ "patternProperties": {
+ ".*.": {
+ "type": "array",
+ "items": {
+ "required": ["line", "description", "severity"],
+ "properties": {
+ "line": { "type": "integer" },
+ "description": { "type": "string" },
+ "severity": { "type": "string" }
+ },
+ "additionalProperties": false
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/entities/member.json b/spec/fixtures/api/schemas/entities/member.json
index e8b40745803..03b1872632e 100644
--- a/spec/fixtures/api/schemas/entities/member.json
+++ b/spec/fixtures/api/schemas/entities/member.json
@@ -9,7 +9,8 @@
"source",
"valid_roles",
"can_update",
- "can_remove"
+ "can_remove",
+ "is_direct_member"
],
"properties": {
"id": { "type": "integer" },
@@ -18,6 +19,7 @@
"requested_at": { "type": ["date-time", "null"] },
"can_update": { "type": "boolean" },
"can_remove": { "type": "boolean" },
+ "is_direct_member": { "type": "boolean" },
"access_level": {
"type": "object",
"required": ["integer_value", "string_value"],
diff --git a/spec/fixtures/api/schemas/entities/member_user.json b/spec/fixtures/api/schemas/entities/member_user.json
index 983cdb7b9d9..ebd26bfaaaa 100644
--- a/spec/fixtures/api/schemas/entities/member_user.json
+++ b/spec/fixtures/api/schemas/entities/member_user.json
@@ -9,6 +9,7 @@
"web_url": { "type": "string" },
"blocked": { "type": "boolean" },
"two_factor_enabled": { "type": "boolean" },
+ "availability": { "type": ["string", "null"] },
"status": {
"type": "object",
"required": ["emoji"],
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_composer_details.json b/spec/fixtures/api/schemas/graphql/packages/package_composer_details.json
deleted file mode 100644
index bcf64a6e567..00000000000
--- a/spec/fixtures/api/schemas/graphql/packages/package_composer_details.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "type": "object",
- "allOf": [{ "$ref": "./package_details.json" }],
- "properties": {
- "target_sha": {
- "type": "string"
- },
- "composer_json": {
- "type": "object"
- }
- }
-}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_composer_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_composer_metadata.json
new file mode 100644
index 00000000000..db9b25889be
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_composer_metadata.json
@@ -0,0 +1,21 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["targetSha", "composerJson"],
+ "properties": {
+ "targetSha": {
+ "type": "string"
+ },
+ "composerJson": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["name", "type", "license", "version"],
+ "properties": {
+ "name": { "type": "string" },
+ "type": { "type": "string" },
+ "license": { "type": "string" },
+ "version": { "type": "string" }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
index 4f90285183c..d2e2e65db54 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_details.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -1,5 +1,10 @@
{
"type": "object",
+ "additionalProperties": false,
+ "required": [
+ "id", "name", "createdAt", "updatedAt", "version", "packageType",
+ "project", "tags", "pipelines", "versions", "metadata"
+ ],
"properties": {
"id": {
"type": "string"
@@ -16,21 +21,46 @@
"version": {
"type": ["string", "null"]
},
- "package_type": {
+ "packageType": {
"type": ["string"],
"enum": ["MAVEN", "NPM", "CONAN", "NUGET", "PYPI", "COMPOSER", "GENERIC", "GOLANG", "DEBIAN"]
},
"tags": {
- "type": "object"
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "pageInfo": { "type": "object" },
+ "edges": { "type": "array" },
+ "nodes": { "type": "array" }
+ }
},
"project": {
"type": "object"
},
"pipelines": {
- "type": "object"
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "pageInfo": { "type": "object" },
+ "count": { "type": "integer" },
+ "edges": { "type": "array" },
+ "nodes": { "type": "array" }
+ }
},
"versions": {
- "type": "object"
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "pageInfo": { "type": "object" },
+ "edges": { "type": "array" },
+ "nodes": { "type": "array" }
+ }
+ },
+ "metadata": {
+ "anyOf": [
+ { "$ref": "./package_composer_metadata.json" },
+ { "type": "null" }
+ ]
}
}
}
diff --git a/spec/fixtures/api/schemas/group_group_links.json b/spec/fixtures/api/schemas/group_group_links.json
deleted file mode 100644
index f8b4e7f035b..00000000000
--- a/spec/fixtures/api/schemas/group_group_links.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "type": "array",
- "items": {
- "$ref": "entities/group_group_link.json"
- }
-}
diff --git a/spec/fixtures/api/schemas/group_link/group_group_link.json b/spec/fixtures/api/schemas/group_link/group_group_link.json
new file mode 100644
index 00000000000..bfca5c885e3
--- /dev/null
+++ b/spec/fixtures/api/schemas/group_link/group_group_link.json
@@ -0,0 +1,16 @@
+{
+ "type": "object",
+ "allOf": [
+ { "$ref": "group_link.json" },
+ {
+ "required": [
+ "can_update",
+ "can_remove"
+ ],
+ "properties": {
+ "can_update": { "type": "boolean" },
+ "can_remove": { "type": "boolean" }
+ }
+ }
+ ]
+}
diff --git a/spec/fixtures/api/schemas/group_link/group_group_links.json b/spec/fixtures/api/schemas/group_link/group_group_links.json
new file mode 100644
index 00000000000..2c0bf20f524
--- /dev/null
+++ b/spec/fixtures/api/schemas/group_link/group_group_links.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "group_group_link.json"
+ }
+}
diff --git a/spec/fixtures/api/schemas/entities/group_group_link.json b/spec/fixtures/api/schemas/group_link/group_link.json
index bf94bbb3ce4..300790728a8 100644
--- a/spec/fixtures/api/schemas/entities/group_group_link.json
+++ b/spec/fixtures/api/schemas/group_link/group_link.json
@@ -4,8 +4,6 @@
"id",
"created_at",
"expires_at",
- "can_update",
- "can_remove",
"access_level",
"valid_roles"
],
@@ -13,15 +11,14 @@
"id": { "type": "integer" },
"created_at": { "type": "date-time" },
"expires_at": { "type": ["date-time", "null"] },
- "can_update": { "type": "boolean" },
- "can_remove": { "type": "boolean" },
"access_level": {
"type": "object",
"required": ["integer_value", "string_value"],
"properties": {
"integer_value": { "type": "integer" },
"string_value": { "type": "string" }
- }
+ },
+ "additionalProperties": false
},
"valid_roles": { "type": "object" },
"shared_with_group": {
@@ -34,7 +31,8 @@
"full_path": { "type": "string" },
"avatar_url": { "type": ["string", "null"] },
"web_url": { "type": "string" }
- }
+ },
+ "additionalProperties": false
}
}
}
diff --git a/spec/fixtures/api/schemas/group_link/project_group_link.json b/spec/fixtures/api/schemas/group_link/project_group_link.json
new file mode 100644
index 00000000000..bfca5c885e3
--- /dev/null
+++ b/spec/fixtures/api/schemas/group_link/project_group_link.json
@@ -0,0 +1,16 @@
+{
+ "type": "object",
+ "allOf": [
+ { "$ref": "group_link.json" },
+ {
+ "required": [
+ "can_update",
+ "can_remove"
+ ],
+ "properties": {
+ "can_update": { "type": "boolean" },
+ "can_remove": { "type": "boolean" }
+ }
+ }
+ ]
+}
diff --git a/spec/fixtures/api/schemas/group_link/project_group_links.json b/spec/fixtures/api/schemas/group_link/project_group_links.json
new file mode 100644
index 00000000000..fc024d67f36
--- /dev/null
+++ b/spec/fixtures/api/schemas/group_link/project_group_links.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "project_group_link.json"
+ }
+}
diff --git a/spec/fixtures/dependency_proxy/manifest b/spec/fixtures/dependency_proxy/manifest
index a899d05d697..ed543883d60 100644
--- a/spec/fixtures/dependency_proxy/manifest
+++ b/spec/fixtures/dependency_proxy/manifest
@@ -1,38 +1,16 @@
{
- "schemaVersion": 1,
- "name": "library/alpine",
- "tag": "latest",
- "architecture": "amd64",
- "fsLayers": [
+ "schemaVersion": 2,
+ "mediaType": "application/vnd.docker.distribution.manifest.v2+json",
+ "config": {
+ "mediaType": "application/vnd.docker.container.image.v1+json",
+ "size": 1472,
+ "digest": "sha256:7731472c3f2a25edbb9c085c78f42ec71259f2b83485aa60648276d408865839"
+ },
+ "layers": [
{
- "blobSum": "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4"
- },
- {
- "blobSum": "sha256:188c0c94c7c576fff0792aca7ec73d67a2f7f4cb3a6e53a84559337260b36964"
- }
- ],
- "history": [
- {
- "v1Compatibility": "{\"architecture\":\"amd64\",\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[\"/bin/sh\"],\"ArgsEscaped\":true,\"Image\":\"sha256:3543079adc6fb5170279692361be8b24e89ef1809a374c1b4429e1d560d1459c\",\"Volumes\":null,\"WorkingDir\":\"\",\"Entrypoint\":null,\"OnBuild\":null,\"Labels\":null},\"container\":\"8c59eb170e19b8c3768b8d06c91053b0debf4a6fa6a452df394145fe9b885ea5\",\"container_config\":{\"Hostname\":\"8c59eb170e19\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[\"/bin/sh\",\"-c\",\"#(nop) \",\"CMD [\\\"/bin/sh\\\"]\"],\"ArgsEscaped\":true,\"Image\":\"sha256:3543079adc6fb5170279692361be8b24e89ef1809a374c1b4429e1d560d1459c\",\"Volumes\":null,\"WorkingDir\":\"\",\"Entrypoint\":null,\"OnBuild\":null,\"Labels\":{}},\"created\":\"2020-10-22T02:19:24.499382102Z\",\"docker_version\":\"18.09.7\",\"id\":\"c5f1aab5bb88eaf1aa62bea08ea6654547d43fd4d15b1a476c77e705dd5385ba\",\"os\":\"linux\",\"parent\":\"dc0b50cc52bc340d7848a62cfe8a756f4420592f4984f7a680ef8f9d258176ed\",\"throwaway\":true}"
- },
- {
- "v1Compatibility": "{\"id\":\"dc0b50cc52bc340d7848a62cfe8a756f4420592f4984f7a680ef8f9d258176ed\",\"created\":\"2020-10-22T02:19:24.33416307Z\",\"container_config\":{\"Cmd\":[\"/bin/sh -c #(nop) ADD file:f17f65714f703db9012f00e5ec98d0b2541ff6147c2633f7ab9ba659d0c507f4 in / \"]}}"
- }
- ],
- "signatures": [
- {
- "header": {
- "jwk": {
- "crv": "P-256",
- "kid": "XOTE:DZ4C:YBPJ:3O3L:YI4B:NYXU:T4VR:USH6:CXXN:SELU:CSCC:FVPE",
- "kty": "EC",
- "x": "cR1zye_3354mdbD7Dn-mtXNXvtPtmLlUVDa5vH6Lp74",
- "y": "rldUXSllLit6_2BW6AV8aqkwWJXHoYPG9OwkIBouwxQ"
- },
- "alg": "ES256"
- },
- "signature": "DYB2iB-XKIisqp5Q0OXFOBIOlBOuRV7pnZuKy0cxVB2Qj1VFRhWX4Tq336y0VMWbF6ma1he5A1E_Vk4jazrJ9g",
- "protected": "eyJmb3JtYXRMZW5ndGgiOjIxMzcsImZvcm1hdFRhaWwiOiJDbjAiLCJ0aW1lIjoiMjAyMC0xMS0yNFQyMjowMTo1MVoifQ"
+ "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
+ "size": 2810825,
+ "digest": "sha256:596ba82af5aaa3e2fd9d6f955b8b94f0744a2b60710e3c243ba3e4a467f051d1"
}
]
} \ No newline at end of file
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index aff4b1aae23..30ae58dbf9e 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -170,6 +170,8 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Ignores invalid: <%= User.reference_prefix %>fake_user
- Ignored in code: `<%= user.to_reference %>`
- Ignored in links: [Link to <%= user.to_reference %>](#user-link)
+- Ignored when backslash escaped: \<%= user.to_reference %>
+- Ignored when backslash escaped: \<%= group.to_reference %>
- Link to user by reference: [User](<%= user.to_reference %>)
#### IssueReferenceFilter
@@ -178,6 +180,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Issue in another project: <%= xissue.to_reference(project) %>
- Ignored in code: `<%= issue.to_reference %>`
- Ignored in links: [Link to <%= issue.to_reference %>](#issue-link)
+- Ignored when backslash escaped: \<%= issue.to_reference %>
- Issue by URL: <%= urls.project_issue_url(issue.project, issue) %>
- Link to issue by reference: [Issue](<%= issue.to_reference %>)
- Link to issue by URL: [Issue](<%= urls.project_issue_url(issue.project, issue) %>)
@@ -188,6 +191,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Merge request in another project: <%= xmerge_request.to_reference(project) %>
- Ignored in code: `<%= merge_request.to_reference %>`
- Ignored in links: [Link to <%= merge_request.to_reference %>](#merge-request-link)
+- Ignored when backslash escaped: \<%= merge_request.to_reference %>
- Merge request by URL: <%= urls.project_merge_request_url(merge_request.project, merge_request) %>
- Link to merge request by reference: [Merge request](<%= merge_request.to_reference %>)
- Link to merge request by URL: [Merge request](<%= urls.project_merge_request_url(merge_request.project, merge_request) %>)
@@ -198,6 +202,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Snippet in another project: <%= xsnippet.to_reference(project) %>
- Ignored in code: `<%= snippet.to_reference %>`
- Ignored in links: [Link to <%= snippet.to_reference %>](#snippet-link)
+- Ignored when backslash escaped: \<%= snippet.to_reference %>
- Snippet by URL: <%= urls.project_snippet_url(snippet.project, snippet) %>
- Link to snippet by reference: [Snippet](<%= snippet.to_reference %>)
- Link to snippet by URL: [Snippet](<%= urls.project_snippet_url(snippet.project, snippet) %>)
@@ -229,6 +234,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Label by name in quotes: <%= label.to_reference(format: :name) %>
- Ignored in code: `<%= simple_label.to_reference %>`
- Ignored in links: [Link to <%= simple_label.to_reference %>](#label-link)
+- Ignored when backslash escaped: \<%= simple_label.to_reference %>
- Link to label by reference: [Label](<%= label.to_reference %>)
#### MilestoneReferenceFilter
@@ -239,6 +245,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Milestone in another project: <%= xmilestone.to_reference(project) %>
- Ignored in code: `<%= simple_milestone.to_reference %>`
- Ignored in links: [Link to <%= simple_milestone.to_reference %>](#milestone-link)
+- Ignored when backslash escaped: \<%= simple_milestone.to_reference %>
- Milestone by URL: <%= urls.milestone_url(milestone) %>
- Link to milestone by URL: [Milestone](<%= milestone.to_reference %>)
- Group milestone by name: <%= Milestone.reference_prefix %><%= group_milestone.name %>
@@ -250,6 +257,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- Alert in another project: <%= xalert.to_reference(project) %>
- Ignored in code: `<%= alert.to_reference %>`
- Ignored in links: [Link to <%= alert.to_reference %>](#alert-link)
+- Ignored when backslash escaped: \<%= alert.to_reference %>
- Alert by URL: <%= alert.details_url %>
- Link to alert by reference: [Alert](<%= alert.to_reference %>)
- Link to alert by URL: [Alert](<%= alert.details_url %>)
diff --git a/spec/fixtures/packages/composer/package.json b/spec/fixtures/packages/composer/package.json
new file mode 100644
index 00000000000..0967ef424bc
--- /dev/null
+++ b/spec/fixtures/packages/composer/package.json
@@ -0,0 +1 @@
+{}
diff --git a/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json b/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json
new file mode 100644
index 00000000000..c3ee2bc4cac
--- /dev/null
+++ b/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json
@@ -0,0 +1,23 @@
+{
+ "files": {
+ "file_a.rb": [
+ {
+ "line": 10,
+ "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
+ "severity": "major"
+ },
+ {
+ "line": 10,
+ "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
+ "severity": "minor"
+ }
+ ],
+ "file_b.rb": [
+ {
+ "line": 10,
+ "description": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count.",
+ "severity": "minor"
+ }
+ ]
+ }
+}
diff --git a/spec/fixtures/whats_new/invalid.yml b/spec/fixtures/whats_new/invalid.yml
index 0e588efaf8f..a3342be0f24 100644
--- a/spec/fixtures/whats_new/invalid.yml
+++ b/spec/fixtures/whats_new/invalid.yml
@@ -13,7 +13,7 @@
stage: Release
self-managed: true
gitlab-com: true
- packages: [Starter]
+ packages: [Free]
url: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
image_url: https://about.gitlab.com/images/12_10/jwt-vault-1.png
published_at: 2020-04-22
diff --git a/spec/fixtures/whats_new/valid.yml b/spec/fixtures/whats_new/valid.yml
index cbe9d666357..ec465f47989 100644
--- a/spec/fixtures/whats_new/valid.yml
+++ b/spec/fixtures/whats_new/valid.yml
@@ -13,7 +13,7 @@
stage: Release
self-managed: true
gitlab-com: true
- packages: [Starter]
+ packages: [Free]
url: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
image_url: https://about.gitlab.com/images/12_10/jwt-vault-1.png
published_at: 2020-04-22
diff --git a/spec/frontend/__helpers__/graphql_helpers.js b/spec/frontend/__helpers__/graphql_helpers.js
new file mode 100644
index 00000000000..63123aa046f
--- /dev/null
+++ b/spec/frontend/__helpers__/graphql_helpers.js
@@ -0,0 +1,14 @@
+/**
+ * Returns a clone of the given object with all __typename keys omitted,
+ * including deeply nested ones.
+ *
+ * Only works with JSON-serializable objects.
+ *
+ * @param {object} An object with __typename keys (e.g., a GraphQL response)
+ * @returns {object} A new object with no __typename keys
+ */
+export const stripTypenames = (object) => {
+ return JSON.parse(
+ JSON.stringify(object, (key, value) => (key === '__typename' ? undefined : value)),
+ );
+};
diff --git a/spec/frontend/__helpers__/graphql_helpers_spec.js b/spec/frontend/__helpers__/graphql_helpers_spec.js
new file mode 100644
index 00000000000..dd23fbbf4e9
--- /dev/null
+++ b/spec/frontend/__helpers__/graphql_helpers_spec.js
@@ -0,0 +1,23 @@
+import { stripTypenames } from './graphql_helpers';
+
+describe('stripTypenames', () => {
+ it.each`
+ input | expected
+ ${{}} | ${{}}
+ ${{ __typename: 'Foo' }} | ${{}}
+ ${{ bar: 'bar', __typename: 'Foo' }} | ${{ bar: 'bar' }}
+ ${{ bar: { __typename: 'Bar' }, __typename: 'Foo' }} | ${{ bar: {} }}
+ ${{ bar: [{ __typename: 'Bar' }], __typename: 'Foo' }} | ${{ bar: [{}] }}
+ ${[]} | ${[]}
+ ${[{ __typename: 'Foo' }]} | ${[{}]}
+ ${[{ bar: [{ a: 1, __typename: 'Bar' }] }]} | ${[{ bar: [{ a: 1 }] }]}
+ `('given $input returns $expected, with all __typename keys removed', ({ input, expected }) => {
+ const actual = stripTypenames(input);
+ expect(actual).toEqual(expected);
+ expect(input).not.toBe(actual);
+ });
+
+ it('given null returns null', () => {
+ expect(stripTypenames(null)).toEqual(null);
+ });
+});
diff --git a/spec/frontend/__helpers__/stub_component.js b/spec/frontend/__helpers__/stub_component.js
index 45550450517..96fe3a8bc45 100644
--- a/spec/frontend/__helpers__/stub_component.js
+++ b/spec/frontend/__helpers__/stub_component.js
@@ -1,7 +1,32 @@
+/**
+ * Returns a new object with keys pointing to stubbed methods
+ *
+ * This is helpful for stubbing components like GlModal where it's supported
+ * in the API to call `.show()` and `.hide()` ([Bootstrap Vue docs][1]).
+ *
+ * [1]: https://bootstrap-vue.org/docs/components/modal#using-show-hide-and-toggle-component-methods
+ *
+ * @param {Object} methods - Object whose keys will be in the returned object.
+ */
+const createStubbedMethods = (methods = {}) => {
+ if (!methods) {
+ return {};
+ }
+
+ return Object.keys(methods).reduce(
+ (acc, key) =>
+ Object.assign(acc, {
+ [key]: () => {},
+ }),
+ {},
+ );
+};
+
export function stubComponent(Component, options = {}) {
return {
props: Component.props,
model: Component.model,
+ methods: createStubbedMethods(Component.methods),
// Do not render any slots/scoped slots except default
// This differs from VTU behavior which renders all slots
template: '<div><slot></slot></div>',
diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
index 1a3b151afa0..0faf0db4135 100644
--- a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
+++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
@@ -2,11 +2,11 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlModal, GlSearchBoxByType } from '@gitlab/ui';
import AddReviewItemsModal from '~/add_context_commits_modal/components/add_context_commits_modal_wrapper.vue';
-import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
import defaultState from '~/add_context_commits_modal/store/state';
import mutations from '~/add_context_commits_modal/store/mutations';
import * as actions from '~/add_context_commits_modal/store/actions';
+import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
new file mode 100644
index 00000000000..78bc37233c2
--- /dev/null
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -0,0 +1,138 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdownDivider } from '@gitlab/ui';
+import AdminUserActions from '~/admin/users/components/user_actions.vue';
+import { generateUserPaths } from '~/admin/users/utils';
+
+import { users, paths } from '../mock_data';
+
+const BLOCK = 'block';
+const EDIT = 'edit';
+const LDAP = 'ldapBlocked';
+const DELETE = 'delete';
+const DELETE_WITH_CONTRIBUTIONS = 'deleteWithContributions';
+
+describe('AdminUserActions component', () => {
+ let wrapper;
+ const user = users[0];
+ const userPaths = generateUserPaths(paths, user.username);
+
+ const findEditButton = () => wrapper.find('[data-testid="edit"]');
+ const findActionsDropdown = () => wrapper.find('[data-testid="actions"');
+ const findDropdownDivider = () => wrapper.find(GlDropdownDivider);
+
+ const initComponent = ({ actions = [] } = {}) => {
+ wrapper = shallowMount(AdminUserActions, {
+ propsData: {
+ user: {
+ ...user,
+ actions,
+ },
+ paths,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('edit button', () => {
+ describe('when the user has an edit action attached', () => {
+ beforeEach(() => {
+ initComponent({ actions: [EDIT] });
+ });
+
+ it('renders the edit button linking to the user edit path', () => {
+ expect(findEditButton().exists()).toBe(true);
+ expect(findEditButton().attributes('href')).toBe(userPaths.edit);
+ });
+ });
+
+ describe('when there is no edit action attached to the user', () => {
+ beforeEach(() => {
+ initComponent({ actions: [] });
+ });
+
+ it('does not render the edit button linking to the user edit path', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('actions dropdown', () => {
+ describe('when there are actions', () => {
+ const actions = [EDIT, BLOCK];
+
+ beforeEach(() => {
+ initComponent({ actions });
+ });
+
+ it('renders the actions dropdown', () => {
+ expect(findActionsDropdown().exists()).toBe(true);
+ });
+
+ it.each(actions)('renders a dropdown item for %s', (action) => {
+ const dropdownAction = wrapper.find(`[data-testid="${action}"]`);
+ expect(dropdownAction.exists()).toBe(true);
+ expect(dropdownAction.attributes('href')).toBe(userPaths[action]);
+ });
+
+ describe('when there is a LDAP action', () => {
+ beforeEach(() => {
+ initComponent({ actions: [LDAP] });
+ });
+
+ it('renders the LDAP dropdown item without a link', () => {
+ const dropdownAction = wrapper.find(`[data-testid="${LDAP}"]`);
+ expect(dropdownAction.exists()).toBe(true);
+ expect(dropdownAction.attributes('href')).toBe(undefined);
+ });
+ });
+
+ describe('when there is a delete action', () => {
+ const deleteActions = [DELETE, DELETE_WITH_CONTRIBUTIONS];
+
+ beforeEach(() => {
+ initComponent({ actions: [BLOCK, ...deleteActions] });
+ });
+
+ it('renders a dropdown divider', () => {
+ expect(findDropdownDivider().exists()).toBe(true);
+ });
+
+ it('only renders delete dropdown items for actions containing the word "delete"', () => {
+ const { length } = wrapper.findAll(`[data-testid*="delete-"]`);
+ expect(length).toBe(deleteActions.length);
+ });
+
+ it.each(deleteActions)('renders a delete dropdown item for %s', (action) => {
+ const deleteAction = wrapper.find(`[data-testid="delete-${action}"]`);
+ expect(deleteAction.exists()).toBe(true);
+ expect(deleteAction.attributes('href')).toBe(userPaths[action]);
+ });
+ });
+
+ describe('when there are no delete actions', () => {
+ it('does not render a dropdown divider', () => {
+ expect(findDropdownDivider().exists()).toBe(false);
+ });
+
+ it('does not render a delete dropdown item', () => {
+ const anyDeleteAction = wrapper.find(`[data-testid*="delete-"]`);
+ expect(anyDeleteAction.exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('when there are no actions', () => {
+ beforeEach(() => {
+ initComponent({ actions: [] });
+ });
+
+ it('does not render the actions dropdown', () => {
+ expect(findActionsDropdown().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/users/components/user_avatar_spec.js b/spec/frontend/admin/users/components/user_avatar_spec.js
index ba4e83690d0..0c048a06a34 100644
--- a/spec/frontend/admin/users/components/user_avatar_spec.js
+++ b/spec/frontend/admin/users/components/user_avatar_spec.js
@@ -1,7 +1,10 @@
-import { GlAvatarLink, GlAvatarLabeled, GlBadge } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { GlAvatarLink, GlAvatarLabeled, GlBadge, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
+import { LENGTH_OF_USER_NOTE_TOOLTIP } from '~/admin/users/constants';
+import { truncate } from '~/lib/utils/text_utility';
import { users, paths } from '../mock_data';
describe('AdminUserAvatar component', () => {
@@ -9,17 +12,25 @@ describe('AdminUserAvatar component', () => {
const user = users[0];
const adminUserPath = paths.adminUser;
+ const findNote = () => wrapper.find(GlIcon);
const findAvatar = () => wrapper.find(GlAvatarLabeled);
const findAvatarLink = () => wrapper.find(GlAvatarLink);
const findAllBadges = () => wrapper.findAll(GlBadge);
+ const findTooltip = () => getBinding(findNote().element, 'gl-tooltip');
const initComponent = (props = {}) => {
- wrapper = mount(AdminUserAvatar, {
+ wrapper = shallowMount(AdminUserAvatar, {
propsData: {
user,
adminUserPath,
...props,
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ stubs: {
+ GlAvatarLabeled,
+ },
});
};
@@ -53,11 +64,58 @@ describe('AdminUserAvatar component', () => {
expect(findAvatar().attributes('src')).toBe(user.avatarUrl);
});
+ it('renders a user note icon', () => {
+ expect(findNote().exists()).toBe(true);
+ expect(findNote().props('name')).toBe('document');
+ });
+
+ it("renders the user's note tooltip", () => {
+ const tooltip = findTooltip();
+
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe(user.note);
+ });
+
it("renders the user's badges", () => {
findAllBadges().wrappers.forEach((badge, idx) => {
expect(badge.text()).toBe(user.badges[idx].text);
expect(badge.props('variant')).toBe(user.badges[idx].variant);
});
});
+
+ describe('and the user note is very long', () => {
+ const noteText = new Array(LENGTH_OF_USER_NOTE_TOOLTIP + 1).join('a');
+
+ beforeEach(() => {
+ initComponent({
+ user: {
+ ...user,
+ note: noteText,
+ },
+ });
+ });
+
+ it("renders a truncated user's note tooltip", () => {
+ const tooltip = findTooltip();
+
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe(truncate(noteText, LENGTH_OF_USER_NOTE_TOOLTIP));
+ });
+ });
+
+ describe('and the user does not have a note', () => {
+ beforeEach(() => {
+ initComponent({
+ user: {
+ ...user,
+ note: null,
+ },
+ });
+ });
+
+ it('does not render a user note', () => {
+ expect(findNote().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/admin/users/components/user_date_spec.js b/spec/frontend/admin/users/components/user_date_spec.js
new file mode 100644
index 00000000000..6428b10059b
--- /dev/null
+++ b/spec/frontend/admin/users/components/user_date_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+
+import UserDate from '~/admin/users/components/user_date.vue';
+import { users } from '../mock_data';
+
+const mockDate = users[0].createdAt;
+
+describe('FormatDate component', () => {
+ let wrapper;
+
+ const initComponent = (props = {}) => {
+ wrapper = shallowMount(UserDate, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it.each`
+ date | output
+ ${mockDate} | ${'13 Nov, 2020'}
+ ${null} | ${'Never'}
+ ${undefined} | ${'Never'}
+ `('renders $date as $output', ({ date, output }) => {
+ initComponent({ date });
+
+ expect(wrapper.text()).toBe(output);
+ });
+});
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index b79d2d4d39d..ac48542cec5 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -3,6 +3,9 @@ import { mount } from '@vue/test-utils';
import AdminUsersTable from '~/admin/users/components/users_table.vue';
import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
+import AdminUserDate from '~/admin/users/components/user_date.vue';
+import AdminUserActions from '~/admin/users/components/user_actions.vue';
+
import { users, paths } from '../mock_data';
describe('AdminUsersTable component', () => {
@@ -39,18 +42,21 @@ describe('AdminUsersTable component', () => {
initComponent();
});
- it.each`
- key | label
- ${'name'} | ${'Name'}
- ${'projectsCount'} | ${'Projects'}
- ${'createdAt'} | ${'Created on'}
- ${'lastActivityOn'} | ${'Last activity'}
- `('renders users.$key in column $label', ({ key, label }) => {
- expect(getCellByLabel(0, label).text()).toContain(`${user[key]}`);
+ it('renders the projects count', () => {
+ expect(getCellByLabel(0, 'Projects').text()).toContain(`${user.projectsCount}`);
});
- it('renders an AdminUserAvatar component', () => {
- expect(getCellByLabel(0, 'Name').find(AdminUserAvatar).exists()).toBe(true);
+ it('renders the user actions', () => {
+ expect(wrapper.find(AdminUserActions).exists()).toBe(true);
+ });
+
+ it.each`
+ component | label
+ ${AdminUserAvatar} | ${'Name'}
+ ${AdminUserDate} | ${'Created on'}
+ ${AdminUserDate} | ${'Last activity'}
+ `('renders the component for column $label', ({ component, label }) => {
+ expect(getCellByLabel(0, label).find(component).exists()).toBe(true);
});
});
diff --git a/spec/frontend/admin/users/index_spec.js b/spec/frontend/admin/users/index_spec.js
index 171d54c8f4f..20b60bd8640 100644
--- a/spec/frontend/admin/users/index_spec.js
+++ b/spec/frontend/admin/users/index_spec.js
@@ -1,5 +1,5 @@
import { createWrapper } from '@vue/test-utils';
-import initAdminUsers from '~/admin/users';
+import { initAdminUsersApp } from '~/admin/users';
import AdminUsersApp from '~/admin/users/components/app.vue';
import { users, paths } from './mock_data';
@@ -16,7 +16,7 @@ describe('initAdminUsersApp', () => {
document.body.appendChild(el);
- wrapper = createWrapper(initAdminUsers(el));
+ wrapper = createWrapper(initAdminUsersApp(el));
});
afterEach(() => {
diff --git a/spec/frontend/admin/users/mock_data.js b/spec/frontend/admin/users/mock_data.js
index 860994a9152..c3918ef5173 100644
--- a/spec/frontend/admin/users/mock_data.js
+++ b/spec/frontend/admin/users/mock_data.js
@@ -14,6 +14,7 @@ export const users = [
],
projectsCount: 0,
actions: [],
+ note: 'Create per issue #999',
},
];
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index 0cc3d565e10..21217e6c608 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -2,11 +2,11 @@ import { mount } from '@vue/test-utils';
import { GlTable, GlAlert, GlLoadingIcon, GlDropdown, GlIcon, GlAvatar } from '@gitlab/ui';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import mockAlerts from 'jest/vue_shared/alert_details/mocks/alerts.json';
import { visitUrl } from '~/lib/utils/url_utility';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import AlertManagementTable from '~/alert_management/components/alert_management_table.vue';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
-import mockAlerts from '../mocks/alerts.json';
import defaultProvideValues from '../mocks/alerts_provide_config.json';
jest.mock('~/lib/utils/url_utility', () => ({
diff --git a/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap b/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap
deleted file mode 100644
index ef68a6a2c32..00000000000
--- a/spec/frontend/alerts_settings/__snapshots__/alerts_settings_form_spec.js.snap
+++ /dev/null
@@ -1,98 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`AlertsSettingsFormNew with default values renders the initial template 1`] = `
-"<form class=\\"gl-mt-6\\">
- <h5 class=\\"gl-font-lg gl-my-5\\">Add new integrations</h5>
- <div id=\\"integration-type\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"integration-type__BV_label_\\" for=\\"integration-type\\" class=\\"d-block col-form-label\\">1. Select integration type</label>
- <div class=\\"bv-no-focus-ring\\"><select class=\\"gl-form-select mw-100 custom-select\\" id=\\"__BVID__8\\">
- <option value=\\"\\">Select integration type</option>
- <option value=\\"HTTP\\">HTTP Endpoint</option>
- <option value=\\"PROMETHEUS\\">External Prometheus</option>
- </select>
- <!---->
- <!---->
- <!---->
- <!---->
- </div>
- </div>
- <transition-stub css=\\"true\\" enterclass=\\"\\" leaveclass=\\"collapse show\\" entertoclass=\\"collapse show\\" leavetoclass=\\"collapse\\" enteractiveclass=\\"collapsing\\" leaveactiveclass=\\"collapsing\\" class=\\"gl-mt-3\\">
- <div class=\\"collapse\\" style=\\"display: none;\\" id=\\"__BVID__10\\">
- <div>
- <div id=\\"name-integration\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"name-integration__BV_label_\\" for=\\"name-integration\\" class=\\"d-block col-form-label\\">2. Name integration</label>
- <div class=\\"bv-no-focus-ring\\"><input type=\\"text\\" placeholder=\\"Enter integration name\\" class=\\"gl-form-input form-control\\" id=\\"__BVID__15\\">
- <!---->
- <!---->
- <!---->
- </div>
- </div>
- <div id=\\"integration-webhook\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"integration-webhook__BV_label_\\" for=\\"integration-webhook\\" class=\\"d-block col-form-label\\">3. Set up webhook</label>
- <div class=\\"bv-no-focus-ring\\"><span>Utilize the URL and authorization key below to authorize an external service to send alerts to GitLab. Review your external service's documentation to learn where to add these details, and the <a rel=\\"noopener noreferrer\\" target=\\"_blank\\" href=\\"https://docs.gitlab.com/ee/operations/incident_management/alert_integrations.html\\" class=\\"gl-link gl-display-inline-block\\">GitLab documentation</a> to learn more about configuring your endpoint.</span> <label class=\\"gl-display-flex gl-flex-direction-column gl-mb-0 gl-w-max-content gl-my-4 gl-font-weight-normal\\">
- <div class=\\"gl-toggle-wrapper\\"><span class=\\"gl-toggle-label\\">Active</span>
- <!----> <button aria-label=\\"Active\\" type=\\"button\\" class=\\"gl-toggle\\"><span class=\\"toggle-icon\\"><svg data-testid=\\"close-icon\\" aria-hidden=\\"true\\" class=\\"gl-icon s16\\"><use href=\\"#close\\"></use></svg></span></button></div>
- <!---->
- </label>
- <!---->
- <div class=\\"gl-my-4\\"><span class=\\"gl-font-weight-bold\\">
- Webhook URL
- </span>
- <div id=\\"url\\" readonly=\\"readonly\\">
- <div role=\\"group\\" class=\\"input-group\\">
- <!---->
- <!----> <input id=\\"url\\" type=\\"text\\" readonly=\\"readonly\\" class=\\"gl-form-input form-control\\">
- <div class=\\"input-group-append\\"><button title=\\"Copy\\" data-clipboard-text=\\"\\" aria-label=\\"Copy this value\\" type=\\"button\\" class=\\"btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon\\">
- <!----> <svg data-testid=\\"copy-to-clipboard-icon\\" aria-hidden=\\"true\\" class=\\"gl-button-icon gl-icon s16\\">
- <use href=\\"#copy-to-clipboard\\"></use>
- </svg>
- <!----></button></div>
- <!---->
- </div>
- </div>
- </div>
- <div class=\\"gl-my-4\\"><span class=\\"gl-font-weight-bold\\">
- Authorization key
- </span>
- <div id=\\"authorization-key\\" readonly=\\"readonly\\" class=\\"gl-mb-3\\">
- <div role=\\"group\\" class=\\"input-group\\">
- <!---->
- <!----> <input id=\\"authorization-key\\" type=\\"text\\" readonly=\\"readonly\\" class=\\"gl-form-input form-control\\">
- <div class=\\"input-group-append\\"><button title=\\"Copy\\" data-clipboard-text=\\"\\" aria-label=\\"Copy this value\\" type=\\"button\\" class=\\"btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon\\">
- <!----> <svg data-testid=\\"copy-to-clipboard-icon\\" aria-hidden=\\"true\\" class=\\"gl-button-icon gl-icon s16\\">
- <use href=\\"#copy-to-clipboard\\"></use>
- </svg>
- <!----></button></div>
- <!---->
- </div>
- </div> <button type=\\"button\\" disabled=\\"disabled\\" class=\\"btn btn-default btn-md disabled gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">
- Reset Key
- </span></button>
- <!---->
- </div>
- <!---->
- <!---->
- <!---->
- </div>
- </div>
- <div id=\\"test-integration\\" role=\\"group\\" class=\\"form-group gl-form-group\\"><label id=\\"test-integration__BV_label_\\" for=\\"test-integration\\" class=\\"d-block col-form-label\\">4. Sample alert payload (optional)</label>
- <div class=\\"bv-no-focus-ring\\"><span>Provide an example payload from the monitoring tool you intend to integrate with. This payload can be used to test the integration (optional).</span> <textarea id=\\"test-payload\\" disabled=\\"disabled\\" placeholder=\\"{ &quot;events&quot;: [{ &quot;application&quot;: &quot;Name of application&quot; }] }\\" wrap=\\"soft\\" class=\\"gl-form-input gl-form-textarea gl-my-3 form-control is-valid\\" style=\\"resize: none; overflow-y: scroll;\\"></textarea>
- <!---->
- <!---->
- <!---->
- </div>
- </div>
- <!---->
- <!---->
- </div>
- <div class=\\"gl-display-flex gl-justify-content-start gl-py-3\\"><button data-testid=\\"integration-form-submit\\" type=\\"submit\\" class=\\"btn js-no-auto-disable btn-success btn-md gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Save integration
- </span></button> <button data-testid=\\"integration-test-and-submit\\" type=\\"button\\" disabled=\\"disabled\\" class=\\"btn gl-mx-3 js-no-auto-disable btn-success btn-md disabled gl-button btn-success-secondary\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Save and test payload</span></button> <button type=\\"reset\\" class=\\"btn js-no-auto-disable btn-default btn-md gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Cancel</span></button></div>
- </div>
- </transition-stub>
-</form>"
-`;
diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap
new file mode 100644
index 00000000000..eb2b82a0211
--- /dev/null
+++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap
@@ -0,0 +1,406 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`AlertsSettingsForm with default values renders the initial template 1`] = `
+<form
+ class="gl-mt-6"
+>
+ <h5
+ class="gl-font-lg gl-my-5"
+ >
+ Add new integrations
+ </h5>
+
+ <div
+ class="form-group gl-form-group"
+ id="integration-type"
+ role="group"
+ >
+ <label
+ class="d-block col-form-label"
+ for="integration-type"
+ id="integration-type__BV_label_"
+ >
+ 1. Select integration type
+ </label>
+ <div
+ class="bv-no-focus-ring"
+ >
+ <select
+ class="gl-form-select mw-100 custom-select"
+ id="__BVID__8"
+ >
+ <option
+ value=""
+ >
+ Select integration type
+ </option>
+ <option
+ value="HTTP"
+ >
+ HTTP Endpoint
+ </option>
+ <option
+ value="PROMETHEUS"
+ >
+ External Prometheus
+ </option>
+ </select>
+
+ <!---->
+ <!---->
+ <!---->
+ <!---->
+ </div>
+ </div>
+
+ <transition-stub
+ class="gl-mt-3"
+ css="true"
+ enteractiveclass="collapsing"
+ enterclass=""
+ entertoclass="collapse show"
+ leaveactiveclass="collapsing"
+ leaveclass="collapse show"
+ leavetoclass="collapse"
+ >
+ <div
+ class="collapse"
+ id="__BVID__10"
+ style="display: none;"
+ >
+ <div>
+ <div
+ class="form-group gl-form-group"
+ id="name-integration"
+ role="group"
+ >
+ <label
+ class="d-block col-form-label"
+ for="name-integration"
+ id="name-integration__BV_label_"
+ >
+ 2. Name integration
+ </label>
+ <div
+ class="bv-no-focus-ring"
+ >
+ <input
+ class="gl-form-input form-control"
+ id="__BVID__15"
+ placeholder="Enter integration name"
+ type="text"
+ />
+ <!---->
+ <!---->
+ <!---->
+ </div>
+ </div>
+
+ <div
+ class="form-group gl-form-group"
+ id="integration-webhook"
+ role="group"
+ >
+ <label
+ class="d-block col-form-label"
+ for="integration-webhook"
+ id="integration-webhook__BV_label_"
+ >
+ 3. Set up webhook
+ </label>
+ <div
+ class="bv-no-focus-ring"
+ >
+ <span>
+ Utilize the URL and authorization key below to authorize an external service to send alerts to GitLab. Review your external service's documentation to learn where to add these details, and the
+ <a
+ class="gl-link gl-display-inline-block"
+ href="https://docs.gitlab.com/ee/operations/incident_management/alert_integrations.html"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ GitLab documentation
+ </a>
+ to learn more about configuring your endpoint.
+ </span>
+
+ <label
+ class="gl-display-flex gl-flex-direction-column gl-mb-0 gl-w-max-content gl-my-4 gl-font-weight-normal"
+ >
+ <span
+ class="gl-toggle-wrapper"
+ >
+ <span
+ class="gl-toggle-label"
+ data-testid="toggle-label"
+ >
+ Active
+ </span>
+
+ <!---->
+
+ <button
+ aria-label="Active"
+ class="gl-toggle"
+ role="switch"
+ type="button"
+ >
+ <span
+ class="toggle-icon"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-icon s16"
+ data-testid="close-icon"
+ >
+ <use
+ href="#close"
+ />
+ </svg>
+ </span>
+ </button>
+ </span>
+
+ <!---->
+ </label>
+
+ <!---->
+
+ <div
+ class="gl-my-4"
+ >
+ <span
+ class="gl-font-weight-bold"
+ >
+
+ Webhook URL
+
+ </span>
+
+ <div
+ id="url"
+ readonly="readonly"
+ >
+ <div
+ class="input-group"
+ role="group"
+ >
+ <!---->
+ <!---->
+
+ <input
+ class="gl-form-input form-control"
+ id="url"
+ readonly="readonly"
+ type="text"
+ />
+
+ <div
+ class="input-group-append"
+ >
+ <button
+ aria-label="Copy this value"
+ class="btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon"
+ data-clipboard-text=""
+ title="Copy"
+ type="button"
+ >
+ <!---->
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon gl-icon s16"
+ data-testid="copy-to-clipboard-icon"
+ >
+ <use
+ href="#copy-to-clipboard"
+ />
+ </svg>
+
+ <!---->
+ </button>
+ </div>
+ <!---->
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-my-4"
+ >
+ <span
+ class="gl-font-weight-bold"
+ >
+
+ Authorization key
+
+ </span>
+
+ <div
+ class="gl-mb-3"
+ id="authorization-key"
+ readonly="readonly"
+ >
+ <div
+ class="input-group"
+ role="group"
+ >
+ <!---->
+ <!---->
+
+ <input
+ class="gl-form-input form-control"
+ id="authorization-key"
+ readonly="readonly"
+ type="text"
+ />
+
+ <div
+ class="input-group-append"
+ >
+ <button
+ aria-label="Copy this value"
+ class="btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon"
+ data-clipboard-text=""
+ title="Copy"
+ type="button"
+ >
+ <!---->
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon gl-icon s16"
+ data-testid="copy-to-clipboard-icon"
+ >
+ <use
+ href="#copy-to-clipboard"
+ />
+ </svg>
+
+ <!---->
+ </button>
+ </div>
+ <!---->
+ </div>
+ </div>
+
+ <button
+ class="btn btn-default btn-md disabled gl-button"
+ disabled="disabled"
+ type="button"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+
+ Reset Key
+
+ </span>
+ </button>
+
+ <!---->
+ </div>
+ <!---->
+ <!---->
+ <!---->
+ </div>
+ </div>
+
+ <div
+ class="form-group gl-form-group"
+ id="test-integration"
+ role="group"
+ >
+ <label
+ class="d-block col-form-label"
+ for="test-integration"
+ id="test-integration__BV_label_"
+ >
+ 4. Sample alert payload (optional)
+ </label>
+ <div
+ class="bv-no-focus-ring"
+ >
+ <span>
+ Provide an example payload from the monitoring tool you intend to integrate with. This payload can be used to test the integration (optional).
+ </span>
+
+ <textarea
+ class="gl-form-input gl-form-textarea gl-my-3 form-control is-valid"
+ disabled="disabled"
+ id="test-payload"
+ placeholder="{ \\"events\\": [{ \\"application\\": \\"Name of application\\" }] }"
+ style="resize: none; overflow-y: scroll;"
+ wrap="soft"
+ />
+ <!---->
+ <!---->
+ <!---->
+ </div>
+ </div>
+
+ <!---->
+
+ <!---->
+ </div>
+
+ <div
+ class="gl-display-flex gl-justify-content-start gl-py-3"
+ >
+ <button
+ class="btn js-no-auto-disable btn-success btn-md gl-button"
+ data-testid="integration-form-submit"
+ type="submit"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+ Save integration
+
+ </span>
+ </button>
+
+ <button
+ class="btn gl-mx-3 js-no-auto-disable btn-success btn-md disabled gl-button btn-success-secondary"
+ data-testid="integration-test-and-submit"
+ disabled="disabled"
+ type="button"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+ Save and test payload
+ </span>
+ </button>
+
+ <button
+ class="btn js-no-auto-disable btn-default btn-md gl-button"
+ type="reset"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+ Cancel
+ </span>
+ </button>
+ </div>
+ </div>
+ </transition-stub>
+</form>
+`;
diff --git a/spec/frontend/alerts_settings/alert_mapping_builder_spec.js b/spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js
index 5d48ff02e35..1ec7b5c18f6 100644
--- a/spec/frontend/alerts_settings/alert_mapping_builder_spec.js
+++ b/spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js
@@ -1,8 +1,10 @@
import { GlIcon, GlFormInput, GlDropdown, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AlertMappingBuilder, { i18n } from '~/alerts_settings/components/alert_mapping_builder.vue';
-import gitlabFields from '~/alerts_settings/components/mocks/gitlabFields.json';
import parsedMapping from '~/alerts_settings/components/mocks/parsedMapping.json';
+import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
+import * as transformationUtils from '~/alerts_settings/utils/mapping_transformations';
+import alertFields from '../mocks/alertFields.json';
describe('AlertMappingBuilder', () => {
let wrapper;
@@ -10,8 +12,9 @@ describe('AlertMappingBuilder', () => {
function mountComponent() {
wrapper = shallowMount(AlertMappingBuilder, {
propsData: {
- payloadFields: parsedMapping.samplePayload.payloadAlerFields.nodes,
- mapping: parsedMapping.storedMapping.nodes,
+ parsedPayload: parsedMapping.samplePayload.payloadAlerFields.nodes,
+ savedMapping: parsedMapping.storedMapping.nodes,
+ alertFields,
},
});
}
@@ -42,52 +45,58 @@ describe('AlertMappingBuilder', () => {
});
it('renders disabled form input for each mapped field', () => {
- gitlabFields.forEach((field, index) => {
+ alertFields.forEach((field, index) => {
const input = findColumnInRow(index + 1, 0).find(GlFormInput);
- expect(input.attributes('value')).toBe(`${field.label} (${field.type.join(' or ')})`);
+ const types = field.types.map((t) => capitalizeFirstCharacter(t.toLowerCase())).join(' or ');
+ expect(input.attributes('value')).toBe(`${field.label} (${types})`);
expect(input.attributes('disabled')).toBe('');
});
});
it('renders right arrow next to each input', () => {
- gitlabFields.forEach((field, index) => {
+ alertFields.forEach((field, index) => {
const arrow = findColumnInRow(index + 1, 1).find('.right-arrow');
expect(arrow.exists()).toBe(true);
});
});
it('renders mapping dropdown for each field', () => {
- gitlabFields.forEach(({ compatibleTypes }, index) => {
+ alertFields.forEach(({ types }, index) => {
const dropdown = findColumnInRow(index + 1, 2).find(GlDropdown);
- const searchBox = dropdown.find(GlSearchBoxByType);
- const dropdownItems = dropdown.findAll(GlDropdownItem);
+ const searchBox = dropdown.findComponent(GlSearchBoxByType);
+ const dropdownItems = dropdown.findAllComponents(GlDropdownItem);
const { nodes } = parsedMapping.samplePayload.payloadAlerFields;
- const numberOfMappingOptions = nodes.filter(({ type }) =>
- type.some((t) => compatibleTypes.includes(t)),
- );
+ const mappingOptions = nodes.filter(({ type }) => types.includes(type));
expect(dropdown.exists()).toBe(true);
expect(searchBox.exists()).toBe(true);
- expect(dropdownItems).toHaveLength(numberOfMappingOptions.length);
+ expect(dropdownItems).toHaveLength(mappingOptions.length);
});
});
it('renders fallback dropdown only for the fields that have fallback', () => {
- gitlabFields.forEach(({ compatibleTypes, numberOfFallbacks }, index) => {
+ alertFields.forEach(({ types, numberOfFallbacks }, index) => {
const dropdown = findColumnInRow(index + 1, 3).find(GlDropdown);
expect(dropdown.exists()).toBe(Boolean(numberOfFallbacks));
if (numberOfFallbacks) {
- const searchBox = dropdown.find(GlSearchBoxByType);
- const dropdownItems = dropdown.findAll(GlDropdownItem);
+ const searchBox = dropdown.findComponent(GlSearchBoxByType);
+ const dropdownItems = dropdown.findAllComponents(GlDropdownItem);
const { nodes } = parsedMapping.samplePayload.payloadAlerFields;
- const numberOfMappingOptions = nodes.filter(({ type }) =>
- type.some((t) => compatibleTypes.includes(t)),
- );
+ const mappingOptions = nodes.filter(({ type }) => types.includes(type));
expect(searchBox.exists()).toBe(Boolean(numberOfFallbacks));
- expect(dropdownItems).toHaveLength(numberOfMappingOptions.length);
+ expect(dropdownItems).toHaveLength(mappingOptions.length);
}
});
});
+
+ it('emits event with selected mapping', () => {
+ const mappingToSave = { fieldName: 'TITLE', mapping: 'PARSED_TITLE' };
+ jest.spyOn(transformationUtils, 'transformForSave').mockReturnValue(mappingToSave);
+ const dropdown = findColumnInRow(1, 2).find(GlDropdown);
+ const option = dropdown.find(GlDropdownItem);
+ option.vm.$emit('click');
+ expect(wrapper.emitted('onMappingUpdate')[0]).toEqual([mappingToSave]);
+ });
});
diff --git a/spec/frontend/alerts_settings/alerts_integrations_list_spec.js b/spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js
index 5a3874d055b..5a3874d055b 100644
--- a/spec/frontend/alerts_settings/alerts_integrations_list_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js
diff --git a/spec/frontend/alerts_settings/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
index 21cdec6f94c..b43c1318a86 100644
--- a/spec/frontend/alerts_settings/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
@@ -9,10 +9,12 @@ import {
} from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
-import { defaultAlertSettingsConfig } from './util';
+import MappingBuilder from '~/alerts_settings/components/alert_mapping_builder.vue';
import { typeSet } from '~/alerts_settings/constants';
+import alertFields from '../mocks/alertFields.json';
+import { defaultAlertSettingsConfig } from './util';
-describe('AlertsSettingsFormNew', () => {
+describe('AlertsSettingsForm', () => {
let wrapper;
const mockToastShow = jest.fn();
@@ -20,6 +22,7 @@ describe('AlertsSettingsFormNew', () => {
data = {},
props = {},
multipleHttpIntegrationsCustomMapping = false,
+ multiIntegrations = true,
} = {}) => {
wrapper = mount(AlertsSettingsForm, {
data() {
@@ -31,8 +34,9 @@ describe('AlertsSettingsFormNew', () => {
...props,
},
provide: {
- glFeatures: { multipleHttpIntegrationsCustomMapping },
...defaultAlertSettingsConfig,
+ glFeatures: { multipleHttpIntegrationsCustomMapping },
+ multiIntegrations,
},
mocks: {
$toast: {
@@ -49,6 +53,7 @@ describe('AlertsSettingsFormNew', () => {
const findFormToggle = () => wrapper.find(GlToggle);
const findTestPayloadSection = () => wrapper.find(`[id = "test-integration"]`);
const findMappingBuilderSection = () => wrapper.find(`[id = "mapping-builder"]`);
+ const findMappingBuilder = () => wrapper.findComponent(MappingBuilder);
const findSubmitButton = () => wrapper.find(`[type = "submit"]`);
const findMultiSupportText = () =>
wrapper.find(`[data-testid="multi-integrations-not-supported"]`);
@@ -63,13 +68,23 @@ describe('AlertsSettingsFormNew', () => {
}
});
+ const selectOptionAtIndex = async (index) => {
+ const options = findSelect().findAll('option');
+ await options.at(index).setSelected();
+ };
+
+ const enableIntegration = (index, value) => {
+ findFormFields().at(index).setValue(value);
+ findFormToggle().trigger('click');
+ };
+
describe('with default values', () => {
beforeEach(() => {
createComponent();
});
it('renders the initial template', () => {
- expect(wrapper.html()).toMatchSnapshot();
+ expect(wrapper.element).toMatchSnapshot();
});
it('render the initial form with only an integration type dropdown', () => {
@@ -80,10 +95,7 @@ describe('AlertsSettingsFormNew', () => {
});
it('shows the rest of the form when the dropdown is used', async () => {
- const options = findSelect().findAll('option');
- await options.at(1).setSelected();
-
- await wrapper.vm.$nextTick();
+ await selectOptionAtIndex(1);
expect(findFormFields().at(0).isVisible()).toBe(true);
});
@@ -96,120 +108,132 @@ describe('AlertsSettingsFormNew', () => {
it('disabled the name input when the selected value is prometheus', async () => {
createComponent();
- const options = findSelect().findAll('option');
- await options.at(2).setSelected();
+ await selectOptionAtIndex(2);
expect(findFormFields().at(0).attributes('disabled')).toBe('disabled');
});
});
describe('submitting integration form', () => {
- it('allows for create-new-integration with the correct form values for HTTP', async () => {
- createComponent();
+ describe('HTTP', () => {
+ it('create', async () => {
+ createComponent();
- const options = findSelect().findAll('option');
- await options.at(1).setSelected();
+ const integrationName = 'Test integration';
+ await selectOptionAtIndex(1);
+ enableIntegration(0, integrationName);
- await findFormFields().at(0).setValue('Test integration');
- await findFormToggle().trigger('click');
+ const submitBtn = findSubmitButton();
+ expect(submitBtn.exists()).toBe(true);
+ expect(submitBtn.text()).toBe('Save integration');
- await wrapper.vm.$nextTick();
+ findForm().trigger('submit');
- expect(findSubmitButton().exists()).toBe(true);
- expect(findSubmitButton().text()).toBe('Save integration');
-
- findForm().trigger('submit');
-
- await wrapper.vm.$nextTick();
-
- expect(wrapper.emitted('create-new-integration')).toBeTruthy();
- expect(wrapper.emitted('create-new-integration')[0]).toEqual([
- { type: typeSet.http, variables: { name: 'Test integration', active: true } },
- ]);
- });
+ expect(wrapper.emitted('create-new-integration')[0]).toEqual([
+ { type: typeSet.http, variables: { name: integrationName, active: true } },
+ ]);
+ });
- it('allows for create-new-integration with the correct form values for PROMETHEUS', async () => {
- createComponent();
+ it('create with custom mapping', async () => {
+ createComponent({
+ multipleHttpIntegrationsCustomMapping: true,
+ multiIntegrations: true,
+ props: { alertFields },
+ });
- const options = findSelect().findAll('option');
- await options.at(2).setSelected();
+ const integrationName = 'Test integration';
+ await selectOptionAtIndex(1);
- await findFormFields().at(0).setValue('Test integration');
- await findFormFields().at(1).setValue('https://test.com');
- await findFormToggle().trigger('click');
+ enableIntegration(0, integrationName);
- await wrapper.vm.$nextTick();
+ const sampleMapping = { field: 'test' };
+ findMappingBuilder().vm.$emit('onMappingUpdate', sampleMapping);
+ findForm().trigger('submit');
- expect(findSubmitButton().exists()).toBe(true);
- expect(findSubmitButton().text()).toBe('Save integration');
+ expect(wrapper.emitted('create-new-integration')[0]).toEqual([
+ {
+ type: typeSet.http,
+ variables: {
+ name: integrationName,
+ active: true,
+ payloadAttributeMappings: sampleMapping,
+ payloadExample: null,
+ },
+ },
+ ]);
+ });
- findForm().trigger('submit');
+ it('update', () => {
+ createComponent({
+ data: {
+ selectedIntegration: typeSet.http,
+ currentIntegration: { id: '1', name: 'Test integration pre' },
+ },
+ props: {
+ loading: false,
+ },
+ });
+ const updatedIntegrationName = 'Test integration post';
+ enableIntegration(0, updatedIntegrationName);
- await wrapper.vm.$nextTick();
+ const submitBtn = findSubmitButton();
+ expect(submitBtn.exists()).toBe(true);
+ expect(submitBtn.text()).toBe('Save integration');
- expect(wrapper.emitted('create-new-integration')).toBeTruthy();
- expect(wrapper.emitted('create-new-integration')[0]).toEqual([
- { type: typeSet.prometheus, variables: { apiUrl: 'https://test.com', active: true } },
- ]);
- });
+ findForm().trigger('submit');
- it('allows for update-integration with the correct form values for HTTP', async () => {
- createComponent({
- data: {
- selectedIntegration: typeSet.http,
- currentIntegration: { id: '1', name: 'Test integration pre' },
- },
- props: {
- loading: false,
- },
+ expect(wrapper.emitted('update-integration')[0]).toEqual([
+ { type: typeSet.http, variables: { name: updatedIntegrationName, active: true } },
+ ]);
});
+ });
- await findFormFields().at(0).setValue('Test integration post');
- await findFormToggle().trigger('click');
+ describe('PROMETHEUS', () => {
+ it('create', async () => {
+ createComponent();
- await wrapper.vm.$nextTick();
+ await selectOptionAtIndex(2);
- expect(findSubmitButton().exists()).toBe(true);
- expect(findSubmitButton().text()).toBe('Save integration');
+ const apiUrl = 'https://test.com';
+ enableIntegration(1, apiUrl);
- findForm().trigger('submit');
+ findFormToggle().trigger('click');
- await wrapper.vm.$nextTick();
+ const submitBtn = findSubmitButton();
+ expect(submitBtn.exists()).toBe(true);
+ expect(submitBtn.text()).toBe('Save integration');
- expect(wrapper.emitted('update-integration')).toBeTruthy();
- expect(wrapper.emitted('update-integration')[0]).toEqual([
- { type: typeSet.http, variables: { name: 'Test integration post', active: true } },
- ]);
- });
+ findForm().trigger('submit');
- it('allows for update-integration with the correct form values for PROMETHEUS', async () => {
- createComponent({
- data: {
- selectedIntegration: typeSet.prometheus,
- currentIntegration: { id: '1', apiUrl: 'https://test-pre.com' },
- },
- props: {
- loading: false,
- },
+ expect(wrapper.emitted('create-new-integration')[0]).toEqual([
+ { type: typeSet.prometheus, variables: { apiUrl, active: true } },
+ ]);
});
- await findFormFields().at(0).setValue('Test integration');
- await findFormFields().at(1).setValue('https://test-post.com');
- await findFormToggle().trigger('click');
-
- await wrapper.vm.$nextTick();
+ it('update', () => {
+ createComponent({
+ data: {
+ selectedIntegration: typeSet.prometheus,
+ currentIntegration: { id: '1', apiUrl: 'https://test-pre.com' },
+ },
+ props: {
+ loading: false,
+ },
+ });
- expect(findSubmitButton().exists()).toBe(true);
- expect(findSubmitButton().text()).toBe('Save integration');
+ const apiUrl = 'https://test-post.com';
+ enableIntegration(1, apiUrl);
- findForm().trigger('submit');
+ const submitBtn = findSubmitButton();
+ expect(submitBtn.exists()).toBe(true);
+ expect(submitBtn.text()).toBe('Save integration');
- await wrapper.vm.$nextTick();
+ findForm().trigger('submit');
- expect(wrapper.emitted('update-integration')).toBeTruthy();
- expect(wrapper.emitted('update-integration')[0]).toEqual([
- { type: typeSet.prometheus, variables: { apiUrl: 'https://test-post.com', active: true } },
- ]);
+ expect(wrapper.emitted('update-integration')[0]).toEqual([
+ { type: typeSet.prometheus, variables: { apiUrl, active: true } },
+ ]);
+ });
});
});
@@ -234,9 +258,10 @@ describe('AlertsSettingsFormNew', () => {
jest.runAllTimers();
await wrapper.vm.$nextTick();
- expect(findJsonTestSubmit().exists()).toBe(true);
- expect(findJsonTestSubmit().text()).toBe('Save and test payload');
- expect(findJsonTestSubmit().props('disabled')).toBe(true);
+ const jsonTestSubmit = findJsonTestSubmit();
+ expect(jsonTestSubmit.exists()).toBe(true);
+ expect(jsonTestSubmit.text()).toBe('Save and test payload');
+ expect(jsonTestSubmit.props('disabled')).toBe(true);
});
it('should allow for the form to be automatically saved if the test payload is successfully submitted', async () => {
@@ -257,6 +282,7 @@ describe('AlertsSettingsFormNew', () => {
currentIntegration: {
type: typeSet.http,
},
+ alertFields,
},
});
});
@@ -329,21 +355,29 @@ describe('AlertsSettingsFormNew', () => {
describe('Mapping builder section', () => {
describe.each`
- featureFlag | integrationOption | visible
- ${true} | ${1} | ${true}
- ${true} | ${2} | ${false}
- ${false} | ${1} | ${false}
- ${false} | ${2} | ${false}
- `('', ({ featureFlag, integrationOption, visible }) => {
+ alertFieldsProvided | multiIntegrations | featureFlag | integrationOption | visible
+ ${true} | ${true} | ${true} | ${1} | ${true}
+ ${true} | ${true} | ${true} | ${2} | ${false}
+ ${true} | ${true} | ${false} | ${1} | ${false}
+ ${true} | ${true} | ${false} | ${2} | ${false}
+ ${true} | ${false} | ${true} | ${1} | ${false}
+ ${false} | ${true} | ${true} | ${1} | ${false}
+ `('', ({ alertFieldsProvided, multiIntegrations, featureFlag, integrationOption, visible }) => {
const visibleMsg = visible ? 'is rendered' : 'is not rendered';
const featureFlagMsg = featureFlag ? 'is enabled' : 'is disabled';
+ const alertFieldsMsg = alertFieldsProvided ? 'are provided' : 'are not provided';
const integrationType = integrationOption === 1 ? typeSet.http : typeSet.prometheus;
- it(`${visibleMsg} when multipleHttpIntegrationsCustomMapping feature flag ${featureFlagMsg} and integration type is ${integrationType}`, async () => {
- createComponent({ multipleHttpIntegrationsCustomMapping: featureFlag });
- const options = findSelect().findAll('option');
- options.at(integrationOption).setSelected();
- await wrapper.vm.$nextTick();
+ it(`${visibleMsg} when multipleHttpIntegrationsCustomMapping feature flag ${featureFlagMsg} and integration type is ${integrationType} and alert fields ${alertFieldsMsg}`, async () => {
+ createComponent({
+ multipleHttpIntegrationsCustomMapping: featureFlag,
+ multiIntegrations,
+ props: {
+ alertFields: alertFieldsProvided ? alertFields : [],
+ },
+ });
+ await selectOptionAtIndex(integrationOption);
+
expect(findMappingBuilderSection().exists()).toBe(visible);
});
});
diff --git a/spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index 4d0732ca76c..79a7cd93f01 100644
--- a/spec/frontend/alerts_settings/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -1,10 +1,10 @@
import VueApollo from 'vue-apollo';
import { mount, createLocalVue } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { GlLoadingIcon } from '@gitlab/ui';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
-import { GlLoadingIcon } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import AlertsSettingsWrapper from '~/alerts_settings/components/alerts_settings_wrapper.vue';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
diff --git a/spec/frontend/alerts_settings/mocks/apollo_mock.js b/spec/frontend/alerts_settings/components/mocks/apollo_mock.js
index e0eba1e8421..e0eba1e8421 100644
--- a/spec/frontend/alerts_settings/mocks/apollo_mock.js
+++ b/spec/frontend/alerts_settings/components/mocks/apollo_mock.js
diff --git a/spec/frontend/alerts_settings/mocks/integrations.json b/spec/frontend/alerts_settings/components/mocks/integrations.json
index b1284fc55a2..b1284fc55a2 100644
--- a/spec/frontend/alerts_settings/mocks/integrations.json
+++ b/spec/frontend/alerts_settings/components/mocks/integrations.json
diff --git a/spec/frontend/alerts_settings/util.js b/spec/frontend/alerts_settings/components/util.js
index 5c07f22f1c9..5c07f22f1c9 100644
--- a/spec/frontend/alerts_settings/util.js
+++ b/spec/frontend/alerts_settings/components/util.js
diff --git a/spec/frontend/alerts_settings/mocks/alertFields.json b/spec/frontend/alerts_settings/mocks/alertFields.json
new file mode 100644
index 00000000000..ffe59dd0c05
--- /dev/null
+++ b/spec/frontend/alerts_settings/mocks/alertFields.json
@@ -0,0 +1,123 @@
+[
+ {
+ "name": "title",
+ "label": "Title",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ],
+ "numberOfFallbacks": 1
+ },
+ {
+ "name": "description",
+ "label": "Description",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "start_time",
+ "label": "Start time",
+ "type": [
+ "datetime"
+ ],
+ "types": [
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "end_time",
+ "label": "End time",
+ "type": [
+ "datetime"
+ ],
+ "types": [
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "service",
+ "label": "Service",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "monitoring_tool",
+ "label": "Monitoring tool",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "hosts",
+ "label": "Hosts",
+ "type": [
+ "string",
+ "ARRAY"
+ ],
+ "types": [
+ "string",
+ "ARRAY",
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "severity",
+ "label": "Severity",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "fingerprint",
+ "label": "Fingerprint",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ]
+ },
+ {
+ "name": "gitlab_environment_name",
+ "label": "Environment",
+ "type": [
+ "string"
+ ],
+ "types": [
+ "string",
+ "number",
+ "datetime"
+ ]
+ }
+]
diff --git a/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js b/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js
new file mode 100644
index 00000000000..f8d2a7aa23b
--- /dev/null
+++ b/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js
@@ -0,0 +1,81 @@
+import {
+ getMappingData,
+ getPayloadFields,
+ transformForSave,
+} from '~/alerts_settings/utils/mapping_transformations';
+import parsedMapping from '~/alerts_settings/components/mocks/parsedMapping.json';
+import alertFields from '../mocks/alertFields.json';
+
+describe('Mapping Transformation Utilities', () => {
+ const nameField = {
+ label: 'Name',
+ path: ['alert', 'name'],
+ type: 'string',
+ };
+ const dashboardField = {
+ label: 'Dashboard Id',
+ path: ['alert', 'dashboardId'],
+ type: 'string',
+ };
+
+ describe('getMappingData', () => {
+ it('should return mapping data', () => {
+ const result = getMappingData(
+ alertFields,
+ getPayloadFields(parsedMapping.samplePayload.payloadAlerFields.nodes.slice(0, 3)),
+ parsedMapping.storedMapping.nodes.slice(0, 3),
+ );
+
+ result.forEach((data, index) => {
+ expect(data).toEqual(
+ expect.objectContaining({
+ ...alertFields[index],
+ searchTerm: '',
+ fallbackSearchTerm: '',
+ }),
+ );
+ });
+ });
+ });
+
+ describe('transformForSave', () => {
+ it('should transform mapped data for save', () => {
+ const fieldName = 'title';
+ const mockMappingData = [
+ {
+ name: fieldName,
+ mapping: 'alert_name',
+ mappingFields: getPayloadFields([dashboardField, nameField]),
+ },
+ ];
+ const result = transformForSave(mockMappingData);
+ const { path, type, label } = nameField;
+ expect(result).toEqual([
+ { fieldName: fieldName.toUpperCase(), path, type: type.toUpperCase(), label },
+ ]);
+ });
+
+ it('should return empty array if no mapping provided', () => {
+ const fieldName = 'title';
+ const mockMappingData = [
+ {
+ name: fieldName,
+ mapping: null,
+ mappingFields: getPayloadFields([nameField, dashboardField]),
+ },
+ ];
+ const result = transformForSave(mockMappingData);
+ expect(result).toEqual([]);
+ });
+ });
+
+ describe('getPayloadFields', () => {
+ it('should add name field to each payload field', () => {
+ const result = getPayloadFields([nameField, dashboardField]);
+ expect(result).toEqual([
+ { ...nameField, name: 'alert_name' },
+ { ...dashboardField, name: 'alert_dashboardId' },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/api/api_utils_spec.js b/spec/frontend/api/api_utils_spec.js
index 3fec26f0149..04be442ef71 100644
--- a/spec/frontend/api/api_utils_spec.js
+++ b/spec/frontend/api/api_utils_spec.js
@@ -20,6 +20,10 @@ describe('~/api/api_utils.js', () => {
);
});
+ it('ensures the URL is prefixed with a /', () => {
+ expect(apiUtils.buildApiUrl('api/:version/projects/:id')).toEqual('/api/v7/projects/:id');
+ });
+
describe('when gon includes a relative_url_root property', () => {
beforeEach(() => {
window.gon.relative_url_root = '/relative/root';
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 76d67195499..6bfc4a7b7c8 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -260,6 +260,28 @@ describe('Api', () => {
});
});
+ describe('groupLabels', () => {
+ it('fetches group labels', (done) => {
+ const options = { params: { search: 'foo' } };
+ const expectedGroup = 'gitlab-org';
+ const expectedUrl = `${dummyUrlRoot}/groups/${expectedGroup}/-/labels`;
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ {
+ id: 1,
+ title: 'Foo Label',
+ },
+ ]);
+
+ Api.groupLabels(expectedGroup, options)
+ .then((res) => {
+ expect(res.length).toBe(1);
+ expect(res[0].title).toBe('Foo Label');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('namespaces', () => {
it('fetches namespaces', (done) => {
const query = 'dummy query';
diff --git a/spec/frontend/behaviors/autosize_spec.js b/spec/frontend/behaviors/autosize_spec.js
index 352bd8a0ed0..a9dbee7fd08 100644
--- a/spec/frontend/behaviors/autosize_spec.js
+++ b/spec/frontend/behaviors/autosize_spec.js
@@ -1,12 +1,20 @@
import '~/behaviors/autosize';
-function load() {
- document.dispatchEvent(new Event('DOMContentLoaded'));
-}
-
jest.mock('~/helpers/startup_css_helper', () => {
return {
- waitForCSSLoaded: jest.fn().mockImplementation((cb) => cb.apply()),
+ waitForCSSLoaded: jest.fn().mockImplementation((cb) => {
+ // This is a hack:
+ // autosize.js will execute and modify the DOM
+ // whenever waitForCSSLoaded calls its callback function.
+ // This setTimeout is here because everything within setTimeout will be queued
+ // as async code until the current call stack is executed.
+ // If we would not do this, the mock for waitForCSSLoaded would call its callback
+ // before the fixture in the beforeEach is set and the Test would fail.
+ // more on this here: https://johnresig.com/blog/how-javascript-timers-work/
+ setTimeout(() => {
+ cb.apply();
+ }, 0);
+ }),
};
});
@@ -16,9 +24,15 @@ describe('Autosize behavior', () => {
});
it('is applied to the textarea', () => {
- load();
-
- const textarea = document.querySelector('textarea');
- expect(textarea.classList).toContain('js-autosize-initialized');
+ // This is the second part of the Hack:
+ // Because we are forcing the mock for WaitForCSSLoaded and the very end of our callstack
+ // to call its callback. This querySelector needs to go to the very end of our callstack
+ // as well, if we would not have this setTimeout Function here, the querySelector
+ // would run before the mockImplementation called its callBack Function
+ // the DOM Manipulation didn't happen yet and the test would fail.
+ setTimeout(() => {
+ const textarea = document.querySelector('textarea');
+ expect(textarea.classList).toContain('js-autosize-initialized');
+ }, 0);
});
});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
index b54efb93bc9..31fb6addcac 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
@@ -9,7 +9,7 @@ exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
/>
<div
- class="gl-display-none gl-display-sm-flex"
+ class="gl-display-none gl-sm-display-flex"
>
<viewer-switcher-stub
value="simple"
diff --git a/spec/frontend/blob/components/blob_content_spec.js b/spec/frontend/blob/components/blob_content_spec.js
index 3db95e5ad3f..44e2742745f 100644
--- a/spec/frontend/blob/components/blob_content_spec.js
+++ b/spec/frontend/blob/components/blob_content_spec.js
@@ -7,6 +7,7 @@ import {
BLOB_RENDER_EVENT_SHOW_SOURCE,
BLOB_RENDER_ERRORS,
} from '~/blob/components/constants';
+import { RichViewer, SimpleViewer } from '~/vue_shared/components/blob_viewers';
import {
Blob,
RichViewerMock,
@@ -14,7 +15,6 @@ import {
RichBlobContentMock,
SimpleBlobContentMock,
} from './mock_data';
-import { RichViewer, SimpleViewer } from '~/vue_shared/components/blob_viewers';
describe('Blob Content component', () => {
let wrapper;
diff --git a/spec/frontend/blob/components/blob_header_filepath_spec.js b/spec/frontend/blob/components/blob_header_filepath_spec.js
index 7b8b5050486..ed2d214c307 100644
--- a/spec/frontend/blob/components/blob_header_filepath_spec.js
+++ b/spec/frontend/blob/components/blob_header_filepath_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import BlobHeaderFilepath from '~/blob/components/blob_header_filepath.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
-import { Blob as MockBlob } from './mock_data';
import { numberToHumanSize } from '~/lib/utils/number_utils';
+import { Blob as MockBlob } from './mock_data';
jest.mock('~/lib/utils/number_utils', () => ({
numberToHumanSize: jest.fn(() => 'a lot'),
diff --git a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
index e55b8e4af24..c3e32f24d7f 100644
--- a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
+++ b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import { GlButton } from '@gitlab/ui';
+import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import Popover from '~/blob/suggest_gitlab_ci_yml/components/popover.vue';
import * as utils from '~/lib/utils/common_utils';
diff --git a/spec/frontend/boards/board_list_deprecated_spec.js b/spec/frontend/boards/board_list_deprecated_spec.js
index 393d7f954b1..d1c9d648c96 100644
--- a/spec/frontend/boards/board_list_deprecated_spec.js
+++ b/spec/frontend/boards/board_list_deprecated_spec.js
@@ -9,9 +9,9 @@ import eventHub from '~/boards/eventhub';
import BoardList from '~/boards/components/board_list_deprecated.vue';
import '~/boards/models/issue';
import '~/boards/models/list';
-import { listObj, boardsMockInterceptor } from './mock_data';
import store from '~/boards/stores';
import boardsStore from '~/boards/stores/boards_store';
+import { listObj, boardsMockInterceptor } from './mock_data';
const createComponent = ({ done, listIssueProps = {}, componentProps = {}, listProps = {} }) => {
const el = document.createElement('div');
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index f82b1f7ed5c..7f0bad030fe 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -9,9 +9,9 @@ import BoardList from '~/boards/components/board_list_deprecated.vue';
import '~/boards/models/issue';
import '~/boards/models/list';
-import { listObj, boardsMockInterceptor } from './mock_data';
import store from '~/boards/stores';
import boardsStore from '~/boards/stores/boards_store';
+import { listObj, boardsMockInterceptor } from './mock_data';
window.Sortable = Sortable;
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 1b62f25044e..05f4479959e 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -1,11 +1,11 @@
import Vuex from 'vuex';
-import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
import { createLocalVue, mount } from '@vue/test-utils';
+import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
import eventHub from '~/boards/eventhub';
import BoardList from '~/boards/components/board_list.vue';
import BoardCard from '~/boards/components/board_card.vue';
-import { mockList, mockIssuesByListId, issues, mockIssues } from './mock_data';
import defaultState from '~/boards/stores/state';
+import { mockList, mockIssuesByListId, issues, mockIssues } from './mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/boards/boards_store_spec.js b/spec/frontend/boards/boards_store_spec.js
index f1d249ff069..b7a18c6c0ce 100644
--- a/spec/frontend/boards/boards_store_spec.js
+++ b/spec/frontend/boards/boards_store_spec.js
@@ -3,10 +3,10 @@ import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import boardsStore from '~/boards/stores/boards_store';
import eventHub from '~/boards/eventhub';
-import { listObj, listObjDuplicate } from './mock_data';
import ListIssue from '~/boards/models/issue';
import List from '~/boards/models/list';
+import { listObj, listObjDuplicate } from './mock_data';
jest.mock('js-cookie');
diff --git a/spec/frontend/boards/boards_util_spec.js b/spec/frontend/boards/boards_util_spec.js
new file mode 100644
index 00000000000..0feb1411003
--- /dev/null
+++ b/spec/frontend/boards/boards_util_spec.js
@@ -0,0 +1,17 @@
+import { transformNotFilters } from '~/boards/boards_util';
+
+describe('transformNotFilters', () => {
+ const filters = {
+ 'not[labelName]': ['label'],
+ 'not[assigneeUsername]': 'assignee',
+ };
+
+ it('formats not filters, transforms epicId to fullEpicId', () => {
+ const result = transformNotFilters(filters);
+
+ expect(result).toEqual({
+ labelName: ['label'],
+ assigneeUsername: 'assignee',
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/board_assignee_dropdown_spec.js b/spec/frontend/boards/components/board_assignee_dropdown_spec.js
index e52c14f9783..77d48c42d4b 100644
--- a/spec/frontend/boards/components/board_assignee_dropdown_spec.js
+++ b/spec/frontend/boards/components/board_assignee_dropdown_spec.js
@@ -6,8 +6,8 @@ import {
GlSearchBoxByType,
GlLoadingIcon,
} from '@gitlab/ui';
-import createMockApollo from 'helpers/mock_apollo_helper';
import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import BoardAssigneeDropdown from '~/boards/components/board_assignee_dropdown.vue';
import IssuableAssignees from '~/sidebar/components/assignees/issuable_assignees.vue';
import MultiSelectDropdown from '~/vue_shared/components/sidebar/multiselect_dropdown.vue';
@@ -38,7 +38,7 @@ describe('BoardCardAssigneeDropdown', () => {
return {
search,
selected: [],
- participants,
+ issueParticipants: participants,
};
},
store,
@@ -49,7 +49,7 @@ describe('BoardCardAssigneeDropdown', () => {
mocks: {
$apollo: {
queries: {
- participants: {
+ searchUsers: {
loading,
},
},
@@ -70,7 +70,6 @@ describe('BoardCardAssigneeDropdown', () => {
return {
search,
selected: [],
- participants,
};
},
store,
@@ -256,17 +255,15 @@ describe('BoardCardAssigneeDropdown', () => {
},
);
- describe('when participants is loading', () => {
- beforeEach(() => {
- createComponent('', true);
- });
-
+ describe('when searching users is loading', () => {
it('finds a loading icon in the dropdown', () => {
+ createComponent('test', true);
+
expect(findLoadingIcon().exists()).toBe(true);
});
});
- describe('when participants is loading is false', () => {
+ describe('when participants loading is false', () => {
beforeEach(() => {
createComponent();
});
diff --git a/spec/frontend/boards/components/board_card_layout_deprecated_spec.js b/spec/frontend/boards/components/board_card_layout_deprecated_spec.js
new file mode 100644
index 00000000000..80f9c1a0545
--- /dev/null
+++ b/spec/frontend/boards/components/board_card_layout_deprecated_spec.js
@@ -0,0 +1,158 @@
+/* global List */
+/* global ListLabel */
+
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+
+import '~/boards/models/label';
+import '~/boards/models/assignee';
+import '~/boards/models/list';
+import boardsVuexStore from '~/boards/stores';
+import boardsStore from '~/boards/stores/boards_store';
+import BoardCardLayout from '~/boards/components/board_card_layout_deprecated.vue';
+import issueCardInner from '~/boards/components/issue_card_inner.vue';
+import { ISSUABLE } from '~/boards/constants';
+import { listObj, boardsMockInterceptor, setMockEndpoints } from '../mock_data';
+
+describe('Board card layout', () => {
+ let wrapper;
+ let mock;
+ let list;
+ let store;
+
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
+ const createStore = ({ getters = {}, actions = {} } = {}) => {
+ store = new Vuex.Store({
+ ...boardsVuexStore,
+ actions,
+ getters,
+ });
+ };
+
+ // this particular mount component needs to be used after the root beforeEach because it depends on list being initialized
+ const mountComponent = ({ propsData = {}, provide = {} } = {}) => {
+ wrapper = shallowMount(BoardCardLayout, {
+ localVue,
+ stubs: {
+ issueCardInner,
+ },
+ store,
+ propsData: {
+ list,
+ issue: list.issues[0],
+ disabled: false,
+ index: 0,
+ ...propsData,
+ },
+ provide: {
+ groupId: null,
+ rootPath: '/',
+ scopedLabelsAvailable: false,
+ ...provide,
+ },
+ });
+ };
+
+ const setupData = () => {
+ list = new List(listObj);
+ boardsStore.create();
+ boardsStore.detail.issue = {};
+ const label1 = new ListLabel({
+ id: 3,
+ title: 'testing 123',
+ color: '#000cff',
+ text_color: 'white',
+ description: 'test',
+ });
+ return waitForPromises().then(() => {
+ list.issues[0].labels.push(label1);
+ });
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
+ setMockEndpoints();
+ return setupData();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ list = null;
+ mock.restore();
+ });
+
+ describe('mouse events', () => {
+ it('sets showDetail to true on mousedown', async () => {
+ createStore();
+ mountComponent();
+
+ wrapper.trigger('mousedown');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.showDetail).toBe(true);
+ });
+
+ it('sets showDetail to false on mousemove', async () => {
+ createStore();
+ mountComponent();
+ wrapper.trigger('mousedown');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.showDetail).toBe(true);
+ wrapper.trigger('mousemove');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.showDetail).toBe(false);
+ });
+
+ it("calls 'setActiveId' when 'graphqlBoardLists' feature flag is turned on", async () => {
+ const setActiveId = jest.fn();
+ createStore({
+ actions: {
+ setActiveId,
+ },
+ });
+ mountComponent({
+ provide: {
+ glFeatures: { graphqlBoardLists: true },
+ },
+ });
+
+ wrapper.trigger('mouseup');
+ await wrapper.vm.$nextTick();
+
+ expect(setActiveId).toHaveBeenCalledTimes(1);
+ expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), {
+ id: list.issues[0].id,
+ sidebarType: ISSUABLE,
+ });
+ });
+
+ it("calls 'setActiveId' when epic swimlanes is active", async () => {
+ const setActiveId = jest.fn();
+ const isSwimlanesOn = () => true;
+ createStore({
+ getters: { isSwimlanesOn },
+ actions: {
+ setActiveId,
+ },
+ });
+ mountComponent();
+
+ wrapper.trigger('mouseup');
+ await wrapper.vm.$nextTick();
+
+ expect(setActiveId).toHaveBeenCalledTimes(1);
+ expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), {
+ id: list.issues[0].id,
+ sidebarType: ISSUABLE,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/board_card_layout_spec.js b/spec/frontend/boards/components/board_card_layout_spec.js
index d8633871e8d..3ce3d0100f8 100644
--- a/spec/frontend/boards/components/board_card_layout_spec.js
+++ b/spec/frontend/boards/components/board_card_layout_spec.js
@@ -1,28 +1,14 @@
-/* global List */
-/* global ListLabel */
-
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
-import axios from '~/lib/utils/axios_utils';
-
-import '~/boards/models/label';
-import '~/boards/models/assignee';
-import '~/boards/models/list';
-import boardsVuexStore from '~/boards/stores';
-import boardsStore from '~/boards/stores/boards_store';
+import defaultState from '~/boards/stores/state';
import BoardCardLayout from '~/boards/components/board_card_layout.vue';
-import issueCardInner from '~/boards/components/issue_card_inner.vue';
-import { listObj, boardsMockInterceptor, setMockEndpoints } from '../mock_data';
-
+import IssueCardInner from '~/boards/components/issue_card_inner.vue';
import { ISSUABLE } from '~/boards/constants';
+import { mockLabelList, mockIssue } from '../mock_data';
describe('Board card layout', () => {
let wrapper;
- let mock;
- let list;
let store;
const localVue = createLocalVue();
@@ -30,7 +16,7 @@ describe('Board card layout', () => {
const createStore = ({ getters = {}, actions = {} } = {}) => {
store = new Vuex.Store({
- ...boardsVuexStore,
+ state: defaultState,
actions,
getters,
});
@@ -41,12 +27,12 @@ describe('Board card layout', () => {
wrapper = shallowMount(BoardCardLayout, {
localVue,
stubs: {
- issueCardInner,
+ IssueCardInner,
},
store,
propsData: {
- list,
- issue: list.issues[0],
+ list: mockLabelList,
+ issue: mockIssue,
disabled: false,
index: 0,
...propsData,
@@ -60,34 +46,9 @@ describe('Board card layout', () => {
});
};
- const setupData = () => {
- list = new List(listObj);
- boardsStore.create();
- boardsStore.detail.issue = {};
- const label1 = new ListLabel({
- id: 3,
- title: 'testing 123',
- color: '#000cff',
- text_color: 'white',
- description: 'test',
- });
- return waitForPromises().then(() => {
- list.issues[0].labels.push(label1);
- });
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mock.onAny().reply(boardsMockInterceptor);
- setMockEndpoints();
- return setupData();
- });
-
afterEach(() => {
wrapper.destroy();
wrapper = null;
- list = null;
- mock.restore();
});
describe('mouse events', () => {
@@ -112,25 +73,21 @@ describe('Board card layout', () => {
expect(wrapper.vm.showDetail).toBe(false);
});
- it("calls 'setActiveId' when 'graphqlBoardLists' feature flag is turned on", async () => {
+ it("calls 'setActiveId'", async () => {
const setActiveId = jest.fn();
createStore({
actions: {
setActiveId,
},
});
- mountComponent({
- provide: {
- glFeatures: { graphqlBoardLists: true },
- },
- });
+ mountComponent();
wrapper.trigger('mouseup');
await wrapper.vm.$nextTick();
expect(setActiveId).toHaveBeenCalledTimes(1);
expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), {
- id: list.issues[0].id,
+ id: mockIssue.id,
sidebarType: ISSUABLE,
});
});
@@ -151,7 +108,7 @@ describe('Board card layout', () => {
expect(setActiveId).toHaveBeenCalledTimes(1);
expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), {
- id: list.issues[0].id,
+ id: mockIssue.id,
sidebarType: ISSUABLE,
});
});
diff --git a/spec/frontend/boards/components/board_configuration_options_spec.js b/spec/frontend/boards/components/board_configuration_options_spec.js
index d9614c254e2..6f0971a9458 100644
--- a/spec/frontend/boards/components/board_configuration_options_spec.js
+++ b/spec/frontend/boards/components/board_configuration_options_spec.js
@@ -7,6 +7,7 @@ describe('BoardConfigurationOptions', () => {
const defaultProps = {
hideBacklogList: false,
hideClosedList: false,
+ readonly: false,
};
const createComponent = (props = {}) => {
@@ -61,4 +62,18 @@ describe('BoardConfigurationOptions', () => {
expect(wrapper.emitted('update:hideClosedList')).toEqual([[true]]);
});
+
+ it('renders checkboxes disabled when user does not have edit rights', () => {
+ createComponent({ readonly: true });
+
+ expect(closedListCheckbox().attributes('disabled')).toBe('true');
+ expect(backlogListCheckbox().attributes('disabled')).toBe('true');
+ });
+
+ it('renders checkboxes enabled when user has edit rights', () => {
+ createComponent();
+
+ expect(closedListCheckbox().attributes('disabled')).toBeUndefined();
+ expect(backlogListCheckbox().attributes('disabled')).toBeUndefined();
+ });
});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 98be02d7dbf..ef2795030a0 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -5,8 +5,8 @@ import Draggable from 'vuedraggable';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
import getters from 'ee_else_ce/boards/stores/getters';
import BoardColumnDeprecated from '~/boards/components/board_column_deprecated.vue';
-import { mockLists, mockListsWithModel } from '../mock_data';
import BoardContent from '~/boards/components/board_content.vue';
+import { mockLists, mockListsWithModel } from '../mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index c34987a55de..acff37296ce 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
-import { TEST_HOST } from 'helpers/test_constants';
import { GlModal } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import { deprecatedCreateFlash as createFlash } from '~/flash';
diff --git a/spec/frontend/boards/components/board_list_header_deprecated_spec.js b/spec/frontend/boards/components/board_list_header_deprecated_spec.js
index 6207724e6a9..15e767e9d91 100644
--- a/spec/frontend/boards/components/board_list_header_deprecated_spec.js
+++ b/spec/frontend/boards/components/board_list_header_deprecated_spec.js
@@ -74,7 +74,13 @@ describe('Board List Header Component', () => {
describe('Add issue button', () => {
const hasNoAddButton = [ListType.closed];
- const hasAddButton = [ListType.backlog, ListType.label, ListType.milestone, ListType.assignee];
+ const hasAddButton = [
+ ListType.backlog,
+ ListType.label,
+ ListType.milestone,
+ ListType.iteration,
+ ListType.assignee,
+ ];
it.each(hasNoAddButton)('does not render when List Type is `%s`', (listType) => {
createComponent({ listType });
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 357d05ced02..6d8c9e51d3f 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -78,7 +78,13 @@ describe('Board List Header Component', () => {
describe('Add issue button', () => {
const hasNoAddButton = [ListType.closed];
- const hasAddButton = [ListType.backlog, ListType.label, ListType.milestone, ListType.assignee];
+ const hasAddButton = [
+ ListType.backlog,
+ ListType.label,
+ ListType.milestone,
+ ListType.iteration,
+ ListType.assignee,
+ ];
it.each(hasNoAddButton)('does not render when List Type is `%s`', (listType) => {
createComponent({ listType });
@@ -167,7 +173,7 @@ describe('Board List Header Component', () => {
describe('user can drag', () => {
const cannotDragList = [ListType.backlog, ListType.closed];
- const canDragList = [ListType.label, ListType.milestone, ListType.assignee];
+ const canDragList = [ListType.label, ListType.milestone, ListType.iteration, ListType.assignee];
it.each(cannotDragList)(
'does not have user-can-drag-class so user cannot drag list',
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js
index 74d88d9f34c..e6e2befedd0 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js
@@ -20,7 +20,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () =>
wrapper = null;
});
- const createWrapper = ({ milestone = null } = {}) => {
+ const createWrapper = ({ milestone = null, loading = false } = {}) => {
store = createStore();
store.state.issues = { [TEST_ISSUE.id]: { ...TEST_ISSUE, milestone } };
store.state.activeId = TEST_ISSUE.id;
@@ -38,7 +38,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () =>
},
mocks: {
$apollo: {
- loading: false,
+ loading,
},
},
});
@@ -63,12 +63,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () =>
});
it('shows loader while Apollo is loading', async () => {
- createWrapper({ milestone: TEST_MILESTONE });
-
- expect(findLoader().exists()).toBe(false);
-
- wrapper.vm.$apollo.loading = true;
- await wrapper.vm.$nextTick();
+ createWrapper({ milestone: TEST_MILESTONE, loading: true });
expect(findLoader().exists()).toBe(true);
});
@@ -76,8 +71,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () =>
it('shows message when error or no milestones found', async () => {
createWrapper();
- wrapper.setData({ milestones: [] });
- await wrapper.vm.$nextTick();
+ await wrapper.setData({ milestones: [] });
expect(findNoMilestonesFoundItem().text()).toBe('No milestones found');
});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
index b1df0f2d771..8e1285dcd07 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
@@ -4,8 +4,8 @@ import { GlToggle, GlLoadingIcon } from '@gitlab/ui';
import BoardSidebarSubscription from '~/boards/components/sidebar/board_sidebar_subscription.vue';
import * as types from '~/boards/stores/mutation_types';
import { createStore } from '~/boards/stores';
-import { mockActiveIssue } from '../../mock_data';
import createFlash from '~/flash';
+import { mockActiveIssue } from '../../mock_data';
jest.mock('~/flash.js');
diff --git a/spec/frontend/boards/issue_card_deprecated_spec.js b/spec/frontend/boards/issue_card_deprecated_spec.js
index fd7b0edb97e..2fefda5158b 100644
--- a/spec/frontend/boards/issue_card_deprecated_spec.js
+++ b/spec/frontend/boards/issue_card_deprecated_spec.js
@@ -7,8 +7,8 @@ import '~/boards/models/issue';
import '~/boards/models/list';
import { GlLabel } from '@gitlab/ui';
import IssueCardInner from '~/boards/components/issue_card_inner_deprecated.vue';
-import { listObj } from './mock_data';
import store from '~/boards/stores';
+import { listObj } from './mock_data';
describe('Issue card component', () => {
const user = new ListAssignee({
diff --git a/spec/frontend/boards/issue_card_inner_spec.js b/spec/frontend/boards/issue_card_inner_spec.js
index f9ad78494af..aa8f6b826b6 100644
--- a/spec/frontend/boards/issue_card_inner_spec.js
+++ b/spec/frontend/boards/issue_card_inner_spec.js
@@ -2,10 +2,10 @@ import { mount } from '@vue/test-utils';
import { range } from 'lodash';
import { GlLabel } from '@gitlab/ui';
import IssueCardInner from '~/boards/components/issue_card_inner.vue';
-import { mockLabelList } from './mock_data';
import defaultStore from '~/boards/stores';
import eventHub from '~/boards/eventhub';
import { updateHistory } from '~/lib/utils/url_utility';
+import { mockLabelList } from './mock_data';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/boards/eventhub');
diff --git a/spec/frontend/boards/issue_spec.js b/spec/frontend/boards/issue_spec.js
index d68e17c06a7..1f354fb04db 100644
--- a/spec/frontend/boards/issue_spec.js
+++ b/spec/frontend/boards/issue_spec.js
@@ -41,7 +41,7 @@ describe('Issue model', () => {
});
expect(issue.labels.length).toBe(1);
- expect(issue.labels[0].color).toBe('red');
+ expect(issue.labels[0].color).toBe('#F0AD4E');
});
it('adds other label with same title', () => {
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index d5cfb9b7d07..6d60937f850 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -137,7 +137,7 @@ export const rawIssue = {
{
id: 1,
title: 'test',
- color: 'red',
+ color: '#F0AD4E',
description: 'testing',
},
],
@@ -165,7 +165,7 @@ export const mockIssue = {
{
id: 1,
title: 'test',
- color: 'red',
+ color: '#F0AD4E',
description: 'testing',
},
],
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index e4209cd5e55..60bcbd1ec92 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1,16 +1,4 @@
import testAction from 'helpers/vuex_action_helper';
-import {
- mockLists,
- mockListsById,
- mockIssue,
- mockIssue2,
- rawIssue,
- mockIssues,
- mockMilestone,
- labels,
- mockActiveIssue,
- mockGroupProjects,
-} from '../mock_data';
import actions, { gqlClient } from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
import { inactiveId } from '~/boards/constants';
@@ -25,6 +13,18 @@ import {
formatIssueInput,
} from '~/boards/boards_util';
import createFlash from '~/flash';
+import {
+ mockLists,
+ mockListsById,
+ mockIssue,
+ mockIssue2,
+ rawIssue,
+ mockIssues,
+ mockMilestone,
+ labels,
+ mockActiveIssue,
+ mockGroupProjects,
+} from '../mock_data';
jest.mock('~/flash');
@@ -71,7 +71,7 @@ describe('setFilters', () => {
actions.setFilters,
filters,
state,
- [{ type: types.SET_FILTERS, payload: filters }],
+ [{ type: types.SET_FILTERS, payload: { ...filters, not: {} } }],
[],
done,
);
@@ -254,6 +254,27 @@ describe('createList', () => {
});
});
+describe('fetchLabels', () => {
+ it('should commit mutation RECEIVE_LABELS_SUCCESS on success', async () => {
+ const queryResponse = {
+ data: {
+ group: {
+ labels: {
+ nodes: labels,
+ },
+ },
+ },
+ };
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ await testAction({
+ action: actions.fetchLabels,
+ state: { boardType: 'group' },
+ expectedMutations: [{ type: types.RECEIVE_LABELS_SUCCESS, payload: labels }],
+ });
+ });
+});
+
describe('moveList', () => {
it('should commit MOVE_LIST mutation and dispatch updateList action', (done) => {
const initialBoardListsState = {
@@ -1201,6 +1222,40 @@ describe('setSelectedProject', () => {
});
});
+describe('toggleBoardItemMultiSelection', () => {
+ const boardItem = mockIssue;
+
+ it('should commit mutation ADD_BOARD_ITEM_TO_SELECTION if item is not on selection state', () => {
+ testAction(
+ actions.toggleBoardItemMultiSelection,
+ boardItem,
+ { selectedBoardItems: [] },
+ [
+ {
+ type: types.ADD_BOARD_ITEM_TO_SELECTION,
+ payload: boardItem,
+ },
+ ],
+ [],
+ );
+ });
+
+ it('should commit mutation REMOVE_BOARD_ITEM_FROM_SELECTION if item is on selection state', () => {
+ testAction(
+ actions.toggleBoardItemMultiSelection,
+ boardItem,
+ { selectedBoardItems: [mockIssue] },
+ [
+ {
+ type: types.REMOVE_BOARD_ITEM_FROM_SELECTION,
+ payload: boardItem,
+ },
+ ],
+ [],
+ );
+ });
+});
+
describe('fetchBacklog', () => {
expectNotImplemented(actions.fetchBacklog);
});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index 44b41b5667d..74d597b9046 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -62,6 +62,22 @@ describe('Boards - Getters', () => {
});
});
+ describe('groupPathByIssueId', () => {
+ it('returns group path for the active issue', () => {
+ const mockActiveIssue = {
+ referencePath: 'gitlab-org/gitlab-test#1',
+ };
+ expect(getters.groupPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual(
+ 'gitlab-org',
+ );
+ });
+
+ it('returns empty string as group path when active issue is an empty object', () => {
+ const mockActiveIssue = {};
+ expect(getters.groupPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual('');
+ });
+ });
+
describe('projectPathByIssueId', () => {
it('returns project path for the active issue', () => {
const mockActiveIssue = {
@@ -72,7 +88,7 @@ describe('Boards - Getters', () => {
);
});
- it('returns empty string as project when active issue is an empty object', () => {
+ it('returns empty string as project path when active issue is an empty object', () => {
const mockActiveIssue = {};
expect(getters.projectPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual('');
});
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index c5fe0e22c3c..2b6548534c3 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -1,7 +1,14 @@
import mutations from '~/boards/stores/mutations';
import * as types from '~/boards/stores/mutation_types';
import defaultState from '~/boards/stores/state';
-import { mockLists, rawIssue, mockIssue, mockIssue2, mockGroupProjects } from '../mock_data';
+import {
+ mockLists,
+ rawIssue,
+ mockIssue,
+ mockIssue2,
+ mockGroupProjects,
+ labels,
+} from '../mock_data';
const expectNotImplemented = (action) => {
it('is not implemented', () => {
@@ -99,13 +106,11 @@ describe('Board Store Mutations', () => {
});
});
- describe('RECEIVE_LABELS_FAILURE', () => {
- it('sets error message', () => {
- mutations.RECEIVE_LABELS_FAILURE(state);
+ describe('RECEIVE_LABELS_SUCCESS', () => {
+ it('sets labels on state', () => {
+ mutations.RECEIVE_LABELS_SUCCESS(state, labels);
- expect(state.error).toEqual(
- 'An error occurred while fetching labels. Please reload the page.',
- );
+ expect(state.labels).toEqual(labels);
});
});
@@ -589,4 +594,27 @@ describe('Board Store Mutations', () => {
expect(state.selectedProject).toEqual(mockGroupProjects[0]);
});
});
+
+ describe('ADD_BOARD_ITEM_TO_SELECTION', () => {
+ it('Should add boardItem to selectedBoardItems state', () => {
+ expect(state.selectedBoardItems).toEqual([]);
+
+ mutations[types.ADD_BOARD_ITEM_TO_SELECTION](state, mockIssue);
+
+ expect(state.selectedBoardItems).toEqual([mockIssue]);
+ });
+ });
+
+ describe('REMOVE_BOARD_ITEM_FROM_SELECTION', () => {
+ it('Should remove boardItem to selectedBoardItems state', () => {
+ state = {
+ ...state,
+ selectedBoardItems: [mockIssue],
+ };
+
+ mutations[types.REMOVE_BOARD_ITEM_FROM_SELECTION](state, mockIssue);
+
+ expect(state.selectedBoardItems).toEqual([]);
+ });
+ });
});
diff --git a/spec/frontend/captcha/init_recaptcha_script_spec.js b/spec/frontend/captcha/init_recaptcha_script_spec.js
new file mode 100644
index 00000000000..df114821651
--- /dev/null
+++ b/spec/frontend/captcha/init_recaptcha_script_spec.js
@@ -0,0 +1,49 @@
+import {
+ RECAPTCHA_API_URL_PREFIX,
+ RECAPTCHA_ONLOAD_CALLBACK_NAME,
+ clearMemoizeCache,
+ initRecaptchaScript,
+} from '~/captcha/init_recaptcha_script';
+
+describe('initRecaptchaScript', () => {
+ afterEach(() => {
+ document.head.innerHTML = '';
+ clearMemoizeCache();
+ });
+
+ const getScriptOnload = () => window[RECAPTCHA_ONLOAD_CALLBACK_NAME];
+ const triggerScriptOnload = (...args) => window[RECAPTCHA_ONLOAD_CALLBACK_NAME](...args);
+
+ describe('when called', () => {
+ let result;
+
+ beforeEach(() => {
+ result = initRecaptchaScript();
+ });
+
+ it('adds script to head', () => {
+ expect(document.head).toMatchInlineSnapshot(`
+ <head>
+ <script
+ class="js-recaptcha-script"
+ src="${RECAPTCHA_API_URL_PREFIX}?onload=${RECAPTCHA_ONLOAD_CALLBACK_NAME}&render=explicit"
+ />
+ </head>
+ `);
+ });
+
+ it('is memoized', () => {
+ expect(initRecaptchaScript()).toBe(result);
+ expect(document.head.querySelectorAll('script').length).toBe(1);
+ });
+
+ it('when onload is triggered, resolves promise', async () => {
+ const instance = {};
+
+ triggerScriptOnload(instance);
+
+ await expect(result).resolves.toBe(instance);
+ expect(getScriptOnload()).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/store/actions_spec.js b/spec/frontend/ci_variable_list/store/actions_spec.js
index 075e5829305..03a2e37ed55 100644
--- a/spec/frontend/ci_variable_list/store/actions_spec.js
+++ b/spec/frontend/ci_variable_list/store/actions_spec.js
@@ -6,8 +6,8 @@ import { deprecatedCreateFlash as createFlash } from '~/flash';
import getInitialState from '~/ci_variable_list/store/state';
import * as actions from '~/ci_variable_list/store/actions';
import * as types from '~/ci_variable_list/store/mutation_types';
-import mockData from '../services/mock_data';
import { prepareDataForDisplay, prepareEnvironments } from '~/ci_variable_list/store/utils';
+import mockData from '../services/mock_data';
jest.mock('~/api.js');
jest.mock('~/flash.js');
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index ee4ec4636ea..6047b404197 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -60,8 +60,8 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
>
<svg
aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon"
- data-testid="mobile-issue-close-icon"
+ class="gl-icon s16 gl-new-dropdown-item-check-icon gl-mt-3 gl-align-self-start"
+ data-testid="dropdown-item-checkbox"
>
<use
href="#mobile-issue-close"
@@ -115,8 +115,8 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
>
<svg
aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon gl-visibility-hidden"
- data-testid="mobile-issue-close-icon"
+ class="gl-icon s16 gl-new-dropdown-item-check-icon gl-visibility-hidden gl-mt-3 gl-align-self-start"
+ data-testid="dropdown-item-checkbox"
>
<use
href="#mobile-issue-close"
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index cf89246c1a5..78f08430554 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -1,13 +1,13 @@
import { shallowMount, mount } from '@vue/test-utils';
import Applications from '~/clusters/components/applications.vue';
import { CLUSTER_TYPE, PROVIDER_TYPE } from '~/clusters/constants';
-import { APPLICATIONS_MOCK_STATE } from '../services/mock_data';
import eventHub from '~/clusters/event_hub';
import ApplicationRow from '~/clusters/components/application_row.vue';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
import FluentdOutputSettings from '~/clusters/components/fluentd_output_settings.vue';
+import { APPLICATIONS_MOCK_STATE } from '../services/mock_data';
describe('Applications', () => {
let wrapper;
@@ -16,7 +16,7 @@ describe('Applications', () => {
gon.features = gon.features || {};
});
- const createApp = ({ applications, type, propsData } = {}, isShallow) => {
+ const createComponent = ({ applications, type, propsData } = {}, isShallow) => {
const mountMethod = isShallow ? shallowMount : mount;
wrapper = mountMethod(Applications, {
@@ -29,7 +29,7 @@ describe('Applications', () => {
});
};
- const createShallowApp = (options) => createApp(options, true);
+ const createShallowComponent = (options) => createComponent(options, true);
const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
afterEach(() => {
wrapper.destroy();
@@ -37,7 +37,7 @@ describe('Applications', () => {
describe('Project cluster applications', () => {
beforeEach(() => {
- createApp({ type: CLUSTER_TYPE.PROJECT });
+ createComponent({ type: CLUSTER_TYPE.PROJECT });
});
it('renders a row for Ingress', () => {
@@ -82,7 +82,7 @@ describe('Applications', () => {
describe('Group cluster applications', () => {
beforeEach(() => {
- createApp({ type: CLUSTER_TYPE.GROUP });
+ createComponent({ type: CLUSTER_TYPE.GROUP });
});
it('renders a row for Ingress', () => {
@@ -128,7 +128,7 @@ describe('Applications', () => {
describe('Instance cluster applications', () => {
beforeEach(() => {
- createApp({ type: CLUSTER_TYPE.INSTANCE });
+ createComponent({ type: CLUSTER_TYPE.INSTANCE });
});
it('renders a row for Ingress', () => {
@@ -174,14 +174,14 @@ describe('Applications', () => {
describe('Helm application', () => {
it('does not render a row for Helm Tiller', () => {
- createApp();
+ createComponent();
expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(false);
});
});
describe('Ingress application', () => {
it('shows the correct warning message', () => {
- createApp();
+ createComponent();
expect(findByTestId('ingressCostWarning').element).toMatchSnapshot();
});
@@ -195,7 +195,7 @@ describe('Applications', () => {
},
};
- beforeEach(() => createShallowApp(propsData));
+ beforeEach(() => createShallowComponent(propsData));
it('renders IngressModsecuritySettings', () => {
const modsecuritySettings = wrapper.find(IngressModsecuritySettings);
@@ -206,7 +206,7 @@ describe('Applications', () => {
describe('when installed', () => {
describe('with ip address', () => {
it('renders ip address with a clipboard button', () => {
- createApp({
+ createComponent({
applications: {
ingress: {
title: 'Ingress',
@@ -225,7 +225,7 @@ describe('Applications', () => {
describe('with hostname', () => {
it('renders hostname with a clipboard button', () => {
- createApp({
+ createComponent({
applications: {
ingress: {
title: 'Ingress',
@@ -255,7 +255,7 @@ describe('Applications', () => {
describe('without ip address', () => {
it('renders an input text with a loading icon and an alert text', () => {
- createApp({
+ createComponent({
applications: {
ingress: {
title: 'Ingress',
@@ -272,7 +272,7 @@ describe('Applications', () => {
describe('before installing', () => {
it('does not render the IP address', () => {
- createApp();
+ createComponent();
expect(wrapper.text()).not.toContain('Ingress IP Address');
expect(wrapper.find('.js-endpoint').exists()).toBe(false);
@@ -282,13 +282,13 @@ describe('Applications', () => {
describe('Cert-Manager application', () => {
it('shows the correct description', () => {
- createApp();
+ createComponent();
expect(findByTestId('certManagerDescription').element).toMatchSnapshot();
});
describe('when not installed', () => {
it('renders email & allows editing', () => {
- createApp({
+ createComponent({
applications: {
cert_manager: {
title: 'Cert-Manager',
@@ -305,7 +305,7 @@ describe('Applications', () => {
describe('when installed', () => {
it('renders email in readonly', () => {
- createApp({
+ createComponent({
applications: {
cert_manager: {
title: 'Cert-Manager',
@@ -324,7 +324,7 @@ describe('Applications', () => {
describe('Jupyter application', () => {
describe('with ingress installed with ip & jupyter installable', () => {
it('renders hostname active input', () => {
- createApp({
+ createComponent({
applications: {
ingress: {
title: 'Ingress',
@@ -342,7 +342,7 @@ describe('Applications', () => {
describe('with ingress installed without external ip', () => {
it('does not render hostname input', () => {
- createApp({
+ createComponent({
applications: {
ingress: { title: 'Ingress', status: 'installed' },
},
@@ -356,7 +356,7 @@ describe('Applications', () => {
describe('with ingress & jupyter installed', () => {
it('renders readonly input', () => {
- createApp({
+ createComponent({
applications: {
ingress: {
title: 'Ingress',
@@ -375,7 +375,7 @@ describe('Applications', () => {
describe('without ingress installed', () => {
beforeEach(() => {
- createApp();
+ createComponent();
});
it('does not render input', () => {
@@ -388,7 +388,7 @@ describe('Applications', () => {
describe('Prometheus application', () => {
it('shows the correct description', () => {
- createApp();
+ createComponent();
expect(findByTestId('prometheusDescription').element).toMatchSnapshot();
});
});
@@ -414,14 +414,14 @@ describe('Applications', () => {
let knativeDomainEditor;
beforeEach(() => {
- createShallowApp(propsData);
+ createShallowComponent(propsData);
jest.spyOn(eventHub, '$emit');
knativeDomainEditor = wrapper.find(KnativeDomainEditor);
});
it('shows the correct description', async () => {
- createApp();
+ createComponent();
wrapper.setProps({
providerType: PROVIDER_TYPE.GCP,
preInstalledKnative: true,
@@ -487,7 +487,7 @@ describe('Applications', () => {
},
};
- beforeEach(() => createShallowApp(propsData));
+ beforeEach(() => createShallowComponent(propsData));
it('renders the correct Component', () => {
const crossplane = wrapper.find(CrossplaneProviderStack);
@@ -495,7 +495,7 @@ describe('Applications', () => {
});
it('shows the correct description', () => {
- createApp();
+ createComponent();
expect(findByTestId('crossplaneDescription').element).toMatchSnapshot();
});
});
@@ -503,7 +503,7 @@ describe('Applications', () => {
describe('Elastic Stack application', () => {
describe('with elastic stack installable', () => {
it('renders the install button enabled', () => {
- createApp();
+ createComponent();
expect(
wrapper
@@ -517,7 +517,7 @@ describe('Applications', () => {
describe('elastic stack installed', () => {
it('renders uninstall button', () => {
- createApp({
+ createComponent({
applications: {
elastic_stack: { title: 'Elastic Stack', status: 'installed' },
},
@@ -535,7 +535,7 @@ describe('Applications', () => {
});
describe('Fluentd application', () => {
- beforeEach(() => createShallowApp());
+ beforeEach(() => createShallowComponent());
it('renders the correct Component', () => {
expect(wrapper.find(FluentdOutputSettings).exists()).toBe(true);
@@ -544,7 +544,7 @@ describe('Applications', () => {
describe('Cilium application', () => {
it('shows the correct description', () => {
- createApp({ propsData: { ciliumHelpPath: 'cilium-help-path' } });
+ createComponent({ propsData: { ciliumHelpPath: 'cilium-help-path' } });
expect(findByTestId('ciliumDescription').element).toMatchSnapshot();
});
});
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index 6214cb50e13..60d6bb46e3c 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -5,10 +5,10 @@ import * as Sentry from '~/sentry/wrapper';
import Poll from '~/lib/utils/poll';
import { deprecatedCreateFlash as flashError } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { apiData } from '../mock_data';
import { MAX_REQUESTS } from '~/clusters_list/constants';
import * as types from '~/clusters_list/store/mutation_types';
import * as actions from '~/clusters_list/store/actions';
+import { apiData } from '../mock_data';
jest.mock('~/flash.js');
diff --git a/spec/frontend/clusters_list/store/mutations_spec.js b/spec/frontend/clusters_list/store/mutations_spec.js
index df0dfe587b6..2ca1c20ad56 100644
--- a/spec/frontend/clusters_list/store/mutations_spec.js
+++ b/spec/frontend/clusters_list/store/mutations_spec.js
@@ -1,7 +1,7 @@
import * as types from '~/clusters_list/store/mutation_types';
-import { apiData } from '../mock_data';
import getInitialState from '~/clusters_list/store/state';
import mutations from '~/clusters_list/store/mutations';
+import { apiData } from '../mock_data';
describe('Admin statistics panel mutations', () => {
let state;
diff --git a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
index 35348d3a03b..fd5db1c6a29 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
@@ -1,6 +1,6 @@
+import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
-import MockAdapter from 'axios-mock-adapter';
import createState from '~/create_cluster/eks_cluster/store/state';
import * as actions from '~/create_cluster/eks_cluster/store/actions';
import {
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
index c09eaa63d4d..e97e0a6686d 100644
--- a/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
@@ -1,10 +1,10 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { selectedMachineTypeMock, gapiMachineTypesResponseMock } from '../mock_data';
import createState from '~/create_cluster/gke_cluster/store/state';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
import GkeMachineTypeDropdown from '~/create_cluster/gke_cluster/components/gke_machine_type_dropdown.vue';
+import { selectedMachineTypeMock, gapiMachineTypesResponseMock } from '../mock_data';
const componentConfig = {
fieldId: 'cluster_provider_gcp_attributes_gcp_machine_type',
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
index eb58108bf3c..90f96c98427 100644
--- a/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
@@ -1,10 +1,10 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import createState from '~/create_cluster/gke_cluster/store/state';
-import { selectedProjectMock, gapiProjectsResponseMock } from '../mock_data';
import GkeProjectIdDropdown from '~/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
+import { selectedProjectMock, gapiProjectsResponseMock } from '../mock_data';
const componentConfig = {
docsUrl: 'https://console.cloud.google.com/home/dashboard',
diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js
index fcb4e31dec8..f783fdd6476 100644
--- a/spec/frontend/deploy_keys/components/key_spec.js
+++ b/spec/frontend/deploy_keys/components/key_spec.js
@@ -76,7 +76,7 @@ describe('Deploy keys key', () => {
createComponent({ deployKey: { ...deployKey, deploy_keys_projects: deployKeysProjects } });
expect(wrapper.find('.deploy-project-label').attributes('title')).toBe(
- 'Write access allowed',
+ 'Grant write permissions to this key',
);
});
diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
index 77fc70e08d1..a414eb04ab4 100644
--- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
@@ -1,6 +1,5 @@
import { mount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
-import notes from '../../mock_data/notes';
import DesignDiscussion from '~/design_management/components/design_notes/design_discussion.vue';
import DesignNote from '~/design_management/components/design_notes/design_note.vue';
import DesignReplyForm from '~/design_management/components/design_notes/design_reply_form.vue';
@@ -8,6 +7,7 @@ import createNoteMutation from '~/design_management/graphql/mutations/create_not
import toggleResolveDiscussionMutation from '~/design_management/graphql/mutations/toggle_resolve_discussion.mutation.graphql';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ToggleRepliesWidget from '~/design_management/components/design_notes/toggle_replies_widget.vue';
+import notes from '../../mock_data/notes';
import mockDiscussion from '../../mock_data/discussion';
const defaultMockDiscussion = {
diff --git a/spec/frontend/design_management/components/design_overlay_spec.js b/spec/frontend/design_management/components/design_overlay_spec.js
index a026cc39c84..ecb1fdbf534 100644
--- a/spec/frontend/design_management/components/design_overlay_spec.js
+++ b/spec/frontend/design_management/components/design_overlay_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
import DesignOverlay from '~/design_management/components/design_overlay.vue';
import updateActiveDiscussion from '~/design_management/graphql/mutations/update_active_discussion.mutation.graphql';
-import notes from '../mock_data/notes';
import { ACTIVE_DISCUSSION_SOURCE_TYPES } from '~/design_management/constants';
+import notes from '../mock_data/notes';
const mutate = jest.fn(() => Promise.resolve());
diff --git a/spec/frontend/design_management/components/design_sidebar_spec.js b/spec/frontend/design_management/components/design_sidebar_spec.js
index 60266883fcd..3d520ea721a 100644
--- a/spec/frontend/design_management/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management/components/design_sidebar_spec.js
@@ -4,9 +4,9 @@ import Cookies from 'js-cookie';
import DesignSidebar from '~/design_management/components/design_sidebar.vue';
import Participants from '~/sidebar/components/participants/participants.vue';
import DesignDiscussion from '~/design_management/components/design_notes/design_discussion.vue';
-import design from '../mock_data/design';
import updateActiveDiscussionMutation from '~/design_management/graphql/mutations/update_active_discussion.mutation.graphql';
import DesignTodoButton from '~/design_management/components/design_todo_button.vue';
+import design from '../mock_data/design';
const scrollIntoViewMock = jest.fn();
HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
diff --git a/spec/frontend/design_management/components/design_todo_button_spec.js b/spec/frontend/design_management/components/design_todo_button_spec.js
index 9ebc6ca26a2..0c7516c55df 100644
--- a/spec/frontend/design_management/components/design_todo_button_spec.js
+++ b/spec/frontend/design_management/components/design_todo_button_spec.js
@@ -111,7 +111,7 @@ describe('Design management design todo button', () => {
});
it('renders correct button text', () => {
- expect(wrapper.text()).toBe('Add a To Do');
+ expect(wrapper.text()).toBe('Add a to do');
});
describe('when clicked', () => {
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 36a2ffd19c3..8fe3e92360a 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -26,9 +26,10 @@ exports[`Design management list item component with notes renders item with mult
<img
alt="test"
- class="gl-display-block gl-mx-auto gl-max-w-full mh-100 design-img"
+ class="gl-display-block gl-mx-auto gl-max-w-full gl-max-h-full design-img"
data-qa-filename="test"
data-qa-selector="design_image"
+ data-testid="design-img-1"
src=""
/>
</gl-intersection-observer-stub>
@@ -43,6 +44,8 @@ exports[`Design management list item component with notes renders item with mult
<span
class="gl-font-weight-bold str-truncated-100"
data-qa-selector="design_file_name"
+ data-testid="design-img-filename-1"
+ title="test"
>
test
</span>
@@ -100,9 +103,10 @@ exports[`Design management list item component with notes renders item with sing
<img
alt="test"
- class="gl-display-block gl-mx-auto gl-max-w-full mh-100 design-img"
+ class="gl-display-block gl-mx-auto gl-max-w-full gl-max-h-full design-img"
data-qa-filename="test"
data-qa-selector="design_image"
+ data-testid="design-img-1"
src=""
/>
</gl-intersection-observer-stub>
@@ -117,6 +121,8 @@ exports[`Design management list item component with notes renders item with sing
<span
class="gl-font-weight-bold str-truncated-100"
data-qa-selector="design_file_name"
+ data-testid="design-img-filename-1"
+ title="test"
>
test
</span>
diff --git a/spec/frontend/design_management/components/list/item_spec.js b/spec/frontend/design_management/components/list/item_spec.js
index 55c6ecbc26b..a89d05127c7 100644
--- a/spec/frontend/design_management/components/list/item_spec.js
+++ b/spec/frontend/design_management/components/list/item_spec.js
@@ -1,6 +1,7 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlIcon, GlLoadingIcon, GlIntersectionObserver } from '@gitlab/ui';
import VueRouter from 'vue-router';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import Item from '~/design_management/components/list/item.vue';
const localVue = createLocalVue();
@@ -17,8 +18,11 @@ const DESIGN_VERSION_EVENT = {
describe('Design management list item component', () => {
let wrapper;
+ const imgId = 1;
+ const imgFilename = 'test';
- const findDesignEvent = () => wrapper.find('[data-testid="designEvent"]');
+ const findDesignEvent = () => wrapper.findByTestId('design-event');
+ const findImgFilename = (id = imgId) => wrapper.findByTestId(`design-img-filename-${id}`);
const findEventIcon = () => findDesignEvent().find(GlIcon);
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
@@ -28,25 +32,27 @@ describe('Design management list item component', () => {
isUploading = false,
imageLoading = false,
} = {}) {
- wrapper = shallowMount(Item, {
- localVue,
- router,
- propsData: {
- id: 1,
- filename: 'test',
- image: 'http://via.placeholder.com/300',
- isUploading,
- event,
- notesCount,
- updatedAt: '01-01-2019',
- },
- data() {
- return {
- imageLoading,
- };
- },
- stubs: ['router-link'],
- });
+ wrapper = extendedWrapper(
+ shallowMount(Item, {
+ localVue,
+ router,
+ propsData: {
+ id: imgId,
+ filename: imgFilename,
+ image: 'http://via.placeholder.com/300',
+ isUploading,
+ event,
+ notesCount,
+ updatedAt: '01-01-2019',
+ },
+ data() {
+ return {
+ imageLoading,
+ };
+ },
+ stubs: ['router-link'],
+ }),
+ );
}
afterEach(() => {
@@ -75,6 +81,10 @@ describe('Design management list item component', () => {
return wrapper.vm.$nextTick();
});
+ it('renders a tooltip', () => {
+ expect(findImgFilename().attributes('title')).toEqual(imgFilename);
+ });
+
describe('before image is loaded', () => {
it('renders loading spinner', () => {
expect(wrapper.find(GlLoadingIcon)).toExist();
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 9c11af28cf0..63e1c712d6b 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -18,15 +18,15 @@ import { DESIGNS_ROUTE_NAME, DESIGN_ROUTE_NAME } from '~/design_management/route
import createRouter from '~/design_management/router';
import * as utils from '~/design_management/utils/design_management_utils';
import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management/constants';
-import design from '../../mock_data/design';
-import mockResponseWithDesigns from '../../mock_data/designs';
-import mockResponseNoDesigns from '../../mock_data/no_designs';
-import mockAllVersions from '../../mock_data/all_versions';
import {
DESIGN_TRACKING_PAGE_NAME,
DESIGN_SNOWPLOW_EVENT_TYPES,
DESIGN_USAGE_PING_EVENT_TYPES,
} from '~/design_management/utils/tracking';
+import design from '../../mock_data/design';
+import mockResponseWithDesigns from '../../mock_data/designs';
+import mockResponseNoDesigns from '../../mock_data/no_designs';
+import mockAllVersions from '../../mock_data/all_versions';
jest.mock('~/flash');
jest.mock('~/api.js');
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 7d28d6f6d11..94d75f018f9 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -22,6 +22,11 @@ import {
import createFlash from '~/flash';
import createRouter from '~/design_management/router';
import * as utils from '~/design_management/utils/design_management_utils';
+import moveDesignMutation from '~/design_management/graphql/mutations/move_design.mutation.graphql';
+import {
+ DESIGN_TRACKING_PAGE_NAME,
+ DESIGN_SNOWPLOW_EVENT_TYPES,
+} from '~/design_management/utils/tracking';
import {
designListQueryResponse,
designUploadMutationCreatedResponse,
@@ -31,11 +36,6 @@ import {
reorderedDesigns,
moveDesignMutationResponseWithErrors,
} from '../mock_data/apollo_mock';
-import moveDesignMutation from '~/design_management/graphql/mutations/move_design.mutation.graphql';
-import {
- DESIGN_TRACKING_PAGE_NAME,
- DESIGN_SNOWPLOW_EVENT_TYPES,
-} from '~/design_management/utils/tracking';
jest.mock('~/flash.js');
const mockPageEl = {
diff --git a/spec/frontend/design_management/utils/cache_update_spec.js b/spec/frontend/design_management/utils/cache_update_spec.js
index 2fb08c3ef05..7327cf00abd 100644
--- a/spec/frontend/design_management/utils/cache_update_spec.js
+++ b/spec/frontend/design_management/utils/cache_update_spec.js
@@ -10,8 +10,8 @@ import {
ADD_IMAGE_DIFF_NOTE_ERROR,
UPDATE_IMAGE_DIFF_NOTE_ERROR,
} from '~/design_management/utils/error_messages';
-import design from '../mock_data/design';
import createFlash from '~/flash';
+import design from '../mock_data/design';
jest.mock('~/flash.js');
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 7fbeb33dd93..e84fa6fb0ae 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -3,8 +3,8 @@ import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlPagination } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
-import { TEST_HOST } from 'spec/test_constants';
import Mousetrap from 'mousetrap';
+import { TEST_HOST } from 'spec/test_constants';
import App from '~/diffs/components/app.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
import DiffFile from '~/diffs/components/diff_file.vue';
@@ -13,14 +13,14 @@ import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import CommitWidget from '~/diffs/components/commit_widget.vue';
import TreeList from '~/diffs/components/tree_list.vue';
-import createDiffsStore from '../create_diffs_store';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
-import diffsMockData from '../mock_data/merge_request_diffs';
import { EVT_VIEW_FILE_BY_FILE } from '~/diffs/constants';
import eventHub from '~/diffs/event_hub';
+import diffsMockData from '../mock_data/merge_request_diffs';
+import createDiffsStore from '../create_diffs_store';
const mergeRequestDiff = { version_index: 1 };
const TEST_ENDPOINT = `${TEST_HOST}/diff/endpoint`;
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 949cc855200..adb695b8a64 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -1,6 +1,6 @@
-import { trimText } from 'helpers/text_helper';
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import { trimText } from 'helpers/text_helper';
import CompareVersionsComponent from '~/diffs/components/compare_versions.vue';
import { createStore } from '~/mr_notes/stores';
import diffsMockData from '../mock_data/merge_request_diffs';
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index c1cf4793c88..7b2b8996808 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -9,9 +9,9 @@ import ParallelDiffView from '~/diffs/components/parallel_diff_view.vue';
import NoteForm from '~/notes/components/note_form.vue';
import DiffDiscussions from '~/diffs/components/diff_discussions.vue';
import { IMAGE_DIFF_POSITION_TYPE } from '~/diffs/constants';
-import diffFileMockData from '../mock_data/diff_file';
import { diffViewerModes } from '~/ide/constants';
import DiffView from '~/diffs/components/diff_view.vue';
+import diffFileMockData from '../mock_data/diff_file';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index e9a63e861ed..5bd59055afc 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -7,16 +7,23 @@ import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
import DiffFileHeader from '~/diffs/components/diff_file_header.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
-import diffDiscussionsMockData from '../mock_data/diff_discussions';
import { truncateSha } from '~/lib/utils/text_utility';
import { diffViewerModes } from '~/ide/constants';
import { __, sprintf } from '~/locale';
import { scrollToElement } from '~/lib/utils/common_utils';
+import { SET_MR_FILE_REVIEWS } from '~/diffs/store/mutation_types';
+import { reviewFile } from '~/diffs/store/actions';
+import { DIFF_FILE_AUTOMATIC_COLLAPSE, DIFF_FILE_MANUAL_COLLAPSE } from '~/diffs/constants';
+import testAction from '../../__helpers__/vuex_action_helper';
+import diffDiscussionsMockData from '../mock_data/diff_discussions';
+
jest.mock('~/lib/utils/common_utils');
const diffFile = Object.freeze(
Object.assign(diffDiscussionsMockData.diff_file, {
+ id: '123',
+ file_identifier_hash: 'abc',
edit_path: 'link:/to/edit/path',
blob: {
id: '848ed9407c6730ff16edb3dd24485a0eea24292a',
@@ -52,6 +59,8 @@ describe('DiffFileHeader component', () => {
toggleFileDiscussionWrappers: jest.fn(),
toggleFullDiff: jest.fn(),
toggleActiveFileByHash: jest.fn(),
+ setFileCollapsedByUser: jest.fn(),
+ reviewFile: jest.fn(),
},
},
},
@@ -79,10 +88,11 @@ describe('DiffFileHeader component', () => {
const findViewFileButton = () => wrapper.find({ ref: 'viewButton' });
const findCollapseIcon = () => wrapper.find({ ref: 'collapseIcon' });
const findEditButton = () => wrapper.find({ ref: 'editButton' });
+ const findReviewFileCheckbox = () => wrapper.find("[data-testid='fileReviewCheckbox']");
- const createComponent = (props) => {
+ const createComponent = ({ props, options = {} } = {}) => {
mockStoreConfig = cloneDeep(defaultMockStoreConfig);
- const store = new Vuex.Store(mockStoreConfig);
+ const store = new Vuex.Store({ ...mockStoreConfig, ...(options.store || {}) });
wrapper = shallowMount(DiffFileHeader, {
propsData: {
@@ -91,6 +101,7 @@ describe('DiffFileHeader component', () => {
viewDiffsFileByFile: false,
...props,
},
+ ...options,
localVue,
store,
});
@@ -101,7 +112,7 @@ describe('DiffFileHeader component', () => {
${'visible'} | ${true}
${'hidden'} | ${false}
`('collapse toggle is $visibility if collapsible is $collapsible', ({ collapsible }) => {
- createComponent({ collapsible });
+ createComponent({ props: { collapsible } });
expect(findCollapseIcon().exists()).toBe(collapsible);
});
@@ -110,7 +121,7 @@ describe('DiffFileHeader component', () => {
${true} | ${'chevron-down'}
${false} | ${'chevron-right'}
`('collapse icon is $icon if expanded is $expanded', ({ icon, expanded }) => {
- createComponent({ expanded, collapsible: true });
+ createComponent({ props: { expanded, collapsible: true } });
expect(findCollapseIcon().props('name')).toBe(icon);
});
@@ -124,7 +135,7 @@ describe('DiffFileHeader component', () => {
});
it('when collapseIcon is clicked emits toggleFile', () => {
- createComponent({ collapsible: true });
+ createComponent({ props: { collapsible: true } });
findCollapseIcon().vm.$emit('click', new Event('click'));
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().toggleFile).toBeDefined();
@@ -132,7 +143,7 @@ describe('DiffFileHeader component', () => {
});
it('when other element in header is clicked does not emits toggleFile', () => {
- createComponent({ collapsible: true });
+ createComponent({ props: { collapsible: true } });
findTitleLink().trigger('click');
return wrapper.vm.$nextTick().then(() => {
@@ -171,10 +182,12 @@ describe('DiffFileHeader component', () => {
it('prefers submodule_tree_url over submodule_link for href', () => {
const submoduleTreeUrl = 'some://tree/url';
createComponent({
- discussionLink: 'discussionLink',
- diffFile: {
- ...submoduleDiffFile,
- submodule_tree_url: 'some://tree/url',
+ props: {
+ discussionLink: 'discussionLink',
+ diffFile: {
+ ...submoduleDiffFile,
+ submodule_tree_url: 'some://tree/url',
+ },
},
});
@@ -184,8 +197,10 @@ describe('DiffFileHeader component', () => {
it('uses submodule_link for href if submodule_tree_url does not exists', () => {
const submoduleLink = 'link://to/submodule';
createComponent({
- discussionLink: 'discussionLink',
- diffFile: submoduleDiffFile,
+ props: {
+ discussionLink: 'discussionLink',
+ diffFile: submoduleDiffFile,
+ },
});
expect(findTitleLink().attributes('href')).toBe(submoduleLink);
@@ -193,7 +208,9 @@ describe('DiffFileHeader component', () => {
it('uses file_path + SHA as link text', () => {
createComponent({
- diffFile: submoduleDiffFile,
+ props: {
+ diffFile: submoduleDiffFile,
+ },
});
expect(findTitleLink().text()).toContain(
@@ -203,15 +220,19 @@ describe('DiffFileHeader component', () => {
it('does not render file actions', () => {
createComponent({
- diffFile: submoduleDiffFile,
- addMergeRequestButtons: true,
+ props: {
+ diffFile: submoduleDiffFile,
+ addMergeRequestButtons: true,
+ },
});
expect(findFileActions().exists()).toBe(false);
});
it('renders submodule icon', () => {
createComponent({
- diffFile: submoduleDiffFile,
+ props: {
+ diffFile: submoduleDiffFile,
+ },
});
expect(wrapper.find(FileIcon).props('submodule')).toBe(true);
@@ -223,13 +244,15 @@ describe('DiffFileHeader component', () => {
it('for mode_changed file mode displays mode changes', () => {
createComponent({
- diffFile: {
- ...diffFile,
- a_mode: 'old-mode',
- b_mode: 'new-mode',
- viewer: {
- ...diffFile.viewer,
- name: diffViewerModes.mode_changed,
+ props: {
+ diffFile: {
+ ...diffFile,
+ a_mode: 'old-mode',
+ b_mode: 'new-mode',
+ viewer: {
+ ...diffFile.viewer,
+ name: diffViewerModes.mode_changed,
+ },
},
},
});
@@ -240,13 +263,15 @@ describe('DiffFileHeader component', () => {
'for %s file mode does not display mode changes',
(mode) => {
createComponent({
- diffFile: {
- ...diffFile,
- a_mode: 'old-mode',
- b_mode: 'new-mode',
- viewer: {
- ...diffFile.viewer,
- name: diffViewerModes[mode],
+ props: {
+ diffFile: {
+ ...diffFile,
+ a_mode: 'old-mode',
+ b_mode: 'new-mode',
+ viewer: {
+ ...diffFile.viewer,
+ name: diffViewerModes[mode],
+ },
},
},
});
@@ -256,32 +281,38 @@ describe('DiffFileHeader component', () => {
it('displays the LFS label for files stored in LFS', () => {
createComponent({
- diffFile: { ...diffFile, stored_externally: true, external_storage: 'lfs' },
+ props: {
+ diffFile: { ...diffFile, stored_externally: true, external_storage: 'lfs' },
+ },
});
expect(findLfsLabel().exists()).toBe(true);
});
it('does not display the LFS label for files stored in repository', () => {
createComponent({
- diffFile: { ...diffFile, stored_externally: false },
+ props: {
+ diffFile: { ...diffFile, stored_externally: false },
+ },
});
expect(findLfsLabel().exists()).toBe(false);
});
it('does not render view replaced file button if no replaced view path is present', () => {
createComponent({
- diffFile: { ...diffFile, replaced_view_path: null },
+ props: {
+ diffFile: { ...diffFile, replaced_view_path: null },
+ },
});
expect(findReplacedFileButton().exists()).toBe(false);
});
describe('when addMergeRequestButtons is false', () => {
it('does not render file actions', () => {
- createComponent({ addMergeRequestButtons: false });
+ createComponent({ props: { addMergeRequestButtons: false } });
expect(findFileActions().exists()).toBe(false);
});
it('should not render edit button', () => {
- createComponent({ addMergeRequestButtons: false });
+ createComponent({ props: { addMergeRequestButtons: false } });
expect(findEditButton().exists()).toBe(false);
});
});
@@ -290,7 +321,7 @@ describe('DiffFileHeader component', () => {
describe('without discussions', () => {
it('does not render a toggle discussions button', () => {
diffHasDiscussionsResultMock.mockReturnValue(false);
- createComponent({ addMergeRequestButtons: true });
+ createComponent({ props: { addMergeRequestButtons: true } });
expect(findToggleDiscussionsButton().exists()).toBe(false);
});
});
@@ -298,7 +329,7 @@ describe('DiffFileHeader component', () => {
describe('with discussions', () => {
it('dispatches toggleFileDiscussionWrappers when user clicks on toggle discussions button', () => {
diffHasDiscussionsResultMock.mockReturnValue(true);
- createComponent({ addMergeRequestButtons: true });
+ createComponent({ props: { addMergeRequestButtons: true } });
expect(findToggleDiscussionsButton().exists()).toBe(true);
findToggleDiscussionsButton().vm.$emit('click');
expect(
@@ -309,7 +340,9 @@ describe('DiffFileHeader component', () => {
it('should show edit button', () => {
createComponent({
- addMergeRequestButtons: true,
+ props: {
+ addMergeRequestButtons: true,
+ },
});
expect(findEditButton().exists()).toBe(true);
});
@@ -319,25 +352,27 @@ describe('DiffFileHeader component', () => {
const externalUrl = 'link://to/external';
const formattedExternalUrl = 'link://formatted';
createComponent({
- diffFile: {
- ...diffFile,
- external_url: externalUrl,
- formatted_external_url: formattedExternalUrl,
+ props: {
+ diffFile: {
+ ...diffFile,
+ external_url: externalUrl,
+ formatted_external_url: formattedExternalUrl,
+ },
+ addMergeRequestButtons: true,
},
- addMergeRequestButtons: true,
});
expect(findExternalLink().exists()).toBe(true);
});
it('is hidden by default', () => {
- createComponent({ addMergeRequestButtons: true });
+ createComponent({ props: { addMergeRequestButtons: true } });
expect(findExternalLink().exists()).toBe(false);
});
});
describe('without file blob', () => {
beforeEach(() => {
- createComponent({ diffFile: { ...diffFile, blob: false } });
+ createComponent({ props: { diffFile: { ...diffFile, blob: false } } });
});
it('should not render toggle discussions button', () => {
@@ -352,8 +387,10 @@ describe('DiffFileHeader component', () => {
it('should render correct file view button', () => {
const viewPath = 'link://view-path';
createComponent({
- diffFile: { ...diffFile, view_path: viewPath },
- addMergeRequestButtons: true,
+ props: {
+ diffFile: { ...diffFile, view_path: viewPath },
+ addMergeRequestButtons: true,
+ },
});
expect(findViewFileButton().attributes('href')).toBe(viewPath);
expect(findViewFileButton().text()).toEqual(
@@ -367,9 +404,11 @@ describe('DiffFileHeader component', () => {
describe('when diff is fully expanded', () => {
it('is not rendered', () => {
createComponent({
- diffFile: {
- ...diffFile,
- is_fully_expanded: true,
+ props: {
+ diffFile: {
+ ...diffFile,
+ is_fully_expanded: true,
+ },
},
});
expect(findExpandButton().exists()).toBe(false);
@@ -387,17 +426,17 @@ describe('DiffFileHeader component', () => {
};
it('renders expand to full file button if not showing full file already', () => {
- createComponent(fullyNotExpandedFileProps);
+ createComponent({ props: fullyNotExpandedFileProps });
expect(findExpandButton().exists()).toBe(true);
});
it('renders loading icon when loading full file', () => {
- createComponent(fullyNotExpandedFileProps);
+ createComponent({ props: fullyNotExpandedFileProps });
expect(findExpandButton().exists()).toBe(true);
});
it('toggles full diff on click', () => {
- createComponent(fullyNotExpandedFileProps);
+ createComponent({ props: fullyNotExpandedFileProps });
findExpandButton().vm.$emit('click');
expect(mockStoreConfig.modules.diffs.actions.toggleFullDiff).toHaveBeenCalled();
});
@@ -407,7 +446,9 @@ describe('DiffFileHeader component', () => {
it('uses discussionPath for link if it is defined', () => {
const discussionPath = 'link://to/discussion';
createComponent({
- discussionPath,
+ props: {
+ discussionPath,
+ },
});
expect(findTitleLink().attributes('href')).toBe(discussionPath);
});
@@ -436,21 +477,21 @@ describe('DiffFileHeader component', () => {
describe('for new file', () => {
it('displays the path', () => {
- createComponent({ diffFile: { ...diffFile, new_file: true } });
+ createComponent({ props: { diffFile: { ...diffFile, new_file: true } } });
expect(findTitleLink().text()).toBe(diffFile.file_path);
});
});
describe('for deleted file', () => {
it('displays the path', () => {
- createComponent({ diffFile: { ...diffFile, deleted_file: true } });
+ createComponent({ props: { diffFile: { ...diffFile, deleted_file: true } } });
expect(findTitleLink().text()).toBe(
sprintf(__('%{filePath} deleted'), { filePath: diffFile.file_path }, false),
);
});
it('does not show edit button', () => {
- createComponent({ diffFile: { ...diffFile, deleted_file: true } });
+ createComponent({ props: { diffFile: { ...diffFile, deleted_file: true } } });
expect(findEditButton().exists()).toBe(false);
});
});
@@ -458,11 +499,13 @@ describe('DiffFileHeader component', () => {
describe('for renamed file', () => {
it('displays old and new path if the file was renamed', () => {
createComponent({
- diffFile: {
- ...diffFile,
- renamed_file: true,
- old_path_html: 'old',
- new_path_html: 'new',
+ props: {
+ diffFile: {
+ ...diffFile,
+ renamed_file: true,
+ old_path_html: 'old',
+ new_path_html: 'new',
+ },
},
});
expect(findTitleLink().text()).toMatch(/^old.+new/s);
@@ -473,13 +516,132 @@ describe('DiffFileHeader component', () => {
it('renders view replaced file button', () => {
const replacedViewPath = 'some/path';
createComponent({
- diffFile: {
- ...diffFile,
- replaced_view_path: replacedViewPath,
+ props: {
+ diffFile: {
+ ...diffFile,
+ replaced_view_path: replacedViewPath,
+ },
+ addMergeRequestButtons: true,
},
- addMergeRequestButtons: true,
});
expect(findReplacedFileButton().exists()).toBe(true);
});
});
+
+ describe('file reviews', () => {
+ it('calls the action to set the new review', () => {
+ createComponent({
+ props: {
+ diffFile: {
+ ...diffFile,
+ viewer: {
+ ...diffFile.viewer,
+ automaticallyCollapsed: false,
+ manuallyCollapsed: null,
+ },
+ },
+ showLocalFileReviews: true,
+ addMergeRequestButtons: true,
+ },
+ });
+
+ const file = wrapper.vm.diffFile;
+
+ findReviewFileCheckbox().vm.$emit('change', true);
+
+ return testAction(
+ reviewFile,
+ { file, reviewed: true },
+ {},
+ [{ type: SET_MR_FILE_REVIEWS, payload: { [file.file_identifier_hash]: [file.id] } }],
+ [],
+ );
+ });
+
+ it.each`
+ description | newReviewedStatus | collapseType | aCollapse | mCollapse | callAction
+ ${'does nothing'} | ${true} | ${DIFF_FILE_MANUAL_COLLAPSE} | ${false} | ${true} | ${false}
+ ${'does nothing'} | ${false} | ${DIFF_FILE_AUTOMATIC_COLLAPSE} | ${true} | ${null} | ${false}
+ ${'does nothing'} | ${true} | ${'not collapsed'} | ${false} | ${null} | ${false}
+ ${'does nothing'} | ${false} | ${'not collapsed'} | ${false} | ${null} | ${false}
+ ${'collapses the file'} | ${true} | ${DIFF_FILE_AUTOMATIC_COLLAPSE} | ${true} | ${null} | ${true}
+ `(
+ "$description if the new review status is reviewed = $newReviewedStatus and the file's collapse type is collapse = $collapseType",
+ ({ newReviewedStatus, aCollapse, mCollapse, callAction }) => {
+ createComponent({
+ props: {
+ diffFile: {
+ ...diffFile,
+ viewer: {
+ ...diffFile.viewer,
+ automaticallyCollapsed: aCollapse,
+ manuallyCollapsed: mCollapse,
+ },
+ },
+ showLocalFileReviews: true,
+ addMergeRequestButtons: true,
+ },
+ });
+
+ findReviewFileCheckbox().vm.$emit('change', newReviewedStatus);
+
+ if (callAction) {
+ expect(mockStoreConfig.modules.diffs.actions.setFileCollapsedByUser).toHaveBeenCalled();
+ } else {
+ expect(
+ mockStoreConfig.modules.diffs.actions.setFileCollapsedByUser,
+ ).not.toHaveBeenCalled();
+ }
+ },
+ );
+
+ it.each`
+ description | show | visible
+ ${'shows'} | ${true} | ${true}
+ ${'hides'} | ${false} | ${false}
+ `(
+ '$description the file review feature given { showLocalFileReviewsProp: $show }',
+ ({ show, visible }) => {
+ createComponent({
+ props: {
+ showLocalFileReviews: show,
+ addMergeRequestButtons: true,
+ },
+ });
+
+ expect(findReviewFileCheckbox().exists()).toEqual(visible);
+ },
+ );
+
+ it.each`
+ open | status | fires
+ ${true} | ${true} | ${true}
+ ${false} | ${false} | ${true}
+ ${true} | ${false} | ${false}
+ ${false} | ${true} | ${false}
+ `(
+ 'toggles appropriately when { fileExpanded: $open, newReviewStatus: $status }',
+ ({ open, status, fires }) => {
+ createComponent({
+ props: {
+ diffFile: {
+ ...diffFile,
+ viewer: {
+ ...diffFile.viewer,
+ automaticallyCollapsed: false,
+ manuallyCollapsed: null,
+ },
+ },
+ showLocalFileReviews: true,
+ addMergeRequestButtons: true,
+ expanded: open,
+ },
+ });
+
+ findReviewFileCheckbox().vm.$emit('change', status);
+
+ expect(Boolean(wrapper.emitted().toggleFile)).toBe(fires);
+ },
+ );
+ });
});
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index c715d779986..a8a1c88b0cc 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -6,8 +6,6 @@ import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
import createDiffsStore from '~/diffs/store/modules';
import createNotesStore from '~/notes/stores/modules';
-import diffFileMockDataReadable from '../mock_data/diff_file';
-import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
import DiffFileComponent from '~/diffs/components/diff_file.vue';
import DiffFileHeaderComponent from '~/diffs/components/diff_file_header.vue';
@@ -21,6 +19,8 @@ import {
} from '~/diffs/constants';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
+import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
+import diffFileMockDataReadable from '../mock_data/diff_file';
function changeViewer(store, index, { automaticallyCollapsed, manuallyCollapsed, name }) {
const file = store.state.diffs.diffFiles[index];
@@ -66,7 +66,7 @@ function markFileToBeRendered(store, index = 0) {
});
}
-function createComponent({ file, first = false, last = false }) {
+function createComponent({ file, first = false, last = false, options = {}, props = {} }) {
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -89,7 +89,9 @@ function createComponent({ file, first = false, last = false }) {
viewDiffsFileByFile: false,
isFirstFile: first,
isLastFile: last,
+ ...props,
},
+ ...options,
});
return {
@@ -220,6 +222,53 @@ describe('DiffFile', () => {
});
});
+ describe('computed', () => {
+ describe('showLocalFileReviews', () => {
+ let gon;
+
+ function setLoggedIn(bool) {
+ window.gon.current_user_id = bool;
+ }
+
+ beforeAll(() => {
+ gon = window.gon;
+ window.gon = {};
+ });
+
+ afterEach(() => {
+ window.gon = gon;
+ });
+
+ it.each`
+ loggedIn | featureOn | bool
+ ${true} | ${true} | ${true}
+ ${false} | ${true} | ${false}
+ ${true} | ${false} | ${false}
+ ${false} | ${false} | ${false}
+ `(
+ 'should be $bool when { userIsLoggedIn: $loggedIn, featureEnabled: $featureOn }',
+ ({ loggedIn, featureOn, bool }) => {
+ setLoggedIn(loggedIn);
+
+ ({ wrapper } = createComponent({
+ options: {
+ provide: {
+ glFeatures: {
+ localFileReviews: featureOn,
+ },
+ },
+ },
+ props: {
+ file: store.state.diffs.diffFiles[0],
+ },
+ }));
+
+ expect(wrapper.vm.showLocalFileReviews).toBe(bool);
+ },
+ );
+ });
+ });
+
describe('collapsing', () => {
describe(`\`${EVT_EXPAND_ALL_FILES}\` event`, () => {
beforeEach(() => {
@@ -422,9 +471,11 @@ describe('DiffFile', () => {
await wrapper.vm.$nextTick();
- expect(wrapper.vm.$el.innerText).toContain(
- 'This source diff could not be displayed because it is too large',
- );
+ const button = wrapper.find('[data-testid="blob-button"]');
+
+ expect(wrapper.text()).toContain('Changes are too large to be shown.');
+ expect(button.html()).toContain('View file @');
+ expect(button.attributes('href')).toBe('/file/view/path');
});
});
});
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index c06d8e78316..0bfbbc16f07 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -3,8 +3,8 @@ import { getByTestId, fireEvent } from '@testing-library/dom';
import Vuex from 'vuex';
import diffsModule from '~/diffs/store/modules';
import DiffRow from '~/diffs/components/diff_row.vue';
-import diffFileMockData from '../mock_data/diff_file';
import { mapParallel } from '~/diffs/components/diff_row_utils';
+import diffFileMockData from '../mock_data/diff_file';
describe('DiffRow', () => {
const testLines = [
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
index d70d6b609ac..47ae3cd5867 100644
--- a/spec/frontend/diffs/components/diff_row_utils_spec.js
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -143,10 +143,21 @@ describe('addCommentTooltip', () => {
'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
const brokenRealTooltip =
'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
+ const commentTooltip = 'Add a comment to this line';
+ const dragTooltip = 'Add a comment to this line or drag for multiple lines';
+
it('should return default tooltip', () => {
expect(utils.addCommentTooltip()).toBeUndefined();
});
+ it('should return comment tooltip', () => {
+ expect(utils.addCommentTooltip({})).toEqual(commentTooltip);
+ });
+
+ it('should return drag comment tooltip when dragging is enabled', () => {
+ expect(utils.addCommentTooltip({}, true)).toEqual(dragTooltip);
+ });
+
it('should return broken symlink tooltip', () => {
expect(utils.addCommentTooltip({ commentsDisabled: { wasSymbolic: true } })).toEqual(
brokenSymLinkTooltip,
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index 3d36ebf14a3..3f8fc8e6afe 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -55,12 +55,12 @@ describe('DiffView', () => {
});
it.each`
- type | side | container | sides | total
- ${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {} }, right: { lineDraft: {} } }} | ${2}
- ${'parallel'} | ${'right'} | ${'.new'} | ${{ left: { lineDraft: {} }, right: { lineDraft: {} } }} | ${2}
- ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {} } }} | ${1}
- ${'inline'} | ${'right'} | ${'.new'} | ${{ right: { lineDraft: {} } }} | ${1}
- ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {} }, right: { lineDraft: {} } }} | ${1}
+ type | side | container | sides | total
+ ${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true }, right: { lineDraft: {}, renderDiscussion: true } }} | ${2}
+ ${'parallel'} | ${'right'} | ${'.new'} | ${{ left: { lineDraft: {}, renderDiscussion: true }, right: { lineDraft: {}, renderDiscussion: true } }} | ${2}
+ ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true } }} | ${1}
+ ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true } }} | ${1}
+ ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true } }} | ${1}
`(
'renders a $type comment row with comment cell on $side',
({ type, container, sides, total }) => {
diff --git a/spec/frontend/diffs/components/inline_diff_table_row_spec.js b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
index 21e7d7397a0..56a5306752f 100644
--- a/spec/frontend/diffs/components/inline_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
@@ -2,9 +2,9 @@ import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/mr_notes/stores';
import InlineDiffTableRow from '~/diffs/components/inline_diff_table_row.vue';
import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
+import { mapInline } from '~/diffs/components/diff_row_utils';
import diffFileMockData from '../mock_data/diff_file';
import discussionsMockData from '../mock_data/diff_discussions';
-import { mapInline } from '~/diffs/components/diff_row_utils';
const TEST_USER_ID = 'abc123';
const TEST_USER = { id: TEST_USER_ID };
diff --git a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
index 445553706b7..51c4c4ae9d6 100644
--- a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
@@ -4,8 +4,8 @@ import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/mr_notes/stores';
import ParallelDiffTableRow from '~/diffs/components/parallel_diff_table_row.vue';
import { mapParallel } from '~/diffs/components/diff_row_utils';
-import diffFileMockData from '../mock_data/diff_file';
import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
+import diffFileMockData from '../mock_data/diff_file';
import discussionsMockData from '../mock_data/diff_discussions';
describe('ParallelDiffTableRow', () => {
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 056ac23fcf7..c204648337a 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -58,8 +58,8 @@ import axios from '~/lib/utils/axios_utils';
import * as utils from '~/diffs/store/utils';
import * as commonUtils from '~/lib/utils/common_utils';
import { mergeUrlParams } from '~/lib/utils/url_utility';
-import { diffMetadata } from '../mock_data/diff_metadata';
import { deprecatedCreateFlash as createFlash } from '~/flash';
+import { diffMetadata } from '../mock_data/diff_metadata';
jest.mock('~/flash');
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index 4d7f861ac22..85dc52f8bf7 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -375,26 +375,4 @@ describe('Diffs Module Getters', () => {
});
});
});
-
- describe('fileReviews', () => {
- const file1 = { id: '123', file_identifier_hash: 'abc' };
- const file2 = { id: '098', file_identifier_hash: 'abc' };
-
- it.each`
- reviews | files | fileReviews
- ${{}} | ${[file1, file2]} | ${[false, false]}
- ${{ abc: ['123'] }} | ${[file1, file2]} | ${[true, false]}
- ${{ abc: ['098'] }} | ${[file1, file2]} | ${[false, true]}
- ${{ def: ['123'] }} | ${[file1, file2]} | ${[false, false]}
- ${{ abc: ['123'], def: ['098'] }} | ${[]} | ${[]}
- `(
- 'returns $fileReviews based on the diff files in state and the existing reviews $reviews',
- ({ reviews, files, fileReviews }) => {
- localState.diffFiles = files;
- localState.mrReviews = reviews;
-
- expect(getters.fileReviews(localState)).toStrictEqual(fileReviews);
- },
- );
- });
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index 2c342d8e2a5..e0fefc4e053 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -2,8 +2,8 @@ import createState from '~/diffs/store/modules/diff_state';
import mutations from '~/diffs/store/mutations';
import * as types from '~/diffs/store/mutation_types';
import { INLINE_DIFF_VIEW_TYPE, INLINE_DIFF_LINES_KEY } from '~/diffs/constants';
-import diffFileMockData from '../mock_data/diff_file';
import * as utils from '~/diffs/store/utils';
+import diffFileMockData from '../mock_data/diff_file';
describe('DiffsStoreMutations', () => {
describe('SET_BASE_CONFIG', () => {
diff --git a/spec/frontend/diffs/utils/diff_file_spec.js b/spec/frontend/diffs/utils/diff_file_spec.js
index 2de8db28e71..c6cfdfced65 100644
--- a/spec/frontend/diffs/utils/diff_file_spec.js
+++ b/spec/frontend/diffs/utils/diff_file_spec.js
@@ -1,4 +1,4 @@
-import { prepareRawDiffFile } from '~/diffs/utils/diff_file';
+import { prepareRawDiffFile, getShortShaFromFile } from '~/diffs/utils/diff_file';
function getDiffFiles() {
const loadFull = 'namespace/project/-/merge_requests/12345/diff_for_path?file_identifier=abc';
@@ -143,4 +143,15 @@ describe('diff_file utilities', () => {
expect(preppedFile).not.toHaveProp('id');
});
});
+
+ describe('getShortShaFromFile', () => {
+ it.each`
+ response | cs
+ ${'12345678'} | ${'12345678abcdogcat'}
+ ${null} | ${undefined}
+ ${'hidogcat'} | ${'hidogcatmorethings'}
+ `('returns $response for a file with { content_sha: $cs }', ({ response, cs }) => {
+ expect(getShortShaFromFile({ content_sha: cs })).toBe(response);
+ });
+ });
});
diff --git a/spec/frontend/diffs/utils/file_reviews_spec.js b/spec/frontend/diffs/utils/file_reviews_spec.js
index 819426ee75f..a58c19a7245 100644
--- a/spec/frontend/diffs/utils/file_reviews_spec.js
+++ b/spec/frontend/diffs/utils/file_reviews_spec.js
@@ -5,6 +5,7 @@ import {
setReviewsForMergeRequest,
isFileReviewed,
markFileReview,
+ reviewStatuses,
reviewable,
} from '~/diffs/utils/file_reviews';
@@ -28,6 +29,39 @@ describe('File Review(s) utilities', () => {
localStorage.clear();
});
+ describe('isFileReviewed', () => {
+ it.each`
+ description | diffFile | fileReviews
+ ${'the file does not have an `id`'} | ${{ ...file, id: undefined }} | ${getDefaultReviews()}
+ ${'there are no reviews for the file'} | ${file} | ${{ ...getDefaultReviews(), abc: undefined }}
+ `('returns `false` if $description', ({ diffFile, fileReviews }) => {
+ expect(isFileReviewed(fileReviews, diffFile)).toBe(false);
+ });
+
+ it("returns `true` for a file if it's available in the provided reviews", () => {
+ expect(isFileReviewed(reviews, file)).toBe(true);
+ });
+ });
+
+ describe('reviewStatuses', () => {
+ const file1 = { id: '123', file_identifier_hash: 'abc' };
+ const file2 = { id: '098', file_identifier_hash: 'abc' };
+
+ it.each`
+ mrReviews | files | fileReviews
+ ${{}} | ${[file1, file2]} | ${[false, false]}
+ ${{ abc: ['123'] }} | ${[file1, file2]} | ${[true, false]}
+ ${{ abc: ['098'] }} | ${[file1, file2]} | ${[false, true]}
+ ${{ def: ['123'] }} | ${[file1, file2]} | ${[false, false]}
+ ${{ abc: ['123'], def: ['098'] }} | ${[]} | ${[]}
+ `(
+ 'returns $fileReviews based on the diff files in state and the existing reviews $reviews',
+ ({ mrReviews, files, fileReviews }) => {
+ expect(reviewStatuses(files, mrReviews)).toStrictEqual(fileReviews);
+ },
+ );
+ });
+
describe('getReviewsForMergeRequest', () => {
it('fetches the appropriate stored reviews from localStorage', () => {
getReviewsForMergeRequest(mrPath);
@@ -73,20 +107,6 @@ describe('File Review(s) utilities', () => {
});
});
- describe('isFileReviewed', () => {
- it.each`
- description | diffFile | fileReviews
- ${'the file does not have an `id`'} | ${{ ...file, id: undefined }} | ${getDefaultReviews()}
- ${'there are no reviews for the file'} | ${file} | ${{ ...getDefaultReviews(), abc: undefined }}
- `('returns `false` if $description', ({ diffFile, fileReviews }) => {
- expect(isFileReviewed(fileReviews, diffFile)).toBe(false);
- });
-
- it("returns `true` for a file if it's available in the provided reviews", () => {
- expect(isFileReviewed(reviews, file)).toBe(true);
- });
- });
-
describe('reviewable', () => {
it.each`
response | diffFile | description
diff --git a/spec/frontend/editor/editor_lite_spec.js b/spec/frontend/editor/editor_lite_spec.js
index c3099997287..20937bc7063 100644
--- a/spec/frontend/editor/editor_lite_spec.js
+++ b/spec/frontend/editor/editor_lite_spec.js
@@ -1,14 +1,21 @@
/* eslint-disable max-classes-per-file */
-import { editor as monacoEditor, languages as monacoLanguages, Uri } from 'monaco-editor';
+import { editor as monacoEditor, languages as monacoLanguages } from 'monaco-editor';
import waitForPromises from 'helpers/wait_for_promises';
-import Editor from '~/editor/editor_lite';
+import { joinPaths } from '~/lib/utils/url_utility';
+import EditorLite from '~/editor/editor_lite';
import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
-import { EDITOR_LITE_INSTANCE_ERROR_NO_EL, URI_PREFIX } from '~/editor/constants';
+import {
+ EDITOR_LITE_INSTANCE_ERROR_NO_EL,
+ URI_PREFIX,
+ EDITOR_READY_EVENT,
+} from '~/editor/constants';
describe('Base editor', () => {
let editorEl;
let editor;
+ let defaultArguments;
+ const blobOriginalContent = 'Foo Foo';
const blobContent = 'Foo Bar';
const blobPath = 'test.md';
const blobGlobalId = 'snippet_777';
@@ -17,15 +24,19 @@ describe('Base editor', () => {
beforeEach(() => {
setFixtures('<div id="editor" data-editor-loading></div>');
editorEl = document.getElementById('editor');
- editor = new Editor();
+ defaultArguments = { el: editorEl, blobPath, blobContent, blobGlobalId };
+ editor = new EditorLite();
});
afterEach(() => {
editor.dispose();
editorEl.remove();
+ monacoEditor.getModels().forEach((model) => {
+ model.dispose();
+ });
});
- const createUri = (...paths) => Uri.file([URI_PREFIX, ...paths].join('/'));
+ const uriFilePath = joinPaths('/', URI_PREFIX, blobGlobalId, blobPath);
it('initializes Editor with basic properties', () => {
expect(editor).toBeDefined();
@@ -38,76 +49,192 @@ describe('Base editor', () => {
expect(editorEl.dataset.editorLoading).toBeUndefined();
});
- describe('instance of the Editor', () => {
+ describe('instance of the Editor Lite', () => {
let modelSpy;
let instanceSpy;
- let setModel;
- let dispose;
+ const setModel = jest.fn();
+ const dispose = jest.fn();
+ const mockModelReturn = (res = fakeModel) => {
+ modelSpy = jest.spyOn(monacoEditor, 'createModel').mockImplementation(() => res);
+ };
+ const mockDecorateInstance = (decorations = {}) => {
+ jest.spyOn(EditorLite, 'convertMonacoToELInstance').mockImplementation((inst) => {
+ return Object.assign(inst, decorations);
+ });
+ };
beforeEach(() => {
- setModel = jest.fn();
- dispose = jest.fn();
- modelSpy = jest.spyOn(monacoEditor, 'createModel').mockImplementation(() => fakeModel);
- instanceSpy = jest.spyOn(monacoEditor, 'create').mockImplementation(() => ({
- setModel,
- dispose,
- onDidDispose: jest.fn(),
- }));
+ modelSpy = jest.spyOn(monacoEditor, 'createModel');
});
- it('throws an error if no dom element is supplied', () => {
- expect(() => {
- editor.createInstance();
- }).toThrow(EDITOR_LITE_INSTANCE_ERROR_NO_EL);
+ describe('instance of the Code Editor', () => {
+ beforeEach(() => {
+ instanceSpy = jest.spyOn(monacoEditor, 'create');
+ });
- expect(modelSpy).not.toHaveBeenCalled();
- expect(instanceSpy).not.toHaveBeenCalled();
- expect(setModel).not.toHaveBeenCalled();
- });
+ it('throws an error if no dom element is supplied', () => {
+ mockDecorateInstance();
+ expect(() => {
+ editor.createInstance();
+ }).toThrow(EDITOR_LITE_INSTANCE_ERROR_NO_EL);
- it('creates model to be supplied to Monaco editor', () => {
- editor.createInstance({ el: editorEl, blobPath, blobContent, blobGlobalId: '' });
+ expect(modelSpy).not.toHaveBeenCalled();
+ expect(instanceSpy).not.toHaveBeenCalled();
+ expect(EditorLite.convertMonacoToELInstance).not.toHaveBeenCalled();
+ });
- expect(modelSpy).toHaveBeenCalledWith(blobContent, undefined, createUri(blobPath));
- expect(setModel).toHaveBeenCalledWith(fakeModel);
- });
+ it('creates model to be supplied to Monaco editor', () => {
+ mockModelReturn();
+ mockDecorateInstance({
+ setModel,
+ });
+ editor.createInstance(defaultArguments);
- it('initializes the instance on a supplied DOM node', () => {
- editor.createInstance({ el: editorEl });
+ expect(modelSpy).toHaveBeenCalledWith(
+ blobContent,
+ undefined,
+ expect.objectContaining({
+ path: uriFilePath,
+ }),
+ );
+ expect(setModel).toHaveBeenCalledWith(fakeModel);
+ });
- expect(editor.editorEl).not.toBe(null);
- expect(instanceSpy).toHaveBeenCalledWith(editorEl, expect.anything());
- });
+ it('does not create a model automatically if model is passed as `null`', () => {
+ mockDecorateInstance({
+ setModel,
+ });
+ editor.createInstance({ ...defaultArguments, model: null });
+ expect(modelSpy).not.toHaveBeenCalled();
+ expect(setModel).not.toHaveBeenCalled();
+ });
- it('with blobGlobalId, creates model with id in uri', () => {
- editor.createInstance({ el: editorEl, blobPath, blobContent, blobGlobalId });
+ it('initializes the instance on a supplied DOM node', () => {
+ editor.createInstance({ el: editorEl });
- expect(modelSpy).toHaveBeenCalledWith(
- blobContent,
- undefined,
- createUri(blobGlobalId, blobPath),
- );
- });
+ expect(editor.editorEl).not.toBe(null);
+ expect(instanceSpy).toHaveBeenCalledWith(editorEl, expect.anything());
+ });
- it('initializes instance with passed properties', () => {
- const instanceOptions = {
- foo: 'bar',
- };
- editor.createInstance({
- el: editorEl,
- ...instanceOptions,
+ it('with blobGlobalId, creates model with the id in uri', () => {
+ editor.createInstance(defaultArguments);
+
+ expect(modelSpy).toHaveBeenCalledWith(
+ blobContent,
+ undefined,
+ expect.objectContaining({
+ path: uriFilePath,
+ }),
+ );
+ });
+
+ it('initializes instance with passed properties', () => {
+ const instanceOptions = {
+ foo: 'bar',
+ };
+ editor.createInstance({
+ el: editorEl,
+ ...instanceOptions,
+ });
+ expect(instanceSpy).toHaveBeenCalledWith(
+ editorEl,
+ expect.objectContaining(instanceOptions),
+ );
+ });
+
+ it('disposes instance when the global editor is disposed', () => {
+ mockDecorateInstance({
+ dispose,
+ });
+ editor.createInstance(defaultArguments);
+
+ expect(dispose).not.toHaveBeenCalled();
+
+ editor.dispose();
+
+ expect(dispose).toHaveBeenCalled();
+ });
+
+ it("removes the disposed instance from the global editor's storage and disposes the associated model", () => {
+ mockModelReturn();
+ mockDecorateInstance({
+ setModel,
+ });
+ const instance = editor.createInstance(defaultArguments);
+
+ expect(editor.instances).toHaveLength(1);
+ expect(fakeModel.dispose).not.toHaveBeenCalled();
+
+ instance.dispose();
+
+ expect(editor.instances).toHaveLength(0);
+ expect(fakeModel.dispose).toHaveBeenCalled();
});
- expect(instanceSpy).toHaveBeenCalledWith(editorEl, expect.objectContaining(instanceOptions));
});
- it('disposes instance when the editor is disposed', () => {
- editor.createInstance({ el: editorEl, blobPath, blobContent, blobGlobalId });
+ describe('instance of the Diff Editor', () => {
+ beforeEach(() => {
+ instanceSpy = jest.spyOn(monacoEditor, 'createDiffEditor');
+ });
- expect(dispose).not.toHaveBeenCalled();
+ it('Diff Editor goes through the normal path of Code Editor just with the flag ON', () => {
+ const spy = jest.spyOn(editor, 'createInstance').mockImplementation(() => {});
+ editor.createDiffInstance();
+ expect(spy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ isDiff: true,
+ }),
+ );
+ });
- editor.dispose();
+ it('initializes the instance on a supplied DOM node', () => {
+ const wrongInstanceSpy = jest.spyOn(monacoEditor, 'create').mockImplementation(() => ({}));
+ editor.createDiffInstance({ ...defaultArguments, blobOriginalContent });
+
+ expect(editor.editorEl).not.toBe(null);
+ expect(wrongInstanceSpy).not.toHaveBeenCalled();
+ expect(instanceSpy).toHaveBeenCalledWith(editorEl, expect.anything());
+ });
+
+ it('creates correct model for the Diff Editor', () => {
+ const instance = editor.createDiffInstance({ ...defaultArguments, blobOriginalContent });
+ const getDiffModelValue = (model) => instance.getModel()[model].getValue();
+
+ expect(modelSpy).toHaveBeenCalledTimes(2);
+ expect(modelSpy.mock.calls[0]).toEqual([
+ blobContent,
+ undefined,
+ expect.objectContaining({
+ path: uriFilePath,
+ }),
+ ]);
+ expect(modelSpy.mock.calls[1]).toEqual([blobOriginalContent, 'markdown']);
+ expect(getDiffModelValue('original')).toBe(blobOriginalContent);
+ expect(getDiffModelValue('modified')).toBe(blobContent);
+ });
- expect(dispose).toHaveBeenCalled();
+ it('correctly disposes the diff editor model', () => {
+ const modifiedModel = fakeModel;
+ const originalModel = { ...fakeModel };
+ mockDecorateInstance({
+ getModel: jest.fn().mockReturnValue({
+ original: originalModel,
+ modified: modifiedModel,
+ }),
+ });
+
+ const instance = editor.createDiffInstance({ ...defaultArguments, blobOriginalContent });
+
+ expect(editor.instances).toHaveLength(1);
+ expect(originalModel.dispose).not.toHaveBeenCalled();
+ expect(modifiedModel.dispose).not.toHaveBeenCalled();
+
+ instance.dispose();
+
+ expect(editor.instances).toHaveLength(0);
+ expect(originalModel.dispose).toHaveBeenCalled();
+ expect(modifiedModel.dispose).toHaveBeenCalled();
+ });
});
});
@@ -127,16 +254,14 @@ describe('Base editor', () => {
editorEl2 = document.getElementById('editor2');
inst1Args = {
el: editorEl1,
- blobGlobalId,
};
inst2Args = {
el: editorEl2,
blobContent,
blobPath,
- blobGlobalId,
};
- editor = new Editor();
+ editor = new EditorLite();
instanceSpy = jest.spyOn(monacoEditor, 'create');
});
@@ -166,8 +291,20 @@ describe('Base editor', () => {
expect(model1).not.toEqual(model2);
});
+ it('does not create a new model if a model for the path & globalId combo already exists', () => {
+ const modelSpy = jest.spyOn(monacoEditor, 'createModel');
+ inst1 = editor.createInstance({ ...inst2Args, blobGlobalId });
+ inst2 = editor.createInstance({ ...inst2Args, el: editorEl1, blobGlobalId });
+
+ const model1 = inst1.getModel();
+ const model2 = inst2.getModel();
+
+ expect(modelSpy).toHaveBeenCalledTimes(1);
+ expect(model1).toBe(model2);
+ });
+
it('shares global editor options among all instances', () => {
- editor = new Editor({
+ editor = new EditorLite({
readOnly: true,
});
@@ -179,7 +316,7 @@ describe('Base editor', () => {
});
it('allows overriding editor options on the instance level', () => {
- editor = new Editor({
+ editor = new EditorLite({
readOnly: true,
});
inst1 = editor.createInstance({
@@ -200,6 +337,7 @@ describe('Base editor', () => {
expect(monacoEditor.getModels()).toHaveLength(2);
inst1.dispose();
+
expect(inst1.getModel()).toBe(null);
expect(inst2.getModel()).not.toBe(null);
expect(editor.instances).toHaveLength(1);
@@ -402,19 +540,20 @@ describe('Base editor', () => {
el: editorEl,
blobPath,
blobContent,
- blobGlobalId,
extensions,
});
};
beforeEach(() => {
- editorExtensionSpy = jest.spyOn(Editor, 'pushToImportsArray').mockImplementation((arr) => {
- arr.push(
- Promise.resolve({
- default: {},
- }),
- );
- });
+ editorExtensionSpy = jest
+ .spyOn(EditorLite, 'pushToImportsArray')
+ .mockImplementation((arr) => {
+ arr.push(
+ Promise.resolve({
+ default: {},
+ }),
+ );
+ });
});
it.each([undefined, [], [''], ''])(
@@ -446,15 +585,20 @@ describe('Base editor', () => {
expect(editorExtensionSpy).toHaveBeenCalledWith(expect.any(Array), expectation);
});
- it('emits editor-ready event after all extensions were applied', async () => {
+ it('emits EDITOR_READY_EVENT event after all extensions were applied', async () => {
const calls = [];
const eventSpy = jest.fn().mockImplementation(() => {
calls.push('event');
});
- const useSpy = jest.spyOn(editor, 'use').mockImplementation(() => {
+ const useSpy = jest.fn().mockImplementation(() => {
calls.push('use');
});
- editorEl.addEventListener('editor-ready', eventSpy);
+ jest.spyOn(EditorLite, 'convertMonacoToELInstance').mockImplementation((inst) => {
+ const decoratedInstance = inst;
+ decoratedInstance.use = useSpy;
+ return decoratedInstance;
+ });
+ editorEl.addEventListener(EDITOR_READY_EVENT, eventSpy);
instance = instanceConstructor('foo, bar');
await waitForPromises();
expect(useSpy.mock.calls).toHaveLength(2);
@@ -487,12 +631,6 @@ describe('Base editor', () => {
expect(inst1.alpha()).toEqual(alphaRes);
expect(inst2.alpha()).toEqual(alphaRes);
});
-
- it('extends specific instance if it has been passed', () => {
- editor.use(AlphaExt, inst2);
- expect(inst1.alpha).toBeUndefined();
- expect(inst2.alpha()).toEqual(alphaRes);
- });
});
});
@@ -526,7 +664,7 @@ describe('Base editor', () => {
it('sets default syntax highlighting theme', () => {
const expectedTheme = themes.find((t) => t.name === DEFAULT_THEME);
- editor = new Editor();
+ editor = new EditorLite();
expect(themeDefineSpy).toHaveBeenCalledWith(DEFAULT_THEME, expectedTheme.data);
expect(themeSetSpy).toHaveBeenCalledWith(DEFAULT_THEME);
@@ -538,7 +676,7 @@ describe('Base editor', () => {
expect(expectedTheme.name).not.toBe(DEFAULT_THEME);
window.gon.user_color_scheme = expectedTheme.name;
- editor = new Editor();
+ editor = new EditorLite();
expect(themeDefineSpy).toHaveBeenCalledWith(expectedTheme.name, expectedTheme.data);
expect(themeSetSpy).toHaveBeenCalledWith(expectedTheme.name);
@@ -549,7 +687,7 @@ describe('Base editor', () => {
const nonExistentTheme = { name };
window.gon.user_color_scheme = nonExistentTheme.name;
- editor = new Editor();
+ editor = new EditorLite();
expect(themeDefineSpy).not.toHaveBeenCalled();
expect(themeSetSpy).toHaveBeenCalledWith(DEFAULT_THEME);
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
index 875a01c07ea..279b275eb58 100644
--- a/spec/frontend/environments/environment_actions_spec.js
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -1,6 +1,6 @@
import { shallowMount, mount } from '@vue/test-utils';
-import { TEST_HOST } from 'helpers/test_constants';
import { GlDropdown, GlDropdownItem, GlLoadingIcon, GlIcon } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import eventHub from '~/environments/event_hub';
import EnvironmentActions from '~/environments/components/environment_actions.vue';
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index da12237b1d9..d41233a6b72 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -5,8 +5,8 @@ import stubChildren from 'helpers/stub_children';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
import ErrorTrackingActions from '~/error_tracking/components/error_tracking_actions.vue';
import { trackErrorListViewsOptions, trackErrorStatusUpdateOptions } from '~/error_tracking/utils';
-import errorsList from './list_mock.json';
import Tracking from '~/tracking';
+import errorsList from './list_mock.json';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
index a754c682356..e66e37a4ae6 100644
--- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -75,6 +75,8 @@ describe('Edit feature flag form', () => {
});
const findAlert = () => wrapper.find(GlAlert);
+ const findWarningGlAlert = () =>
+ wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning');
it('should display the iid', () => {
expect(wrapper.find('h3').text()).toContain('^5');
@@ -88,7 +90,7 @@ describe('Edit feature flag form', () => {
expect(wrapper.find(GlToggle).props('value')).toBe(true);
});
- it('should not alert users that feature flags are changing soon', () => {
+ it('should alert users the flag is read only', () => {
expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags');
});
@@ -96,8 +98,9 @@ describe('Edit feature flag form', () => {
it('should render the error', () => {
store.dispatch('receiveUpdateFeatureFlagError', { message: ['The name is required'] });
return wrapper.vm.$nextTick(() => {
- expect(wrapper.find('.alert-danger').exists()).toEqual(true);
- expect(wrapper.find('.alert-danger').text()).toContain('The name is required');
+ const warningGlAlert = findWarningGlAlert();
+ expect(warningGlAlert.at(1).exists()).toEqual(true);
+ expect(warningGlAlert.at(1).text()).toContain('The name is required');
});
});
});
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
index 3a057aedde9..6c9ccba5181 100644
--- a/spec/frontend/feature_flags/components/form_spec.js
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -1,6 +1,7 @@
import { uniqueId } from 'lodash';
import { shallowMount } from '@vue/test-utils';
-import { GlFormTextarea, GlFormCheckbox, GlButton } from '@gitlab/ui';
+import { GlFormTextarea, GlFormCheckbox, GlButton, GlToggle } from '@gitlab/ui';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import Api from '~/api';
import Form from '~/feature_flags/components/form.vue';
import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
@@ -14,7 +15,6 @@ import {
NEW_VERSION_FLAG,
} from '~/feature_flags/constants';
import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue';
-import ToggleButton from '~/vue_shared/components/toggle_button.vue';
import { featureFlag, userList, allUsersStrategy } from '../mock_data';
jest.mock('~/api.js');
@@ -35,14 +35,19 @@ describe('feature flag form', () => {
},
};
+ const findAddNewScopeRow = () => wrapper.findByTestId('add-new-scope');
+ const findGlToggle = () => wrapper.find(GlToggle);
+
const factory = (props = {}, provide = {}) => {
- wrapper = shallowMount(Form, {
- propsData: { ...requiredProps, ...props },
- provide: {
- ...requiredInjections,
- ...provide,
- },
- });
+ wrapper = extendedWrapper(
+ shallowMount(Form, {
+ propsData: { ...requiredProps, ...props },
+ provide: {
+ ...requiredInjections,
+ ...provide,
+ },
+ }),
+ );
};
beforeEach(() => {
@@ -102,13 +107,13 @@ describe('feature flag form', () => {
});
it('should render scopes table with a new row ', () => {
- expect(wrapper.find('.js-add-new-scope').exists()).toBe(true);
+ expect(findAddNewScopeRow().exists()).toBe(true);
});
describe('status toggle', () => {
describe('without filled text input', () => {
it('should add a new scope with the text value empty and the status', () => {
- wrapper.find(ToggleButton).vm.$emit('change', true);
+ findGlToggle().vm.$emit('change', true);
expect(wrapper.vm.formScopes).toHaveLength(1);
expect(wrapper.vm.formScopes[0].active).toEqual(true);
@@ -121,7 +126,7 @@ describe('feature flag form', () => {
it('should be disabled if the feature flag is not active', (done) => {
wrapper.setProps({ active: false });
wrapper.vm.$nextTick(() => {
- expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
+ expect(findGlToggle().props('disabled')).toBe(true);
done();
});
});
@@ -166,11 +171,11 @@ describe('feature flag form', () => {
describe('scopes', () => {
it('should be possible to remove a scope', () => {
- expect(wrapper.find('.js-feature-flag-delete').exists()).toEqual(true);
+ expect(wrapper.findByTestId('feature-flag-delete').exists()).toEqual(true);
});
it('renders empty row to add a new scope', () => {
- expect(wrapper.find('.js-add-new-scope').exists()).toEqual(true);
+ expect(findAddNewScopeRow().exists()).toEqual(true);
});
it('renders the user id checkbox', () => {
@@ -186,7 +191,7 @@ describe('feature flag form', () => {
describe('update scope', () => {
describe('on click on toggle', () => {
it('should update the scope', () => {
- wrapper.find(ToggleButton).vm.$emit('change', false);
+ findGlToggle().vm.$emit('change', false);
expect(wrapper.vm.formScopes[0].active).toBe(false);
});
@@ -195,7 +200,7 @@ describe('feature flag form', () => {
wrapper.setProps({ active: false });
wrapper.vm.$nextTick(() => {
- expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
+ expect(findGlToggle().props('disabled')).toBe(true);
done();
});
});
@@ -294,7 +299,7 @@ describe('feature flag form', () => {
const row = wrapper.findAll('.gl-responsive-table-row').at(2);
expect(row.find(EnvironmentsDropdown).vm.disabled).toBe(true);
- expect(row.find(ToggleButton).vm.disabledInput).toBe(true);
+ expect(row.find(GlToggle).props('disabled')).toBe(true);
expect(row.find('.js-delete-scope').exists()).toBe(false);
});
});
@@ -347,10 +352,10 @@ describe('feature flag form', () => {
return wrapper.vm.$nextTick();
})
.then(() => {
- wrapper.find('.js-add-new-scope').find(ToggleButton).vm.$emit('change', true);
+ findAddNewScopeRow().find(GlToggle).vm.$emit('change', true);
})
.then(() => {
- wrapper.find(ToggleButton).vm.$emit('change', true);
+ findGlToggle().vm.$emit('change', true);
return wrapper.vm.$nextTick();
})
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
index e317ac4b092..2dfcdf201fb 100644
--- a/spec/frontend/feature_flags/components/new_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -41,6 +41,9 @@ describe('New feature flag form', () => {
});
};
+ const findWarningGlAlert = () =>
+ wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning');
+
beforeEach(() => {
factory();
});
@@ -53,8 +56,9 @@ describe('New feature flag form', () => {
it('should render the error', () => {
store.dispatch('receiveCreateFeatureFlagError', { message: ['The name is required'] });
return wrapper.vm.$nextTick(() => {
- expect(wrapper.find('.alert').exists()).toEqual(true);
- expect(wrapper.find('.alert').text()).toContain('The name is required');
+ const warningGlAlert = findWarningGlAlert();
+ expect(warningGlAlert.at(0).exists()).toBe(true);
+ expect(warningGlAlert.at(0).text()).toContain('The name is required');
});
});
});
@@ -81,10 +85,6 @@ describe('New feature flag form', () => {
expect(wrapper.find(Form).props('scopes')).toContainEqual(defaultScope);
});
- it('should not alert users that feature flags are changing soon', () => {
- expect(wrapper.find(GlAlert).exists()).toBe(false);
- });
-
it('has an all users strategy by default', () => {
const strategies = wrapper.find(Form).props('strategies');
diff --git a/spec/frontend/filtered_search/recent_searches_root_spec.js b/spec/frontend/filtered_search/recent_searches_root_spec.js
index 6bb9e68d591..fa3267c98a1 100644
--- a/spec/frontend/filtered_search/recent_searches_root_spec.js
+++ b/spec/frontend/filtered_search/recent_searches_root_spec.js
@@ -1,32 +1,51 @@
-import Vue from 'vue';
+import { setHTMLFixture } from 'helpers/fixtures';
import RecentSearchesRoot from '~/filtered_search/recent_searches_root';
-jest.mock('vue');
+const containerId = 'test-container';
+const dropdownElementId = 'test-dropdown-element';
describe('RecentSearchesRoot', () => {
describe('render', () => {
- let recentSearchesRoot;
- let data;
- let template;
+ let recentSearchesRootMockInstance;
+ let vm;
+ let containerEl;
beforeEach(() => {
- recentSearchesRoot = {
+ setHTMLFixture(`
+ <div id="${containerId}">
+ <div id="${dropdownElementId}"></div>
+ </div>
+ `);
+
+ containerEl = document.getElementById(containerId);
+
+ recentSearchesRootMockInstance = {
store: {
- state: 'state',
+ state: {
+ recentSearches: ['foo', 'bar', 'qux'],
+ isLocalStorageAvailable: true,
+ allowedKeys: ['test'],
+ },
},
+ wrapperElement: document.getElementById(dropdownElementId),
};
- Vue.mockImplementation((options) => {
- ({ data, template } = options);
- });
+ RecentSearchesRoot.prototype.render.call(recentSearchesRootMockInstance);
+ vm = recentSearchesRootMockInstance.vm;
- RecentSearchesRoot.prototype.render.call(recentSearchesRoot);
+ return vm.$nextTick();
});
- it('should instantiate Vue', () => {
- expect(Vue).toHaveBeenCalled();
- expect(data()).toBe(recentSearchesRoot.store.state);
- expect(template).toContain(':is-local-storage-available="isLocalStorageAvailable"');
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render the recent searches', () => {
+ const { recentSearches } = recentSearchesRootMockInstance.store.state;
+
+ recentSearches.forEach((recentSearch) => {
+ expect(containerEl.textContent).toContain(recentSearch);
+ });
});
});
});
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index b74e4ac45cf..5b1d397ec0b 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -8,8 +8,8 @@ import appComponent from '~/frequent_items/components/app.vue';
import eventHub from '~/frequent_items/event_hub';
import { FREQUENT_ITEMS, HOUR_IN_MS } from '~/frequent_items/constants';
import { getTopFrequentItems } from '~/frequent_items/utils';
-import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data';
import { createStore } from '~/frequent_items/store';
+import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data';
useLocalStorageSpy();
diff --git a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
index cdd8b127676..eb6ed44e840 100644
--- a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
@@ -2,7 +2,6 @@ import { shallowMount } from '@vue/test-utils';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import searchComponent from '~/frequent_items/components/frequent_items_search_input.vue';
import { createStore } from '~/frequent_items/store';
-import eventHub from '~/frequent_items/event_hub';
describe('FrequentItemsSearchInputComponent', () => {
let wrapper;
@@ -45,39 +44,6 @@ describe('FrequentItemsSearchInputComponent', () => {
});
});
- describe('mounted', () => {
- it('should listen `dropdownOpen` event', (done) => {
- jest.spyOn(eventHub, '$on').mockImplementation(() => {});
- const vmX = createComponent().vm;
-
- vmX.$nextTick(() => {
- expect(eventHub.$on).toHaveBeenCalledWith(
- `${vmX.namespace}-dropdownOpen`,
- expect.any(Function),
- );
- done();
- });
- });
- });
-
- describe('beforeDestroy', () => {
- it('should unbind event listeners on eventHub', (done) => {
- const vmX = createComponent().vm;
- jest.spyOn(eventHub, '$off').mockImplementation(() => {});
-
- vmX.$mount();
- vmX.$destroy();
-
- vmX.$nextTick(() => {
- expect(eventHub.$off).toHaveBeenCalledWith(
- `${vmX.namespace}-dropdownOpen`,
- expect.any(Function),
- );
- done();
- });
- });
- });
-
describe('template', () => {
it('should render component element', () => {
expect(wrapper.classes()).toContain('search-input-container');
diff --git a/spec/frontend/frequent_items/store/actions_spec.js b/spec/frontend/frequent_items/store/actions_spec.js
index 351fde25f49..acea46eae60 100644
--- a/spec/frontend/frequent_items/store/actions_spec.js
+++ b/spec/frontend/frequent_items/store/actions_spec.js
@@ -1,5 +1,5 @@
-import testAction from 'helpers/vuex_action_helper';
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import AccessorUtilities from '~/lib/utils/accessor';
import * as actions from '~/frequent_items/store/actions';
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index c2ff66f6afc..aefd465532a 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -1,16 +1,13 @@
/* eslint no-param-reassign: "off" */
import $ from 'jquery';
+import MockAdapter from 'axios-mock-adapter';
import { emojiFixtureMap, initEmojiMock, describeEmojiFields } from 'helpers/emoji';
import '~/lib/utils/jquery_at_who';
import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete';
-
import { TEST_HOST } from 'helpers/test_constants';
import { getJSONFixture } from 'helpers/fixtures';
-
import waitForPromises from 'helpers/wait_for_promises';
-
-import MockAdapter from 'axios-mock-adapter';
import AjaxCache from '~/lib/utils/ajax_cache';
import axios from '~/lib/utils/axios_utils';
@@ -493,7 +490,7 @@ describe('GfmAutoComplete', () => {
username: 'my-group',
avatarTag: '<div class="avatar rect-avatar center avatar-inline s26">M</div>',
title: 'My Group (2)',
- search: 'my-group My Group',
+ search: 'MyGroup my-group',
icon: '',
},
]);
@@ -506,7 +503,7 @@ describe('GfmAutoComplete', () => {
avatarTag:
'<img src="./group.jpg" alt="my-group" class="avatar rect-avatar avatar-inline center s26"/>',
title: 'My Group (2)',
- search: 'my-group My Group',
+ search: 'MyGroup my-group',
icon: '',
},
]);
@@ -519,7 +516,7 @@ describe('GfmAutoComplete', () => {
avatarTag:
'<img src="./group.jpg" alt="my-group" class="avatar rect-avatar avatar-inline center s26"/>',
title: 'My Group',
- search: 'my-group My Group',
+ search: 'MyGroup my-group',
icon:
'<svg class="s16 vertical-align-middle gl-ml-2"><use xlink:href="undefined#notifications-off" /></svg>',
},
@@ -537,7 +534,7 @@ describe('GfmAutoComplete', () => {
avatarTag:
'<img src="./users.jpg" alt="my-user" class="avatar avatar-inline center s26"/>',
title: 'My User',
- search: 'my-user My User',
+ search: 'MyUser my-user',
icon: '',
},
]);
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index e880f585daa..0fc4343ec3c 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -8,13 +8,13 @@ exports[`grafana integration component default state to match the default snapsh
<div
class="settings-header"
>
- <h3
- class="js-section-header h4"
+ <h4
+ class="js-section-header"
>
Grafana authentication
- </h3>
+ </h4>
<gl-button-stub
buttontextclasses=""
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 9244e4f331e..9680fa8bfd2 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -60,8 +60,8 @@ describe('AppComponent', () => {
beforeEach(() => {
mock = new AxiosMockAdapter(axios);
mock.onGet('/dashboard/groups.json').reply(200, mockGroups);
- Vue.component('group-folder', groupFolderComponent);
- Vue.component('group-item', groupItemComponent);
+ Vue.component('GroupFolder', groupFolderComponent);
+ Vue.component('GroupItem', groupItemComponent);
createShallowComponent();
getGroupsSpy = jest.spyOn(vm.service, 'getGroups');
diff --git a/spec/frontend/groups/components/group_folder_spec.js b/spec/frontend/groups/components/group_folder_spec.js
index a40fa9bece8..1d8e10479b6 100644
--- a/spec/frontend/groups/components/group_folder_spec.js
+++ b/spec/frontend/groups/components/group_folder_spec.js
@@ -19,7 +19,7 @@ describe('GroupFolderComponent', () => {
let vm;
beforeEach(() => {
- Vue.component('group-item', groupItemComponent);
+ Vue.component('GroupItem', groupItemComponent);
vm = createComponent();
vm.$mount();
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index d70ea709dee..c80f9a83fc4 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -20,7 +20,7 @@ describe('GroupItemComponent', () => {
let vm;
beforeEach(() => {
- Vue.component('group-folder', groupFolderComponent);
+ Vue.component('GroupFolder', groupFolderComponent);
vm = createComponent();
diff --git a/spec/frontend/groups/components/groups_spec.js b/spec/frontend/groups/components/groups_spec.js
index 6205400eb03..1fde8fa3d6e 100644
--- a/spec/frontend/groups/components/groups_spec.js
+++ b/spec/frontend/groups/components/groups_spec.js
@@ -22,8 +22,8 @@ describe('GroupsComponent', () => {
let vm;
beforeEach(() => {
- Vue.component('group-folder', groupFolderComponent);
- Vue.component('group-item', groupItemComponent);
+ Vue.component('GroupFolder', groupFolderComponent);
+ Vue.component('GroupItem', groupItemComponent);
vm = createComponent();
diff --git a/spec/frontend/groups/components/item_actions_spec.js b/spec/frontend/groups/components/item_actions_spec.js
index 9adbc9abe13..ffbdf9b1aa6 100644
--- a/spec/frontend/groups/components/item_actions_spec.js
+++ b/spec/frontend/groups/components/item_actions_spec.js
@@ -66,6 +66,22 @@ describe('ItemActions', () => {
});
});
+ it('emits `showLeaveGroupModal` event with the correct prefix if `action` prop is passed', () => {
+ const group = {
+ ...mockParentGroupItem,
+ canEdit: true,
+ canLeave: true,
+ };
+ createComponent({
+ group,
+ action: 'test',
+ });
+ jest.spyOn(eventHub, '$emit');
+ findLeaveGroupBtn().vm.$emit('click', { stopPropagation: () => {} });
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('testshowLeaveGroupModal', group, parentGroup);
+ });
+
it('does not render leave button if group can not be left', () => {
createComponent({
group: {
diff --git a/spec/frontend/groups/members/mock_data.js b/spec/frontend/groups/members/mock_data.js
deleted file mode 100644
index b84c9c6d446..00000000000
--- a/spec/frontend/groups/members/mock_data.js
+++ /dev/null
@@ -1,33 +0,0 @@
-export const membersJsonString =
- '[{"requested_at":null,"can_update":true,"can_remove":true,"can_override":false,"access_level":{"integer_value":50,"string_value":"Owner"},"source":{"id":323,"name":"My group / my subgroup","web_url":"http://127.0.0.1:3000/groups/my-group/my-subgroup"},"user":{"id":1,"name":"Administrator","username":"root","web_url":"http://127.0.0.1:3000/root","avatar_url":"https://www.gravatar.com/avatar/4816142ef496f956a277bedf1a40607b?s=80\u0026d=identicon","blocked":false,"two_factor_enabled":false},"id":524,"created_at":"2020-08-21T21:33:27.631Z","expires_at":null,"using_license":false,"group_sso":false,"group_managed_account":false}]';
-
-export const membersParsed = [
- {
- requestedAt: null,
- canUpdate: true,
- canRemove: true,
- canOverride: false,
- accessLevel: { integerValue: 50, stringValue: 'Owner' },
- source: {
- id: 323,
- name: 'My group / my subgroup',
- webUrl: 'http://127.0.0.1:3000/groups/my-group/my-subgroup',
- },
- user: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- webUrl: 'http://127.0.0.1:3000/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/4816142ef496f956a277bedf1a40607b?s=80&d=identicon',
- blocked: false,
- twoFactorEnabled: false,
- },
- id: 524,
- createdAt: '2020-08-21T21:33:27.631Z',
- expiresAt: null,
- usingLicense: false,
- groupSso: false,
- groupManagedAccount: false,
- },
-];
diff --git a/spec/frontend/groups/members/utils_spec.js b/spec/frontend/groups/members/utils_spec.js
index 68945174e9d..0912e66e3e8 100644
--- a/spec/frontend/groups/members/utils_spec.js
+++ b/spec/frontend/groups/members/utils_spec.js
@@ -1,53 +1,14 @@
-import { membersJsonString, membersParsed } from './mock_data';
-import {
- parseDataAttributes,
- memberRequestFormatter,
- groupLinkRequestFormatter,
-} from '~/groups/members/utils';
+import { groupMemberRequestFormatter } from '~/groups/members/utils';
describe('group member utils', () => {
- describe('parseDataAttributes', () => {
- let el;
-
- beforeEach(() => {
- el = document.createElement('div');
- el.setAttribute('data-members', membersJsonString);
- el.setAttribute('data-group-id', '234');
- el.setAttribute('data-can-manage-members', 'true');
- });
-
- afterEach(() => {
- el = null;
- });
-
- it('correctly parses the data attributes', () => {
- expect(parseDataAttributes(el)).toEqual({
- members: membersParsed,
- sourceId: 234,
- canManageMembers: true,
- });
- });
- });
-
- describe('memberRequestFormatter', () => {
+ describe('groupMemberRequestFormatter', () => {
it('returns expected format', () => {
expect(
- memberRequestFormatter({
+ groupMemberRequestFormatter({
accessLevel: 50,
expires_at: '2020-10-16',
}),
).toEqual({ group_member: { access_level: 50, expires_at: '2020-10-16' } });
});
});
-
- describe('groupLinkRequestFormatter', () => {
- it('returns expected format', () => {
- expect(
- groupLinkRequestFormatter({
- accessLevel: 50,
- expires_at: '2020-10-16',
- }),
- ).toEqual({ group_link: { group_access: 50, expires_at: '2020-10-16' } });
- });
- });
});
diff --git a/spec/frontend/ide/components/activity_bar_spec.js b/spec/frontend/ide/components/activity_bar_spec.js
index 1a4b6ca0b71..6ea823ce4b5 100644
--- a/spec/frontend/ide/components/activity_bar_spec.js
+++ b/spec/frontend/ide/components/activity_bar_spec.js
@@ -9,6 +9,8 @@ describe('IDE activity bar', () => {
let vm;
let store;
+ const findChangesBadge = () => vm.$el.querySelector('.badge');
+
beforeEach(() => {
store = createStore();
@@ -69,4 +71,19 @@ describe('IDE activity bar', () => {
});
});
});
+
+ describe('changes badge', () => {
+ it('is rendered when files are staged', () => {
+ store.state.stagedFiles = [{ path: '/path/to/file' }];
+ vm.$mount();
+
+ expect(findChangesBadge()).toBeTruthy();
+ expect(findChangesBadge().textContent.trim()).toBe('1');
+ });
+
+ it('is not rendered when no changes are present', () => {
+ vm.$mount();
+ expect(findChangesBadge()).toBeFalsy();
+ });
+ });
});
diff --git a/spec/frontend/ide/components/commit_sidebar/actions_spec.js b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
index 91751bd34ea..596b5e81b2a 100644
--- a/spec/frontend/ide/components/commit_sidebar/actions_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
@@ -3,7 +3,10 @@ import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { projectData, branches } from 'jest/ide/mock_data';
import { createStore } from '~/ide/stores';
import commitActions from '~/ide/components/commit_sidebar/actions.vue';
-import consts from '~/ide/stores/modules/commit/constants';
+import {
+ COMMIT_TO_NEW_BRANCH,
+ COMMIT_TO_CURRENT_BRANCH,
+} from '~/ide/stores/modules/commit/constants';
const ACTION_UPDATE_COMMIT_ACTION = 'commit/updateCommitAction';
@@ -126,16 +129,16 @@ describe('IDE commit sidebar actions', () => {
it.each`
input | expectedOption
- ${{ currentBranchId: BRANCH_DEFAULT }} | ${consts.COMMIT_TO_NEW_BRANCH}
- ${{ currentBranchId: BRANCH_DEFAULT, emptyRepo: true }} | ${consts.COMMIT_TO_CURRENT_BRANCH}
- ${{ currentBranchId: BRANCH_PROTECTED, hasMR: true }} | ${consts.COMMIT_TO_CURRENT_BRANCH}
- ${{ currentBranchId: BRANCH_PROTECTED, hasMR: false }} | ${consts.COMMIT_TO_CURRENT_BRANCH}
- ${{ currentBranchId: BRANCH_PROTECTED_NO_ACCESS, hasMR: true }} | ${consts.COMMIT_TO_NEW_BRANCH}
- ${{ currentBranchId: BRANCH_PROTECTED_NO_ACCESS, hasMR: false }} | ${consts.COMMIT_TO_NEW_BRANCH}
- ${{ currentBranchId: BRANCH_REGULAR, hasMR: true }} | ${consts.COMMIT_TO_CURRENT_BRANCH}
- ${{ currentBranchId: BRANCH_REGULAR, hasMR: false }} | ${consts.COMMIT_TO_CURRENT_BRANCH}
- ${{ currentBranchId: BRANCH_REGULAR_NO_ACCESS, hasMR: true }} | ${consts.COMMIT_TO_NEW_BRANCH}
- ${{ currentBranchId: BRANCH_REGULAR_NO_ACCESS, hasMR: false }} | ${consts.COMMIT_TO_NEW_BRANCH}
+ ${{ currentBranchId: BRANCH_DEFAULT }} | ${COMMIT_TO_NEW_BRANCH}
+ ${{ currentBranchId: BRANCH_DEFAULT, emptyRepo: true }} | ${COMMIT_TO_CURRENT_BRANCH}
+ ${{ currentBranchId: BRANCH_PROTECTED, hasMR: true }} | ${COMMIT_TO_CURRENT_BRANCH}
+ ${{ currentBranchId: BRANCH_PROTECTED, hasMR: false }} | ${COMMIT_TO_CURRENT_BRANCH}
+ ${{ currentBranchId: BRANCH_PROTECTED_NO_ACCESS, hasMR: true }} | ${COMMIT_TO_NEW_BRANCH}
+ ${{ currentBranchId: BRANCH_PROTECTED_NO_ACCESS, hasMR: false }} | ${COMMIT_TO_NEW_BRANCH}
+ ${{ currentBranchId: BRANCH_REGULAR, hasMR: true }} | ${COMMIT_TO_CURRENT_BRANCH}
+ ${{ currentBranchId: BRANCH_REGULAR, hasMR: false }} | ${COMMIT_TO_CURRENT_BRANCH}
+ ${{ currentBranchId: BRANCH_REGULAR_NO_ACCESS, hasMR: true }} | ${COMMIT_TO_NEW_BRANCH}
+ ${{ currentBranchId: BRANCH_REGULAR_NO_ACCESS, hasMR: false }} | ${COMMIT_TO_NEW_BRANCH}
`(
'with $input, it dispatches update commit action with $expectedOption',
({ input, expectedOption }) => {
diff --git a/spec/frontend/ide/components/commit_sidebar/form_spec.js b/spec/frontend/ide/components/commit_sidebar/form_spec.js
index abd7e3bb8fc..857d8f400a7 100644
--- a/spec/frontend/ide/components/commit_sidebar/form_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/form_spec.js
@@ -1,11 +1,14 @@
import Vue from 'vue';
-import { getByText } from '@testing-library/dom';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
import { projectData } from 'jest/ide/mock_data';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { createStore } from '~/ide/stores';
-import consts from '~/ide/stores/modules/commit/constants';
+import { COMMIT_TO_NEW_BRANCH } from '~/ide/stores/modules/commit/constants';
import CommitForm from '~/ide/components/commit_sidebar/form.vue';
+import CommitMessageField from '~/ide/components/commit_sidebar/message_field.vue';
import { leftSidebarViews } from '~/ide/constants';
import {
createCodeownersCommitError,
@@ -15,256 +18,283 @@ import {
} from '~/ide/lib/errors';
describe('IDE commit form', () => {
- const Component = Vue.extend(CommitForm);
- let vm;
+ let wrapper;
let store;
- const beginCommitButton = () => vm.$el.querySelector('[data-testid="begin-commit-button"]');
+ const createComponent = () => {
+ wrapper = shallowMount(CommitForm, {
+ store,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ stubs: {
+ GlModal: stubComponent(GlModal),
+ },
+ });
+ };
+
+ const setLastCommitMessage = (msg) => {
+ store.state.lastCommitMsg = msg;
+ };
+ const goToCommitView = () => {
+ store.state.currentActivityView = leftSidebarViews.commit.name;
+ };
+ const goToEditView = () => {
+ store.state.currentActivityView = leftSidebarViews.edit.name;
+ };
+ const findBeginCommitButton = () => wrapper.find('[data-testid="begin-commit-button"]');
+ const findBeginCommitButtonTooltip = () =>
+ wrapper.find('[data-testid="begin-commit-button-tooltip"]');
+ const findBeginCommitButtonData = () => ({
+ disabled: findBeginCommitButton().props('disabled'),
+ tooltip: getBinding(findBeginCommitButtonTooltip().element, 'gl-tooltip').value.title,
+ });
+ const findCommitButton = () => wrapper.find('[data-testid="commit-button"]');
+ const findCommitButtonTooltip = () => wrapper.find('[data-testid="commit-button-tooltip"]');
+ const findCommitButtonData = () => ({
+ disabled: findCommitButton().props('disabled'),
+ tooltip: getBinding(findCommitButtonTooltip().element, 'gl-tooltip').value.title,
+ });
+ const clickCommitButton = () => findCommitButton().vm.$emit('click');
+ const findForm = () => wrapper.find('form');
+ const submitForm = () => findForm().trigger('submit');
+ const findCommitMessageInput = () => wrapper.find(CommitMessageField);
+ const setCommitMessageInput = (val) => findCommitMessageInput().vm.$emit('input', val);
+ const findDiscardDraftButton = () => wrapper.find('[data-testid="discard-draft"]');
beforeEach(() => {
store = createStore();
- store.state.changedFiles.push('test');
+ store.state.stagedFiles.push('test');
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
- Vue.set(store.state.projects, 'abcproject', { ...projectData });
-
- vm = createComponentWithStore(Component, store).$mount();
+ Vue.set(store.state.projects, 'abcproject', {
+ ...projectData,
+ userPermissions: { pushCode: true },
+ });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('enables begin commit button when there are changes', () => {
- expect(beginCommitButton()).not.toHaveAttr('disabled');
- });
+ // Notes:
+ // - When there are no changes, there is no commit button so there's nothing to test :)
+ describe.each`
+ desc | stagedFiles | userPermissions | viewFn | buttonFn | disabled | tooltip
+ ${'when there are no changes'} | ${[]} | ${{ pushCode: true }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${''}
+ ${'when there are changes'} | ${['test']} | ${{ pushCode: true }} | ${goToEditView} | ${findBeginCommitButtonData} | ${false} | ${''}
+ ${'when there are changes'} | ${['test']} | ${{ pushCode: true }} | ${goToCommitView} | ${findCommitButtonData} | ${false} | ${''}
+ ${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${CommitForm.MSG_CANNOT_PUSH_CODE}
+ ${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToCommitView} | ${findCommitButtonData} | ${true} | ${CommitForm.MSG_CANNOT_PUSH_CODE}
+ `('$desc', ({ stagedFiles, userPermissions, viewFn, buttonFn, disabled, tooltip }) => {
+ beforeEach(async () => {
+ store.state.stagedFiles = stagedFiles;
+ store.state.projects.abcproject.userPermissions = userPermissions;
+
+ createComponent();
+ });
+
+ it(`at view=${viewFn.name}, ${buttonFn.name} has disabled=${disabled} tooltip=${tooltip}`, async () => {
+ viewFn();
- it('disables begin commit button when there are no changes', async () => {
- store.state.changedFiles = [];
- await vm.$nextTick();
+ await wrapper.vm.$nextTick();
- expect(beginCommitButton()).toHaveAttr('disabled');
+ expect(buttonFn()).toEqual({
+ disabled,
+ tooltip,
+ });
+ });
});
- describe('compact', () => {
- beforeEach(() => {
- vm.isCompact = true;
+ describe('on edit tab', () => {
+ beforeEach(async () => {
+ // Test that we react to switching to compact view.
+ goToCommitView();
+
+ createComponent();
+
+ goToEditView();
- return vm.$nextTick();
+ await wrapper.vm.$nextTick();
});
it('renders commit button in compact mode', () => {
- expect(beginCommitButton()).not.toBeNull();
- expect(beginCommitButton().textContent).toContain('Commit');
+ expect(findBeginCommitButton().exists()).toBe(true);
+ expect(findBeginCommitButton().text()).toBe('Commit…');
});
it('does not render form', () => {
- expect(vm.$el.querySelector('form')).toBeNull();
+ expect(findForm().exists()).toBe(false);
});
it('renders overview text', () => {
- vm.$store.state.stagedFiles.push('test');
-
- return vm.$nextTick(() => {
- expect(vm.$el.querySelector('p').textContent).toContain('1 changed file');
- });
+ expect(wrapper.find('p').text()).toBe('1 changed file');
});
- it('shows form when clicking commit button', () => {
- beginCommitButton().click();
+ it('when begin commit button is clicked, shows form', async () => {
+ findBeginCommitButton().vm.$emit('click');
- return vm.$nextTick(() => {
- expect(vm.$el.querySelector('form')).not.toBeNull();
- });
- });
+ await wrapper.vm.$nextTick();
- it('toggles activity bar view when clicking commit button', () => {
- beginCommitButton().click();
-
- return vm.$nextTick(() => {
- expect(store.state.currentActivityView).toBe(leftSidebarViews.commit.name);
- });
+ expect(findForm().exists()).toBe(true);
});
- it('collapses if lastCommitMsg is set to empty and current view is not commit view', async () => {
- store.state.lastCommitMsg = 'abc';
- store.state.currentActivityView = leftSidebarViews.edit.name;
- await vm.$nextTick();
-
- // if commit message is set, form is uncollapsed
- expect(vm.isCompact).toBe(false);
+ it('when begin commit button is clicked, sets activity view', async () => {
+ findBeginCommitButton().vm.$emit('click');
- store.state.lastCommitMsg = '';
- await vm.$nextTick();
+ await wrapper.vm.$nextTick();
- // collapsed when set to empty
- expect(vm.isCompact).toBe(true);
+ expect(store.state.currentActivityView).toBe(leftSidebarViews.commit.name);
});
- it('collapses if in commit view but there are no changes and vice versa', async () => {
- store.state.currentActivityView = leftSidebarViews.commit.name;
- await vm.$nextTick();
+ it('collapses if lastCommitMsg is set to empty and current view is not commit view', async () => {
+ // Test that it expands when lastCommitMsg is set
+ setLastCommitMessage('test');
+ goToEditView();
- // expanded by default if there are changes
- expect(vm.isCompact).toBe(false);
+ await wrapper.vm.$nextTick();
- store.state.changedFiles = [];
- await vm.$nextTick();
+ expect(findForm().exists()).toBe(true);
- expect(vm.isCompact).toBe(true);
+ // Now test that it collapses when lastCommitMsg is cleared
+ setLastCommitMessage('');
- store.state.changedFiles.push('test');
- await vm.$nextTick();
+ await wrapper.vm.$nextTick();
- // uncollapsed once again
- expect(vm.isCompact).toBe(false);
+ expect(findForm().exists()).toBe(false);
});
+ });
- it('collapses if switched from commit view to edit view and vice versa', async () => {
- store.state.currentActivityView = leftSidebarViews.edit.name;
- await vm.$nextTick();
-
- expect(vm.isCompact).toBe(true);
+ describe('on commit tab when window height is less than MAX_WINDOW_HEIGHT', () => {
+ let oldHeight;
- store.state.currentActivityView = leftSidebarViews.commit.name;
- await vm.$nextTick();
+ beforeEach(async () => {
+ oldHeight = window.innerHeight;
+ window.innerHeight = 700;
- expect(vm.isCompact).toBe(false);
+ createComponent();
- store.state.currentActivityView = leftSidebarViews.edit.name;
- await vm.$nextTick();
+ goToCommitView();
- expect(vm.isCompact).toBe(true);
+ await wrapper.vm.$nextTick();
});
- describe('when window height is less than MAX_WINDOW_HEIGHT', () => {
- let oldHeight;
-
- beforeEach(() => {
- oldHeight = window.innerHeight;
- window.innerHeight = 700;
- });
+ afterEach(() => {
+ window.innerHeight = oldHeight;
+ });
- afterEach(() => {
- window.innerHeight = oldHeight;
- });
+ it('stays collapsed if changes are added or removed', async () => {
+ expect(findForm().exists()).toBe(false);
- it('stays collapsed when switching from edit view to commit view and back', async () => {
- store.state.currentActivityView = leftSidebarViews.edit.name;
- await vm.$nextTick();
+ store.state.stagedFiles = [];
+ await wrapper.vm.$nextTick();
- expect(vm.isCompact).toBe(true);
+ expect(findForm().exists()).toBe(false);
- store.state.currentActivityView = leftSidebarViews.commit.name;
- await vm.$nextTick();
+ store.state.stagedFiles.push('test');
+ await wrapper.vm.$nextTick();
- expect(vm.isCompact).toBe(true);
+ expect(findForm().exists()).toBe(false);
+ });
+ });
- store.state.currentActivityView = leftSidebarViews.edit.name;
- await vm.$nextTick();
+ describe('on commit tab', () => {
+ beforeEach(async () => {
+ // Test that the component reacts to switching to full view
+ goToEditView();
- expect(vm.isCompact).toBe(true);
- });
+ createComponent();
- it('stays uncollapsed if changes are added or removed', async () => {
- store.state.currentActivityView = leftSidebarViews.commit.name;
- await vm.$nextTick();
+ goToCommitView();
- expect(vm.isCompact).toBe(true);
+ await wrapper.vm.$nextTick();
+ });
- store.state.changedFiles = [];
- await vm.$nextTick();
+ it('shows form', () => {
+ expect(findForm().exists()).toBe(true);
+ });
- expect(vm.isCompact).toBe(true);
+ it('hides begin commit button', () => {
+ expect(findBeginCommitButton().exists()).toBe(false);
+ });
- store.state.changedFiles.push('test');
- await vm.$nextTick();
+ describe('when no changed files', () => {
+ beforeEach(async () => {
+ store.state.stagedFiles = [];
+ await wrapper.vm.$nextTick();
+ });
- expect(vm.isCompact).toBe(true);
+ it('hides form', () => {
+ expect(findForm().exists()).toBe(false);
});
- it('uncollapses when clicked on Commit button in the edit view', async () => {
- store.state.currentActivityView = leftSidebarViews.edit.name;
- beginCommitButton().click();
- await waitForPromises();
+ it('expands again when staged files are added', async () => {
+ store.state.stagedFiles.push('test');
+ await wrapper.vm.$nextTick();
- expect(vm.isCompact).toBe(false);
+ expect(findForm().exists()).toBe(true);
});
});
- });
- describe('full', () => {
- beforeEach(() => {
- vm.isCompact = false;
+ it('updates commitMessage in store on input', async () => {
+ setCommitMessageInput('testing commit message');
+
+ await wrapper.vm.$nextTick();
- return vm.$nextTick();
+ expect(store.state.commit.commitMessage).toBe('testing commit message');
});
- it('updates commitMessage in store on input', () => {
- const textarea = vm.$el.querySelector('textarea');
+ describe('discard draft button', () => {
+ it('hidden when commitMessage is empty', () => {
+ expect(findDiscardDraftButton().exists()).toBe(false);
+ });
+
+ it('resets commitMessage when clicking discard button', async () => {
+ setCommitMessageInput('testing commit message');
- textarea.value = 'testing commit message';
+ await wrapper.vm.$nextTick();
- textarea.dispatchEvent(new Event('input'));
+ expect(findCommitMessageInput().props('text')).toBe('testing commit message');
- return vm.$nextTick().then(() => {
- expect(vm.$store.state.commit.commitMessage).toBe('testing commit message');
- });
- });
+ // Test that commitMessage is cleared on click
+ findDiscardDraftButton().vm.$emit('click');
- it('updating currentActivityView not to commit view sets compact mode', () => {
- store.state.currentActivityView = 'a';
+ await wrapper.vm.$nextTick();
- return vm.$nextTick(() => {
- expect(vm.isCompact).toBe(true);
+ expect(findCommitMessageInput().props('text')).toBe('');
});
});
- it('always opens itself in full view current activity view is not commit view when clicking commit button', () => {
- beginCommitButton().click();
+ describe('when submitting', () => {
+ beforeEach(async () => {
+ goToEditView();
- return vm.$nextTick(() => {
- expect(store.state.currentActivityView).toBe(leftSidebarViews.commit.name);
- expect(vm.isCompact).toBe(false);
- });
- });
+ createComponent();
- describe('discard draft button', () => {
- it('hidden when commitMessage is empty', () => {
- expect(vm.$el.querySelector('.btn-default').textContent).toContain('Collapse');
- });
+ goToCommitView();
+
+ await wrapper.vm.$nextTick();
+
+ setCommitMessageInput('testing commit message');
- it('resets commitMessage when clicking discard button', () => {
- vm.$store.state.commit.commitMessage = 'testing commit message';
-
- return vm
- .$nextTick()
- .then(() => {
- vm.$el.querySelector('.btn-default').click();
- })
- .then(() => vm.$nextTick())
- .then(() => {
- expect(vm.$store.state.commit.commitMessage).not.toBe('testing commit message');
- });
+ await wrapper.vm.$nextTick();
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
});
- });
- describe('when submitting', () => {
- beforeEach(() => {
- jest.spyOn(vm, 'commitChanges');
+ it.each([clickCommitButton, submitForm])('when %p, commits changes', (fn) => {
+ fn();
- vm.$store.state.stagedFiles.push('test');
- vm.$store.state.commit.commitMessage = 'testing commit message';
+ expect(store.dispatch).toHaveBeenCalledWith('commit/commitChanges', undefined);
});
- it('calls commitChanges', () => {
- vm.commitChanges.mockResolvedValue({ success: true });
+ it('when cannot push code, submitting does nothing', async () => {
+ store.state.projects.abcproject.userPermissions.pushCode = false;
+ await wrapper.vm.$nextTick();
- return vm.$nextTick().then(() => {
- vm.$el.querySelector('.btn-success').click();
+ submitForm();
- expect(vm.commitChanges).toHaveBeenCalled();
- });
+ expect(store.dispatch).not.toHaveBeenCalled();
});
it.each`
@@ -272,31 +302,32 @@ describe('IDE commit form', () => {
${() => createCodeownersCommitError('test message')} | ${{ actionPrimary: { text: 'Create new branch' } }}
${createUnexpectedCommitError} | ${{ actionPrimary: null }}
`('opens error modal if commitError with $error', async ({ createError, props }) => {
- jest.spyOn(vm.$refs.commitErrorModal, 'show');
+ const modal = wrapper.find(GlModal);
+ modal.vm.show = jest.fn();
const error = createError();
store.state.commit.commitError = error;
- await vm.$nextTick();
+ await wrapper.vm.$nextTick();
- expect(vm.$refs.commitErrorModal.show).toHaveBeenCalled();
- expect(vm.$refs.commitErrorModal).toMatchObject({
+ expect(modal.vm.show).toHaveBeenCalled();
+ expect(modal.props()).toMatchObject({
actionCancel: { text: 'Cancel' },
...props,
});
// Because of the legacy 'mountComponent' approach here, the only way to
// test the text of the modal is by viewing the content of the modal added to the document.
- expect(document.body).toHaveText(error.messageHTML);
+ expect(modal.html()).toContain(error.messageHTML);
});
});
describe('with error modal with primary', () => {
beforeEach(() => {
- jest.spyOn(vm.$store, 'dispatch').mockReturnValue(Promise.resolve());
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
});
const commitActions = [
- ['commit/updateCommitAction', consts.COMMIT_TO_NEW_BRANCH],
+ ['commit/updateCommitAction', COMMIT_TO_NEW_BRANCH],
['commit/commitChanges'],
];
@@ -310,27 +341,15 @@ describe('IDE commit form', () => {
async ({ commitError, expectedActions }) => {
store.state.commit.commitError = commitError('test message');
- await vm.$nextTick();
+ await wrapper.vm.$nextTick();
- getByText(document.body, 'Create new branch').click();
+ wrapper.find(GlModal).vm.$emit('ok');
await waitForPromises();
- expect(vm.$store.dispatch.mock.calls).toEqual(expectedActions);
+ expect(store.dispatch.mock.calls).toEqual(expectedActions);
},
);
});
});
-
- describe('commitButtonText', () => {
- it('returns commit text when staged files exist', () => {
- vm.$store.state.stagedFiles.push('testing');
-
- expect(vm.commitButtonText).toBe('Commit');
- });
-
- it('returns stage & commit text when staged files do not exist', () => {
- expect(vm.commitButtonText).toBe('Stage & Commit');
- });
- });
});
diff --git a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
index 50da64abbbe..f8ab7aa4d0f 100644
--- a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
@@ -4,7 +4,10 @@ import { projectData, branches } from 'jest/ide/mock_data';
import NewMergeRequestOption from '~/ide/components/commit_sidebar/new_merge_request_option.vue';
import { createStore } from '~/ide/stores';
import { PERMISSION_CREATE_MR } from '~/ide/constants';
-import consts from '~/ide/stores/modules/commit/constants';
+import {
+ COMMIT_TO_CURRENT_BRANCH,
+ COMMIT_TO_NEW_BRANCH,
+} from '~/ide/stores/modules/commit/constants';
describe('create new MR checkbox', () => {
let store;
@@ -27,8 +30,8 @@ describe('create new MR checkbox', () => {
vm = createComponentWithStore(Component, store);
vm.$store.state.commit.commitAction = createNewBranch
- ? consts.COMMIT_TO_NEW_BRANCH
- : consts.COMMIT_TO_CURRENT_BRANCH;
+ ? COMMIT_TO_NEW_BRANCH
+ : COMMIT_TO_CURRENT_BRANCH;
vm.$store.state.currentBranchId = currentBranchId;
diff --git a/spec/frontend/ide/components/ide_sidebar_nav_spec.js b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
index 6b4cb9bd03d..841e19532a1 100644
--- a/spec/frontend/ide/components/ide_sidebar_nav_spec.js
+++ b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
@@ -3,6 +3,7 @@ import { GlIcon } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import IdeSidebarNav from '~/ide/components/ide_sidebar_nav.vue';
import { SIDE_RIGHT, SIDE_LEFT } from '~/ide/constants';
+import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
const TEST_TABS = [
{
@@ -74,7 +75,7 @@ describe('ide/components/ide_sidebar_nav', () => {
createComponent({ isOpen, side });
bsTooltipHide = jest.fn();
- wrapper.vm.$root.$on('bv::hide::tooltip', bsTooltipHide);
+ wrapper.vm.$root.$on(BV_HIDE_TOOLTIP, bsTooltipHide);
});
it('renders buttons', () => {
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index 805fa898611..ef8c55c8b1c 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -1,9 +1,10 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlAlert } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import { createStore } from '~/ide/stores';
import ErrorMessage from '~/ide/components/error_message.vue';
-import ide from '~/ide/components/ide.vue';
+import Ide from '~/ide/components/ide.vue';
import { file } from '../helpers';
import { projectData } from '../mock_data';
@@ -15,12 +16,12 @@ describe('WebIDE', () => {
let wrapper;
- function createComponent({ projData = emptyProjData, state = {} } = {}) {
+ const createComponent = ({ projData = emptyProjData, state = {} } = {}) => {
const store = createStore();
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
- store.state.projects.abcproject = { ...projData };
+ store.state.projects.abcproject = projData && { ...projData };
store.state.trees['abcproject/master'] = {
tree: [],
loading: false,
@@ -29,11 +30,13 @@ describe('WebIDE', () => {
store.state[key] = state[key];
});
- return shallowMount(ide, {
+ wrapper = shallowMount(Ide, {
store,
localVue,
});
- }
+ };
+
+ const findAlert = () => wrapper.find(GlAlert);
afterEach(() => {
wrapper.destroy();
@@ -42,7 +45,7 @@ describe('WebIDE', () => {
describe('ide component, empty repo', () => {
beforeEach(() => {
- wrapper = createComponent({
+ createComponent({
projData: {
empty_repo: true,
},
@@ -63,7 +66,7 @@ describe('WebIDE', () => {
`(
'should error message exists=$exists when errorMessage=$errorMessage',
async ({ errorMessage, exists }) => {
- wrapper = createComponent({
+ createComponent({
state: {
errorMessage,
},
@@ -78,12 +81,12 @@ describe('WebIDE', () => {
describe('onBeforeUnload', () => {
it('returns undefined when no staged files or changed files', () => {
- wrapper = createComponent();
+ createComponent();
expect(wrapper.vm.onBeforeUnload()).toBe(undefined);
});
it('returns warning text when their are changed files', () => {
- wrapper = createComponent({
+ createComponent({
state: {
changedFiles: [file()],
},
@@ -93,7 +96,7 @@ describe('WebIDE', () => {
});
it('returns warning text when their are staged files', () => {
- wrapper = createComponent({
+ createComponent({
state: {
stagedFiles: [file()],
},
@@ -104,7 +107,7 @@ describe('WebIDE', () => {
it('updates event object', () => {
const event = {};
- wrapper = createComponent({
+ createComponent({
state: {
stagedFiles: [file()],
},
@@ -118,7 +121,7 @@ describe('WebIDE', () => {
describe('non-existent branch', () => {
it('does not render "New file" button for non-existent branch when repo is not empty', () => {
- wrapper = createComponent({
+ createComponent({
state: {
projects: {},
},
@@ -130,7 +133,7 @@ describe('WebIDE', () => {
describe('branch with files', () => {
beforeEach(() => {
- wrapper = createComponent({
+ createComponent({
projData: {
empty_repo: false,
},
@@ -142,4 +145,31 @@ describe('WebIDE', () => {
});
});
});
+
+ it('when user cannot push code, shows alert', () => {
+ createComponent({
+ projData: {
+ userPermissions: {
+ pushCode: false,
+ },
+ },
+ });
+
+ expect(findAlert().props()).toMatchObject({
+ dismissible: false,
+ });
+ expect(findAlert().text()).toBe(Ide.MSG_CANNOT_PUSH_CODE);
+ });
+
+ it.each`
+ desc | projData
+ ${'when user can push code'} | ${{ userPermissions: { pushCode: true } }}
+ ${'when project is not ready'} | ${null}
+ `('$desc, no alert is shown', ({ projData }) => {
+ createComponent({
+ projData,
+ });
+
+ expect(findAlert().exists()).toBe(false);
+ });
});
diff --git a/spec/frontend/ide/components/preview/navigator_spec.js b/spec/frontend/ide/components/preview/navigator_spec.js
index ba5ac3bbbea..e3272734755 100644
--- a/spec/frontend/ide/components/preview/navigator_spec.js
+++ b/spec/frontend/ide/components/preview/navigator_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
-import { TEST_HOST } from 'helpers/test_constants';
import { GlLoadingIcon } from '@gitlab/ui';
import { listen } from 'codesandbox-api';
+import { TEST_HOST } from 'helpers/test_constants';
import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
jest.mock('codesandbox-api', () => ({
diff --git a/spec/frontend/ide/lib/decorations/controller_spec.js b/spec/frontend/ide/lib/decorations/controller_spec.js
index e9b7faaadfe..2790563fd6a 100644
--- a/spec/frontend/ide/lib/decorations/controller_spec.js
+++ b/spec/frontend/ide/lib/decorations/controller_spec.js
@@ -1,8 +1,8 @@
import Editor from '~/ide/lib/editor';
import DecorationsController from '~/ide/lib/decorations/controller';
import Model from '~/ide/lib/common/model';
-import { file } from '../../helpers';
import { createStore } from '~/ide/stores';
+import { file } from '../../helpers';
describe('Multi-file editor library decorations controller', () => {
let editorInstance;
diff --git a/spec/frontend/ide/lib/editor_spec.js b/spec/frontend/ide/lib/editor_spec.js
index 12779c61dc3..b379d778966 100644
--- a/spec/frontend/ide/lib/editor_spec.js
+++ b/spec/frontend/ide/lib/editor_spec.js
@@ -7,6 +7,7 @@ import {
import Editor from '~/ide/lib/editor';
import { createStore } from '~/ide/stores';
import { defaultEditorOptions } from '~/ide/lib/editor_options';
+import { EDITOR_TYPE_DIFF } from '~/editor/constants';
import { file } from '../helpers';
describe('Multi-file editor library', () => {
@@ -125,7 +126,7 @@ describe('Multi-file editor library', () => {
});
it('sets original & modified when diff editor', () => {
- jest.spyOn(instance.instance, 'getEditorType').mockReturnValue('vs.editor.IDiffEditor');
+ jest.spyOn(instance.instance, 'getEditorType').mockReturnValue(EDITOR_TYPE_DIFF);
jest.spyOn(instance.instance, 'setModel').mockImplementation(() => {});
instance.attachModel(model);
diff --git a/spec/frontend/ide/stores/actions_spec.js b/spec/frontend/ide/stores/actions_spec.js
index 036bc91cd11..d925e9a0844 100644
--- a/spec/frontend/ide/stores/actions_spec.js
+++ b/spec/frontend/ide/stores/actions_spec.js
@@ -20,8 +20,8 @@ import {
} from '~/ide/stores/actions';
import axios from '~/lib/utils/axios_utils';
import * as types from '~/ide/stores/mutation_types';
-import { file, createTriggerRenameAction, createTriggerChangeAction } from '../helpers';
import eventHub from '~/ide/eventhub';
+import { file, createTriggerRenameAction, createTriggerChangeAction } from '../helpers';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
diff --git a/spec/frontend/ide/stores/getters_spec.js b/spec/frontend/ide/stores/getters_spec.js
index 1787f9e9361..1289c1aec75 100644
--- a/spec/frontend/ide/stores/getters_spec.js
+++ b/spec/frontend/ide/stores/getters_spec.js
@@ -2,6 +2,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import * as getters from '~/ide/stores/getters';
import { createStore } from '~/ide/stores';
import { file } from '../helpers';
+import { DEFAULT_PERMISSIONS } from '../../../../app/assets/javascripts/ide/constants';
const TEST_PROJECT_ID = 'test_project';
@@ -386,7 +387,9 @@ describe('IDE store getters', () => {
describe('findProjectPermissions', () => {
it('returns false if project not found', () => {
- expect(localStore.getters.findProjectPermissions(TEST_PROJECT_ID)).toEqual({});
+ expect(localStore.getters.findProjectPermissions(TEST_PROJECT_ID)).toEqual(
+ DEFAULT_PERMISSIONS,
+ );
});
it('finds permission in given project', () => {
diff --git a/spec/frontend/ide/stores/modules/commit/actions_spec.js b/spec/frontend/ide/stores/modules/commit/actions_spec.js
index 5be0e22a9fc..ac495a657a2 100644
--- a/spec/frontend/ide/stores/modules/commit/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/actions_spec.js
@@ -1,13 +1,16 @@
-import { file } from 'jest/ide/helpers';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { file } from 'jest/ide/helpers';
import testAction from 'helpers/vuex_action_helper';
import { visitUrl } from '~/lib/utils/url_utility';
import { createStore } from '~/ide/stores';
import service from '~/ide/services';
import { createRouter } from '~/ide/ide_router';
import eventHub from '~/ide/eventhub';
-import consts from '~/ide/stores/modules/commit/constants';
+import {
+ COMMIT_TO_CURRENT_BRANCH,
+ COMMIT_TO_NEW_BRANCH,
+} from '~/ide/stores/modules/commit/constants';
import * as mutationTypes from '~/ide/stores/modules/commit/mutation_types';
import * as actions from '~/ide/stores/modules/commit/actions';
import { createUnexpectedCommitError } from '~/ide/lib/errors';
@@ -425,12 +428,12 @@ describe('IDE commit module actions', () => {
});
it('resets stores commit actions', (done) => {
- store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH;
+ store.state.commit.commitAction = COMMIT_TO_NEW_BRANCH;
store
.dispatch('commit/commitChanges')
.then(() => {
- expect(store.state.commit.commitAction).not.toBe(consts.COMMIT_TO_NEW_BRANCH);
+ expect(store.state.commit.commitAction).not.toBe(COMMIT_TO_NEW_BRANCH);
})
.then(done)
.catch(done.fail);
@@ -450,7 +453,7 @@ describe('IDE commit module actions', () => {
it('redirects to new merge request page', (done) => {
jest.spyOn(eventHub, '$on').mockImplementation();
- store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH;
+ store.state.commit.commitAction = COMMIT_TO_NEW_BRANCH;
store.state.commit.shouldCreateMR = true;
store
@@ -468,7 +471,7 @@ describe('IDE commit module actions', () => {
it('does not redirect to new merge request page when shouldCreateMR is not checked', (done) => {
jest.spyOn(eventHub, '$on').mockImplementation();
- store.state.commit.commitAction = consts.COMMIT_TO_NEW_BRANCH;
+ store.state.commit.commitAction = COMMIT_TO_NEW_BRANCH;
store.state.commit.shouldCreateMR = false;
store
@@ -483,7 +486,7 @@ describe('IDE commit module actions', () => {
it('does not redirect to merge request page if shouldCreateMR is checked, but branch is the default branch', async () => {
jest.spyOn(eventHub, '$on').mockImplementation();
- store.state.commit.commitAction = consts.COMMIT_TO_CURRENT_BRANCH;
+ store.state.commit.commitAction = COMMIT_TO_CURRENT_BRANCH;
store.state.commit.shouldCreateMR = true;
await store.dispatch('commit/commitChanges');
diff --git a/spec/frontend/ide/stores/modules/commit/getters_spec.js b/spec/frontend/ide/stores/modules/commit/getters_spec.js
index 66ed51dbd13..a97b7e49dfc 100644
--- a/spec/frontend/ide/stores/modules/commit/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/getters_spec.js
@@ -1,6 +1,9 @@
import commitState from '~/ide/stores/modules/commit/state';
import * as getters from '~/ide/stores/modules/commit/getters';
-import consts from '~/ide/stores/modules/commit/constants';
+import {
+ COMMIT_TO_CURRENT_BRANCH,
+ COMMIT_TO_NEW_BRANCH,
+} from '~/ide/stores/modules/commit/constants';
describe('IDE commit module getters', () => {
let state;
@@ -147,13 +150,13 @@ describe('IDE commit module getters', () => {
describe('isCreatingNewBranch', () => {
it('returns false if NOT creating a new branch', () => {
- state.commitAction = consts.COMMIT_TO_CURRENT_BRANCH;
+ state.commitAction = COMMIT_TO_CURRENT_BRANCH;
expect(getters.isCreatingNewBranch(state)).toBeFalsy();
});
it('returns true if creating a new branch', () => {
- state.commitAction = consts.COMMIT_TO_NEW_BRANCH;
+ state.commitAction = COMMIT_TO_NEW_BRANCH;
expect(getters.isCreatingNewBranch(state)).toBeTruthy();
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index cd184bb65cc..34e2a945333 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -1,6 +1,6 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlEmptyState, GlLoadingIcon, GlSearchBoxByClick, GlSprintf } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
@@ -8,6 +8,7 @@ import ImportTable from '~/import_entities/import_groups/components/import_table
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
import importGroupMutation from '~/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql';
+import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import { STATUSES } from '~/import_entities/constants';
@@ -20,6 +21,9 @@ describe('import table', () => {
let wrapper;
let apolloProvider;
+ const FAKE_GROUP = generateFakeEntry({ id: 1, status: STATUSES.NONE });
+ const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
+
const createComponent = ({ bulkImportSourceGroups }) => {
apolloProvider = createMockApollo([], {
Query: {
@@ -34,6 +38,12 @@ describe('import table', () => {
});
wrapper = shallowMount(ImportTable, {
+ propsData: {
+ sourceUrl: 'https://demo.host',
+ },
+ stubs: {
+ GlSprintf,
+ },
localVue,
apolloProvider,
});
@@ -62,25 +72,50 @@ describe('import table', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
});
+ it('renders message about empty state when no groups are available for import', async () => {
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: [],
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(wrapper.find(GlEmptyState).props().title).toBe('No groups available for import');
+ });
+
it('renders import row for each group in response', async () => {
const FAKE_GROUPS = [
generateFakeEntry({ id: 1, status: STATUSES.NONE }),
generateFakeEntry({ id: 2, status: STATUSES.FINISHED }),
];
createComponent({
- bulkImportSourceGroups: () => FAKE_GROUPS,
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
});
await waitForPromises();
expect(wrapper.findAll(ImportTableRow)).toHaveLength(FAKE_GROUPS.length);
});
- describe('converts row events to mutation invocations', () => {
- const FAKE_GROUP = generateFakeEntry({ id: 1, status: STATUSES.NONE });
+ it('does not render status string when result list is empty', async () => {
+ createComponent({
+ bulkImportSourceGroups: jest.fn().mockResolvedValue({
+ nodes: [],
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+ expect(wrapper.text()).not.toContain('Showing 1-0');
+ });
+
+ describe('converts row events to mutation invocations', () => {
beforeEach(() => {
createComponent({
- bulkImportSourceGroups: () => [FAKE_GROUP],
+ bulkImportSourceGroups: () => ({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO }),
});
return waitForPromises();
});
@@ -100,4 +135,115 @@ describe('import table', () => {
});
});
});
+
+ describe('pagination', () => {
+ const bulkImportSourceGroupsQueryMock = jest
+ .fn()
+ .mockResolvedValue({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO });
+
+ beforeEach(() => {
+ createComponent({
+ bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
+ });
+ return waitForPromises();
+ });
+
+ it('correctly passes pagination info from query', () => {
+ expect(wrapper.find(PaginationLinks).props().pageInfo).toStrictEqual(FAKE_PAGE_INFO);
+ });
+
+ it('updates page when page change is requested', async () => {
+ const REQUESTED_PAGE = 2;
+ wrapper.find(PaginationLinks).props().change(REQUESTED_PAGE);
+
+ await waitForPromises();
+ expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ page: REQUESTED_PAGE }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('updates status text when page is changed', async () => {
+ const REQUESTED_PAGE = 2;
+ bulkImportSourceGroupsQueryMock.mockResolvedValue({
+ nodes: [FAKE_GROUP],
+ pageInfo: {
+ page: 2,
+ total: 38,
+ perPage: 20,
+ totalPages: 2,
+ },
+ });
+ wrapper.find(PaginationLinks).props().change(REQUESTED_PAGE);
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain('Showing 21-21 of 38');
+ });
+ });
+
+ describe('filters', () => {
+ const bulkImportSourceGroupsQueryMock = jest
+ .fn()
+ .mockResolvedValue({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO });
+
+ beforeEach(() => {
+ createComponent({
+ bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
+ });
+ return waitForPromises();
+ });
+
+ const findFilterInput = () => wrapper.find(GlSearchBoxByClick);
+
+ it('properly passes filter to graphql query when search box is submitted', async () => {
+ createComponent({
+ bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
+ });
+ await waitForPromises();
+
+ const FILTER_VALUE = 'foo';
+ findFilterInput().vm.$emit('submit', FILTER_VALUE);
+ await waitForPromises();
+
+ expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ filter: FILTER_VALUE }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('updates status string when search box is submitted', async () => {
+ createComponent({
+ bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
+ });
+ await waitForPromises();
+
+ const FILTER_VALUE = 'foo';
+ findFilterInput().vm.$emit('submit', FILTER_VALUE);
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain('Showing 1-1 of 40 groups matching filter "foo"');
+ });
+
+ it('properly resets filter in graphql query when search box is cleared', async () => {
+ const FILTER_VALUE = 'foo';
+ findFilterInput().vm.$emit('submit', FILTER_VALUE);
+ await waitForPromises();
+
+ bulkImportSourceGroupsQueryMock.mockClear();
+ await apolloProvider.defaultClient.resetStore();
+ findFilterInput().vm.$emit('clear');
+ await waitForPromises();
+
+ expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ filter: '' }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+ });
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index 514ed411138..94a2e4f75b4 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -79,33 +79,56 @@ describe('Bulk import resolvers', () => {
axiosMockAdapter
.onGet(FAKE_ENDPOINTS.availableNamespaces)
.reply(httpStatus.OK, availableNamespacesFixture);
-
- const response = await client.query({ query: bulkImportSourceGroupsQuery });
- results = response.data.bulkImportSourceGroups;
});
- it('mirrors REST endpoint response fields', () => {
- const MIRRORED_FIELDS = ['id', 'full_name', 'full_path', 'web_url'];
- expect(
- results.every((r, idx) =>
- MIRRORED_FIELDS.every(
- (field) => r[field] === statusEndpointFixture.importable_data[idx][field],
+ describe('when called', () => {
+ beforeEach(async () => {
+ const response = await client.query({ query: bulkImportSourceGroupsQuery });
+ results = response.data.bulkImportSourceGroups.nodes;
+ });
+
+ it('mirrors REST endpoint response fields', () => {
+ const MIRRORED_FIELDS = ['id', 'full_name', 'full_path', 'web_url'];
+ expect(
+ results.every((r, idx) =>
+ MIRRORED_FIELDS.every(
+ (field) => r[field] === statusEndpointFixture.importable_data[idx][field],
+ ),
),
- ),
- ).toBe(true);
- });
+ ).toBe(true);
+ });
- it('populates each result instance with status field default to none', () => {
- expect(results.every((r) => r.status === STATUSES.NONE)).toBe(true);
- });
+ it('populates each result instance with status field default to none', () => {
+ expect(results.every((r) => r.status === STATUSES.NONE)).toBe(true);
+ });
- it('populates each result instance with import_target defaulted to first available namespace', () => {
- expect(
- results.every(
- (r) => r.import_target.target_namespace === availableNamespacesFixture[0].full_path,
- ),
- ).toBe(true);
+ it('populates each result instance with import_target defaulted to first available namespace', () => {
+ expect(
+ results.every(
+ (r) => r.import_target.target_namespace === availableNamespacesFixture[0].full_path,
+ ),
+ ).toBe(true);
+ });
});
+
+ it.each`
+ variable | queryParam | value
+ ${'filter'} | ${'filter'} | ${'demo'}
+ ${'perPage'} | ${'per_page'} | ${30}
+ ${'page'} | ${'page'} | ${3}
+ `(
+ 'properly passes GraphQL variable $variable as REST $queryParam query parameter',
+ async ({ variable, queryParam, value }) => {
+ await client.query({
+ query: bulkImportSourceGroupsQuery,
+ variables: { [variable]: value },
+ });
+ const restCall = axiosMockAdapter.history.get.find(
+ (q) => q.url === FAKE_ENDPOINTS.status,
+ );
+ expect(restCall.params[queryParam]).toBe(value);
+ },
+ );
});
});
@@ -117,20 +140,28 @@ describe('Bulk import resolvers', () => {
client.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
- bulkImportSourceGroups: [
- {
- __typename: clientTypenames.BulkImportSourceGroup,
- id: GROUP_ID,
- status: STATUSES.NONE,
- web_url: 'https://fake.host/1',
- full_path: 'fake_group_1',
- full_name: 'fake_name_1',
- import_target: {
- target_namespace: 'root',
- new_name: 'group1',
+ bulkImportSourceGroups: {
+ nodes: [
+ {
+ __typename: clientTypenames.BulkImportSourceGroup,
+ id: GROUP_ID,
+ status: STATUSES.NONE,
+ web_url: 'https://fake.host/1',
+ full_path: 'fake_group_1',
+ full_name: 'fake_name_1',
+ import_target: {
+ target_namespace: 'root',
+ new_name: 'group1',
+ },
},
+ ],
+ pageInfo: {
+ page: 1,
+ perPage: 20,
+ total: 37,
+ totalPages: 2,
},
- ],
+ },
},
});
@@ -140,7 +171,7 @@ describe('Bulk import resolvers', () => {
fetchPolicy: 'cache-only',
})
.subscribe(({ data }) => {
- results = data.bulkImportSourceGroups;
+ results = data.bulkImportSourceGroups.nodes;
});
});
@@ -174,7 +205,9 @@ describe('Bulk import resolvers', () => {
});
await waitForPromises();
- const { bulkImportSourceGroups: intermediateResults } = client.readQuery({
+ const {
+ bulkImportSourceGroups: { nodes: intermediateResults },
+ } = client.readQuery({
query: bulkImportSourceGroupsQuery,
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
index e7f1626f81d..8d0e1dbd65f 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
@@ -4,6 +4,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import { StatusPoller } from '~/import_entities/import_groups/graphql/services/status_poller';
+import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory';
import bulkImportSourceGroupsQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_groups.query.graphql';
import { STATUSES } from '~/import_entities/constants';
import { SourceGroupsManager } from '~/import_entities/import_groups/graphql/services/source_groups_manager';
@@ -17,6 +18,7 @@ jest.mock('~/import_entities/import_groups/graphql/services/source_groups_manage
}));
const TEST_POLL_INTERVAL = 1000;
+const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
describe('Bulk import status poller', () => {
let poller;
@@ -25,6 +27,25 @@ describe('Bulk import status poller', () => {
const listQueryCacheCalls = () =>
clientMock.readQuery.mock.calls.filter((call) => call[0].query === bulkImportSourceGroupsQuery);
+ const generateFakeGroups = (statuses) =>
+ statuses.map((status, idx) => generateFakeEntry({ status, id: idx }));
+
+ const writeFakeGroupsQuery = (nodes) => {
+ clientMock.cache.writeQuery({
+ query: bulkImportSourceGroupsQuery,
+ data: {
+ bulkImportSourceGroups: {
+ __typename: clientTypenames.BulkImportSourceGroupConnection,
+ nodes,
+ pageInfo: {
+ __typename: clientTypenames.BulkImportPageInfo,
+ ...FAKE_PAGE_INFO,
+ },
+ },
+ },
+ });
+ };
+
beforeEach(() => {
clientMock = createMockClient({
cache: new InMemoryCache({
@@ -42,10 +63,7 @@ describe('Bulk import status poller', () => {
describe('general behavior', () => {
beforeEach(() => {
- clientMock.cache.writeQuery({
- query: bulkImportSourceGroupsQuery,
- data: { bulkImportSourceGroups: [] },
- });
+ writeFakeGroupsQuery([]);
});
it('does not perform polling when constructed', () => {
@@ -94,14 +112,7 @@ describe('Bulk import status poller', () => {
});
it('does not query server when no groups have STARTED status', async () => {
- clientMock.cache.writeQuery({
- query: bulkImportSourceGroupsQuery,
- data: {
- bulkImportSourceGroups: [STATUSES.NONE, STATUSES.FINISHED].map((status, idx) =>
- generateFakeEntry({ status, id: idx }),
- ),
- },
- });
+ writeFakeGroupsQuery(generateFakeGroups([STATUSES.NONE, STATUSES.FINISHED]));
jest.spyOn(clientMock, 'query');
poller.startPolling();
@@ -111,44 +122,23 @@ describe('Bulk import status poller', () => {
describe('when there are groups which have STARTED status', () => {
const TARGET_NAMESPACE = 'root';
- const STARTED_GROUP_1 = {
+ const STARTED_GROUP_1 = generateFakeEntry({
status: STATUSES.STARTED,
id: 'started1',
- import_target: {
- target_namespace: TARGET_NAMESPACE,
- new_name: 'group1',
- },
- };
+ });
- const STARTED_GROUP_2 = {
+ const STARTED_GROUP_2 = generateFakeEntry({
status: STATUSES.STARTED,
id: 'started2',
- import_target: {
- target_namespace: TARGET_NAMESPACE,
- new_name: 'group2',
- },
- };
+ });
- const NOT_STARTED_GROUP = {
+ const NOT_STARTED_GROUP = generateFakeEntry({
status: STATUSES.NONE,
id: 'not_started',
- import_target: {
- target_namespace: TARGET_NAMESPACE,
- new_name: 'group3',
- },
- };
+ });
it('query server only for groups with STATUSES.STARTED', async () => {
- clientMock.cache.writeQuery({
- query: bulkImportSourceGroupsQuery,
- data: {
- bulkImportSourceGroups: [
- STARTED_GROUP_1,
- NOT_STARTED_GROUP,
- STARTED_GROUP_2,
- ].map((group) => generateFakeEntry(group)),
- },
- });
+ writeFakeGroupsQuery([STARTED_GROUP_1, NOT_STARTED_GROUP, STARTED_GROUP_2]);
clientMock.query = jest.fn().mockResolvedValue({ data: {} });
poller.startPolling();
@@ -166,14 +156,7 @@ describe('Bulk import status poller', () => {
});
it('updates statuses only for groups in response', async () => {
- clientMock.cache.writeQuery({
- query: bulkImportSourceGroupsQuery,
- data: {
- bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map((group) =>
- generateFakeEntry(group),
- ),
- },
- });
+ writeFakeGroupsQuery([STARTED_GROUP_1, STARTED_GROUP_2]);
clientMock.query = jest.fn().mockResolvedValue({ data: { group0: {} } });
poller.startPolling();
@@ -188,14 +171,7 @@ describe('Bulk import status poller', () => {
describe('when error occurs', () => {
beforeEach(() => {
- clientMock.cache.writeQuery({
- query: bulkImportSourceGroupsQuery,
- data: {
- bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map((group) =>
- generateFakeEntry(group),
- ),
- },
- });
+ writeFakeGroupsQuery([STARTED_GROUP_1, STARTED_GROUP_2]);
clientMock.query = jest.fn().mockRejectedValue(new Error('dummy error'));
poller.startPolling();
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
index 2b3c803be08..4398d568501 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
@@ -9,9 +9,7 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
<div
class="settings-header"
>
- <h4
- class="gl-my-3! gl-py-1"
- >
+ <h4>
Incidents
diff --git a/spec/frontend/incidents_settings/components/pagerduty_form_spec.js b/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
index 50d0de8a753..0d98c09e16f 100644
--- a/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
+++ b/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import waitForPromises from 'helpers/wait_for_promises';
import { GlAlert, GlModal } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
import PagerDutySettingsForm from '~/incidents_settings/components/pagerduty_form.vue';
describe('Alert integration settings form', () => {
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index 97e77ac87ab..4888a8c1a5a 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { mockIntegrationProps } from 'jest/integrations/edit/mock_data';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { setHTMLFixture } from 'helpers/fixtures';
import { createStore } from '~/integrations/edit/store';
import IntegrationForm from '~/integrations/edit/components/integration_form.vue';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
@@ -15,24 +17,31 @@ import { integrationLevels } from '~/integrations/edit/constants';
describe('IntegrationForm', () => {
let wrapper;
- const createComponent = (customStateProps = {}, featureFlags = {}, initialState = {}) => {
- wrapper = shallowMount(IntegrationForm, {
- propsData: {},
- store: createStore({
- customState: { ...mockIntegrationProps, ...customStateProps },
- ...initialState,
+ const createComponent = ({
+ customStateProps = {},
+ featureFlags = {},
+ initialState = {},
+ props = {},
+ } = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(IntegrationForm, {
+ propsData: { ...props },
+ store: createStore({
+ customState: { ...mockIntegrationProps, ...customStateProps },
+ ...initialState,
+ }),
+ stubs: {
+ OverrideDropdown,
+ ActiveCheckbox,
+ ConfirmationModal,
+ JiraTriggerFields,
+ TriggerFields,
+ },
+ provide: {
+ glFeatures: featureFlags,
+ },
}),
- stubs: {
- OverrideDropdown,
- ActiveCheckbox,
- ConfirmationModal,
- JiraTriggerFields,
- TriggerFields,
- },
- provide: {
- glFeatures: featureFlags,
- },
- });
+ );
};
afterEach(() => {
@@ -63,7 +72,9 @@ describe('IntegrationForm', () => {
describe('showActive is false', () => {
it('does not render ActiveCheckbox', () => {
createComponent({
- showActive: false,
+ customStateProps: {
+ showActive: false,
+ },
});
expect(findActiveCheckbox().exists()).toBe(false);
@@ -73,7 +84,9 @@ describe('IntegrationForm', () => {
describe('integrationLevel is instance', () => {
it('renders ConfirmationModal', () => {
createComponent({
- integrationLevel: integrationLevels.INSTANCE,
+ customStateProps: {
+ integrationLevel: integrationLevels.INSTANCE,
+ },
});
expect(findConfirmationModal().exists()).toBe(true);
@@ -82,7 +95,9 @@ describe('IntegrationForm', () => {
describe('resetPath is empty', () => {
it('does not render ResetConfirmationModal and button', () => {
createComponent({
- integrationLevel: integrationLevels.INSTANCE,
+ customStateProps: {
+ integrationLevel: integrationLevels.INSTANCE,
+ },
});
expect(findResetButton().exists()).toBe(false);
@@ -93,8 +108,10 @@ describe('IntegrationForm', () => {
describe('resetPath is present', () => {
it('renders ResetConfirmationModal and button', () => {
createComponent({
- integrationLevel: integrationLevels.INSTANCE,
- resetPath: 'resetPath',
+ customStateProps: {
+ integrationLevel: integrationLevels.INSTANCE,
+ resetPath: 'resetPath',
+ },
});
expect(findResetButton().exists()).toBe(true);
@@ -106,7 +123,9 @@ describe('IntegrationForm', () => {
describe('integrationLevel is group', () => {
it('renders ConfirmationModal', () => {
createComponent({
- integrationLevel: integrationLevels.GROUP,
+ customStateProps: {
+ integrationLevel: integrationLevels.GROUP,
+ },
});
expect(findConfirmationModal().exists()).toBe(true);
@@ -115,7 +134,9 @@ describe('IntegrationForm', () => {
describe('resetPath is empty', () => {
it('does not render ResetConfirmationModal and button', () => {
createComponent({
- integrationLevel: integrationLevels.GROUP,
+ customStateProps: {
+ integrationLevel: integrationLevels.GROUP,
+ },
});
expect(findResetButton().exists()).toBe(false);
@@ -126,8 +147,10 @@ describe('IntegrationForm', () => {
describe('resetPath is present', () => {
it('renders ResetConfirmationModal and button', () => {
createComponent({
- integrationLevel: integrationLevels.GROUP,
- resetPath: 'resetPath',
+ customStateProps: {
+ integrationLevel: integrationLevels.GROUP,
+ resetPath: 'resetPath',
+ },
});
expect(findResetButton().exists()).toBe(true);
@@ -139,7 +162,9 @@ describe('IntegrationForm', () => {
describe('integrationLevel is project', () => {
it('does not render ConfirmationModal', () => {
createComponent({
- integrationLevel: 'project',
+ customStateProps: {
+ integrationLevel: 'project',
+ },
});
expect(findConfirmationModal().exists()).toBe(false);
@@ -147,8 +172,10 @@ describe('IntegrationForm', () => {
it('does not render ResetConfirmationModal and button', () => {
createComponent({
- integrationLevel: 'project',
- resetPath: 'resetPath',
+ customStateProps: {
+ integrationLevel: 'project',
+ resetPath: 'resetPath',
+ },
});
expect(findResetButton().exists()).toBe(false);
@@ -158,7 +185,9 @@ describe('IntegrationForm', () => {
describe('type is "slack"', () => {
beforeEach(() => {
- createComponent({ type: 'slack' });
+ createComponent({
+ customStateProps: { type: 'slack' },
+ });
});
it('does not render JiraTriggerFields', () => {
@@ -172,14 +201,19 @@ describe('IntegrationForm', () => {
describe('type is "jira"', () => {
it('renders JiraTriggerFields', () => {
- createComponent({ type: 'jira' });
+ createComponent({
+ customStateProps: { type: 'jira' },
+ });
expect(findJiraTriggerFields().exists()).toBe(true);
});
describe('featureFlag jiraIssuesIntegration is false', () => {
it('does not render JiraIssuesFields', () => {
- createComponent({ type: 'jira' }, { jiraIssuesIntegration: false });
+ createComponent({
+ customStateProps: { type: 'jira' },
+ featureFlags: { jiraIssuesIntegration: false },
+ });
expect(findJiraIssuesFields().exists()).toBe(false);
});
@@ -187,8 +221,10 @@ describe('IntegrationForm', () => {
describe('featureFlag jiraIssuesIntegration is true', () => {
it('renders JiraIssuesFields', () => {
- createComponent({ type: 'jira' }, { jiraIssuesIntegration: true });
-
+ createComponent({
+ customStateProps: { type: 'jira' },
+ featureFlags: { jiraIssuesIntegration: true },
+ });
expect(findJiraIssuesFields().exists()).toBe(true);
});
});
@@ -200,8 +236,10 @@ describe('IntegrationForm', () => {
const type = 'slack';
createComponent({
- triggerEvents: events,
- type,
+ customStateProps: {
+ triggerEvents: events,
+ type,
+ },
});
expect(findTriggerFields().exists()).toBe(true);
@@ -218,7 +256,9 @@ describe('IntegrationForm', () => {
];
createComponent({
- fields,
+ customStateProps: {
+ fields,
+ },
});
const dynamicFields = wrapper.findAll(DynamicField);
@@ -232,13 +272,11 @@ describe('IntegrationForm', () => {
describe('defaultState state is null', () => {
it('does not render OverrideDropdown', () => {
- createComponent(
- {},
- {},
- {
+ createComponent({
+ initialState: {
defaultState: null,
},
- );
+ });
expect(findOverrideDropdown().exists()).toBe(false);
});
@@ -246,18 +284,43 @@ describe('IntegrationForm', () => {
describe('defaultState state is an object', () => {
it('renders OverrideDropdown', () => {
- createComponent(
- {},
- {},
- {
+ createComponent({
+ initialState: {
defaultState: {
...mockIntegrationProps,
},
},
- );
+ });
expect(findOverrideDropdown().exists()).toBe(true);
});
});
+
+ describe('with `helpHtml` prop', () => {
+ const mockTestId = 'jest-help-html-test';
+
+ setHTMLFixture(`
+ <div data-testid="${mockTestId}">
+ <svg class="gl-icon">
+ <use></use>
+ </svg>
+ </div>
+ `);
+
+ it('renders `helpHtml`', async () => {
+ const mockHelpHtml = document.querySelector(`[data-testid="${mockTestId}"]`);
+
+ createComponent({
+ props: {
+ helpHtml: mockHelpHtml.outerHTML,
+ },
+ });
+
+ const helpHtml = wrapper.findByTestId(mockTestId);
+
+ expect(helpHtml.isVisible()).toBe(true);
+ expect(helpHtml.find('svg').isVisible()).toBe(true);
+ });
+ });
});
});
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index eaeed2703d1..b866cf8bc03 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -3,18 +3,22 @@ import { mount } from '@vue/test-utils';
import { GlFormCheckbox, GlFormInput } from '@gitlab/ui';
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
+import eventHub from '~/integrations/edit/event_hub';
describe('JiraIssuesFields', () => {
let wrapper;
const defaultProps = {
- showJiraIssuesIntegration: true,
editProjectPath: '/edit',
+ showJiraIssuesIntegration: true,
+ showJiraVulnerabilitiesIntegration: true,
};
- const createComponent = (props) => {
+ const createComponent = ({ props, ...options } = {}) => {
wrapper = mount(JiraIssuesFields, {
propsData: { ...defaultProps, ...props },
+ stubs: ['jira-issue-creation-vulnerabilities'],
+ ...options,
});
};
@@ -28,11 +32,14 @@ describe('JiraIssuesFields', () => {
const findEnableCheckbox = () => wrapper.find(GlFormCheckbox);
const findProjectKey = () => wrapper.find(GlFormInput);
const expectedBannerText = 'This is a Premium feature';
+ const findJiraForVulnerabilities = () => wrapper.find('[data-testid="jira-for-vulnerabilities"]');
+ const setEnableCheckbox = async (isEnabled = true) =>
+ findEnableCheckbox().vm.$emit('input', isEnabled);
describe('template', () => {
describe('upgrade banner for non-Premium user', () => {
beforeEach(() => {
- createComponent({ initialProjectKey: '', showJiraIssuesIntegration: false });
+ createComponent({ props: { initialProjectKey: '', showJiraIssuesIntegration: false } });
});
it('shows upgrade banner', () => {
@@ -47,7 +54,7 @@ describe('JiraIssuesFields', () => {
describe('Enable Jira issues checkbox', () => {
beforeEach(() => {
- createComponent({ initialProjectKey: '' });
+ createComponent({ props: { initialProjectKey: '' } });
});
it('does not show upgrade banner', () => {
@@ -69,20 +76,16 @@ describe('JiraIssuesFields', () => {
});
describe('on enable issues', () => {
- it('enables project_key input', () => {
- findEnableCheckbox().vm.$emit('input', true);
+ it('enables project_key input', async () => {
+ await setEnableCheckbox(true);
- return wrapper.vm.$nextTick().then(() => {
- expect(findProjectKey().attributes('disabled')).toBeUndefined();
- });
+ expect(findProjectKey().attributes('disabled')).toBeUndefined();
});
- it('requires project_key input', () => {
- findEnableCheckbox().vm.$emit('input', true);
+ it('requires project_key input', async () => {
+ await setEnableCheckbox(true);
- return wrapper.vm.$nextTick().then(() => {
- expect(findProjectKey().attributes('required')).toBe('required');
- });
+ expect(findProjectKey().attributes('required')).toBe('required');
});
});
});
@@ -103,10 +106,46 @@ describe('JiraIssuesFields', () => {
});
it('does not contain warning when GitLab issues is disabled', () => {
- createComponent({ gitlabIssuesEnabled: false });
+ createComponent({ props: { gitlabIssuesEnabled: false } });
expect(wrapper.text()).not.toContain(expectedText);
});
});
+
+ describe('Vulnerabilities creation', () => {
+ beforeEach(() => {
+ createComponent({ provide: { glFeatures: { jiraForVulnerabilities: true } } });
+ });
+
+ it.each([true, false])(
+ 'shows the jira-vulnerabilities component correctly when jira issues enables is set to "%s"',
+ async (hasJiraIssuesEnabled) => {
+ await setEnableCheckbox(hasJiraIssuesEnabled);
+
+ expect(findJiraForVulnerabilities().exists()).toBe(hasJiraIssuesEnabled);
+ },
+ );
+
+ it('emits "getJiraIssueTypes" to the eventHub when the jira-vulnerabilities component requests to fetch issue types', async () => {
+ const eventHubEmitSpy = jest.spyOn(eventHub, '$emit');
+
+ await setEnableCheckbox(true);
+ await findJiraForVulnerabilities().vm.$emit('request-get-issue-types');
+
+ expect(eventHubEmitSpy).toHaveBeenCalledWith('getJiraIssueTypes');
+ });
+
+ describe('with "jiraForVulnerabilities" feature flag disabled', () => {
+ beforeEach(async () => {
+ createComponent({
+ provide: { glFeatures: { jiraForVulnerabilities: false } },
+ });
+ });
+
+ it('does not show section', () => {
+ expect(findJiraForVulnerabilities().exists()).toBe(false);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/integrations/edit/store/actions_spec.js b/spec/frontend/integrations/edit/store/actions_spec.js
index 1ff881c265d..4b7060fae55 100644
--- a/spec/frontend/integrations/edit/store/actions_spec.js
+++ b/spec/frontend/integrations/edit/store/actions_spec.js
@@ -9,6 +9,9 @@ import {
requestResetIntegration,
receiveResetIntegrationSuccess,
receiveResetIntegrationError,
+ requestJiraIssueTypes,
+ receiveJiraIssueTypesSuccess,
+ receiveJiraIssueTypesError,
} from '~/integrations/edit/store/actions';
import * as types from '~/integrations/edit/store/mutation_types';
@@ -70,4 +73,34 @@ describe('Integration form store actions', () => {
]);
});
});
+
+ describe('requestJiraIssueTypes', () => {
+ it('should commit SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE and SET_IS_LOADING_JIRA_ISSUE_TYPES mutations', () => {
+ return testAction(requestJiraIssueTypes, null, state, [
+ { type: types.SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE, payload: '' },
+ { type: types.SET_IS_LOADING_JIRA_ISSUE_TYPES, payload: true },
+ ]);
+ });
+ });
+
+ describe('receiveJiraIssueTypesSuccess', () => {
+ it('should commit SET_IS_LOADING_JIRA_ISSUE_TYPES and SET_JIRA_ISSUE_TYPES mutations', () => {
+ const issueTypes = ['issue', 'epic'];
+ return testAction(receiveJiraIssueTypesSuccess, issueTypes, state, [
+ { type: types.SET_IS_LOADING_JIRA_ISSUE_TYPES, payload: false },
+ { type: types.SET_JIRA_ISSUE_TYPES, payload: issueTypes },
+ ]);
+ });
+ });
+
+ describe('receiveJiraIssueTypesError', () => {
+ it('should commit SET_IS_LOADING_JIRA_ISSUE_TYPES, SET_JIRA_ISSUE_TYPES and SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE mutations', () => {
+ const errorMessage = 'something went wrong';
+ return testAction(receiveJiraIssueTypesError, errorMessage, state, [
+ { type: types.SET_IS_LOADING_JIRA_ISSUE_TYPES, payload: false },
+ { type: types.SET_JIRA_ISSUE_TYPES, payload: [] },
+ { type: types.SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE, payload: errorMessage },
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/integrations/edit/store/mutations_spec.js b/spec/frontend/integrations/edit/store/mutations_spec.js
index 81f39adb87f..bf2a51d4dad 100644
--- a/spec/frontend/integrations/edit/store/mutations_spec.js
+++ b/spec/frontend/integrations/edit/store/mutations_spec.js
@@ -56,4 +56,30 @@ describe('Integration form store mutations', () => {
expect(state.isResetting).toBe(false);
});
});
+
+ describe(`${types.SET_JIRA_ISSUE_TYPES}`, () => {
+ it('sets jiraIssueTypes', () => {
+ const jiraIssueTypes = ['issue', 'epic'];
+ mutations[types.SET_JIRA_ISSUE_TYPES](state, jiraIssueTypes);
+
+ expect(state.jiraIssueTypes).toBe(jiraIssueTypes);
+ });
+ });
+
+ describe(`${types.SET_IS_LOADING_JIRA_ISSUE_TYPES}`, () => {
+ it.each([true, false])('sets isLoadingJiraIssueTypes to "%s"', (isLoading) => {
+ mutations[types.SET_IS_LOADING_JIRA_ISSUE_TYPES](state, isLoading);
+
+ expect(state.isLoadingJiraIssueTypes).toBe(isLoading);
+ });
+ });
+
+ describe(`${types.SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE}`, () => {
+ it('sets loadingJiraIssueTypesErrorMessage', () => {
+ const errorMessage = 'something went wrong';
+ mutations[types.SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE](state, errorMessage);
+
+ expect(state.loadingJiraIssueTypesErrorMessage).toBe(errorMessage);
+ });
+ });
});
diff --git a/spec/frontend/integrations/edit/store/state_spec.js b/spec/frontend/integrations/edit/store/state_spec.js
index 4d0f4a1da71..6cd84836395 100644
--- a/spec/frontend/integrations/edit/store/state_spec.js
+++ b/spec/frontend/integrations/edit/store/state_spec.js
@@ -9,6 +9,9 @@ describe('Integration form state factory', () => {
isTesting: false,
isResetting: false,
override: false,
+ isLoadingJiraIssueTypes: false,
+ jiraIssueTypes: [],
+ loadingJiraIssueTypesErrorMessage: '',
});
});
diff --git a/spec/frontend/integrations/integration_settings_form_spec.js b/spec/frontend/integrations/integration_settings_form_spec.js
index bba851ad796..373f8f196f9 100644
--- a/spec/frontend/integrations/integration_settings_form_spec.js
+++ b/spec/frontend/integrations/integration_settings_form_spec.js
@@ -132,4 +132,83 @@ describe('IntegrationSettingsForm', () => {
expect(dispatchSpy).toHaveBeenCalledWith('setIsTesting', false);
});
});
+
+ describe('getJiraIssueTypes', () => {
+ let integrationSettingsForm;
+ let formData;
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdaptor(axios);
+
+ jest.spyOn(axios, 'put');
+
+ integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
+ integrationSettingsForm.init();
+
+ // eslint-disable-next-line no-jquery/no-serialize
+ formData = integrationSettingsForm.$form.serialize();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should always dispatch `requestJiraIssueTypes`', async () => {
+ const dispatchSpy = jest.fn();
+
+ mock.onPut(integrationSettingsForm.testEndPoint).networkError();
+
+ integrationSettingsForm.vue.$store = { dispatch: dispatchSpy };
+
+ await integrationSettingsForm.getJiraIssueTypes();
+
+ expect(dispatchSpy).toHaveBeenCalledWith('requestJiraIssueTypes');
+ });
+
+ it('should make an ajax request with provided `formData`', async () => {
+ await integrationSettingsForm.getJiraIssueTypes(formData);
+
+ expect(axios.put).toHaveBeenCalledWith(integrationSettingsForm.testEndPoint, formData);
+ });
+
+ it('should dispatch `receiveJiraIssueTypesSuccess` with the correct payload if ajax request is successful', async () => {
+ const mockData = ['ISSUE', 'EPIC'];
+ const dispatchSpy = jest.fn();
+
+ mock.onPut(integrationSettingsForm.testEndPoint).reply(200, {
+ error: false,
+ issuetypes: mockData,
+ });
+
+ integrationSettingsForm.vue.$store = { dispatch: dispatchSpy };
+
+ await integrationSettingsForm.getJiraIssueTypes(formData);
+
+ expect(dispatchSpy).toHaveBeenCalledWith('receiveJiraIssueTypesSuccess', mockData);
+ });
+
+ it.each(['something went wrong', undefined])(
+ 'should dispatch "receiveJiraIssueTypesError" with a message if the backend responds with error',
+ async (responseErrorMessage) => {
+ const defaultErrorMessage = 'Connection failed. Please check your settings.';
+ const expectedErrorMessage = responseErrorMessage || defaultErrorMessage;
+ const dispatchSpy = jest.fn();
+
+ mock.onPut(integrationSettingsForm.testEndPoint).reply(200, {
+ error: true,
+ message: responseErrorMessage,
+ });
+
+ integrationSettingsForm.vue.$store = { dispatch: dispatchSpy };
+
+ await integrationSettingsForm.getJiraIssueTypes(formData);
+
+ expect(dispatchSpy).toHaveBeenCalledWith(
+ 'receiveJiraIssueTypesError',
+ expectedErrorMessage,
+ );
+ },
+ );
+ });
});
diff --git a/spec/frontend/issuable_list/components/issuable_item_spec.js b/spec/frontend/issuable_list/components/issuable_item_spec.js
index 3c01bf2d319..65e52d05084 100644
--- a/spec/frontend/issuable_list/components/issuable_item_spec.js
+++ b/spec/frontend/issuable_list/components/issuable_item_spec.js
@@ -257,6 +257,23 @@ describe('IssuableItem', () => {
);
});
+ it('renders issuable confidential icon when issuable is confidential', async () => {
+ wrapper.setProps({
+ issuable: {
+ ...mockIssuable,
+ confidential: true,
+ },
+ });
+
+ await wrapper.vm.$nextTick();
+
+ const confidentialEl = wrapper.find('[data-testid="issuable-title"]').find(GlIcon);
+
+ expect(confidentialEl.exists()).toBe(true);
+ expect(confidentialEl.props('name')).toBe('eye-slash');
+ expect(confidentialEl.attributes('title')).toBe('Confidential');
+ });
+
it('renders issuable reference', () => {
const referenceEl = wrapper.find('[data-testid="issuable-reference"]');
diff --git a/spec/frontend/issue_show/components/app_spec.js b/spec/frontend/issue_show/components/app_spec.js
index ec2055ca7d1..9be45686d91 100644
--- a/spec/frontend/issue_show/components/app_spec.js
+++ b/spec/frontend/issue_show/components/app_spec.js
@@ -7,6 +7,10 @@ import { visitUrl } from '~/lib/utils/url_utility';
import '~/behaviors/markdown/render_gfm';
import IssuableApp from '~/issue_show/components/app.vue';
import eventHub from '~/issue_show/event_hub';
+import IncidentTabs from '~/issue_show/components/incidents/incident_tabs.vue';
+import DescriptionComponent from '~/issue_show/components/description.vue';
+import PinnedLinks from '~/issue_show/components/pinned_links.vue';
+import { IssuableStatus, IssuableStatusText } from '~/issue_show/constants';
import {
appProps,
initialRequest,
@@ -14,10 +18,6 @@ import {
secondRequest,
zoomMeetingUrl,
} from '../mock_data';
-import IncidentTabs from '~/issue_show/components/incidents/incident_tabs.vue';
-import DescriptionComponent from '~/issue_show/components/description.vue';
-import PinnedLinks from '~/issue_show/components/pinned_links.vue';
-import { IssuableStatus, IssuableStatusText } from '~/issue_show/constants';
function formatText(text) {
return text.trim().replace(/\s\s+/g, ' ');
diff --git a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
index 416870d1408..ef825688b69 100644
--- a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
@@ -4,12 +4,12 @@ import { GlTab } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import INVALID_URL from '~/lib/utils/invalid_url';
import IncidentTabs from '~/issue_show/components/incidents/incident_tabs.vue';
-import { descriptionProps } from '../../mock_data';
import DescriptionComponent from '~/issue_show/components/description.vue';
import HighlightBar from '~/issue_show/components/incidents/highlight_bar.vue';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
import Tracking from '~/tracking';
import { trackIncidentDetailsViewsOptions } from '~/incidents/constants';
+import { descriptionProps } from '../../mock_data';
const mockAlert = {
__typename: 'AlertManagementAlert',
diff --git a/spec/frontend/issue_show/issue_spec.js b/spec/frontend/issue_show/issue_spec.js
index 818f501882b..51f3a580e6e 100644
--- a/spec/frontend/issue_show/issue_spec.js
+++ b/spec/frontend/issue_show/issue_spec.js
@@ -4,8 +4,8 @@ import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { initIssuableApp } from '~/issue_show/issue';
import * as parseData from '~/issue_show/utils/parse_data';
-import { appProps } from './mock_data';
import createStore from '~/notes/stores';
+import { appProps } from './mock_data';
const mock = new MockAdapter(axios);
mock.onGet().reply(200);
diff --git a/spec/frontend/issues_list/components/issuable_spec.js b/spec/frontend/issues_list/components/issuable_spec.js
index b47a84ad7f6..dcc3542e905 100644
--- a/spec/frontend/issues_list/components/issuable_spec.js
+++ b/spec/frontend/issues_list/components/issuable_spec.js
@@ -7,8 +7,8 @@ import { formatDate } from '~/lib/utils/datetime_utility';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import Issuable from '~/issues_list/components/issuable.vue';
import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue';
-import { simpleIssue, testAssignees, testLabels } from '../issuable_list_test_data';
import { isScopedLabel } from '~/lib/utils/common_utils';
+import { simpleIssue, testAssignees, testLabels } from '../issuable_list_test_data';
jest.mock('~/user_popovers');
diff --git a/spec/frontend/jira_connect/api_spec.js b/spec/frontend/jira_connect/api_spec.js
index 8fecbee9ca7..f98d56a6621 100644
--- a/spec/frontend/jira_connect/api_spec.js
+++ b/spec/frontend/jira_connect/api_spec.js
@@ -14,7 +14,7 @@ describe('JiraConnect API', () => {
const mockJwt = 'jwt';
const mockResponse = { success: true };
- const tokenSpy = jest.fn().mockReturnValue(mockJwt);
+ const tokenSpy = jest.fn((callback) => callback(mockJwt));
window.AP = {
context: {
diff --git a/spec/frontend/jira_connect/components/app_spec.js b/spec/frontend/jira_connect/components/app_spec.js
index be990d5061c..a9194cfc883 100644
--- a/spec/frontend/jira_connect/components/app_spec.js
+++ b/spec/frontend/jira_connect/components/app_spec.js
@@ -1,19 +1,20 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlButton, GlModal } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { GlAlert } from '@gitlab/ui';
+
import JiraConnectApp from '~/jira_connect/components/app.vue';
import createStore from '~/jira_connect/store';
import { SET_ERROR_MESSAGE } from '~/jira_connect/store/mutation_types';
-Vue.use(Vuex);
+jest.mock('~/jira_connect/api');
describe('JiraConnectApp', () => {
let wrapper;
let store;
const findAlert = () => wrapper.findComponent(GlAlert);
+ const findGlButton = () => wrapper.findComponent(GlButton);
+ const findGlModal = () => wrapper.findComponent(GlModal);
const findHeader = () => wrapper.findByTestId('new-jira-connect-ui-heading');
const findHeaderText = () => findHeader().text();
@@ -44,6 +45,33 @@ describe('JiraConnectApp', () => {
expect(findHeaderText()).toBe('Linked namespaces');
});
+ describe('when user is not logged in', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: { newJiraConnectUi: true },
+ usersPath: '/users',
+ },
+ });
+ });
+
+ it('renders "Sign in" button', () => {
+ expect(findGlButton().text()).toBe('Sign in to add namespaces');
+ expect(findGlModal().exists()).toBe(false);
+ });
+ });
+
+ describe('when user is logged in', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders "Add" button and modal', () => {
+ expect(findGlButton().text()).toBe('Add namespace');
+ expect(findGlModal().exists()).toBe(true);
+ });
+ });
+
describe('newJiraConnectUi is false', () => {
it('does not render new UI', () => {
createComponent({
diff --git a/spec/frontend/jira_connect/components/groups_list_item_spec.js b/spec/frontend/jira_connect/components/groups_list_item_spec.js
index 77577c53cf4..2499be1ea5f 100644
--- a/spec/frontend/jira_connect/components/groups_list_item_spec.js
+++ b/spec/frontend/jira_connect/components/groups_list_item_spec.js
@@ -1,27 +1,37 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlAvatar } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import { GlAvatar, GlButton } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { mockGroup1 } from '../mock_data';
+import waitForPromises from 'helpers/wait_for_promises';
import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
+import * as JiraConnectApi from '~/jira_connect/api';
+import { mockGroup1 } from '../mock_data';
describe('GroupsListItem', () => {
let wrapper;
+ const mockSubscriptionPath = 'subscriptionPath';
- const createComponent = () => {
+ const reloadSpy = jest.fn();
+
+ global.AP = {
+ navigator: {
+ reload: reloadSpy,
+ },
+ };
+
+ const createComponent = ({ mountFn = shallowMount } = {}) => {
wrapper = extendedWrapper(
- shallowMount(GroupsListItem, {
+ mountFn(GroupsListItem, {
propsData: {
group: mockGroup1,
},
+ provide: {
+ subscriptionsPath: mockSubscriptionPath,
+ },
}),
);
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -30,17 +40,82 @@ describe('GroupsListItem', () => {
const findGlAvatar = () => wrapper.find(GlAvatar);
const findGroupName = () => wrapper.findByTestId('group-list-item-name');
const findGroupDescription = () => wrapper.findByTestId('group-list-item-description');
+ const findLinkButton = () => wrapper.find(GlButton);
+ const clickLinkButton = () => findLinkButton().trigger('click');
- it('renders group avatar', () => {
- expect(findGlAvatar().exists()).toBe(true);
- expect(findGlAvatar().props('src')).toBe(mockGroup1.avatar_url);
- });
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders group avatar', () => {
+ expect(findGlAvatar().exists()).toBe(true);
+ expect(findGlAvatar().props('src')).toBe(mockGroup1.avatar_url);
+ });
+
+ it('renders group name', () => {
+ expect(findGroupName().text()).toBe(mockGroup1.full_name);
+ });
- it('renders group name', () => {
- expect(findGroupName().text()).toBe(mockGroup1.full_name);
+ it('renders group description', () => {
+ expect(findGroupDescription().text()).toBe(mockGroup1.description);
+ });
+
+ it('renders Link button', () => {
+ expect(findLinkButton().exists()).toBe(true);
+ expect(findLinkButton().text()).toBe('Link');
+ });
});
- it('renders group description', () => {
- expect(findGroupDescription().text()).toBe(mockGroup1.description);
+ describe('on Link button click', () => {
+ let addSubscriptionSpy;
+
+ beforeEach(() => {
+ createComponent({ mountFn: mount });
+
+ addSubscriptionSpy = jest.spyOn(JiraConnectApi, 'addSubscription').mockResolvedValue();
+ });
+
+ it('sets button to loading and sends request', async () => {
+ expect(findLinkButton().props('loading')).toBe(false);
+
+ clickLinkButton();
+
+ await wrapper.vm.$nextTick();
+
+ expect(findLinkButton().props('loading')).toBe(true);
+
+ expect(addSubscriptionSpy).toHaveBeenCalledWith(mockSubscriptionPath, mockGroup1.full_path);
+ });
+
+ describe('when request is successful', () => {
+ it('reloads the page', async () => {
+ clickLinkButton();
+
+ await waitForPromises();
+
+ expect(reloadSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when request has errors', () => {
+ const mockErrorMessage = 'error message';
+ const mockError = { response: { data: { error: mockErrorMessage } } };
+
+ beforeEach(() => {
+ addSubscriptionSpy = jest
+ .spyOn(JiraConnectApi, 'addSubscription')
+ .mockRejectedValue(mockError);
+ });
+
+ it('emits `error` event', async () => {
+ clickLinkButton();
+
+ await waitForPromises();
+
+ expect(reloadSpy).not.toHaveBeenCalled();
+ expect(wrapper.emitted('error')[0][0]).toBe(mockErrorMessage);
+ });
+ });
});
});
diff --git a/spec/frontend/jira_connect/components/groups_list_spec.js b/spec/frontend/jira_connect/components/groups_list_spec.js
index 94f158e6344..b18a913ca6a 100644
--- a/spec/frontend/jira_connect/components/groups_list_spec.js
+++ b/spec/frontend/jira_connect/components/groups_list_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import { fetchGroups } from '~/jira_connect/api';
@@ -28,6 +28,7 @@ describe('GroupsList', () => {
wrapper = null;
});
+ const findGlAlert = () => wrapper.find(GlAlert);
const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findAllItems = () => wrapper.findAll(GroupsListItem);
const findFirstItem = () => findAllItems().at(0);
@@ -45,6 +46,18 @@ describe('GroupsList', () => {
});
});
+ describe('error fetching groups', () => {
+ it('renders error message', async () => {
+ fetchGroups.mockRejectedValue();
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findGlAlert().exists()).toBe(true);
+ expect(findGlAlert().text()).toBe('Failed to load namespaces. Please try again.');
+ });
+ });
+
describe('no groups returned', () => {
it('renders empty state', async () => {
fetchGroups.mockResolvedValue(mockEmptyResponse);
@@ -57,15 +70,28 @@ describe('GroupsList', () => {
});
describe('with groups returned', () => {
- it('renders groups list', async () => {
+ beforeEach(async () => {
fetchGroups.mockResolvedValue({ data: [mockGroup1, mockGroup2] });
createComponent();
await waitForPromises();
+ });
+ it('renders groups list', () => {
expect(findAllItems().length).toBe(2);
expect(findFirstItem().props('group')).toBe(mockGroup1);
expect(findSecondItem().props('group')).toBe(mockGroup2);
});
+
+ it('shows error message on $emit from item', async () => {
+ const errorMessage = 'error message';
+
+ findFirstItem().vm.$emit('error', errorMessage);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findGlAlert().exists()).toBe(true);
+ expect(findGlAlert().text()).toContain(errorMessage);
+ });
});
});
diff --git a/spec/frontend/jira_connect/index_spec.js b/spec/frontend/jira_connect/index_spec.js
new file mode 100644
index 00000000000..eb54fe6476f
--- /dev/null
+++ b/spec/frontend/jira_connect/index_spec.js
@@ -0,0 +1,56 @@
+import waitForPromises from 'helpers/wait_for_promises';
+import { initJiraConnect } from '~/jira_connect';
+import { removeSubscription } from '~/jira_connect/api';
+
+jest.mock('~/jira_connect/api', () => ({
+ removeSubscription: jest.fn().mockResolvedValue(),
+ getLocation: jest.fn().mockResolvedValue('test/location'),
+}));
+
+describe('initJiraConnect', () => {
+ window.AP = {
+ navigator: {
+ reload: jest.fn(),
+ },
+ };
+
+ beforeEach(async () => {
+ setFixtures(`
+ <a class="js-jira-connect-sign-in" href="https://gitlab.com">Sign In</a>
+ <a class="js-jira-connect-sign-in" href="https://gitlab.com">Another Sign In</a>
+
+ <a href="https://gitlab.com/sub1" class="js-jira-connect-remove-subscription">Remove</a>
+ <a href="https://gitlab.com/sub2" class="js-jira-connect-remove-subscription">Remove</a>
+ <a href="https://gitlab.com/sub3" class="js-jira-connect-remove-subscription">Remove</a>
+ `);
+
+ await initJiraConnect();
+ });
+
+ describe('Sign in links', () => {
+ it('have `return_to` query parameter', () => {
+ Array.from(document.querySelectorAll('.js-jira-connect-sign-in')).forEach((el) => {
+ expect(el.href).toContain('return_to=test/location');
+ });
+ });
+ });
+
+ describe('`remove subscription` buttons', () => {
+ describe('on click', () => {
+ it('calls `removeSubscription`', () => {
+ Array.from(document.querySelectorAll('.js-jira-connect-remove-subscription')).forEach(
+ (removeSubscriptionButton) => {
+ removeSubscriptionButton.dispatchEvent(new Event('click'));
+
+ waitForPromises();
+
+ expect(removeSubscription).toHaveBeenCalledWith(removeSubscriptionButton.href);
+ expect(removeSubscription).toHaveBeenCalledTimes(1);
+
+ removeSubscription.mockClear();
+ },
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/mock_data.js b/spec/frontend/jira_connect/mock_data.js
index 31565912489..22255fabc3d 100644
--- a/spec/frontend/jira_connect/mock_data.js
+++ b/spec/frontend/jira_connect/mock_data.js
@@ -3,6 +3,7 @@ export const mockGroup1 = {
avatar_url: 'avatar.png',
name: 'Gitlab Org',
full_name: 'Gitlab Org',
+ full_path: 'gitlab-org',
description: 'Open source software to collaborate on code',
};
@@ -11,5 +12,6 @@ export const mockGroup2 = {
avatar_url: 'avatar.png',
name: 'Gitlab Com',
full_name: 'Gitlab Com',
+ full_path: 'gitlab-com',
description: 'For GitLab company related projects',
};
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index 00fb8f5435e..1cbd0f5ad30 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -1,4 +1,13 @@
-import { GlAlert, GlButton, GlDropdown, GlFormSelect, GlLabel, GlTable } from '@gitlab/ui';
+import {
+ GlAlert,
+ GlButton,
+ GlDropdown,
+ GlDropdownItem,
+ GlFormSelect,
+ GlLabel,
+ GlSearchBoxByType,
+ GlTable,
+} from '@gitlab/ui';
import { getByRole } from '@testing-library/dom';
import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
@@ -6,6 +15,7 @@ import axios from '~/lib/utils/axios_utils';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
import getJiraUserMappingMutation from '~/jira_import/queries/get_jira_user_mapping.mutation.graphql';
import initiateJiraImportMutation from '~/jira_import/queries/initiate_jira_import.mutation.graphql';
+import searchProjectMembersQuery from '~/jira_import/queries/search_project_members.query.graphql';
import {
imports,
issuesPath,
@@ -19,6 +29,7 @@ import {
describe('JiraImportForm', () => {
let axiosMock;
let mutateSpy;
+ let querySpy;
let wrapper;
const currentUsername = 'mrgitlab';
@@ -72,6 +83,7 @@ describe('JiraImportForm', () => {
$apollo: {
loading,
mutate,
+ query: querySpy,
},
},
currentUsername,
@@ -79,19 +91,21 @@ describe('JiraImportForm', () => {
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
- mutateSpy = jest.fn(() =>
- Promise.resolve({
- data: {
- jiraImportStart: { errors: [] },
- jiraImportUsers: { jiraUsers: [], errors: [] },
- },
- }),
- );
+ mutateSpy = jest.fn().mockResolvedValue({
+ data: {
+ jiraImportStart: { errors: [] },
+ jiraImportUsers: { jiraUsers: [], errors: [] },
+ },
+ });
+ querySpy = jest.fn().mockResolvedValue({
+ data: { project: { projectMembers: { nodes: [] } } },
+ });
});
afterEach(() => {
axiosMock.restore();
mutateSpy.mockRestore();
+ querySpy.mockRestore();
wrapper.destroy();
wrapper = null;
});
@@ -236,6 +250,53 @@ describe('JiraImportForm', () => {
});
});
+ describe('member search', () => {
+ describe('when searching for a member', () => {
+ beforeEach(() => {
+ querySpy = jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ projectMembers: {
+ nodes: [
+ {
+ user: {
+ id: 7,
+ name: 'Frederic Chopin',
+ username: 'fchopin',
+ },
+ },
+ ],
+ },
+ },
+ },
+ });
+
+ wrapper = mountComponent({ mountFunction: mount });
+
+ wrapper.find(GlSearchBoxByType).vm.$emit('input', 'fred');
+ });
+
+ it('makes a GraphQL call', () => {
+ const queryArgument = {
+ query: searchProjectMembersQuery,
+ variables: {
+ fullPath: projectPath,
+ search: 'fred',
+ },
+ };
+
+ expect(querySpy).toHaveBeenCalledWith(expect.objectContaining(queryArgument));
+ });
+
+ it('updates the user list', () => {
+ expect(getUserDropdown().findAll(GlDropdownItem)).toHaveLength(1);
+ expect(getUserDropdown().find(GlDropdownItem).text()).toContain(
+ 'fchopin (Frederic Chopin)',
+ );
+ });
+ });
+ });
+
describe('buttons', () => {
describe('"Continue" button', () => {
it('is shown', () => {
diff --git a/spec/frontend/jobs/components/erased_block_spec.js b/spec/frontend/jobs/components/erased_block_spec.js
index b3e1d28eb16..9557ecdbb91 100644
--- a/spec/frontend/jobs/components/erased_block_spec.js
+++ b/spec/frontend/jobs/components/erased_block_spec.js
@@ -10,6 +10,8 @@ describe('Erased block', () => {
const timeago = getTimeago();
const formattedDate = timeago.format(erasedAt);
+ const findLink = () => wrapper.find(GlLink);
+
const createComponent = (props) => {
wrapper = mount(ErasedBlock, {
propsData: props,
@@ -32,7 +34,7 @@ describe('Erased block', () => {
});
it('renders username and link', () => {
- expect(wrapper.find(GlLink).attributes('href')).toEqual('gitlab.com/root');
+ expect(findLink().attributes('href')).toEqual('gitlab.com/root');
expect(wrapper.text().trim()).toContain('Job has been erased by');
expect(wrapper.text().trim()).toContain('root');
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index 657687b5e2a..b9d27932dff 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -34,7 +34,6 @@ describe('Job App', () => {
const props = {
artifactHelpUrl: 'help/artifact',
- runnerHelpUrl: 'help/runner',
deploymentHelpUrl: 'help/deployment',
runnerSettingsUrl: 'settings/ci-cd/runners',
variablesSettingsUrl: 'settings/ci-cd/variables',
diff --git a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
index bc0d455c309..a29d88e5c1e 100644
--- a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
+++ b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
@@ -116,14 +116,5 @@ describe('Job Sidebar Details Container', () => {
expect(findJobTimeout().exists()).toBe(false);
});
-
- it('should pass the help URL', async () => {
- const helpUrl = 'fakeUrl';
- const props = { runnerHelpUrl: helpUrl };
- createWrapper({ props });
- await store.dispatch('receiveJobSuccess', { metadata: { timeout_human_readable } });
-
- expect(findJobTimeout().props('helpUrl')).toBe(helpUrl);
- });
});
});
diff --git a/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js b/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js
index 4bf697ab7cc..8fc5b071e54 100644
--- a/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js
+++ b/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js
@@ -1,8 +1,8 @@
import { GlButton, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import job from '../mock_data';
import JobsSidebarRetryButton from '~/jobs/components/job_sidebar_retry_button.vue';
import createStore from '~/jobs/store';
+import job from '../mock_data';
describe('Job Sidebar Retry Button', () => {
let store;
diff --git a/spec/frontend/jobs/components/trigger_block_spec.js b/spec/frontend/jobs/components/trigger_block_spec.js
index 16ea276ee4a..0af768c6c52 100644
--- a/spec/frontend/jobs/components/trigger_block_spec.js
+++ b/spec/frontend/jobs/components/trigger_block_spec.js
@@ -1,100 +1,86 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import component from '~/jobs/components/trigger_block.vue';
+import { mount } from '@vue/test-utils';
+import { GlButton, GlTable } from '@gitlab/ui';
+import TriggerBlock from '~/jobs/components/trigger_block.vue';
describe('Trigger block', () => {
- const Component = Vue.extend(component);
- let vm;
+ let wrapper;
+
+ const findRevealButton = () => wrapper.find(GlButton);
+ const findVariableTable = () => wrapper.find(GlTable);
+ const findShortToken = () => wrapper.find('[data-testid="trigger-short-token"]');
+ const findVariableValue = (index) =>
+ wrapper.findAll('[data-testid="trigger-build-value"]').at(index);
+ const findVariableKey = (index) => wrapper.findAll('[data-testid="trigger-build-key"]').at(index);
+
+ const createComponent = (props) => {
+ wrapper = mount(TriggerBlock, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- describe('with short token', () => {
+ describe('with short token and no variables', () => {
it('renders short token', () => {
- vm = mountComponent(Component, {
+ createComponent({
trigger: {
short_token: '0a666b2',
+ variables: [],
},
});
- expect(vm.$el.querySelector('.js-short-token').textContent).toContain('0a666b2');
+ expect(findShortToken().text()).toContain('0a666b2');
});
});
- describe('without short token', () => {
+ describe('without variables or short token', () => {
+ beforeEach(() => {
+ createComponent({ trigger: { variables: [] } });
+ });
+
it('does not render short token', () => {
- vm = mountComponent(Component, { trigger: {} });
+ expect(findShortToken().exists()).toBe(false);
+ });
- expect(vm.$el.querySelector('.js-short-token')).toBeNull();
+ it('does not render variables', () => {
+ expect(findRevealButton().exists()).toBe(false);
+ expect(findVariableTable().exists()).toBe(false);
});
});
describe('with variables', () => {
describe('hide/reveal variables', () => {
- it('should toggle variables on click', (done) => {
- vm = mountComponent(Component, {
+ it('should toggle variables on click', async () => {
+ const hiddenValue = '••••••';
+ const gcsVar = { key: 'UPLOAD_TO_GCS', value: 'false', public: false };
+ const s3Var = { key: 'UPLOAD_TO_S3', value: 'true', public: false };
+
+ createComponent({
trigger: {
- short_token: 'bd7e',
- variables: [
- { key: 'UPLOAD_TO_GCS', value: 'false', public: false },
- { key: 'UPLOAD_TO_S3', value: 'true', public: false },
- ],
+ variables: [gcsVar, s3Var],
},
});
- vm.$el.querySelector('.js-reveal-variables').click();
-
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.js-build-variables')).not.toBeNull();
- expect(vm.$el.querySelector('.js-reveal-variables').textContent.trim()).toEqual(
- 'Hide values',
- );
-
- expect(vm.$el.querySelector('.js-build-variables').textContent).toContain(
- 'UPLOAD_TO_GCS',
- );
+ expect(findRevealButton().text()).toBe('Reveal values');
- expect(vm.$el.querySelector('.js-build-variables').textContent).toContain('false');
- expect(vm.$el.querySelector('.js-build-variables').textContent).toContain(
- 'UPLOAD_TO_S3',
- );
+ expect(findVariableValue(0).text()).toBe(hiddenValue);
+ expect(findVariableValue(1).text()).toBe(hiddenValue);
- expect(vm.$el.querySelector('.js-build-variables').textContent).toContain('true');
+ expect(findVariableKey(0).text()).toBe(gcsVar.key);
+ expect(findVariableKey(1).text()).toBe(s3Var.key);
- vm.$el.querySelector('.js-reveal-variables').click();
- })
- .then(vm.$nextTick)
- .then(() => {
- expect(vm.$el.querySelector('.js-reveal-variables').textContent.trim()).toEqual(
- 'Reveal values',
- );
+ await findRevealButton().trigger('click');
- expect(vm.$el.querySelector('.js-build-variables').textContent).toContain(
- 'UPLOAD_TO_GCS',
- );
+ expect(findRevealButton().text()).toBe('Hide values');
- expect(vm.$el.querySelector('.js-build-value').textContent).toContain('••••••');
-
- expect(vm.$el.querySelector('.js-build-variables').textContent).toContain(
- 'UPLOAD_TO_S3',
- );
-
- expect(vm.$el.querySelector('.js-build-value').textContent).toContain('••••••');
- })
- .then(done)
- .catch(done.fail);
+ expect(findVariableValue(0).text()).toBe(gcsVar.value);
+ expect(findVariableValue(1).text()).toBe(s3Var.value);
});
});
});
-
- describe('without variables', () => {
- it('does not render variables', () => {
- vm = mountComponent(Component, { trigger: {} });
-
- expect(vm.$el.querySelector('.js-reveal-variables')).toBeNull();
- expect(vm.$el.querySelector('.js-build-variables')).toBeNull();
- });
- });
});
diff --git a/spec/frontend/lib/utils/array_utility_spec.js b/spec/frontend/lib/utils/array_utility_spec.js
new file mode 100644
index 00000000000..b95286ff254
--- /dev/null
+++ b/spec/frontend/lib/utils/array_utility_spec.js
@@ -0,0 +1,32 @@
+import * as arrayUtils from '~/lib/utils/array_utility';
+
+describe('array_utility', () => {
+ describe('swapArrayItems', () => {
+ it.each`
+ array | leftIndex | rightIndex | result
+ ${[]} | ${0} | ${0} | ${[]}
+ ${[1]} | ${0} | ${1} | ${[1]}
+ ${[1, 2]} | ${0} | ${0} | ${[1, 2]}
+ ${[1, 2]} | ${0} | ${1} | ${[2, 1]}
+ ${[1, 2]} | ${1} | ${2} | ${[1, 2]}
+ ${[1, 2]} | ${2} | ${1} | ${[1, 2]}
+ ${[1, 2]} | ${1} | ${10} | ${[1, 2]}
+ ${[1, 2]} | ${10} | ${1} | ${[1, 2]}
+ ${[1, 2]} | ${1} | ${-1} | ${[1, 2]}
+ ${[1, 2]} | ${-1} | ${1} | ${[1, 2]}
+ ${[1, 2, 3]} | ${1} | ${1} | ${[1, 2, 3]}
+ ${[1, 2, 3]} | ${0} | ${2} | ${[3, 2, 1]}
+ ${[1, 2, 3, 4]} | ${0} | ${2} | ${[3, 2, 1, 4]}
+ ${[1, 2, 3, 4, 5]} | ${0} | ${4} | ${[5, 2, 3, 4, 1]}
+ ${[1, 2, 3, 4, 5]} | ${1} | ${2} | ${[1, 3, 2, 4, 5]}
+ ${[1, 2, 3, 4, 5]} | ${2} | ${1} | ${[1, 3, 2, 4, 5]}
+ `(
+ 'given $array with index $leftIndex and $rightIndex will return $result',
+ ({ array, leftIndex, rightIndex, result }) => {
+ const actual = arrayUtils.swapArrayItems(array, leftIndex, rightIndex);
+ expect(actual).toEqual(result);
+ expect(actual).not.toBe(array);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/lib/utils/color_utils_spec.js b/spec/frontend/lib/utils/color_utils_spec.js
index 433e9d5a85e..8c846abd77f 100644
--- a/spec/frontend/lib/utils/color_utils_spec.js
+++ b/spec/frontend/lib/utils/color_utils_spec.js
@@ -1,4 +1,4 @@
-import { textColorForBackground, hexToRgb } from '~/lib/utils/color_utils';
+import { textColorForBackground, hexToRgb, validateHexColor } from '~/lib/utils/color_utils';
describe('Color utils', () => {
describe('Converting hex code to rgb', () => {
@@ -32,4 +32,19 @@ describe('Color utils', () => {
expect(textColorForBackground('#000')).toEqual('#FFFFFF');
});
});
+
+ describe('Validate hex color', () => {
+ it.each`
+ color | output
+ ${undefined} | ${null}
+ ${null} | ${null}
+ ${''} | ${null}
+ ${'ABC123'} | ${false}
+ ${'#ZZZ'} | ${false}
+ ${'#FF0'} | ${true}
+ ${'#FF0000'} | ${true}
+ `('returns $output when $color is given', ({ color, output }) => {
+ expect(validateHexColor(color)).toEqual(output);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 90222f0f718..18be88a0b8b 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -1045,4 +1045,12 @@ describe('common_utils', () => {
expect(commonUtils.getDashPath('/some/url')).toEqual(null);
});
});
+
+ describe('convertArrayToCamelCase', () => {
+ it('returns a new array with snake_case string elements converted camelCase', () => {
+ const result = commonUtils.convertArrayToCamelCase(['hello', 'hello_world']);
+
+ expect(result).toEqual(['hello', 'helloWorld']);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 66efd43262b..f2f9362ae99 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -584,22 +584,6 @@ describe('secondsToMilliseconds', () => {
});
});
-describe('dayAfter', () => {
- const date = new Date('2019-07-16T00:00:00.000Z');
-
- it('returns the following date', () => {
- const nextDay = datetimeUtility.dayAfter(date);
- const expectedNextDate = new Date('2019-07-17T00:00:00.000Z');
-
- expect(nextDay).toStrictEqual(expectedNextDate);
- });
-
- it('does not modifiy the original date', () => {
- datetimeUtility.dayAfter(date);
- expect(date).toStrictEqual(new Date('2019-07-16T00:00:00.000Z'));
- });
-});
-
describe('secondsToDays', () => {
it('converts seconds to days correctly', () => {
expect(datetimeUtility.secondsToDays(0)).toBe(0);
@@ -608,90 +592,214 @@ describe('secondsToDays', () => {
});
});
-describe('nDaysAfter', () => {
- const date = new Date('2019-07-16T00:00:00.000Z');
+describe('date addition/subtraction methods', () => {
+ beforeEach(() => {
+ timezoneMock.register('US/Eastern');
+ });
- it.each`
- numberOfDays | expectedResult
- ${1} | ${new Date('2019-07-17T00:00:00.000Z').valueOf()}
- ${90} | ${new Date('2019-10-14T00:00:00.000Z').valueOf()}
- ${-1} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
- ${0} | ${date.valueOf()}
- ${0.9} | ${date.valueOf()}
- `('returns $numberOfDays day(s) after the provided date', ({ numberOfDays, expectedResult }) => {
- expect(datetimeUtility.nDaysAfter(date, numberOfDays)).toBe(expectedResult);
+ afterEach(() => {
+ timezoneMock.unregister();
});
-});
-describe('nDaysBefore', () => {
- const date = new Date('2019-07-16T00:00:00.000Z');
+ describe('dayAfter', () => {
+ const input = '2019-03-10T00:00:00.000Z';
+ const expectedLocalResult = '2019-03-10T23:00:00.000Z';
+ const expectedUTCResult = '2019-03-11T00:00:00.000Z';
+
+ it.each`
+ inputAsString | options | expectedAsString
+ ${input} | ${undefined} | ${expectedLocalResult}
+ ${input} | ${{}} | ${expectedLocalResult}
+ ${input} | ${{ utc: false }} | ${expectedLocalResult}
+ ${input} | ${{ utc: true }} | ${expectedUTCResult}
+ `(
+ 'when the provided date is $inputAsString and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.dayAfter(inputDate, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+
+ it('does not modifiy the original date', () => {
+ const inputDate = new Date(input);
+ datetimeUtility.dayAfter(inputDate);
+ expect(inputDate.toISOString()).toBe(input);
+ });
+ });
- it.each`
- numberOfDays | expectedResult
- ${1} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
- ${90} | ${new Date('2019-04-17T00:00:00.000Z').valueOf()}
- ${-1} | ${new Date('2019-07-17T00:00:00.000Z').valueOf()}
- ${0} | ${date.valueOf()}
- ${0.9} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
- `('returns $numberOfDays day(s) before the provided date', ({ numberOfDays, expectedResult }) => {
- expect(datetimeUtility.nDaysBefore(date, numberOfDays)).toBe(expectedResult);
+ describe('nDaysAfter', () => {
+ const input = '2019-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfDays | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2019-07-17T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2019-07-15T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2019-07-16T00:00:00.000Z'}
+ ${input} | ${0.9} | ${undefined} | ${'2019-07-16T00:00:00.000Z'}
+ ${input} | ${120} | ${undefined} | ${'2019-11-13T01:00:00.000Z'}
+ ${input} | ${120} | ${{}} | ${'2019-11-13T01:00:00.000Z'}
+ ${input} | ${120} | ${{ utc: false }} | ${'2019-11-13T01:00:00.000Z'}
+ ${input} | ${120} | ${{ utc: true }} | ${'2019-11-13T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfDays is $numberOfDays, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfDays, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nDaysAfter(inputDate, numberOfDays, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
});
-});
-describe('nMonthsAfter', () => {
- // February has 28 days
- const feb2019 = new Date('2019-02-15T00:00:00.000Z');
- // Except in 2020, it had 29 days
- const feb2020 = new Date('2020-02-15T00:00:00.000Z');
- // April has 30 days
- const apr2020 = new Date('2020-04-15T00:00:00.000Z');
- // May has 31 days
- const may2020 = new Date('2020-05-15T00:00:00.000Z');
+ describe('nDaysBefore', () => {
+ const input = '2019-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfDays | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2019-07-15T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2019-07-17T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2019-07-16T00:00:00.000Z'}
+ ${input} | ${0.9} | ${undefined} | ${'2019-07-15T00:00:00.000Z'}
+ ${input} | ${180} | ${undefined} | ${'2019-01-17T01:00:00.000Z'}
+ ${input} | ${180} | ${{}} | ${'2019-01-17T01:00:00.000Z'}
+ ${input} | ${180} | ${{ utc: false }} | ${'2019-01-17T01:00:00.000Z'}
+ ${input} | ${180} | ${{ utc: true }} | ${'2019-01-17T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfDays is $numberOfDays, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfDays, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nDaysBefore(inputDate, numberOfDays, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
- it.each`
- date | numberOfMonths | expectedResult
- ${feb2019} | ${1} | ${new Date('2019-03-15T00:00:00.000Z').valueOf()}
- ${feb2020} | ${1} | ${new Date('2020-03-15T00:00:00.000Z').valueOf()}
- ${apr2020} | ${1} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${1} | ${new Date('2020-06-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${12} | ${new Date('2021-05-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${-1} | ${new Date('2020-04-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${0} | ${may2020.valueOf()}
- ${may2020} | ${0.9} | ${may2020.valueOf()}
- `(
- 'returns $numberOfMonths month(s) after the provided date',
- ({ date, numberOfMonths, expectedResult }) => {
- expect(datetimeUtility.nMonthsAfter(date, numberOfMonths)).toBe(expectedResult);
- },
- );
-});
+ describe('nWeeksAfter', () => {
+ const input = '2021-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfWeeks | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2021-07-23T00:00:00.000Z'}
+ ${input} | ${3} | ${undefined} | ${'2021-08-06T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2021-07-09T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2021-07-16T00:00:00.000Z'}
+ ${input} | ${0.6} | ${undefined} | ${'2021-07-20T00:00:00.000Z'}
+ ${input} | ${18} | ${undefined} | ${'2021-11-19T01:00:00.000Z'}
+ ${input} | ${18} | ${{}} | ${'2021-11-19T01:00:00.000Z'}
+ ${input} | ${18} | ${{ utc: false }} | ${'2021-11-19T01:00:00.000Z'}
+ ${input} | ${18} | ${{ utc: true }} | ${'2021-11-19T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfWeeks is $numberOfWeeks, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfWeeks, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nWeeksAfter(inputDate, numberOfWeeks, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
-describe('nMonthsBefore', () => {
- // The previous month (February) has 28 days
- const march2019 = new Date('2019-03-15T00:00:00.000Z');
- // Except in 2020, it had 29 days
- const march2020 = new Date('2020-03-15T00:00:00.000Z');
- // The previous month (April) has 30 days
- const may2020 = new Date('2020-05-15T00:00:00.000Z');
- // The previous month (May) has 31 days
- const june2020 = new Date('2020-06-15T00:00:00.000Z');
+ describe('nWeeksBefore', () => {
+ const input = '2021-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfWeeks | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2021-07-09T00:00:00.000Z'}
+ ${input} | ${3} | ${undefined} | ${'2021-06-25T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2021-07-23T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2021-07-16T00:00:00.000Z'}
+ ${input} | ${0.6} | ${undefined} | ${'2021-07-11T00:00:00.000Z'}
+ ${input} | ${20} | ${undefined} | ${'2021-02-26T01:00:00.000Z'}
+ ${input} | ${20} | ${{}} | ${'2021-02-26T01:00:00.000Z'}
+ ${input} | ${20} | ${{ utc: false }} | ${'2021-02-26T01:00:00.000Z'}
+ ${input} | ${20} | ${{ utc: true }} | ${'2021-02-26T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfWeeks is $numberOfWeeks, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfWeeks, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nWeeksBefore(inputDate, numberOfWeeks, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
- it.each`
- date | numberOfMonths | expectedResult
- ${march2019} | ${1} | ${new Date('2019-02-15T00:00:00.000Z').valueOf()}
- ${march2020} | ${1} | ${new Date('2020-02-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${1} | ${new Date('2020-04-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${1} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${12} | ${new Date('2019-06-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${-1} | ${new Date('2020-07-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${0} | ${june2020.valueOf()}
- ${june2020} | ${0.9} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
- `(
- 'returns $numberOfMonths month(s) before the provided date',
- ({ date, numberOfMonths, expectedResult }) => {
- expect(datetimeUtility.nMonthsBefore(date, numberOfMonths)).toBe(expectedResult);
- },
- );
+ describe('nMonthsAfter', () => {
+ // February has 28 days
+ const feb2019 = '2019-02-15T00:00:00.000Z';
+ // Except in 2020, it had 29 days
+ const feb2020 = '2020-02-15T00:00:00.000Z';
+ // April has 30 days
+ const apr2020 = '2020-04-15T00:00:00.000Z';
+ // May has 31 days
+ const may2020 = '2020-05-15T00:00:00.000Z';
+ // November 1, 2020 was the day Daylight Saving Time ended in 2020 (in the US)
+ const oct2020 = '2020-10-15T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfMonths | options | expectedAsString
+ ${feb2019} | ${1} | ${undefined} | ${'2019-03-14T23:00:00.000Z'}
+ ${feb2020} | ${1} | ${undefined} | ${'2020-03-14T23:00:00.000Z'}
+ ${apr2020} | ${1} | ${undefined} | ${'2020-05-15T00:00:00.000Z'}
+ ${may2020} | ${1} | ${undefined} | ${'2020-06-15T00:00:00.000Z'}
+ ${may2020} | ${12} | ${undefined} | ${'2021-05-15T00:00:00.000Z'}
+ ${may2020} | ${-1} | ${undefined} | ${'2020-04-15T00:00:00.000Z'}
+ ${may2020} | ${0} | ${undefined} | ${may2020}
+ ${may2020} | ${0.9} | ${undefined} | ${may2020}
+ ${oct2020} | ${1} | ${undefined} | ${'2020-11-15T01:00:00.000Z'}
+ ${oct2020} | ${1} | ${{}} | ${'2020-11-15T01:00:00.000Z'}
+ ${oct2020} | ${1} | ${{ utc: false }} | ${'2020-11-15T01:00:00.000Z'}
+ ${oct2020} | ${1} | ${{ utc: true }} | ${'2020-11-15T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfMonths is $numberOfMonths, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfMonths, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nMonthsAfter(inputDate, numberOfMonths, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
+
+ describe('nMonthsBefore', () => {
+ // The previous month (February) has 28 days
+ const march2019 = '2019-03-15T00:00:00.000Z';
+ // Except in 2020, it had 29 days
+ const march2020 = '2020-03-15T00:00:00.000Z';
+ // The previous month (April) has 30 days
+ const may2020 = '2020-05-15T00:00:00.000Z';
+ // The previous month (May) has 31 days
+ const june2020 = '2020-06-15T00:00:00.000Z';
+ // November 1, 2020 was the day Daylight Saving Time ended in 2020 (in the US)
+ const nov2020 = '2020-11-15T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfMonths | options | expectedAsString
+ ${march2019} | ${1} | ${undefined} | ${'2019-02-15T01:00:00.000Z'}
+ ${march2020} | ${1} | ${undefined} | ${'2020-02-15T01:00:00.000Z'}
+ ${may2020} | ${1} | ${undefined} | ${'2020-04-15T00:00:00.000Z'}
+ ${june2020} | ${1} | ${undefined} | ${'2020-05-15T00:00:00.000Z'}
+ ${june2020} | ${12} | ${undefined} | ${'2019-06-15T00:00:00.000Z'}
+ ${june2020} | ${-1} | ${undefined} | ${'2020-07-15T00:00:00.000Z'}
+ ${june2020} | ${0} | ${undefined} | ${june2020}
+ ${june2020} | ${0.9} | ${undefined} | ${'2020-05-15T00:00:00.000Z'}
+ ${nov2020} | ${1} | ${undefined} | ${'2020-10-14T23:00:00.000Z'}
+ ${nov2020} | ${1} | ${{}} | ${'2020-10-14T23:00:00.000Z'}
+ ${nov2020} | ${1} | ${{ utc: false }} | ${'2020-10-14T23:00:00.000Z'}
+ ${nov2020} | ${1} | ${{ utc: true }} | ${'2020-10-15T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfMonths is $numberOfMonths, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfMonths, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nMonthsBefore(inputDate, numberOfMonths, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
});
describe('approximateDuration', () => {
@@ -843,7 +951,7 @@ describe('format24HourTimeStringFromInt', () => {
});
});
-describe('getOverlappingDaysInPeriods', () => {
+describe('getOverlapDateInPeriods', () => {
const start = new Date(2021, 0, 11);
const end = new Date(2021, 0, 13);
@@ -851,14 +959,15 @@ describe('getOverlappingDaysInPeriods', () => {
const givenPeriodLeft = new Date(2021, 0, 11);
const givenPeriodRight = new Date(2021, 0, 14);
- it('returns an overlap object that contains the amount of days overlapping, start date of overlap and end date of overlap', () => {
+ it('returns an overlap object that contains the amount of days overlapping, the amount of hours overlapping, start date of overlap and end date of overlap', () => {
expect(
- datetimeUtility.getOverlappingDaysInPeriods(
+ datetimeUtility.getOverlapDateInPeriods(
{ start, end },
{ start: givenPeriodLeft, end: givenPeriodRight },
),
).toEqual({
daysOverlap: 2,
+ hoursOverlap: 48,
overlapStartDate: givenPeriodLeft.getTime(),
overlapEndDate: end.getTime(),
});
@@ -871,7 +980,7 @@ describe('getOverlappingDaysInPeriods', () => {
it('returns an overlap object that contains a 0 value for days overlapping', () => {
expect(
- datetimeUtility.getOverlappingDaysInPeriods(
+ datetimeUtility.getOverlapDateInPeriods(
{ start, end },
{ start: givenPeriodLeft, end: givenPeriodRight },
),
@@ -886,14 +995,54 @@ describe('getOverlappingDaysInPeriods', () => {
it('throws an exception when the left period contains an invalid date', () => {
expect(() =>
- datetimeUtility.getOverlappingDaysInPeriods({ start, end }, { start: startInvalid, end }),
+ datetimeUtility.getOverlapDateInPeriods({ start, end }, { start: startInvalid, end }),
).toThrow(error);
});
it('throws an exception when the right period contains an invalid date', () => {
expect(() =>
- datetimeUtility.getOverlappingDaysInPeriods({ start, end }, { start, end: endInvalid }),
+ datetimeUtility.getOverlapDateInPeriods({ start, end }, { start, end: endInvalid }),
).toThrow(error);
});
});
});
+
+describe('isToday', () => {
+ const today = new Date();
+ it.each`
+ date | expected | negation
+ ${today} | ${true} | ${'is'}
+ ${new Date('2021-01-21T12:00:00.000Z')} | ${false} | ${'is NOT'}
+ `('returns $expected as $date $negation today', ({ date, expected }) => {
+ expect(datetimeUtility.isToday(date)).toBe(expected);
+ });
+});
+
+describe('getStartOfDay', () => {
+ beforeEach(() => {
+ timezoneMock.register('US/Eastern');
+ });
+
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
+
+ it.each`
+ inputAsString | options | expectedAsString
+ ${'2021-01-29T18:08:23.014Z'} | ${undefined} | ${'2021-01-29T05:00:00.000Z'}
+ ${'2021-01-29T13:08:23.014-05:00'} | ${undefined} | ${'2021-01-29T05:00:00.000Z'}
+ ${'2021-01-30T03:08:23.014+09:00'} | ${undefined} | ${'2021-01-29T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${undefined} | ${'2021-01-28T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${{}} | ${'2021-01-28T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${{ utc: false }} | ${'2021-01-28T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${{ utc: true }} | ${'2021-01-29T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.getStartOfDay(inputDate, options);
+
+ expect(actual.toISOString()).toEqual(expectedAsString);
+ },
+ );
+});
diff --git a/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js b/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js
index 26b942c3567..0ca70e0a77e 100644
--- a/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js
+++ b/spec/frontend/lib/utils/unit_format/formatter_factory_spec.js
@@ -36,6 +36,27 @@ describe('unit_format/formatter_factory', () => {
expect(formatNumber(10 ** 7, undefined, 9)).toBe('1.00e+7');
expect(formatNumber(10 ** 7, undefined, 10)).toBe('10,000,000');
});
+
+ describe('formats with a different locale', () => {
+ let originalLang;
+
+ beforeAll(() => {
+ originalLang = document.documentElement.lang;
+ document.documentElement.lang = 'es';
+ });
+
+ afterAll(() => {
+ document.documentElement.lang = originalLang;
+ });
+
+ it('formats a using the correct thousands separator', () => {
+ expect(formatNumber(1000000)).toBe('1.000.000');
+ });
+
+ it('formats a using the correct decimal separator', () => {
+ expect(formatNumber(12.345)).toBe('12,345');
+ });
+ });
});
describe('suffixFormatter', () => {
diff --git a/spec/frontend/logs/components/log_advanced_filters_spec.js b/spec/frontend/logs/components/log_advanced_filters_spec.js
index dfa8913a301..1fc8d943215 100644
--- a/spec/frontend/logs/components/log_advanced_filters_spec.js
+++ b/spec/frontend/logs/components/log_advanced_filters_spec.js
@@ -4,9 +4,8 @@ import { defaultTimeRange } from '~/vue_shared/constants';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { createStore } from '~/logs/stores';
import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
-import { mockPods, mockSearch } from '../mock_data';
-
import LogAdvancedFilters from '~/logs/components/log_advanced_filters.vue';
+import { mockPods, mockSearch } from '../mock_data';
const module = 'environmentLogs';
diff --git a/spec/frontend/logs/components/log_simple_filters_spec.js b/spec/frontend/logs/components/log_simple_filters_spec.js
index 5bd42fd7dbc..5d2b22da3b5 100644
--- a/spec/frontend/logs/components/log_simple_filters_spec.js
+++ b/spec/frontend/logs/components/log_simple_filters_spec.js
@@ -1,9 +1,8 @@
import { GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/logs/stores';
-import { mockPods, mockPodName } from '../mock_data';
-
import LogSimpleFilters from '~/logs/components/log_simple_filters.vue';
+import { mockPods, mockPodName } from '../mock_data';
const module = 'environmentLogs';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index bc58f1e677f..471b8603a94 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -19,6 +19,7 @@ import { defaultTimeRange } from '~/vue_shared/constants';
import axios from '~/lib/utils/axios_utils';
import { deprecatedCreateFlash as flash } from '~/flash';
+import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
import {
mockPodName,
mockEnvironmentsEndpoint,
@@ -34,7 +35,6 @@ import {
mockManagedApps,
mockManagedAppsEndpoint,
} from '../mock_data';
-import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
jest.mock('~/flash');
jest.mock('~/lib/utils/datetime_range');
diff --git a/spec/frontend/groups/members/components/app_spec.js b/spec/frontend/members/components/app_spec.js
index 9847dacbec8..8fac0f5c5e6 100644
--- a/spec/frontend/groups/members/components/app_spec.js
+++ b/spec/frontend/members/components/app_spec.js
@@ -2,13 +2,13 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
import { GlAlert } from '@gitlab/ui';
-import App from '~/groups/members/components/app.vue';
+import MembersApp from '~/members/components/app.vue';
import FilterSortContainer from '~/members/components/filter_sort/filter_sort_container.vue';
import * as commonUtils from '~/lib/utils/common_utils';
import { RECEIVE_MEMBER_ROLE_ERROR, HIDE_ERROR } from '~/members/store/mutation_types';
import mutations from '~/members/store/mutations';
-describe('GroupMembersApp', () => {
+describe('MembersApp', () => {
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -25,7 +25,7 @@ describe('GroupMembersApp', () => {
mutations,
});
- wrapper = shallowMount(App, {
+ wrapper = shallowMount(MembersApp, {
localVue,
store,
...options,
@@ -48,7 +48,7 @@ describe('GroupMembersApp', () => {
it('renders and scrolls to error alert', async () => {
createComponent({ showError: false, errorMessage: '' });
- store.commit(RECEIVE_MEMBER_ROLE_ERROR);
+ store.commit(RECEIVE_MEMBER_ROLE_ERROR, { error: new Error('Network Error') });
await nextTick();
diff --git a/spec/frontend/members/components/avatars/group_avatar_spec.js b/spec/frontend/members/components/avatars/group_avatar_spec.js
index 658bb9462b0..2720181aeb8 100644
--- a/spec/frontend/members/components/avatars/group_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/group_avatar_spec.js
@@ -1,8 +1,8 @@
import { mount, createWrapper } from '@vue/test-utils';
import { getByText as getByTextHelper } from '@testing-library/dom';
import { GlAvatarLink } from '@gitlab/ui';
-import { group as member } from '../../mock_data';
import GroupAvatar from '~/members/components/avatars/group_avatar.vue';
+import { group as member } from '../../mock_data';
describe('MemberList', () => {
let wrapper;
diff --git a/spec/frontend/members/components/avatars/invite_avatar_spec.js b/spec/frontend/members/components/avatars/invite_avatar_spec.js
index 13ee727528b..8ebfdf31127 100644
--- a/spec/frontend/members/components/avatars/invite_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/invite_avatar_spec.js
@@ -1,7 +1,7 @@
import { mount, createWrapper } from '@vue/test-utils';
import { getByText as getByTextHelper } from '@testing-library/dom';
-import { invite as member } from '../../mock_data';
import InviteAvatar from '~/members/components/avatars/invite_avatar.vue';
+import { invite as member } from '../../mock_data';
describe('MemberList', () => {
let wrapper;
diff --git a/spec/frontend/members/components/avatars/user_avatar_spec.js b/spec/frontend/members/components/avatars/user_avatar_spec.js
index 411ec1a54de..d6305652616 100644
--- a/spec/frontend/members/components/avatars/user_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/user_avatar_spec.js
@@ -1,8 +1,8 @@
import { mount, createWrapper } from '@vue/test-utils';
import { within } from '@testing-library/dom';
import { GlAvatarLink, GlBadge } from '@gitlab/ui';
-import { member as memberMock, orphanedMember } from '../../mock_data';
import UserAvatar from '~/members/components/avatars/user_avatar.vue';
+import { member as memberMock, orphanedMember } from '../../mock_data';
describe('UserAvatar', () => {
let wrapper;
diff --git a/spec/frontend/members/components/table/member_action_buttons_spec.js b/spec/frontend/members/components/table/member_action_buttons_spec.js
index b7a6df3d054..064c8403423 100644
--- a/spec/frontend/members/components/table/member_action_buttons_spec.js
+++ b/spec/frontend/members/components/table/member_action_buttons_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import { MEMBER_TYPES } from '~/members/constants';
-import { member as memberMock, group, invite, accessRequest } from '../../mock_data';
import MemberActionButtons from '~/members/components/table/member_action_buttons.vue';
import UserActionButtons from '~/members/components/action_buttons/user_action_buttons.vue';
import GroupActionButtons from '~/members/components/action_buttons/group_action_buttons.vue';
import InviteActionButtons from '~/members/components/action_buttons/invite_action_buttons.vue';
import AccessRequestActionButtons from '~/members/components/action_buttons/access_request_action_buttons.vue';
+import { member as memberMock, group, invite, accessRequest } from '../../mock_data';
describe('MemberActionButtons', () => {
let wrapper;
diff --git a/spec/frontend/members/components/table/member_avatar_spec.js b/spec/frontend/members/components/table/member_avatar_spec.js
index 4341dfbbaf9..a6f9f157532 100644
--- a/spec/frontend/members/components/table/member_avatar_spec.js
+++ b/spec/frontend/members/components/table/member_avatar_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { MEMBER_TYPES } from '~/members/constants';
-import { member as memberMock, group, invite, accessRequest } from '../../mock_data';
import MemberAvatar from '~/members/components/table/member_avatar.vue';
import UserAvatar from '~/members/components/avatars/user_avatar.vue';
import GroupAvatar from '~/members/components/avatars/group_avatar.vue';
import InviteAvatar from '~/members/components/avatars/invite_avatar.vue';
+import { member as memberMock, group, invite, accessRequest } from '../../mock_data';
describe('MemberList', () => {
let wrapper;
diff --git a/spec/frontend/members/components/table/members_table_cell_spec.js b/spec/frontend/members/components/table/members_table_cell_spec.js
index 117c9255c00..69795a4d670 100644
--- a/spec/frontend/members/components/table/members_table_cell_spec.js
+++ b/spec/frontend/members/components/table/members_table_cell_spec.js
@@ -1,8 +1,15 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { MEMBER_TYPES } from '~/members/constants';
-import { member as memberMock, group, invite, accessRequest } from '../../mock_data';
import MembersTableCell from '~/members/components/table/members_table_cell.vue';
+import {
+ member as memberMock,
+ directMember,
+ inheritedMember,
+ group,
+ invite,
+ accessRequest,
+} from '../../mock_data';
describe('MembersTableCell', () => {
const WrappedComponent = {
@@ -31,7 +38,7 @@ describe('MembersTableCell', () => {
const localVue = createLocalVue();
localVue.use(Vuex);
- localVue.component('wrapped-component', WrappedComponent);
+ localVue.component('WrappedComponent', WrappedComponent);
const createStore = (state = {}) => {
return new Vuex.Store({
@@ -75,19 +82,12 @@ describe('MembersTableCell', () => {
const createComponentWithDirectMember = (member = {}) => {
createComponent({
- member: {
- ...memberMock,
- source: {
- ...memberMock.source,
- id: 1,
- },
- ...member,
- },
+ member: { ...directMember, ...member },
});
};
const createComponentWithInheritedMember = (member = {}) => {
createComponent({
- member: { ...memberMock, ...member },
+ member: { ...inheritedMember, ...member },
});
};
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index dbaccde069c..729b65f37aa 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -15,7 +15,7 @@ import RoleDropdown from '~/members/components/table/role_dropdown.vue';
import ExpirationDatepicker from '~/members/components/table/expiration_datepicker.vue';
import MemberActionButtons from '~/members/components/table/member_action_buttons.vue';
import * as initUserPopovers from '~/user_popovers';
-import { member as memberMock, invite, accessRequest } from '../../mock_data';
+import { member as memberMock, directMember, invite, accessRequest } from '../../mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -74,11 +74,6 @@ describe('MembersTable', () => {
});
describe('fields', () => {
- const directMember = {
- ...memberMock,
- source: { ...memberMock.source, id: 1 },
- };
-
const memberCanUpdate = {
...directMember,
canUpdate: true,
@@ -154,7 +149,7 @@ describe('MembersTable', () => {
expect(findTableCellByMemberId('Actions', members[0].id).classes()).toStrictEqual([
'col-actions',
'gl-display-none!',
- 'gl-display-lg-table-cell!',
+ 'gl-lg-display-table-cell!',
]);
expect(findTableCellByMemberId('Actions', members[1].id).classes()).toStrictEqual([
'col-actions',
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index 96a388614f3..3cc52379026 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -6,6 +6,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import waitForPromises from 'helpers/wait_for_promises';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
+import { BV_DROPDOWN_SHOW } from '~/lib/utils/constants';
import { member } from '../../mock_data';
const localVue = createLocalVue();
@@ -67,7 +68,7 @@ describe('RoleDropdown', () => {
createComponent();
findDropdownToggle().trigger('click');
- wrapper.vm.$root.$on('bv::dropdown::shown', () => {
+ wrapper.vm.$root.$on(BV_DROPDOWN_SHOW, () => {
done();
});
});
diff --git a/spec/frontend/groups/members/index_spec.js b/spec/frontend/members/index_spec.js
index 5c717e53229..d73e3b03f07 100644
--- a/spec/frontend/groups/members/index_spec.js
+++ b/spec/frontend/members/index_spec.js
@@ -1,15 +1,15 @@
import { createWrapper } from '@vue/test-utils';
-import { initGroupMembersApp } from '~/groups/members';
-import GroupMembersApp from '~/groups/members/components/app.vue';
-import { membersJsonString, membersParsed } from './mock_data';
+import { initMembersApp } from '~/members/index';
+import MembersApp from '~/members/components/app.vue';
+import { membersJsonString, members } from './mock_data';
-describe('initGroupMembersApp', () => {
+describe('initMembersApp', () => {
let el;
let vm;
let wrapper;
const setup = () => {
- vm = initGroupMembersApp(el, {
+ vm = initMembersApp(el, {
tableFields: ['account'],
tableAttrs: { table: { 'data-qa-selector': 'members_list' } },
tableSortableFields: ['account'],
@@ -22,7 +22,7 @@ describe('initGroupMembersApp', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('data-members', membersJsonString);
- el.setAttribute('data-group-id', '234');
+ el.setAttribute('data-source-id', '234');
el.setAttribute('data-can-manage-members', 'true');
el.setAttribute('data-member-path', '/groups/foo-bar/-/group_members/:id');
@@ -36,10 +36,10 @@ describe('initGroupMembersApp', () => {
wrapper = null;
});
- it('renders `GroupMembersApp`', () => {
+ it('renders `MembersApp`', () => {
setup();
- expect(wrapper.find(GroupMembersApp).exists()).toBe(true);
+ expect(wrapper.find(MembersApp).exists()).toBe(true);
});
it('sets `currentUserId` in Vuex store', () => {
@@ -57,7 +57,7 @@ describe('initGroupMembersApp', () => {
});
});
- it('parses and sets `data-group-id` as `sourceId` in Vuex store', () => {
+ it('parses and sets `data-source-id` as `sourceId` in Vuex store', () => {
setup();
expect(vm.$store.state.sourceId).toBe(234);
@@ -72,7 +72,7 @@ describe('initGroupMembersApp', () => {
it('parses and sets `members` in Vuex store', () => {
setup();
- expect(vm.$store.state.members).toEqual(membersParsed);
+ expect(vm.$store.state.members).toEqual(members);
});
it('sets `tableFields` in Vuex store', () => {
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index e668f2a1998..fa324ce1cf9 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -4,6 +4,7 @@ export const member = {
canRemove: false,
canOverride: false,
isOverridden: false,
+ isDirectMember: false,
accessLevel: { integerValue: 50, stringValue: 'Owner' },
source: {
id: 178,
@@ -69,3 +70,8 @@ export const accessRequest = {
};
export const members = [member];
+
+export const membersJsonString = JSON.stringify(members);
+
+export const directMember = { ...member, isDirectMember: true };
+export const inheritedMember = { ...member, isDirectMember: false };
diff --git a/spec/frontend/members/store/actions_spec.js b/spec/frontend/members/store/actions_spec.js
index 5424fee0750..b0f863e6163 100644
--- a/spec/frontend/members/store/actions_spec.js
+++ b/spec/frontend/members/store/actions_spec.js
@@ -57,15 +57,17 @@ describe('Vuex members actions', () => {
describe('unsuccessful request', () => {
it(`commits ${types.RECEIVE_MEMBER_ROLE_ERROR} mutation and throws error`, async () => {
- mock.onPut().networkError();
+ const error = new Error('Network Error');
+ mock.onPut().reply(() => Promise.reject(error));
await expect(
testAction(updateMemberRole, payload, state, [
{
type: types.RECEIVE_MEMBER_ROLE_ERROR,
+ payload: { error },
},
]),
- ).rejects.toThrowError(new Error('Network Error'));
+ ).rejects.toThrowError(error);
});
});
});
@@ -108,15 +110,17 @@ describe('Vuex members actions', () => {
describe('unsuccessful request', () => {
it(`commits ${types.RECEIVE_MEMBER_EXPIRATION_ERROR} mutation and throws error`, async () => {
- mock.onPut().networkError();
+ const error = new Error('Network Error');
+ mock.onPut().reply(() => Promise.reject(error));
await expect(
testAction(updateMemberExpiration, { memberId, expiresAt }, state, [
{
type: types.RECEIVE_MEMBER_EXPIRATION_ERROR,
+ payload: { error },
},
]),
- ).rejects.toThrowError(new Error('Network Error'));
+ ).rejects.toThrowError(error);
});
});
});
diff --git a/spec/frontend/members/store/mutations_spec.js b/spec/frontend/members/store/mutations_spec.js
index 488bfdf15fd..a56e6a72d9e 100644
--- a/spec/frontend/members/store/mutations_spec.js
+++ b/spec/frontend/members/store/mutations_spec.js
@@ -28,13 +28,33 @@ describe('Vuex members mutations', () => {
});
describe(types.RECEIVE_MEMBER_ROLE_ERROR, () => {
- it('shows error message', () => {
- mutations[types.RECEIVE_MEMBER_ROLE_ERROR](state);
+ describe('when error does not have a message', () => {
+ it('shows default error message', () => {
+ mutations[types.RECEIVE_MEMBER_ROLE_ERROR](state, {
+ error: new Error('Network Error'),
+ });
+
+ expect(state.showError).toBe(true);
+ expect(state.errorMessage).toBe(
+ "An error occurred while updating the member's role, please try again.",
+ );
+ });
+ });
+
+ describe('when error has a message', () => {
+ it('shows error message', () => {
+ const error = new Error('Request failed with status code 422');
+ const message =
+ 'User email "john.smith@gmail.com" does not match the allowed domain of example.com';
- expect(state.showError).toBe(true);
- expect(state.errorMessage).toBe(
- "An error occurred while updating the member's role, please try again.",
- );
+ error.response = {
+ data: { message },
+ };
+ mutations[types.RECEIVE_MEMBER_ROLE_ERROR](state, { error });
+
+ expect(state.showError).toBe(true);
+ expect(state.errorMessage).toBe(message);
+ });
});
});
@@ -52,13 +72,33 @@ describe('Vuex members mutations', () => {
});
describe(types.RECEIVE_MEMBER_EXPIRATION_ERROR, () => {
- it('shows error message', () => {
- mutations[types.RECEIVE_MEMBER_EXPIRATION_ERROR](state);
+ describe('when error does not have a message', () => {
+ it('shows default error message', () => {
+ mutations[types.RECEIVE_MEMBER_EXPIRATION_ERROR](state, {
+ error: new Error('Network Error'),
+ });
+
+ expect(state.showError).toBe(true);
+ expect(state.errorMessage).toBe(
+ "An error occurred while updating the member's expiration date, please try again.",
+ );
+ });
+ });
+
+ describe('when error has a message', () => {
+ it('shows error message', () => {
+ const error = new Error('Request failed with status code 422');
+ const message =
+ 'User email "john.smith@gmail.com" does not match the allowed domain of example.com';
- expect(state.showError).toBe(true);
- expect(state.errorMessage).toBe(
- "An error occurred while updating the member's expiration date, please try again.",
- );
+ error.response = {
+ data: { message },
+ };
+ mutations[types.RECEIVE_MEMBER_EXPIRATION_ERROR](state, { error });
+
+ expect(state.showError).toBe(true);
+ expect(state.errorMessage).toBe(message);
+ });
});
});
});
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index 7cd4e735b55..5c9484fe128 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -9,12 +9,20 @@ import {
canOverride,
parseSortParam,
buildSortHref,
+ parseDataAttributes,
+ groupLinkRequestFormatter,
} from '~/members/utils';
import { DEFAULT_SORT } from '~/members/constants';
-import { member as memberMock, group, invite } from './mock_data';
+import {
+ member as memberMock,
+ directMember,
+ inheritedMember,
+ group,
+ invite,
+ membersJsonString,
+ members,
+} from './mock_data';
-const DIRECT_MEMBER_ID = 178;
-const INHERITED_MEMBER_ID = 179;
const IS_CURRENT_USER_ID = 123;
const IS_NOT_CURRENT_USER_ID = 124;
const URL_HOST = 'https://localhost/';
@@ -57,11 +65,11 @@ describe('Members Utils', () => {
describe('isDirectMember', () => {
test.each`
- sourceId | expected
- ${DIRECT_MEMBER_ID} | ${true}
- ${INHERITED_MEMBER_ID} | ${false}
- `('returns $expected', ({ sourceId, expected }) => {
- expect(isDirectMember(memberMock, sourceId)).toBe(expected);
+ member | expected
+ ${directMember} | ${true}
+ ${inheritedMember} | ${false}
+ `('returns $expected', ({ member, expected }) => {
+ expect(isDirectMember(member)).toBe(expected);
});
});
@@ -76,18 +84,13 @@ describe('Members Utils', () => {
});
describe('canRemove', () => {
- const memberCanRemove = {
- ...memberMock,
- canRemove: true,
- };
-
test.each`
- member | sourceId | expected
- ${memberCanRemove} | ${DIRECT_MEMBER_ID} | ${true}
- ${memberCanRemove} | ${INHERITED_MEMBER_ID} | ${false}
- ${memberMock} | ${INHERITED_MEMBER_ID} | ${false}
- `('returns $expected', ({ member, sourceId, expected }) => {
- expect(canRemove(member, sourceId)).toBe(expected);
+ member | expected
+ ${{ ...directMember, canRemove: true }} | ${true}
+ ${{ ...inheritedMember, canRemove: true }} | ${false}
+ ${{ ...memberMock, canRemove: false }} | ${false}
+ `('returns $expected', ({ member, expected }) => {
+ expect(canRemove(member)).toBe(expected);
});
});
@@ -96,25 +99,20 @@ describe('Members Utils', () => {
member | expected
${invite} | ${true}
${{ ...invite, invite: { ...invite.invite, canResend: false } }} | ${false}
- `('returns $expected', ({ member, sourceId, expected }) => {
- expect(canResend(member, sourceId)).toBe(expected);
+ `('returns $expected', ({ member, expected }) => {
+ expect(canResend(member)).toBe(expected);
});
});
describe('canUpdate', () => {
- const memberCanUpdate = {
- ...memberMock,
- canUpdate: true,
- };
-
test.each`
- member | currentUserId | sourceId | expected
- ${memberCanUpdate} | ${IS_NOT_CURRENT_USER_ID} | ${DIRECT_MEMBER_ID} | ${true}
- ${memberCanUpdate} | ${IS_CURRENT_USER_ID} | ${DIRECT_MEMBER_ID} | ${false}
- ${memberCanUpdate} | ${IS_CURRENT_USER_ID} | ${INHERITED_MEMBER_ID} | ${false}
- ${memberMock} | ${IS_NOT_CURRENT_USER_ID} | ${DIRECT_MEMBER_ID} | ${false}
- `('returns $expected', ({ member, currentUserId, sourceId, expected }) => {
- expect(canUpdate(member, currentUserId, sourceId)).toBe(expected);
+ member | currentUserId | expected
+ ${{ ...directMember, canUpdate: true }} | ${IS_NOT_CURRENT_USER_ID} | ${true}
+ ${{ ...directMember, canUpdate: true }} | ${IS_CURRENT_USER_ID} | ${false}
+ ${{ ...inheritedMember, canUpdate: true }} | ${IS_CURRENT_USER_ID} | ${false}
+ ${{ ...directMember, canUpdate: false }} | ${IS_NOT_CURRENT_USER_ID} | ${false}
+ `('returns $expected', ({ member, currentUserId, expected }) => {
+ expect(canUpdate(member, currentUserId)).toBe(expected);
});
});
@@ -229,4 +227,38 @@ describe('Members Utils', () => {
});
});
});
+
+ describe('parseDataAttributes', () => {
+ let el;
+
+ beforeEach(() => {
+ el = document.createElement('div');
+ el.setAttribute('data-members', membersJsonString);
+ el.setAttribute('data-source-id', '234');
+ el.setAttribute('data-can-manage-members', 'true');
+ });
+
+ afterEach(() => {
+ el = null;
+ });
+
+ it('correctly parses the data attributes', () => {
+ expect(parseDataAttributes(el)).toEqual({
+ members,
+ sourceId: 234,
+ canManageMembers: true,
+ });
+ });
+ });
+
+ describe('groupLinkRequestFormatter', () => {
+ it('returns expected format', () => {
+ expect(
+ groupLinkRequestFormatter({
+ accessLevel: 50,
+ expires_at: '2020-10-16',
+ }),
+ ).toEqual({ group_link: { group_access: 50, expires_at: '2020-10-16' } });
+ });
+ });
});
diff --git a/spec/frontend/merge_request/components/status_box_spec.js b/spec/frontend/merge_request/components/status_box_spec.js
index e6b6512476b..e7623aed4b3 100644
--- a/spec/frontend/merge_request/components/status_box_spec.js
+++ b/spec/frontend/merge_request/components/status_box_spec.js
@@ -18,6 +18,12 @@ const testCases = [
icon: 'issue-open-m',
},
{
+ name: 'Open',
+ state: 'locked',
+ class: 'status-box-open',
+ icon: 'issue-open-m',
+ },
+ {
name: 'Closed',
state: 'closed',
class: 'status-box-mr-closed',
diff --git a/spec/frontend/milestones/milestone_combobox_spec.js b/spec/frontend/milestones/milestone_combobox_spec.js
index 8c519abe382..b0061189a48 100644
--- a/spec/frontend/milestones/milestone_combobox_spec.js
+++ b/spec/frontend/milestones/milestone_combobox_spec.js
@@ -6,8 +6,8 @@ import MockAdapter from 'axios-mock-adapter';
import { GlLoadingIcon, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
import { ENTER_KEY } from '~/lib/utils/keys';
import MilestoneCombobox from '~/milestones/components/milestone_combobox.vue';
-import { projectMilestones, groupMilestones } from './mock_data';
import createStore from '~/milestones/stores/';
+import { projectMilestones, groupMilestones } from './mock_data';
const extraLinks = [
{ text: 'Create new', url: 'http://127.0.0.1:3000/h5bp/html5-boilerplate/-/milestones/new' },
diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js
index dad3003d536..00e843722d6 100644
--- a/spec/frontend/monitoring/components/charts/anomaly_spec.js
+++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js
@@ -3,9 +3,9 @@ import { TEST_HOST } from 'helpers/test_constants';
import Anomaly from '~/monitoring/components/charts/anomaly.vue';
import { colorValues } from '~/monitoring/constants';
+import MonitorTimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
import { anomalyDeploymentData, mockProjectDir } from '../../mock_data';
import { anomalyGraphData } from '../../graph_data';
-import MonitorTimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
const mockProjectPath = `${TEST_HOST}${mockProjectDir}`;
diff --git a/spec/frontend/monitoring/components/charts/single_stat_spec.js b/spec/frontend/monitoring/components/charts/single_stat_spec.js
index 37712eb3012..144a4e0c968 100644
--- a/spec/frontend/monitoring/components/charts/single_stat_spec.js
+++ b/spec/frontend/monitoring/components/charts/single_stat_spec.js
@@ -27,8 +27,12 @@ describe('Single Stat Chart component', () => {
describe('computed', () => {
describe('statValue', () => {
- it('should interpolate the value and unit props', () => {
- expect(findChart().props('value')).toBe('1.00MB');
+ it('should display the correct value', () => {
+ expect(findChart().props('value')).toBe('1.00');
+ });
+
+ it('should display the correct value unit', () => {
+ expect(findChart().props('unit')).toBe('MB');
});
it('should change the value representation to a percentile one', () => {
@@ -36,7 +40,8 @@ describe('Single Stat Chart component', () => {
graphData: singleStatGraphData({ max_value: 120 }, { value: 91 }),
});
- expect(findChart().props('value')).toContain('75.83%');
+ expect(findChart().props('value')).toBe('75.83');
+ expect(findChart().props('unit')).toBe('%');
});
it('should display NaN for non numeric maxValue values', () => {
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index b7e1cb91987..89b7e82e84a 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -1,14 +1,14 @@
import { mount, shallowMount } from '@vue/test-utils';
-import { setTestTimeout } from 'helpers/timeout';
import timezoneMock from 'timezone-mock';
import { GlLink } from '@gitlab/ui';
-import { TEST_HOST } from 'helpers/test_constants';
import {
GlAreaChart,
GlLineChart,
GlChartSeriesLabel,
GlChartLegend,
} from '@gitlab/ui/dist/charts';
+import { TEST_HOST } from 'helpers/test_constants';
+import { setTestTimeout } from 'helpers/timeout';
import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { panelTypes, chartHeight } from '~/monitoring/constants';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
diff --git a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
index 43d5937a3a1..7a2f4a3de9a 100644
--- a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
@@ -2,13 +2,13 @@ import { shallowMount } from '@vue/test-utils';
import { GlDropdownItem } from '@gitlab/ui';
import { createStore } from '~/monitoring/stores';
import { DASHBOARD_PAGE, PANEL_NEW_PAGE } from '~/monitoring/router/constants';
-import { setupAllDashboards, setupStoreWithData } from '../store_utils';
import { redirectTo } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
import ActionsMenu from '~/monitoring/components/dashboard_actions_menu.vue';
import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
-import { dashboardActionsMenuProps, dashboardGitResponse } from '../mock_data';
import * as types from '~/monitoring/stores/mutation_types';
+import { dashboardActionsMenuProps, dashboardGitResponse } from '../mock_data';
+import { setupAllDashboards, setupStoreWithData } from '../store_utils';
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
diff --git a/spec/frontend/monitoring/components/dashboard_header_spec.js b/spec/frontend/monitoring/components/dashboard_header_spec.js
index 32fd9c45e8d..e431b17a965 100644
--- a/spec/frontend/monitoring/components/dashboard_header_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_header_spec.js
@@ -7,6 +7,7 @@ import RefreshButton from '~/monitoring/components/refresh_button.vue';
import DashboardHeader from '~/monitoring/components/dashboard_header.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
import ActionsMenu from '~/monitoring/components/dashboard_actions_menu.vue';
+import { redirectTo } from '~/lib/utils/url_utility';
import { setupAllDashboards, setupStoreWithDashboard, setupStoreWithData } from '../store_utils';
import {
environmentData,
@@ -14,7 +15,6 @@ import {
selfMonitoringDashboardGitResponse,
dashboardHeaderProps,
} from '../mock_data';
-import { redirectTo } from '~/lib/utils/url_utility';
const mockProjectPath = 'https://path/to/project';
diff --git a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
index 08c69701bd2..37939a11c4d 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
@@ -3,11 +3,10 @@ import { GlCard, GlForm, GlFormTextarea, GlAlert } from '@gitlab/ui';
import { createStore } from '~/monitoring/stores';
import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
import * as types from '~/monitoring/stores/mutation_types';
-import { metricsDashboardResponse } from '../fixture_data';
-import { mockTimeRange } from '../mock_data';
-
import DashboardPanelBuilder from '~/monitoring/components/dashboard_panel_builder.vue';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import { metricsDashboardResponse } from '../fixture_data';
+import { mockTimeRange } from '../mock_data';
const mockPanel = metricsDashboardResponse.dashboard.panel_groups[0].panels[0];
diff --git a/spec/frontend/monitoring/components/dashboard_panel_spec.js b/spec/frontend/monitoring/components/dashboard_panel_spec.js
index f64e05d3a2c..b3257f66cc5 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_spec.js
@@ -1,28 +1,13 @@
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
-import { setTestTimeout } from 'helpers/timeout';
import { GlDropdownItem } from '@gitlab/ui';
+import { setTestTimeout } from 'helpers/timeout';
import invalidUrl from '~/lib/utils/invalid_url';
import axios from '~/lib/utils/axios_utils';
import AlertWidget from '~/monitoring/components/alert_widget.vue';
import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
-import {
- mockAlert,
- mockLogsHref,
- mockLogsPath,
- mockNamespace,
- mockNamespacedData,
- mockTimeRange,
-} from '../mock_data';
-import { dashboardProps, graphData, graphDataEmpty } from '../fixture_data';
-import {
- anomalyGraphData,
- singleStatGraphData,
- heatmapGraphData,
- barGraphData,
-} from '../graph_data';
import { panelTypes } from '~/monitoring/constants';
@@ -37,6 +22,21 @@ import MonitorStackedColumnChart from '~/monitoring/components/charts/stacked_co
import { createStore, monitoringDashboard } from '~/monitoring/stores';
import { createStore as createEmbedGroupStore } from '~/monitoring/stores/embed_group';
+import {
+ anomalyGraphData,
+ singleStatGraphData,
+ heatmapGraphData,
+ barGraphData,
+} from '../graph_data';
+import { dashboardProps, graphData, graphDataEmpty } from '../fixture_data';
+import {
+ mockAlert,
+ mockLogsHref,
+ mockLogsPath,
+ mockNamespace,
+ mockNamespacedData,
+ mockTimeRange,
+} from '../mock_data';
const mocks = {
$toast: {
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index db35f1cdde3..062a87c4091 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -16,6 +16,7 @@ import GraphGroup from '~/monitoring/components/graph_group.vue';
import LinksSection from '~/monitoring/components/links_section.vue';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import {
setupAllDashboards,
setupStoreWithDashboard,
@@ -30,7 +31,6 @@ import {
metricsDashboardPanelCount,
dashboardProps,
} from '../fixture_data';
-import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index c4630bde32f..40610e5f542 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -9,13 +9,13 @@ import {
updateHistory,
} from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
-import { mockProjectDir } from '../mock_data';
-import { dashboardProps } from '../fixture_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import DashboardHeader from '~/monitoring/components/dashboard_header.vue';
import { createStore } from '~/monitoring/stores';
import { defaultTimeRange } from '~/vue_shared/constants';
+import { dashboardProps } from '../fixture_data';
+import { mockProjectDir } from '../mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/csv_export_spec.js b/spec/frontend/monitoring/csv_export_spec.js
index eb2a6e40243..42d19c21a7b 100644
--- a/spec/frontend/monitoring/csv_export_spec.js
+++ b/spec/frontend/monitoring/csv_export_spec.js
@@ -1,5 +1,5 @@
-import { timeSeriesGraphData } from './graph_data';
import { graphDataToCsv } from '~/monitoring/csv_export';
+import { timeSeriesGraphData } from './graph_data';
describe('monitoring export_csv', () => {
describe('graphDataToCsv', () => {
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index ca06c96c7d6..29a7c86491d 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,6 +1,6 @@
// The path below needs to be relative because we import the mock-data to karma
-import { TEST_HOST } from '../__helpers__/test_constants';
import invalidUrl from '~/lib/utils/invalid_url';
+import { TEST_HOST } from '../__helpers__/test_constants';
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
diff --git a/spec/frontend/monitoring/requests/index_spec.js b/spec/frontend/monitoring/requests/index_spec.js
index 078de5f15d1..b590fe749e0 100644
--- a/spec/frontend/monitoring/requests/index_spec.js
+++ b/spec/frontend/monitoring/requests/index_spec.js
@@ -3,8 +3,8 @@ import { backoffMockImplementation } from 'helpers/backoff_helper';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import * as commonUtils from '~/lib/utils/common_utils';
-import { metricsDashboardResponse } from '../fixture_data';
import { getDashboard, getPrometheusQueryData } from '~/monitoring/requests';
+import { metricsDashboardResponse } from '../fixture_data';
describe('monitoring metrics_requests', () => {
let mock;
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index 58bb87cb332..2221909583c 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -11,8 +11,8 @@ import {
normalizeCustomDashboardPath,
} from '~/monitoring/stores/utils';
import * as urlUtils from '~/lib/utils/url_utility';
-import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
+import { annotationsData } from '../mock_data';
const projectPath = 'gitlab-org/gitlab-test';
diff --git a/spec/frontend/monitoring/store/variable_mapping_spec.js b/spec/frontend/monitoring/store/variable_mapping_spec.js
index de124b0313c..85e87f361b5 100644
--- a/spec/frontend/monitoring/store/variable_mapping_spec.js
+++ b/spec/frontend/monitoring/store/variable_mapping_spec.js
@@ -3,13 +3,13 @@ import {
mergeURLVariables,
optionsFromSeriesData,
} from '~/monitoring/stores/variable_mapping';
+import * as urlUtils from '~/lib/utils/url_utility';
import {
templatingVariablesExamples,
storeTextVariables,
storeCustomVariables,
storeMetricLabelValuesVariables,
} from '../mock_data';
-import * as urlUtils from '~/lib/utils/url_utility';
describe('Monitoring variable mapping', () => {
describe('parseTemplatingVariables', () => {
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 002c4f206cb..8394d1d0ab2 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -9,7 +9,6 @@ import CommentForm from '~/notes/components/comment_form.vue';
import * as constants from '~/notes/constants';
import eventHub from '~/notes/event_hub';
import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
-import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data';
jest.mock('autosize');
@@ -64,14 +63,6 @@ describe('issue_comment_form component', () => {
});
describe('user is logged in', () => {
- describe('avatar', () => {
- it('should render user avatar with link', () => {
- mountComponent({ mountFunction: mount });
-
- expect(wrapper.find(UserAvatarLink).attributes('href')).toBe(userDataMock.path);
- });
- });
-
describe('handleSave', () => {
it('should request to save note when note is entered', () => {
mountComponent({ mountFunction: mount, initialData: { note: 'hello world' } });
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index 48e569720e9..03e5842bb0f 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -1,10 +1,10 @@
import { shallowMount, mount } from '@vue/test-utils';
-import { discussionMock } from '../mock_data';
import DiscussionActions from '~/notes/components/discussion_actions.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveDiscussionButton from '~/notes/components/discussion_resolve_button.vue';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
import createStore from '~/notes/stores';
+import { discussionMock } from '../mock_data';
// NOTE: clone mock_data so that it is not accidentally mutated
const createDiscussionMock = (props = {}) =>
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
index ebf7d52f38b..ca1972c9f61 100644
--- a/spec/frontend/notes/components/discussion_counter_spec.js
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -3,8 +3,8 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import notesModule from '~/notes/stores/modules';
import DiscussionCounter from '~/notes/components/discussion_counter.vue';
-import { noteableDataMock, discussionMock, notesDataMock, userDataMock } from '../mock_data';
import * as types from '~/notes/stores/mutation_types';
+import { noteableDataMock, discussionMock, notesDataMock, userDataMock } from '../mock_data';
describe('DiscussionCounter component', () => {
let store;
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index 3cfc1445cb8..7c3d747711c 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -1,11 +1,12 @@
import Vue from 'vue';
import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
-import { TEST_HOST } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
import createStore from '~/notes/stores';
import noteActions from '~/notes/components/note_actions.vue';
-import { userDataMock } from '../mock_data';
import axios from '~/lib/utils/axios_utils';
+import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
+import { userDataMock } from '../mock_data';
describe('noteActions', () => {
let wrapper;
@@ -135,7 +136,7 @@ describe('noteActions', () => {
.then(() => {
const emitted = Object.keys(rootWrapper.emitted());
- expect(emitted).toEqual(['bv::hide::tooltip']);
+ expect(emitted).toEqual([BV_HIDE_TOOLTIP]);
done();
})
.catch(done.fail);
diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js
index e64a75bede9..2ef99297540 100644
--- a/spec/frontend/notes/components/note_form_spec.js
+++ b/spec/frontend/notes/components/note_form_spec.js
@@ -4,9 +4,8 @@ import createStore from '~/notes/stores';
import NoteForm from '~/notes/components/note_form.vue';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
-import { noteableDataMock, notesDataMock, discussionMock } from '../mock_data';
-
import { getDraft, updateDraft } from '~/lib/utils/autosave';
+import { noteableDataMock, notesDataMock, discussionMock } from '../mock_data';
jest.mock('~/lib/utils/autosave');
@@ -25,6 +24,8 @@ describe('issue_note_form component', () => {
});
};
+ const findCancelButton = () => wrapper.find('[data-testid="cancel"]');
+
beforeEach(() => {
getDraft.mockImplementation((key) => {
if (key === dummyAutosaveKey) {
@@ -160,8 +161,8 @@ describe('issue_note_form component', () => {
});
await nextTick();
- const cancelButton = wrapper.find('[data-testid="cancel"]');
- cancelButton.trigger('click');
+ const cancelButton = findCancelButton();
+ cancelButton.vm.$emit('click');
await nextTick();
expect(wrapper.emitted().cancelForm).toHaveLength(1);
@@ -177,7 +178,7 @@ describe('issue_note_form component', () => {
const textarea = wrapper.find('textarea');
textarea.setValue('Foo');
const saveButton = wrapper.find('.js-vue-issue-save');
- saveButton.trigger('click');
+ saveButton.vm.$emit('click');
expect(wrapper.vm.isSubmitting).toBe(true);
});
@@ -272,7 +273,7 @@ describe('issue_note_form component', () => {
await nextTick();
const cancelButton = wrapper.find('[data-testid="cancelBatchCommentsEnabled"]');
- cancelButton.trigger('click');
+ cancelButton.vm.$emit('click');
expect(wrapper.vm.cancelHandler).toHaveBeenCalledWith(true);
});
@@ -302,16 +303,16 @@ describe('issue_note_form component', () => {
expect(wrapper.find('.js-resolve-checkbox').exists()).toBe(false);
});
- it('hides actions for commits', () => {
+ it('hides actions for commits', async () => {
wrapper.setProps({ discussion: { for_commit: true } });
- return nextTick(() => {
- expect(wrapper.find('.note-form-actions').text()).not.toContain('Start a review');
- });
+ await nextTick();
+
+ expect(wrapper.find('.note-form-actions').text()).not.toContain('Start a review');
});
describe('on enter', () => {
- it('should start review or add to review when cmd+enter is pressed', () => {
+ it('should start review or add to review when cmd+enter is pressed', async () => {
const textarea = wrapper.find('textarea');
jest.spyOn(wrapper.vm, 'handleAddToReview');
@@ -319,9 +320,8 @@ describe('issue_note_form component', () => {
textarea.setValue('Foo');
textarea.trigger('keydown.enter', { metaKey: true });
- return nextTick(() => {
- expect(wrapper.vm.handleAddToReview).toHaveBeenCalled();
- });
+ await nextTick();
+ expect(wrapper.vm.handleAddToReview).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index e495a4738e0..45fbba2524a 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -10,9 +10,9 @@ import createStore from '~/notes/stores';
import * as constants from '~/notes/constants';
import '~/behaviors/markdown/render_gfm';
// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491)
-import * as mockData from '../mock_data';
import * as urlUtility from '~/lib/utils/url_utility';
import OrderedLayout from '~/vue_shared/components/ordered_layout.vue';
+import * as mockData from '../mock_data';
jest.mock('~/user_popovers', () => jest.fn());
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index f0e6a0a68dd..468e5af6f1a 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1,5 +1,5 @@
-import { TEST_HOST } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
import { deprecatedCreateFlash as Flash } from '~/flash';
@@ -9,7 +9,11 @@ import * as mutationTypes from '~/notes/stores/mutation_types';
import * as notesConstants from '~/notes/constants';
import createStore from '~/notes/stores';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
-import { resetStore } from '../helpers';
+import axios from '~/lib/utils/axios_utils';
+import * as utils from '~/notes/stores/utils';
+import updateIssueConfidentialMutation from '~/sidebar/components/confidential/mutations/update_issue_confidential.mutation.graphql';
+import updateMergeRequestLockMutation from '~/sidebar/components/lock/mutations/update_merge_request_lock.mutation.graphql';
+import updateIssueLockMutation from '~/sidebar/components/lock/mutations/update_issue_lock.mutation.graphql';
import {
discussionMock,
notesDataMock,
@@ -18,11 +22,7 @@ import {
individualNote,
batchSuggestionsInfoMock,
} from '../mock_data';
-import axios from '~/lib/utils/axios_utils';
-import * as utils from '~/notes/stores/utils';
-import updateIssueConfidentialMutation from '~/sidebar/components/confidential/mutations/update_issue_confidential.mutation.graphql';
-import updateMergeRequestLockMutation from '~/sidebar/components/lock/mutations/update_merge_request_lock.mutation.graphql';
-import updateIssueLockMutation from '~/sidebar/components/lock/mutations/update_issue_lock.mutation.graphql';
+import { resetStore } from '../helpers';
const TEST_ERROR_MESSAGE = 'Test error message';
jest.mock('~/flash');
@@ -291,9 +291,45 @@ describe('Actions Notes Store', () => {
[
{ type: 'updateOrCreateNotes', payload: discussionMock.notes },
{ type: 'startTaskList' },
+ { type: 'updateResolvableDiscussionsCounts' },
],
));
});
+
+ describe('paginated notes feature flag enabled', () => {
+ const lastFetchedAt = '12358';
+
+ beforeEach(() => {
+ window.gon = { features: { paginatedNotes: true } };
+
+ axiosMock.onGet(notesDataMock.notesPath).replyOnce(200, {
+ notes: discussionMock.notes,
+ more: false,
+ last_fetched_at: lastFetchedAt,
+ });
+ });
+
+ afterEach(() => {
+ window.gon = null;
+ });
+
+ it('should dispatch setFetchingState, setNotesFetchedState, setLoadingState, updateOrCreateNotes, startTaskList and commit SET_LAST_FETCHED_AT', () => {
+ return testAction(
+ actions.fetchData,
+ null,
+ { notesData: notesDataMock, isFetching: true },
+ [{ type: 'SET_LAST_FETCHED_AT', payload: lastFetchedAt }],
+ [
+ { type: 'setFetchingState', payload: false },
+ { type: 'setNotesFetchedState', payload: true },
+ { type: 'setLoadingState', payload: false },
+ { type: 'updateOrCreateNotes', payload: discussionMock.notes },
+ { type: 'startTaskList' },
+ { type: 'updateResolvableDiscussionsCounts' },
+ ],
+ );
+ });
+ });
});
describe('poll', () => {
@@ -1355,4 +1391,17 @@ describe('Actions Notes Store', () => {
);
});
});
+
+ describe('setFetchingState', () => {
+ it('commits SET_NOTES_FETCHING_STATE', (done) => {
+ testAction(
+ actions.setFetchingState,
+ true,
+ null,
+ [{ type: mutationTypes.SET_NOTES_FETCHING_STATE, payload: true }],
+ [],
+ done,
+ );
+ });
+ });
});
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 66fc74525ad..c4301f63470 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -400,6 +400,19 @@ describe('Notes Store mutations', () => {
expect(state.discussions[0].notes[0].note).toEqual('Foo');
});
+ it('does not update existing note if it matches', () => {
+ const state = {
+ discussions: [{ ...individualNote, individual_note: false }],
+ };
+ jest.spyOn(state.discussions[0].notes, 'splice');
+
+ const updated = individualNote.notes[0];
+
+ mutations.UPDATE_NOTE(state, updated);
+
+ expect(state.discussions[0].notes.splice).not.toHaveBeenCalled();
+ });
+
it('transforms an individual note to discussion', () => {
const state = {
discussions: [individualNote],
diff --git a/spec/frontend/notifications/components/notifications_dropdown_spec.js b/spec/frontend/notifications/components/notifications_dropdown_spec.js
new file mode 100644
index 00000000000..2fe8e9c677e
--- /dev/null
+++ b/spec/frontend/notifications/components/notifications_dropdown_spec.js
@@ -0,0 +1,240 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount } from '@vue/test-utils';
+import { GlButtonGroup, GlButton, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import waitForPromises from 'helpers/wait_for_promises';
+import httpStatus from '~/lib/utils/http_status';
+import NotificationsDropdown from '~/notifications/components/notifications_dropdown.vue';
+import NotificationsDropdownItem from '~/notifications/components/notifications_dropdown_item.vue';
+
+const mockDropdownItems = ['global', 'watch', 'participating', 'mention', 'disabled'];
+const mockToastShow = jest.fn();
+
+describe('NotificationsDropdown', () => {
+ let wrapper;
+ let mockAxios;
+
+ function createComponent(injectedProperties = {}) {
+ return shallowMount(NotificationsDropdown, {
+ stubs: {
+ GlButtonGroup,
+ GlDropdown,
+ GlDropdownItem,
+ NotificationsDropdownItem,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ provide: {
+ dropdownItems: mockDropdownItems,
+ initialNotificationLevel: 'global',
+ ...injectedProperties,
+ },
+ mocks: {
+ $toast: {
+ show: mockToastShow,
+ },
+ },
+ });
+ }
+
+ const findButtonGroup = () => wrapper.find(GlButtonGroup);
+ const findButton = () => wrapper.find(GlButton);
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`);
+ const findAllNotificationsDropdownItems = () => wrapper.findAll(NotificationsDropdownItem);
+ const findDropdownItemAt = (index) =>
+ findAllNotificationsDropdownItems().at(index).find(GlDropdownItem);
+
+ const clickDropdownItemAt = async (index) => {
+ const dropdownItem = findDropdownItemAt(index);
+ dropdownItem.vm.$emit('click');
+
+ await waitForPromises();
+ };
+
+ beforeEach(() => {
+ gon.api_version = 'v4';
+ mockAxios = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ mockAxios.restore();
+ });
+
+ describe('template', () => {
+ describe('when notification level is "custom"', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ initialNotificationLevel: 'custom',
+ });
+ });
+
+ it('renders a button group', () => {
+ expect(findButtonGroup().exists()).toBe(true);
+ });
+
+ it('shows the button text when showLabel is true', () => {
+ wrapper = createComponent({
+ initialNotificationLevel: 'custom',
+ showLabel: true,
+ });
+
+ expect(findButton().text()).toBe('Custom');
+ });
+
+ it("doesn't show the button text when showLabel is false", () => {
+ wrapper = createComponent({
+ initialNotificationLevel: 'custom',
+ showLabel: false,
+ });
+
+ expect(findButton().text()).toBe('');
+ });
+ });
+
+ describe('when notification level is not "custom"', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ initialNotificationLevel: 'global',
+ });
+ });
+
+ it('does not render a button group', () => {
+ expect(findButtonGroup().exists()).toBe(false);
+ });
+
+ it('shows the button text when showLabel is true', () => {
+ wrapper = createComponent({
+ showLabel: true,
+ });
+
+ expect(findDropdown().props('text')).toBe('Global');
+ });
+
+ it("doesn't show the button text when showLabel is false", () => {
+ wrapper = createComponent({
+ showLabel: false,
+ });
+
+ expect(findDropdown().props('text')).toBe(null);
+ });
+ });
+
+ describe('button tooltip', () => {
+ const tooltipTitlePrefix = 'Notification setting';
+ it.each`
+ level | title
+ ${'global'} | ${'Global'}
+ ${'watch'} | ${'Watch'}
+ ${'participating'} | ${'Participate'}
+ ${'mention'} | ${'On mention'}
+ ${'disabled'} | ${'Disabled'}
+ ${'custom'} | ${'Custom'}
+ `(`renders "${tooltipTitlePrefix} - $title" for "$level" level`, ({ level, title }) => {
+ wrapper = createComponent({
+ initialNotificationLevel: level,
+ });
+
+ const tooltipElement = findByTestId('notificationButton');
+ const tooltip = getBinding(tooltipElement.element, 'gl-tooltip');
+
+ expect(tooltip.value.title).toBe(`${tooltipTitlePrefix} - ${title}`);
+ });
+ });
+
+ describe('button icon', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ initialNotificationLevel: 'disabled',
+ });
+ });
+
+ it('renders the "notifications-off" icon when notification level is "disabled"', () => {
+ expect(findDropdown().props('icon')).toBe('notifications-off');
+ });
+
+ it('renders the "notifications" icon when notification level is not "disabled"', () => {
+ wrapper = createComponent();
+
+ expect(findDropdown().props('icon')).toBe('notifications');
+ });
+ });
+
+ describe('dropdown items', () => {
+ it.each`
+ dropdownIndex | level | title | description
+ ${0} | ${'global'} | ${'Global'} | ${'Use your global notification setting'}
+ ${1} | ${'watch'} | ${'Watch'} | ${'You will receive notifications for any activity'}
+ ${2} | ${'participating'} | ${'Participate'} | ${'You will only receive notifications for threads you have participated in'}
+ ${3} | ${'mention'} | ${'On mention'} | ${'You will receive notifications only for comments in which you were @mentioned'}
+ ${4} | ${'disabled'} | ${'Disabled'} | ${'You will not get any notifications via email'}
+ ${5} | ${'custom'} | ${'Custom'} | ${'You will only receive notifications for the events you choose'}
+ `('displays "$title" and "$description"', ({ dropdownIndex, title, description }) => {
+ wrapper = createComponent();
+
+ expect(findAllNotificationsDropdownItems().at(dropdownIndex).props('title')).toBe(title);
+ expect(findAllNotificationsDropdownItems().at(dropdownIndex).props('description')).toBe(
+ description,
+ );
+ });
+ });
+ });
+
+ describe('when selecting an item', () => {
+ beforeEach(() => {
+ jest.spyOn(axios, 'put');
+ });
+
+ it.each`
+ projectId | groupId | endpointUrl | endpointType | condition
+ ${1} | ${null} | ${'/api/v4/projects/1/notification_settings'} | ${'project notifications'} | ${'a projectId is given'}
+ ${null} | ${1} | ${'/api/v4/groups/1/notification_settings'} | ${'group notifications'} | ${'a groupId is given'}
+ ${null} | ${null} | ${'/api/v4/notification_settings'} | ${'global notifications'} | ${'when neither projectId nor groupId are given'}
+ `(
+ 'calls the $endpointType endpoint when $condition',
+ async ({ projectId, groupId, endpointUrl }) => {
+ wrapper = createComponent({
+ projectId,
+ groupId,
+ });
+
+ await clickDropdownItemAt(1);
+
+ expect(axios.put).toHaveBeenCalledWith(endpointUrl, {
+ level: 'watch',
+ });
+ },
+ );
+
+ it('updates the selectedNotificationLevel and marks the item with a checkmark', async () => {
+ mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.OK, {});
+ wrapper = createComponent();
+
+ const dropdownItem = findDropdownItemAt(1);
+
+ await clickDropdownItemAt(1);
+
+ expect(wrapper.vm.selectedNotificationLevel).toBe('watch');
+ expect(dropdownItem.props('isChecked')).toBe(true);
+ });
+
+ it("won't update the selectedNotificationLevel and shows a toast message when the request fails and ", async () => {
+ mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.NOT_FOUND, {});
+ wrapper = createComponent();
+
+ await clickDropdownItemAt(1);
+
+ expect(wrapper.vm.selectedNotificationLevel).toBe('global');
+ expect(
+ mockToastShow,
+ ).toHaveBeenCalledWith(
+ 'An error occured while updating the notification settings. Please try again.',
+ { type: 'error' },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/onboarding_issues/index_spec.js b/spec/frontend/onboarding_issues/index_spec.js
deleted file mode 100644
index d476ba1cf5a..00000000000
--- a/spec/frontend/onboarding_issues/index_spec.js
+++ /dev/null
@@ -1,137 +0,0 @@
-import $ from 'jquery';
-import setWindowLocation from 'helpers/set_window_location_helper';
-import { showLearnGitLabIssuesPopover } from '~/onboarding_issues';
-import { getCookie, setCookie, removeCookie } from '~/lib/utils/common_utils';
-import Tracking from '~/tracking';
-
-describe('Onboarding Issues Popovers', () => {
- const COOKIE_NAME = 'onboarding_issues_settings';
- const getCookieValue = () => JSON.parse(getCookie(COOKIE_NAME));
-
- beforeEach(() => {
- jest.spyOn($.fn, 'popover');
- });
-
- afterEach(() => {
- $.fn.popover.mockRestore();
- document.getElementsByTagName('html')[0].innerHTML = '';
- removeCookie(COOKIE_NAME);
- });
-
- const setupShowLearnGitLabIssuesPopoverTest = ({
- currentPath = 'group/learn-gitlab',
- isIssuesBoardsLinkShown = true,
- isCookieSet = true,
- cookieValue = true,
- } = {}) => {
- setWindowLocation(`http://example.com/${currentPath}`);
-
- if (isIssuesBoardsLinkShown) {
- const elem = document.createElement('a');
- elem.setAttribute('data-qa-selector', 'issue_boards_link');
- document.body.appendChild(elem);
- }
-
- if (isCookieSet) {
- setCookie(COOKIE_NAME, { previous: true, 'issues#index': cookieValue });
- }
-
- showLearnGitLabIssuesPopover();
- };
-
- describe('showLearnGitLabIssuesPopover', () => {
- describe('when on another project', () => {
- beforeEach(() => {
- setupShowLearnGitLabIssuesPopoverTest({
- currentPath: 'group/another-project',
- });
- });
-
- it('does not show a popover', () => {
- expect($.fn.popover).not.toHaveBeenCalled();
- });
- });
-
- describe('when the issues boards link is not shown', () => {
- beforeEach(() => {
- setupShowLearnGitLabIssuesPopoverTest({
- isIssuesBoardsLinkShown: false,
- });
- });
-
- it('does not show a popover', () => {
- expect($.fn.popover).not.toHaveBeenCalled();
- });
- });
-
- describe('when the cookie is not set', () => {
- beforeEach(() => {
- setupShowLearnGitLabIssuesPopoverTest({
- isCookieSet: false,
- });
- });
-
- it('does not show a popover', () => {
- expect($.fn.popover).not.toHaveBeenCalled();
- });
- });
-
- describe('when the cookie value is false', () => {
- beforeEach(() => {
- setupShowLearnGitLabIssuesPopoverTest({
- cookieValue: false,
- });
- });
-
- it('does not show a popover', () => {
- expect($.fn.popover).not.toHaveBeenCalled();
- });
- });
-
- describe('with all the right conditions', () => {
- beforeEach(() => {
- setupShowLearnGitLabIssuesPopoverTest();
- });
-
- it('shows a popover', () => {
- expect($.fn.popover).toHaveBeenCalled();
- });
-
- it('does not change the cookie value', () => {
- expect(getCookieValue()['issues#index']).toBe(true);
- });
-
- it('disables the previous popover', () => {
- expect(getCookieValue().previous).toBe(false);
- });
-
- describe('when clicking the issues boards link', () => {
- beforeEach(() => {
- document.querySelector('a[data-qa-selector="issue_boards_link"]').click();
- });
-
- it('deletes the cookie', () => {
- expect(getCookie(COOKIE_NAME)).toBe(undefined);
- });
- });
-
- describe('when dismissing the popover', () => {
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
- document.querySelector('.learn-gitlab.popover .close').click();
- });
-
- it('deletes the cookie', () => {
- expect(getCookie(COOKIE_NAME)).toBe(undefined);
- });
-
- it('sends a tracking event', () => {
- expect(Tracking.event).toHaveBeenCalledWith(
- 'Growth::Conversion::Experiment::OnboardingIssues',
- 'dismiss_popover',
- );
- });
- });
- });
- });
-});
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
index b8c2138e7f5..e5b130cd958 100644
--- a/spec/frontend/packages/details/store/getters_spec.js
+++ b/spec/frontend/packages/details/store/getters_spec.js
@@ -17,6 +17,7 @@ import {
composerPackageInclude,
groupExists,
} from '~/packages/details/store/getters';
+import { NpmManager } from '~/packages/details/constants';
import {
conanPackage,
npmPackage,
@@ -32,7 +33,6 @@ import {
registryUrl,
pypiSetupCommandStr,
} from '../mock_data';
-import { NpmManager } from '~/packages/details/constants';
describe('Getters PackageDetails Store', () => {
let state;
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap
deleted file mode 100644
index ed77f25916f..00000000000
--- a/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap
+++ /dev/null
@@ -1,14 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`packages_filter renders 1`] = `
-<gl-search-box-by-click-stub
- clearable="true"
- clearbuttontitle="Clear"
- clearrecentsearchestext="Clear recent searches"
- closebuttontitle="Close"
- norecentsearchestext="You don't have any recent searches"
- placeholder="Filter by name"
- recentsearchesheader="Recent searches"
- value=""
-/>
-`;
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
index b2df1ac5ab6..3f17731584c 100644
--- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -6,517 +6,60 @@ exports[`packages_list_app renders 1`] = `
packagehelpurl="foo"
/>
- <b-tabs-stub
- activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
- class="gl-tabs"
- contentclass=",gl-tab-content"
- navclass=",gl-tabs-nav"
- nofade="true"
- nonavstyle="true"
- tag="div"
- >
- <template>
-
- <b-tab-stub
- tag="div"
- title="All"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Composer"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no Composer packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Conan"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no Conan packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Generic"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no Generic packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Maven"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no Maven packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="NPM"
- titlelinkclass="gl-tab-nav-item"
+ <package-search-stub />
+
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
>
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no NPM packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="NuGet"
- titlelinkclass="gl-tab-nav-item"
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt=""
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
>
- <template>
- <div>
- <section
- class="row empty-state text-center"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
>
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no NuGet packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="PyPI"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
<div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no PyPI packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
+ <!---->
+
+ <!---->
</div>
- </template>
- </b-tab-stub>
- </template>
- <template>
- <div
- class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
- >
- <package-filter-stub
- class="gl-mr-2"
- />
-
- <package-sort-stub />
+ </div>
</div>
- </template>
- </b-tabs-stub>
+ </section>
+ </div>
</div>
`;
diff --git a/spec/frontend/packages/list/components/packages_filter_spec.js b/spec/frontend/packages/list/components/packages_filter_spec.js
deleted file mode 100644
index b186b5f5e48..00000000000
--- a/spec/frontend/packages/list/components/packages_filter_spec.js
+++ /dev/null
@@ -1,50 +0,0 @@
-import Vuex from 'vuex';
-import { GlSearchBoxByClick } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import PackagesFilter from '~/packages/list/components/packages_filter.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('packages_filter', () => {
- let wrapper;
- let store;
-
- const findGlSearchBox = () => wrapper.find(GlSearchBoxByClick);
-
- const mountComponent = () => {
- store = new Vuex.Store();
- store.dispatch = jest.fn();
-
- wrapper = shallowMount(PackagesFilter, {
- localVue,
- store,
- });
- };
-
- beforeEach(mountComponent);
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('renders', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('emits events', () => {
- it('sets the filter value in the store on input', () => {
- const searchString = 'foo';
- findGlSearchBox().vm.$emit('input', searchString);
-
- expect(store.dispatch).toHaveBeenCalledWith('setFilter', searchString);
- });
-
- it('emits the filter event when search box is submitted', () => {
- findGlSearchBox().vm.$emit('submit');
-
- expect(wrapper.emitted('filter')).toBeTruthy();
- });
- });
-});
diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js
index 217096f822a..36dd73666ea 100644
--- a/spec/frontend/packages/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages/list/components/packages_list_app_spec.js
@@ -1,9 +1,10 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlEmptyState, GlTab, GlTabs, GlSprintf, GlLink } from '@gitlab/ui';
+import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
import * as commonUtils from '~/lib/utils/common_utils';
import createFlash from '~/flash';
import PackageListApp from '~/packages/list/components/packages_list_app.vue';
+import PackageSearch from '~/packages/list/components/package_search.vue';
import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants';
import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants';
@@ -26,9 +27,9 @@ describe('packages_list_app', () => {
const emptyListHelpUrl = 'helpUrl';
const findEmptyState = () => wrapper.find(GlEmptyState);
const findListComponent = () => wrapper.find(PackageList);
- const findTabComponent = (index = 0) => wrapper.findAll(GlTab).at(index);
+ const findPackageSearch = () => wrapper.find(PackageSearch);
- const createStore = (filterQuery = '') => {
+ const createStore = (filter = []) => {
store = new Vuex.Store({
state: {
isLoading: false,
@@ -38,7 +39,7 @@ describe('packages_list_app', () => {
emptyListHelpUrl,
packageHelpUrl: 'foo',
},
- filterQuery,
+ filter,
},
});
store.dispatch = jest.fn();
@@ -52,8 +53,6 @@ describe('packages_list_app', () => {
GlEmptyState,
GlLoadingIcon,
PackageList,
- GlTab,
- GlTabs,
GlSprintf,
GlLink,
},
@@ -122,27 +121,9 @@ describe('packages_list_app', () => {
expect(store.dispatch).toHaveBeenCalledTimes(1);
});
- describe('tab change', () => {
- it('calls requestPackagesList when all tab is clicked', () => {
- mountComponent();
-
- findTabComponent().trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
- });
-
- it('calls requestPackagesList when a package type tab is clicked', () => {
- mountComponent();
-
- findTabComponent(1).trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
- });
- });
-
describe('filter without results', () => {
beforeEach(() => {
- createStore('foo');
+ createStore([{ type: 'something' }]);
mountComponent();
});
@@ -154,12 +135,28 @@ describe('packages_list_app', () => {
});
});
+ describe('Package Search', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findPackageSearch().exists()).toBe(true);
+ });
+
+ it.each(['sort:changed', 'filter:changed'])('on %p fetches data from the store', (event) => {
+ mountComponent();
+
+ findPackageSearch().vm.$emit(event);
+
+ expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
+ });
+ });
+
describe('delete alert handling', () => {
const { location } = window.location;
const search = `?${SHOW_DELETE_SUCCESS_ALERT}=true`;
beforeEach(() => {
- createStore('foo');
+ createStore();
jest.spyOn(commonUtils, 'historyReplaceState').mockImplementation(() => {});
delete window.location;
window.location = {
diff --git a/spec/frontend/packages/list/components/packages_search_spec.js b/spec/frontend/packages/list/components/packages_search_spec.js
new file mode 100644
index 00000000000..23a8d3fb871
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_search_spec.js
@@ -0,0 +1,145 @@
+import Vuex from 'vuex';
+import { GlSorting, GlSortingItem, GlFilteredSearch } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import component from '~/packages/list/components/package_search.vue';
+import PackageTypeToken from '~/packages/list/components/tokens/package_type_token.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Package Search', () => {
+ let wrapper;
+ let store;
+ let sorting;
+ let sortingItems;
+
+ const findPackageListSorting = () => wrapper.find(GlSorting);
+ const findSortingItems = () => wrapper.findAll(GlSortingItem);
+ const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
+
+ const createStore = (isGroupPage) => {
+ const state = {
+ config: {
+ isGroupPage,
+ },
+ sorting: {
+ orderBy: 'version',
+ sort: 'desc',
+ },
+ filter: [],
+ };
+ store = new Vuex.Store({
+ state,
+ });
+ store.dispatch = jest.fn();
+ };
+
+ const mountComponent = (isGroupPage = false) => {
+ createStore(isGroupPage);
+
+ wrapper = shallowMount(component, {
+ localVue,
+ store,
+ stubs: {
+ GlSortingItem,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('searching', () => {
+ it('has a filtered-search component', () => {
+ mountComponent();
+
+ expect(findFilteredSearch().exists()).toBe(true);
+ });
+
+ it('binds the correct props to filtered-search', () => {
+ mountComponent();
+
+ expect(findFilteredSearch().props()).toMatchObject({
+ value: [],
+ placeholder: 'Filter results',
+ availableTokens: wrapper.vm.tokens,
+ });
+ });
+
+ it('updates vuex when value changes', () => {
+ mountComponent();
+
+ findFilteredSearch().vm.$emit('input', ['foo']);
+
+ expect(store.dispatch).toHaveBeenCalledWith('setFilter', ['foo']);
+ });
+
+ it('emits filter:changed on submit event', () => {
+ mountComponent();
+
+ findFilteredSearch().vm.$emit('submit');
+ expect(wrapper.emitted('filter:changed')).toEqual([[]]);
+ });
+
+ it('emits filter:changed on clear event and reset vuex', () => {
+ mountComponent();
+
+ findFilteredSearch().vm.$emit('clear');
+
+ expect(store.dispatch).toHaveBeenCalledWith('setFilter', []);
+ expect(wrapper.emitted('filter:changed')).toEqual([[]]);
+ });
+
+ it('has a PackageTypeToken token', () => {
+ mountComponent();
+
+ expect(findFilteredSearch().props('availableTokens')).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }),
+ ]),
+ );
+ });
+ });
+
+ describe('sorting', () => {
+ describe('when is in projects', () => {
+ beforeEach(() => {
+ mountComponent();
+ sorting = findPackageListSorting();
+ sortingItems = findSortingItems();
+ });
+
+ it('has all the sortable items', () => {
+ expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
+ });
+
+ it('on sort change set sorting in vuex and emit event', () => {
+ sorting.vm.$emit('sortDirectionChange');
+ expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' });
+ expect(wrapper.emitted('sort:changed')).toBeTruthy();
+ });
+
+ it('on sort item click set sorting and emit event', () => {
+ const item = sortingItems.at(0);
+ const { orderBy } = wrapper.vm.sortableFields[0];
+ item.vm.$emit('click');
+ expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy });
+ expect(wrapper.emitted('sort:changed')).toBeTruthy();
+ });
+ });
+
+ describe('when is in group', () => {
+ beforeEach(() => {
+ mountComponent(true);
+ sorting = findPackageListSorting();
+ sortingItems = findSortingItems();
+ });
+
+ it('has all the sortable items', () => {
+ expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/components/packages_sort_spec.js b/spec/frontend/packages/list/components/packages_sort_spec.js
deleted file mode 100644
index d15ad9bd542..00000000000
--- a/spec/frontend/packages/list/components/packages_sort_spec.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import Vuex from 'vuex';
-import { GlSorting, GlSortingItem } from '@gitlab/ui';
-import { mount, createLocalVue } from '@vue/test-utils';
-import stubChildren from 'helpers/stub_children';
-import PackagesSort from '~/packages/list/components/packages_sort.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('packages_sort', () => {
- let wrapper;
- let store;
- let sorting;
- let sortingItems;
-
- const findPackageListSorting = () => wrapper.find(GlSorting);
- const findSortingItems = () => wrapper.findAll(GlSortingItem);
-
- const createStore = (isGroupPage) => {
- const state = {
- config: {
- isGroupPage,
- },
- sorting: {
- orderBy: 'version',
- sort: 'desc',
- },
- };
- store = new Vuex.Store({
- state,
- });
- store.dispatch = jest.fn();
- };
-
- const mountComponent = (isGroupPage = false) => {
- createStore(isGroupPage);
-
- wrapper = mount(PackagesSort, {
- localVue,
- store,
- stubs: {
- ...stubChildren(PackagesSort),
- GlSortingItem,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('when is in projects', () => {
- beforeEach(() => {
- mountComponent();
- sorting = findPackageListSorting();
- sortingItems = findSortingItems();
- });
-
- it('has all the sortable items', () => {
- expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
- });
-
- it('on sort change set sorting in vuex and emit event', () => {
- sorting.vm.$emit('sortDirectionChange');
- expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' });
- expect(wrapper.emitted('sort:changed')).toBeTruthy();
- });
-
- it('on sort item click set sorting and emit event', () => {
- const item = sortingItems.at(0);
- const { orderBy } = wrapper.vm.sortableFields[0];
- item.vm.$emit('click');
- expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy });
- expect(wrapper.emitted('sort:changed')).toBeTruthy();
- });
- });
-
- describe('when is in group', () => {
- beforeEach(() => {
- mountComponent(true);
- sorting = findPackageListSorting();
- sortingItems = findSortingItems();
- });
-
- it('has all the sortable items', () => {
- expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
- });
- });
-});
diff --git a/spec/frontend/packages/list/components/tokens/package_type_token_spec.js b/spec/frontend/packages/list/components/tokens/package_type_token_spec.js
new file mode 100644
index 00000000000..a654f431266
--- /dev/null
+++ b/spec/frontend/packages/list/components/tokens/package_type_token_spec.js
@@ -0,0 +1,48 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
+import component from '~/packages/list/components/tokens/package_type_token.vue';
+import { PACKAGE_TYPES } from '~/packages/list/constants';
+
+describe('packages_filter', () => {
+ let wrapper;
+
+ const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
+ const findFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion);
+
+ const mountComponent = ({ attrs, listeners } = {}) => {
+ wrapper = shallowMount(component, {
+ attrs,
+ listeners,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('it binds all of his attrs to filtered search token', () => {
+ mountComponent({ attrs: { foo: 'bar' } });
+
+ expect(findFilteredSearchToken().attributes('foo')).toBe('bar');
+ });
+
+ it('it binds all of his events to filtered search token', () => {
+ const clickListener = jest.fn();
+ mountComponent({ listeners: { click: clickListener } });
+
+ findFilteredSearchToken().vm.$emit('click');
+
+ expect(clickListener).toHaveBeenCalled();
+ });
+
+ it.each(PACKAGE_TYPES.map((p, index) => [p, index]))(
+ 'displays a suggestion for %p',
+ (packageType, index) => {
+ mountComponent();
+ const item = findFilteredSearchSuggestions().at(index);
+ expect(item.text()).toBe(packageType.title);
+ expect(item.props('value')).toBe(packageType.type);
+ },
+ );
+});
diff --git a/spec/frontend/packages/list/stores/actions_spec.js b/spec/frontend/packages/list/stores/actions_spec.js
index 05e1fe57cae..d33851e6ab3 100644
--- a/spec/frontend/packages/list/stores/actions_spec.js
+++ b/spec/frontend/packages/list/stores/actions_spec.js
@@ -30,11 +30,13 @@ describe('Actions Package list store', () => {
sort: 'asc',
orderBy: 'version',
};
+
+ const filter = [];
it('should fetch the project packages list when isGroupPage is false', (done) => {
testAction(
actions.requestPackagesList,
undefined,
- { config: { isGroupPage: false, resourceId: 1 }, sorting },
+ { config: { isGroupPage: false, resourceId: 1 }, sorting, filter },
[],
[
{ type: 'setLoading', payload: true },
@@ -54,7 +56,7 @@ describe('Actions Package list store', () => {
testAction(
actions.requestPackagesList,
undefined,
- { config: { isGroupPage: true, resourceId: 2 }, sorting },
+ { config: { isGroupPage: true, resourceId: 2 }, sorting, filter },
[],
[
{ type: 'setLoading', payload: true },
@@ -70,7 +72,7 @@ describe('Actions Package list store', () => {
);
});
- it('should fetch packages of a certain type when selectedType is present', (done) => {
+ it('should fetch packages of a certain type when a filter with a type is present', (done) => {
const packageType = 'maven';
testAction(
@@ -79,7 +81,7 @@ describe('Actions Package list store', () => {
{
config: { isGroupPage: false, resourceId: 1 },
sorting,
- selectedType: { type: packageType },
+ filter: [{ type: 'type', value: { data: 'maven' } }],
},
[],
[
@@ -107,7 +109,7 @@ describe('Actions Package list store', () => {
testAction(
actions.requestPackagesList,
undefined,
- { config: { isGroupPage: false, resourceId: 2 }, sorting },
+ { config: { isGroupPage: false, resourceId: 2 }, sorting, filter },
[],
[
{ type: 'setLoading', payload: true },
diff --git a/spec/frontend/packages/list/stores/mutations_spec.js b/spec/frontend/packages/list/stores/mutations_spec.js
index 0d424a0c011..743de595eb5 100644
--- a/spec/frontend/packages/list/stores/mutations_spec.js
+++ b/spec/frontend/packages/list/stores/mutations_spec.js
@@ -78,17 +78,10 @@ describe('Mutations Registry Store', () => {
});
});
- describe('SET_SELECTED_TYPE', () => {
- it('should set the selected type', () => {
- mutations[types.SET_SELECTED_TYPE](mockState, { type: 'maven' });
- expect(mockState.selectedType).toEqual({ type: 'maven' });
- });
- });
-
describe('SET_FILTER', () => {
it('should set the filter query', () => {
mutations[types.SET_FILTER](mockState, 'foo');
- expect(mockState.filterQuery).toEqual('foo');
+ expect(mockState.filter).toEqual('foo');
});
});
});
diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
index 5faae5690db..4a75deebcf9 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
@@ -6,7 +6,7 @@ exports[`packages_list_row renders 1`] = `
data-qa-selector="package_row"
>
<div
- class="gl-display-flex gl-align-items-center gl-py-5"
+ class="gl-display-flex gl-align-items-center gl-py-3"
>
<!---->
@@ -14,7 +14,7 @@ exports[`packages_list_row renders 1`] = `
class="gl-display-flex gl-xs-flex-direction-column gl-justify-content-space-between gl-align-items-stretch gl-flex-fill-1"
>
<div
- class="gl-display-flex gl-flex-direction-column gl-justify-content-space-between gl-xs-mb-3 gl-min-w-0 gl-flex-grow-1"
+ class="gl-display-flex gl-flex-direction-column gl-xs-mb-3 gl-min-w-0 gl-flex-grow-1"
>
<div
class="gl-display-flex gl-align-items-center gl-text-body gl-font-weight-bold gl-min-h-6 gl-min-w-0"
@@ -40,7 +40,7 @@ exports[`packages_list_row renders 1`] = `
</div>
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1 gl-min-h-6 gl-min-w-0 gl-flex-fill-1"
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-min-h-6 gl-min-w-0 gl-flex-fill-1"
>
<div
class="gl-display-flex"
@@ -85,7 +85,7 @@ exports[`packages_list_row renders 1`] = `
</div>
<div
- class="gl-display-flex gl-align-items-center gl-mt-1 gl-min-h-6"
+ class="gl-display-flex gl-align-items-center gl-min-h-6"
>
<span>
<gl-sprintf-stub
@@ -97,7 +97,7 @@ exports[`packages_list_row renders 1`] = `
</div>
<div
- class="gl-w-9 gl-display-none gl-display-sm-flex gl-justify-content-end gl-pr-1"
+ class="gl-w-9 gl-display-none gl-sm-display-flex gl-justify-content-end gl-pr-1"
>
<gl-button-stub
aria-label="Remove package"
diff --git a/spec/frontend/packages/shared/components/packages_list_loader_spec.js b/spec/frontend/packages/shared/components/packages_list_loader_spec.js
index 115a3a7095d..4ff01068f92 100644
--- a/spec/frontend/packages/shared/components/packages_list_loader_spec.js
+++ b/spec/frontend/packages/shared/components/packages_list_loader_spec.js
@@ -30,7 +30,7 @@ describe('PackagesListLoader', () => {
it('has the correct classes', () => {
expect(findDesktopShapes().classes()).toEqual([
'gl-display-none',
- 'gl-display-sm-flex',
+ 'gl-sm-display-flex',
'gl-flex-direction-column',
]);
});
@@ -44,7 +44,7 @@ describe('PackagesListLoader', () => {
it('has the correct classes', () => {
expect(findMobileShapes().classes()).toEqual([
'gl-flex-direction-column',
- 'gl-display-sm-none',
+ 'gl-sm-display-none',
]);
});
});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
new file mode 100644
index 00000000000..8a1d5044106
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
@@ -0,0 +1,99 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlSprintf, GlLink } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import component from '~/packages_and_registries/settings/group/components/group_settings_app.vue';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+import {
+ PACKAGE_SETTINGS_HEADER,
+ PACKAGE_SETTINGS_DESCRIPTION,
+ PACKAGES_DOCS_PATH,
+} from '~/packages_and_registries/settings/group/constants';
+
+import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql';
+import { groupPackageSettingsMock } from '../mock_data';
+
+const localVue = createLocalVue();
+
+describe('Group Settings App', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const defaultProvide = {
+ defaultExpanded: false,
+ groupPath: 'foo_group_path',
+ };
+
+ const mountComponent = ({
+ provide = defaultProvide,
+ resolver = jest.fn().mockResolvedValue(groupPackageSettingsMock),
+ } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[getGroupPackagesSettingsQuery, resolver]];
+
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMount(component, {
+ localVue,
+ apolloProvider,
+ provide,
+ stubs: {
+ GlSprintf,
+ SettingsBlock,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findSettingsBlock = () => wrapper.find(SettingsBlock);
+ const findDescription = () => wrapper.find('[data-testid="description"');
+ const findLink = () => wrapper.find(GlLink);
+
+ it('renders a settings block', () => {
+ mountComponent();
+
+ expect(findSettingsBlock().exists()).toBe(true);
+ });
+
+ it('passes the correct props to settings block', () => {
+ mountComponent();
+
+ expect(findSettingsBlock().props('defaultExpanded')).toBe(false);
+ });
+
+ it('has the correct header text', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toContain(PACKAGE_SETTINGS_HEADER);
+ });
+
+ it('has the correct description text', () => {
+ mountComponent();
+
+ expect(findDescription().text()).toMatchInterpolatedText(PACKAGE_SETTINGS_DESCRIPTION);
+ });
+
+ it('has the correct link', () => {
+ mountComponent();
+
+ expect(findLink().attributes()).toMatchObject({
+ href: PACKAGES_DOCS_PATH,
+ target: '_blank',
+ });
+ expect(findLink().text()).toBe('More Information');
+ });
+
+ it('calls the graphql API with the proper variables', () => {
+ const resolver = jest.fn().mockResolvedValue(groupPackageSettingsMock);
+ mountComponent({ resolver });
+
+ expect(resolver).toHaveBeenCalledWith({
+ fullPath: defaultProvide.groupPath,
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/mock_data.js b/spec/frontend/packages_and_registries/settings/group/mock_data.js
new file mode 100644
index 00000000000..6be43cb4aea
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/mock_data.js
@@ -0,0 +1,12 @@
+export const groupPackageSettingsMock = {
+ data: {
+ group: {
+ packageSettings: {
+ mavenDuplicatesAllowed: true,
+ mavenDuplicateExceptionRegex: '',
+ __typename: 'PackageSettings',
+ },
+ __typename: 'Group',
+ },
+ },
+};
diff --git a/spec/frontend/pages/projects/edit/mount_search_settings_spec.js b/spec/frontend/pages/projects/edit/mount_search_settings_spec.js
deleted file mode 100644
index b48809b3d00..00000000000
--- a/spec/frontend/pages/projects/edit/mount_search_settings_spec.js
+++ /dev/null
@@ -1,25 +0,0 @@
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import initSearch from '~/search_settings';
-import mountSearchSettings from '~/pages/projects/edit/mount_search_settings';
-
-jest.mock('~/search_settings');
-
-describe('pages/projects/edit/mount_search_settings', () => {
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('initializes search settings when js-search-settings-app is available', async () => {
- setHTMLFixture('<div class="js-search-settings-app"></div>');
-
- await mountSearchSettings();
-
- expect(initSearch).toHaveBeenCalled();
- });
-
- it('does not initialize search settings when js-search-settings-app is unavailable', async () => {
- await mountSearchSettings();
-
- expect(initSearch).not.toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/pages/projects/graphs/code_coverage_spec.js b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
index 4a60c7fd509..c9420285bad 100644
--- a/spec/frontend/pages/projects/graphs/code_coverage_spec.js
+++ b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
@@ -6,8 +6,8 @@ import { GlAreaChart } from '@gitlab/ui/dist/charts';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import CodeCoverage from '~/pages/projects/graphs/components/code_coverage.vue';
-import { codeCoverageMockData, sortedDataByDates } from './mock_data';
import httpStatusCodes from '~/lib/utils/http_status';
+import { codeCoverageMockData, sortedDataByDates } from './mock_data';
describe('Code Coverage', () => {
let wrapper;
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 9aee6ec7ace..689649063c0 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -175,7 +175,7 @@ describe('Settings Panel', () => {
wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
expect(findRepositoryFeatureProjectRow().props().helpText).toBe(
- 'View and edit files in this project',
+ 'View and edit files in this project.',
);
});
@@ -183,7 +183,7 @@ describe('Settings Panel', () => {
wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PUBLIC });
expect(findRepositoryFeatureProjectRow().props().helpText).toBe(
- 'View and edit files in this project. Non-project members will only have read access',
+ 'View and edit files in this project. Non-project members will only have read access.',
);
});
});
@@ -400,7 +400,7 @@ describe('Settings Panel', () => {
const link = message.find('a');
expect(message.text()).toContain(
- 'LFS objects from this repository are still available to forks',
+ 'LFS objects from this repository are available to forks.',
);
expect(link.text()).toBe('How do I remove them?');
expect(link.attributes('href')).toBe(
@@ -530,7 +530,7 @@ describe('Settings Panel', () => {
it('should contain help text', () => {
expect(wrapper.find({ ref: 'metrics-visibility-settings' }).props().helpText).toBe(
- 'With Metrics Dashboard you can visualize this project performance metrics',
+ "Visualize the project's performance metrics.",
);
});
diff --git a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
index aae25a3aa6d..76c56bd2815 100644
--- a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
+++ b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
@@ -5,7 +5,7 @@ import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
import { mockCommitMessage, mockDefaultBranch } from '../../mock_data';
-describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
+describe('Pipeline Editor | Commit Form', () => {
let wrapper;
const createComponent = ({ props = {} } = {}, mountFn = shallowMount) => {
@@ -21,8 +21,8 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
});
};
- const findCommitTextarea = () => wrapper.find(GlFormTextarea);
- const findBranchInput = () => wrapper.find(GlFormInput);
+ const findCommitTextarea = () => wrapper.findComponent(GlFormTextarea);
+ const findBranchInput = () => wrapper.findComponent(GlFormInput);
const findNewMrCheckbox = () => wrapper.find('[data-testid="new-mr-checkbox"]');
const findSubmitBtn = () => wrapper.find('[type="submit"]');
const findCancelBtn = () => wrapper.find('[type="reset"]');
diff --git a/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js
new file mode 100644
index 00000000000..269c0cb525e
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js
@@ -0,0 +1,223 @@
+import { mount } from '@vue/test-utils';
+import { GlFormTextarea, GlFormInput, GlLoadingIcon } from '@gitlab/ui';
+import CommitSection from '~/pipeline_editor/components/commit/commit_section.vue';
+import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
+import { objectToQuery, redirectTo } from '~/lib/utils/url_utility';
+import commitCreate from '~/pipeline_editor/graphql/mutations/commit_ci_file.mutation.graphql';
+
+import { COMMIT_SUCCESS } from '~/pipeline_editor/constants';
+import {
+ mockCiConfigPath,
+ mockCiYml,
+ mockCommitSha,
+ mockCommitNextSha,
+ mockCommitMessage,
+ mockDefaultBranch,
+ mockProjectFullPath,
+ mockNewMergeRequestPath,
+} from '../../mock_data';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ redirectTo: jest.fn(),
+ refreshCurrentPage: jest.fn(),
+ objectToQuery: jest.requireActual('~/lib/utils/url_utility').objectToQuery,
+ mergeUrlParams: jest.requireActual('~/lib/utils/url_utility').mergeUrlParams,
+}));
+
+const mockVariables = {
+ projectPath: mockProjectFullPath,
+ startBranch: mockDefaultBranch,
+ message: mockCommitMessage,
+ filePath: mockCiConfigPath,
+ content: mockCiYml,
+ lastCommitId: mockCommitSha,
+};
+
+const mockProvide = {
+ ciConfigPath: mockCiConfigPath,
+ defaultBranch: mockDefaultBranch,
+ projectFullPath: mockProjectFullPath,
+ newMergeRequestPath: mockNewMergeRequestPath,
+};
+
+describe('Pipeline Editor | Commit section', () => {
+ let wrapper;
+ let mockMutate;
+
+ const defaultProps = { ciFileContent: mockCiYml };
+
+ const createComponent = ({ props = {}, options = {}, provide = {} } = {}) => {
+ mockMutate = jest.fn().mockResolvedValue({
+ data: {
+ commitCreate: {
+ errors: [],
+ commit: {
+ sha: mockCommitNextSha,
+ },
+ },
+ },
+ });
+
+ wrapper = mount(CommitSection, {
+ propsData: { ...defaultProps, ...props },
+ provide: { ...mockProvide, ...provide },
+ data() {
+ return {
+ commitSha: mockCommitSha,
+ };
+ },
+ mocks: {
+ $apollo: {
+ mutate: mockMutate,
+ },
+ },
+ attachTo: document.body,
+ ...options,
+ });
+ };
+
+ const findCommitForm = () => wrapper.findComponent(CommitForm);
+ const findCommitBtnLoadingIcon = () =>
+ wrapper.find('[type="submit"]').findComponent(GlLoadingIcon);
+
+ const submitCommit = async ({
+ message = mockCommitMessage,
+ branch = mockDefaultBranch,
+ openMergeRequest = false,
+ } = {}) => {
+ await findCommitForm().findComponent(GlFormTextarea).setValue(message);
+ await findCommitForm().findComponent(GlFormInput).setValue(branch);
+ if (openMergeRequest) {
+ await findCommitForm().find('[data-testid="new-mr-checkbox"]').setChecked(openMergeRequest);
+ }
+ await findCommitForm().find('[type="submit"]').trigger('click');
+ // Simulate the write to local cache that occurs after a commit
+ await wrapper.setData({ commitSha: mockCommitNextSha });
+ };
+
+ const cancelCommitForm = async () => {
+ const findCancelBtn = () => wrapper.find('[type="reset"]');
+ await findCancelBtn().trigger('click');
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ mockMutate.mockReset();
+
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when the user commits changes to the current branch', () => {
+ beforeEach(async () => {
+ await submitCommit();
+ });
+
+ it('calls the mutation with the default branch', () => {
+ expect(mockMutate).toHaveBeenCalledTimes(1);
+ expect(mockMutate).toHaveBeenCalledWith({
+ mutation: commitCreate,
+ update: expect.any(Function),
+ variables: {
+ ...mockVariables,
+ branch: mockDefaultBranch,
+ },
+ });
+ });
+
+ it('emits an event to communicate the commit was successful', () => {
+ expect(wrapper.emitted('commit')).toHaveLength(1);
+ expect(wrapper.emitted('commit')[0]).toEqual([{ type: COMMIT_SUCCESS }]);
+ });
+
+ it('shows no saving state', () => {
+ expect(findCommitBtnLoadingIcon().exists()).toBe(false);
+ });
+
+ it('a second commit submits the latest sha, keeping the form updated', async () => {
+ await submitCommit();
+
+ expect(mockMutate).toHaveBeenCalledTimes(2);
+ expect(mockMutate).toHaveBeenCalledWith({
+ mutation: commitCreate,
+ update: expect.any(Function),
+ variables: {
+ ...mockVariables,
+ lastCommitId: mockCommitNextSha,
+ branch: mockDefaultBranch,
+ },
+ });
+ });
+ });
+
+ describe('when the user commits changes to a new branch', () => {
+ const newBranch = 'new-branch';
+
+ beforeEach(async () => {
+ await submitCommit({
+ branch: newBranch,
+ });
+ });
+
+ it('calls the mutation with the new branch', () => {
+ expect(mockMutate).toHaveBeenCalledWith({
+ mutation: commitCreate,
+ update: expect.any(Function),
+ variables: {
+ ...mockVariables,
+ branch: newBranch,
+ },
+ });
+ });
+ });
+
+ describe('when the user commits changes to open a new merge request', () => {
+ const newBranch = 'new-branch';
+
+ beforeEach(async () => {
+ await submitCommit({
+ branch: newBranch,
+ openMergeRequest: true,
+ });
+ });
+
+ it('redirects to the merge request page with source and target branches', () => {
+ const branchesQuery = objectToQuery({
+ 'merge_request[source_branch]': newBranch,
+ 'merge_request[target_branch]': mockDefaultBranch,
+ });
+
+ expect(redirectTo).toHaveBeenCalledWith(`${mockNewMergeRequestPath}?${branchesQuery}`);
+ });
+ });
+
+ describe('when the commit is ocurring', () => {
+ it('shows a saving state', async () => {
+ mockMutate.mockImplementationOnce(() => {
+ expect(findCommitBtnLoadingIcon().exists()).toBe(true);
+ return Promise.resolve();
+ });
+
+ await submitCommit({
+ message: mockCommitMessage,
+ branch: mockDefaultBranch,
+ openMergeRequest: false,
+ });
+ });
+ });
+
+ describe('when the commit form is cancelled', () => {
+ beforeEach(async () => {
+ createComponent();
+ });
+
+ it('emits an event so that it cab be reseted', async () => {
+ await cancelCommitForm();
+
+ expect(wrapper.emitted('resetContent')).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
new file mode 100644
index 00000000000..df15a6c8e7f
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue';
+import ValidationSegment from '~/pipeline_editor/components/header/validation_segment.vue';
+
+import { mockLintResponse } from '../../mock_data';
+
+describe('Pipeline editor header', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineEditorHeader, {
+ props: {
+ ciConfigData: mockLintResponse,
+ isCiConfigDataLoading: false,
+ },
+ });
+ };
+
+ const findValidationSegment = () => wrapper.findComponent(ValidationSegment);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders the validation segment', () => {
+ expect(findValidationSegment().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/info/validation_segment_spec.js b/spec/frontend/pipeline_editor/components/header/validation_segment_spec.js
index 8a991d82018..c5b88e4b904 100644
--- a/spec/frontend/pipeline_editor/components/info/validation_segment_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/validation_segment_spec.js
@@ -3,7 +3,9 @@ import { shallowMount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { sprintf } from '~/locale';
-import ValidationSegment, { i18n } from '~/pipeline_editor/components/info/validation_segment.vue';
+import ValidationSegment, {
+ i18n,
+} from '~/pipeline_editor/components/header/validation_segment.vue';
import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
import { mockYmlHelpPagePath, mergeUnwrappedCiConfig } from '../../mock_data';
@@ -29,6 +31,11 @@ describe('~/pipeline_editor/components/info/validation_segment.vue', () => {
const findLearnMoreLink = () => wrapper.findByTestId('learnMoreLink');
const findValidationMsg = () => wrapper.findByTestId('validationMsg');
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
it('shows the loading state', () => {
createComponent({ loading: true });
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
new file mode 100644
index 00000000000..275e2987f38
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -0,0 +1,129 @@
+import { nextTick } from 'vue';
+import { shallowMount, mount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
+import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
+import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
+
+import { mockLintResponse, mockCiYml } from '../mock_data';
+
+describe('Pipeline editor tabs component', () => {
+ let wrapper;
+ const MockTextEditor = {
+ template: '<div />',
+ };
+ const mockProvide = {
+ glFeatures: {
+ ciConfigVisualizationTab: true,
+ },
+ };
+
+ const createComponent = ({ props = {}, provide = {}, mountFn = shallowMount } = {}) => {
+ wrapper = mountFn(PipelineEditorTabs, {
+ propsData: {
+ ciConfigData: mockLintResponse,
+ ciFileContent: mockCiYml,
+ isCiConfigDataLoading: false,
+ ...props,
+ },
+ provide: { ...mockProvide, ...provide },
+ stubs: {
+ TextEditor: MockTextEditor,
+ },
+ });
+ };
+
+ const findEditorTab = () => wrapper.find('[data-testid="editor-tab"]');
+ const findLintTab = () => wrapper.find('[data-testid="lint-tab"]');
+ const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
+ const findCiLint = () => wrapper.findComponent(CiLint);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findPipelineGraph = () => wrapper.findComponent(PipelineGraph);
+ const findTextEditor = () => wrapper.findComponent(MockTextEditor);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('tabs', () => {
+ describe('editor tab', () => {
+ it('displays editor only after the tab is mounted', async () => {
+ createComponent({ mountFn: mount });
+
+ expect(findTextEditor().exists()).toBe(false);
+
+ await nextTick();
+
+ expect(findTextEditor().exists()).toBe(true);
+ expect(findEditorTab().exists()).toBe(true);
+ });
+ });
+
+ describe('visualization tab', () => {
+ describe('with feature flag on', () => {
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({ props: { isCiConfigDataLoading: true } });
+ });
+
+ it('displays a loading icon if the lint query is loading', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findPipelineGraph().exists()).toBe(false);
+ });
+ });
+ describe('after loading', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('display the tab and visualization', () => {
+ expect(findVisualizationTab().exists()).toBe(true);
+ expect(findPipelineGraph().exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('with feature flag off', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: { ciConfigVisualizationTab: false },
+ },
+ });
+ });
+
+ it('does not display the tab or component', () => {
+ expect(findVisualizationTab().exists()).toBe(false);
+ expect(findPipelineGraph().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('lint tab', () => {
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({ props: { isCiConfigDataLoading: true } });
+ });
+
+ it('displays a loading icon if the lint query is loading', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not display the lint component', () => {
+ expect(findCiLint().exists()).toBe(false);
+ });
+ });
+ describe('after loading', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('display the tab and the lint component', () => {
+ expect(findLintTab().exists()).toBe(true);
+ expect(findCiLint().exists()).toBe(true);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/text_editor_spec.js b/spec/frontend/pipeline_editor/components/text_editor_spec.js
index 9221d64c44b..9bb4bb6c210 100644
--- a/spec/frontend/pipeline_editor/components/text_editor_spec.js
+++ b/spec/frontend/pipeline_editor/components/text_editor_spec.js
@@ -1,4 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+
+import { EDITOR_READY_EVENT } from '~/editor/constants';
+import TextEditor from '~/pipeline_editor/components/text_editor.vue';
import {
mockCiConfigPath,
mockCiYml,
@@ -7,9 +10,7 @@ import {
mockProjectNamespace,
} from '../mock_data';
-import TextEditor from '~/pipeline_editor/components/text_editor.vue';
-
-describe('~/pipeline_editor/components/text_editor.vue', () => {
+describe('Pipeline Editor | Text editor component', () => {
let wrapper;
let editorReadyListener;
@@ -20,7 +21,7 @@ describe('~/pipeline_editor/components/text_editor.vue', () => {
template: '<div/>',
props: ['value', 'fileName'],
mounted() {
- this.$emit('editor-ready');
+ this.$emit(EDITOR_READY_EVENT);
},
methods: {
getEditor: () => ({
@@ -35,16 +36,19 @@ describe('~/pipeline_editor/components/text_editor.vue', () => {
provide: {
projectPath: mockProjectPath,
projectNamespace: mockProjectNamespace,
- },
- propsData: {
ciConfigPath: mockCiConfigPath,
- commitSha: mockCommitSha,
},
attrs: {
value: mockCiYml,
},
+ // Simulate graphQL client query result
+ data() {
+ return {
+ commitSha: mockCommitSha,
+ };
+ },
listeners: {
- 'editor-ready': editorReadyListener,
+ [EDITOR_READY_EVENT]: editorReadyListener,
},
stubs: {
EditorLite: MockEditorLite,
@@ -53,41 +57,64 @@ describe('~/pipeline_editor/components/text_editor.vue', () => {
});
};
- const findEditor = () => wrapper.find(MockEditorLite);
+ const findEditor = () => wrapper.findComponent(MockEditorLite);
- beforeEach(() => {
- editorReadyListener = jest.fn();
- mockUse = jest.fn();
- mockRegisterCiSchema = jest.fn();
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
- createComponent();
+ mockUse.mockClear();
+ mockRegisterCiSchema.mockClear();
});
- it('contains an editor', () => {
- expect(findEditor().exists()).toBe(true);
- });
+ describe('template', () => {
+ beforeEach(() => {
+ editorReadyListener = jest.fn();
+ mockUse = jest.fn();
+ mockRegisterCiSchema = jest.fn();
- it('editor contains the value provided', () => {
- expect(findEditor().props('value')).toBe(mockCiYml);
- });
+ createComponent();
+ });
- it('editor is configured for the CI config path', () => {
- expect(findEditor().props('fileName')).toBe(mockCiConfigPath);
- });
+ it('contains an editor', () => {
+ expect(findEditor().exists()).toBe(true);
+ });
- it('editor is configured with syntax highligting', async () => {
- expect(mockUse).toHaveBeenCalledTimes(1);
- expect(mockRegisterCiSchema).toHaveBeenCalledTimes(1);
- expect(mockRegisterCiSchema).toHaveBeenCalledWith({
- projectNamespace: mockProjectNamespace,
- projectPath: mockProjectPath,
- ref: mockCommitSha,
+ it('editor contains the value provided', () => {
+ expect(findEditor().props('value')).toBe(mockCiYml);
+ });
+
+ it('editor is configured for the CI config path', () => {
+ expect(findEditor().props('fileName')).toBe(mockCiConfigPath);
+ });
+
+ it('bubbles up events', () => {
+ findEditor().vm.$emit(EDITOR_READY_EVENT);
+
+ expect(editorReadyListener).toHaveBeenCalled();
});
});
- it('bubbles up events', () => {
- findEditor().vm.$emit('editor-ready');
+ describe('register CI schema', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ // Since the editor will have already mounted, the event will have fired.
+ // To ensure we properly test this, we clear the mock and re-remit the event.
+ mockRegisterCiSchema.mockClear();
+ mockUse.mockClear();
- expect(editorReadyListener).toHaveBeenCalled();
+ findEditor().vm.$emit(EDITOR_READY_EVENT);
+ });
+
+ it('configures editor with syntax highlight', async () => {
+ expect(mockUse).toHaveBeenCalledTimes(1);
+ expect(mockRegisterCiSchema).toHaveBeenCalledTimes(1);
+ expect(mockRegisterCiSchema).toHaveBeenCalledWith({
+ projectNamespace: mockProjectNamespace,
+ projectPath: mockProjectPath,
+ ref: mockCommitSha,
+ });
+ });
});
});
diff --git a/spec/frontend/pipeline_editor/components/ui/confirm_unsaved_changes_dialog_spec.js b/spec/frontend/pipeline_editor/components/ui/confirm_unsaved_changes_dialog_spec.js
new file mode 100644
index 00000000000..44fda2812d8
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/ui/confirm_unsaved_changes_dialog_spec.js
@@ -0,0 +1,42 @@
+import { shallowMount } from '@vue/test-utils';
+import ConfirmDialog from '~/pipeline_editor/components/ui/confirm_unsaved_changes_dialog.vue';
+
+describe('pipeline_editor/components/ui/confirm_unsaved_changes_dialog', () => {
+ let beforeUnloadEvent;
+ let setDialogContent;
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(ConfirmDialog, {
+ propsData,
+ });
+ };
+
+ beforeEach(() => {
+ beforeUnloadEvent = new Event('beforeunload');
+ jest.spyOn(beforeUnloadEvent, 'preventDefault');
+ setDialogContent = jest.spyOn(beforeUnloadEvent, 'returnValue', 'set');
+ });
+
+ afterEach(() => {
+ beforeUnloadEvent.preventDefault.mockRestore();
+ setDialogContent.mockRestore();
+ wrapper.destroy();
+ });
+
+ it('shows confirmation dialog when there are unsaved changes', () => {
+ createComponent({ hasUnsavedChanges: true });
+ window.dispatchEvent(beforeUnloadEvent);
+
+ expect(beforeUnloadEvent.preventDefault).toHaveBeenCalled();
+ expect(setDialogContent).toHaveBeenCalledWith('');
+ });
+
+ it('does not show confirmation dialog when there are no unsaved changes', () => {
+ createComponent({ hasUnsavedChanges: false });
+ window.dispatchEvent(beforeUnloadEvent);
+
+ expect(beforeUnloadEvent.preventDefault).not.toHaveBeenCalled();
+ expect(setDialogContent).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
index 3e008527415..bee9f53814e 100644
--- a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
+++ b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
@@ -1,5 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
import Api from '~/api';
+import httpStatus from '~/lib/utils/http_status';
+import axios from '~/lib/utils/axios_utils';
+import { resolvers } from '~/pipeline_editor/graphql/resolvers';
import {
mockCiConfigPath,
mockCiYml,
@@ -7,9 +10,6 @@ import {
mockLintResponse,
mockProjectFullPath,
} from '../mock_data';
-import httpStatus from '~/lib/utils/http_status';
-import axios from '~/lib/utils/axios_utils';
-import { resolvers } from '~/pipeline_editor/graphql/resolvers';
jest.mock('~/api', () => {
return {
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index d6b90900600..7ffdf1815ba 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -1,119 +1,71 @@
-import { nextTick } from 'vue';
-import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlAlert, GlButton, GlFormInput, GlFormTextarea, GlLoadingIcon, GlTabs } from '@gitlab/ui';
-import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlAlert, GlButton, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
+import TextEditor from '~/pipeline_editor/components/text_editor.vue';
import httpStatusCodes from '~/lib/utils/http_status';
-import { objectToQuery, redirectTo, refreshCurrentPage } from '~/lib/utils/url_utility';
+
+import { COMMIT_SUCCESS, COMMIT_FAILURE, LOAD_FAILURE_UNKNOWN } from '~/pipeline_editor/constants';
+import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
+import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.graphql';
+import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue';
+import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
import {
mockCiConfigPath,
mockCiConfigQueryResponse,
mockCiYml,
- mockCommitSha,
- mockCommitNextSha,
- mockCommitMessage,
mockDefaultBranch,
- mockProjectPath,
mockProjectFullPath,
- mockProjectNamespace,
- mockNewMergeRequestPath,
} from './mock_data';
-import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
-import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.graphql';
-import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
-import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
-import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue';
-import TextEditor from '~/pipeline_editor/components/text_editor.vue';
-
const localVue = createLocalVue();
localVue.use(VueApollo);
-jest.mock('~/lib/utils/url_utility', () => ({
- redirectTo: jest.fn(),
- refreshCurrentPage: jest.fn(),
- objectToQuery: jest.requireActual('~/lib/utils/url_utility').objectToQuery,
- mergeUrlParams: jest.requireActual('~/lib/utils/url_utility').mergeUrlParams,
-}));
-
const MockEditorLite = {
template: '<div/>',
};
const mockProvide = {
+ ciConfigPath: mockCiConfigPath,
+ defaultBranch: mockDefaultBranch,
projectFullPath: mockProjectFullPath,
- projectPath: mockProjectPath,
- projectNamespace: mockProjectNamespace,
- glFeatures: {
- ciConfigVisualizationTab: true,
- },
};
-describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
+describe('Pipeline editor app component', () => {
let wrapper;
let mockApollo;
let mockBlobContentData;
let mockCiConfigData;
- let mockMutate;
-
- const createComponent = ({
- props = {},
- blobLoading = false,
- lintLoading = false,
- options = {},
- mountFn = shallowMount,
- provide = mockProvide,
- } = {}) => {
- mockMutate = jest.fn().mockResolvedValue({
- data: {
- commitCreate: {
- errors: [],
- commit: {
- sha: mockCommitNextSha,
- },
- },
- },
- });
- wrapper = mountFn(PipelineEditorApp, {
- propsData: {
- ciConfigPath: mockCiConfigPath,
- commitSha: mockCommitSha,
- defaultBranch: mockDefaultBranch,
- newMergeRequestPath: mockNewMergeRequestPath,
- ...props,
- },
- provide,
+ const createComponent = ({ blobLoading = false, options = {} } = {}) => {
+ wrapper = shallowMount(PipelineEditorApp, {
+ provide: mockProvide,
stubs: {
GlTabs,
GlButton,
CommitForm,
EditorLite: MockEditorLite,
- TextEditor,
},
mocks: {
$apollo: {
queries: {
- content: {
+ initialCiFileContent: {
loading: blobLoading,
},
ciConfigData: {
- loading: lintLoading,
+ loading: false,
},
},
- mutate: mockMutate,
},
},
- // attachTo is required for input/submit events
- attachTo: mountFn === mount ? document.body : null,
...options,
});
};
- const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
+ const createComponentWithApollo = ({ props = {} } = {}) => {
const handlers = [[getCiConfigData, mockCiConfigData]];
const resolvers = {
Query: {
@@ -134,18 +86,13 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
apolloProvider: mockApollo,
};
- createComponent({ props, options }, mountFn);
+ createComponent({ props, options });
};
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findAlert = () => wrapper.find(GlAlert);
- const findTabAt = (i) => wrapper.findAll(EditorTab).at(i);
- const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
- const findTextEditor = () => wrapper.find(TextEditor);
- const findEditorLite = () => wrapper.find(MockEditorLite);
- const findCommitForm = () => wrapper.find(CommitForm);
- const findPipelineGraph = () => wrapper.find(PipelineGraph);
- const findCommitBtnLoadingIcon = () => wrapper.find('[type="submit"]').find(GlLoadingIcon);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findEditorHome = () => wrapper.findComponent(PipelineEditorHome);
+ const findTextEditor = () => wrapper.findComponent(TextEditor);
beforeEach(() => {
mockBlobContentData = jest.fn();
@@ -155,9 +102,6 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
afterEach(() => {
mockBlobContentData.mockReset();
mockCiConfigData.mockReset();
- refreshCurrentPage.mockReset();
- redirectTo.mockReset();
- mockMutate.mockReset();
wrapper.destroy();
wrapper = null;
@@ -170,245 +114,6 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
expect(findTextEditor().exists()).toBe(false);
});
- describe('tabs', () => {
- describe('editor tab', () => {
- it('displays editor only after the tab is mounted', async () => {
- createComponent({ mountFn: mount });
-
- expect(findTabAt(0).find(TextEditor).exists()).toBe(false);
-
- await nextTick();
-
- expect(findTabAt(0).find(TextEditor).exists()).toBe(true);
- });
- });
-
- describe('visualization tab', () => {
- describe('with feature flag on', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('display the tab', () => {
- expect(findVisualizationTab().exists()).toBe(true);
- });
-
- it('displays a loading icon if the lint query is loading', () => {
- createComponent({ lintLoading: true });
-
- expect(findLoadingIcon().exists()).toBe(true);
- expect(findPipelineGraph().exists()).toBe(false);
- });
- });
-
- describe('with feature flag off', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- ...mockProvide,
- glFeatures: { ciConfigVisualizationTab: false },
- },
- });
- });
-
- it('does not display the tab', () => {
- expect(findVisualizationTab().exists()).toBe(false);
- });
- });
- });
- });
-
- describe('when data is set', () => {
- beforeEach(async () => {
- createComponent({ mountFn: mount });
-
- wrapper.setData({
- content: mockCiYml,
- contentModel: mockCiYml,
- });
-
- await waitForPromises();
- });
-
- it('displays content after the query loads', () => {
- expect(findLoadingIcon().exists()).toBe(false);
-
- expect(findEditorLite().attributes('value')).toBe(mockCiYml);
- expect(findEditorLite().attributes('file-name')).toBe(mockCiConfigPath);
- });
-
- it('configures text editor', () => {
- expect(findTextEditor().props('commitSha')).toBe(mockCommitSha);
- });
-
- describe('commit form', () => {
- const mockVariables = {
- content: mockCiYml,
- filePath: mockCiConfigPath,
- lastCommitId: mockCommitSha,
- message: mockCommitMessage,
- projectPath: mockProjectFullPath,
- startBranch: mockDefaultBranch,
- };
-
- const findInForm = (selector) => findCommitForm().find(selector);
-
- const submitCommit = async ({
- message = mockCommitMessage,
- branch = mockDefaultBranch,
- openMergeRequest = false,
- } = {}) => {
- await findInForm(GlFormTextarea).setValue(message);
- await findInForm(GlFormInput).setValue(branch);
- if (openMergeRequest) {
- await findInForm('[data-testid="new-mr-checkbox"]').setChecked(openMergeRequest);
- }
- await findInForm('[type="submit"]').trigger('click');
- };
-
- const cancelCommitForm = async () => {
- const findCancelBtn = () => wrapper.find('[type="reset"]');
- await findCancelBtn().trigger('click');
- };
-
- describe('when the user commits changes to the current branch', () => {
- beforeEach(async () => {
- await submitCommit();
- });
-
- it('calls the mutation with the default branch', () => {
- expect(mockMutate).toHaveBeenCalledWith({
- mutation: expect.any(Object),
- variables: {
- ...mockVariables,
- branch: mockDefaultBranch,
- },
- });
- });
-
- it('displays an alert to indicate success', () => {
- expect(findAlert().text()).toMatchInterpolatedText(
- 'Your changes have been successfully committed.',
- );
- });
-
- it('shows no saving state', () => {
- expect(findCommitBtnLoadingIcon().exists()).toBe(false);
- });
-
- it('a second commit submits the latest sha, keeping the form updated', async () => {
- await submitCommit();
-
- expect(mockMutate).toHaveBeenCalledTimes(2);
- expect(mockMutate).toHaveBeenLastCalledWith({
- mutation: expect.any(Object),
- variables: {
- ...mockVariables,
- lastCommitId: mockCommitNextSha,
- branch: mockDefaultBranch,
- },
- });
- });
- });
-
- describe('when the user commits changes to a new branch', () => {
- const newBranch = 'new-branch';
-
- beforeEach(async () => {
- await submitCommit({
- branch: newBranch,
- });
- });
-
- it('calls the mutation with the new branch', () => {
- expect(mockMutate).toHaveBeenCalledWith({
- mutation: expect.any(Object),
- variables: {
- ...mockVariables,
- branch: newBranch,
- },
- });
- });
- });
-
- describe('when the user commits changes to open a new merge request', () => {
- const newBranch = 'new-branch';
-
- beforeEach(async () => {
- await submitCommit({
- branch: newBranch,
- openMergeRequest: true,
- });
- });
-
- it('redirects to the merge request page with source and target branches', () => {
- const branchesQuery = objectToQuery({
- 'merge_request[source_branch]': newBranch,
- 'merge_request[target_branch]': mockDefaultBranch,
- });
-
- expect(redirectTo).toHaveBeenCalledWith(`${mockNewMergeRequestPath}?${branchesQuery}`);
- });
- });
-
- describe('when the commit is ocurring', () => {
- it('shows a saving state', async () => {
- await mockMutate.mockImplementationOnce(() => {
- expect(findCommitBtnLoadingIcon().exists()).toBe(true);
- return Promise.resolve();
- });
-
- await submitCommit({
- message: mockCommitMessage,
- branch: mockDefaultBranch,
- openMergeRequest: false,
- });
- });
- });
-
- describe('when the commit fails', () => {
- it('shows an error message', async () => {
- mockMutate.mockRejectedValueOnce(new Error('commit failed'));
-
- await submitCommit();
-
- await waitForPromises();
-
- expect(findAlert().text()).toMatchInterpolatedText(
- 'The GitLab CI configuration could not be updated. commit failed',
- );
- });
-
- it('shows an unkown error', async () => {
- mockMutate.mockRejectedValueOnce();
-
- await submitCommit();
-
- await waitForPromises();
-
- expect(findAlert().text()).toMatchInterpolatedText(
- 'The GitLab CI configuration could not be updated.',
- );
- });
- });
-
- describe('when the commit form is cancelled', () => {
- const otherContent = 'other content';
-
- beforeEach(async () => {
- findTextEditor().vm.$emit('input', otherContent);
- await nextTick();
- });
-
- it('content is restored after cancel is called', async () => {
- await cancelCommitForm();
-
- expect(findEditorLite().attributes('value')).toBe(mockCiYml);
- });
- });
- });
- });
-
describe('when queries are called', () => {
beforeEach(() => {
mockBlobContentData.mockResolvedValue(mockCiYml);
@@ -422,14 +127,12 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
await waitForPromises();
});
- it('shows editor and commit form', () => {
- expect(findEditorLite().exists()).toBe(true);
- expect(findTextEditor().exists()).toBe(true);
+ it('shows pipeline editor home component', () => {
+ expect(findEditorHome().exists()).toBe(true);
});
- it('no error is shown when data is set', async () => {
+ it('no error is shown when data is set', () => {
expect(findAlert().exists()).toBe(false);
- expect(findEditorLite().attributes('value')).toBe(mockCiYml);
});
it('ci config query is called with correct variables', async () => {
@@ -445,10 +148,10 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
});
describe('when no file exists', () => {
- const expectedAlertMsg =
+ const noFileAlertMsg =
'There is no .gitlab-ci.yml file in this repository, please add one and visit the Pipeline Editor again.';
- it('shows a 404 error message and does not show editor or commit form', async () => {
+ it('shows a 404 error message and does not show editor home component', async () => {
mockBlobContentData.mockRejectedValueOnce({
response: {
status: httpStatusCodes.NOT_FOUND,
@@ -458,12 +161,11 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
await waitForPromises();
- expect(findAlert().text()).toBe(expectedAlertMsg);
- expect(findEditorLite().exists()).toBe(false);
- expect(findTextEditor().exists()).toBe(false);
+ expect(findAlert().text()).toBe(noFileAlertMsg);
+ expect(findEditorHome().exists()).toBe(false);
});
- it('shows a 400 error message and does not show editor or commit form', async () => {
+ it('shows a 400 error message and does not show editor home component', async () => {
mockBlobContentData.mockRejectedValueOnce({
response: {
status: httpStatusCodes.BAD_REQUEST,
@@ -473,9 +175,8 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
await waitForPromises();
- expect(findAlert().text()).toBe(expectedAlertMsg);
- expect(findEditorLite().exists()).toBe(false);
- expect(findTextEditor().exists()).toBe(false);
+ expect(findAlert().text()).toBe(noFileAlertMsg);
+ expect(findEditorHome().exists()).toBe(false);
});
it('shows a unkown error message', async () => {
@@ -483,9 +184,60 @@ describe('~/pipeline_editor/pipeline_editor_app.vue', () => {
createComponentWithApollo();
await waitForPromises();
- expect(findAlert().text()).toBe('The CI configuration was not loaded, please try again.');
- expect(findEditorLite().exists()).toBe(true);
- expect(findTextEditor().exists()).toBe(true);
+ expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[LOAD_FAILURE_UNKNOWN]);
+ expect(findEditorHome().exists()).toBe(true);
+ });
+ });
+
+ describe('when the user commits', () => {
+ const updateFailureMessage = 'The GitLab CI configuration could not be updated.';
+
+ describe('and the commit mutation succeeds', () => {
+ beforeEach(() => {
+ createComponent();
+
+ findEditorHome().vm.$emit('commit', { type: COMMIT_SUCCESS });
+ });
+
+ it('shows a confirmation message', () => {
+ expect(findAlert().text()).toBe(wrapper.vm.$options.successTexts[COMMIT_SUCCESS]);
+ });
+ });
+ describe('and the commit mutation fails', () => {
+ const commitFailedReasons = ['Commit failed'];
+
+ beforeEach(() => {
+ createComponent();
+
+ findEditorHome().vm.$emit('showError', {
+ type: COMMIT_FAILURE,
+ reasons: commitFailedReasons,
+ });
+ });
+
+ it('shows an error message', () => {
+ expect(findAlert().text()).toMatchInterpolatedText(
+ `${updateFailureMessage} ${commitFailedReasons[0]}`,
+ );
+ });
+ });
+ describe('when an unknown error occurs', () => {
+ const unknownReasons = ['Commit failed'];
+
+ beforeEach(() => {
+ createComponent();
+
+ findEditorHome().vm.$emit('showError', {
+ type: COMMIT_FAILURE,
+ reasons: unknownReasons,
+ });
+ });
+
+ it('shows an error message', () => {
+ expect(findAlert().text()).toMatchInterpolatedText(
+ `${updateFailureMessage} ${unknownReasons[0]}`,
+ );
+ });
});
});
});
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
new file mode 100644
index 00000000000..6ef1b3cdd1c
--- /dev/null
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -0,0 +1,50 @@
+import { shallowMount } from '@vue/test-utils';
+
+import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
+import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
+import CommitSection from '~/pipeline_editor/components/commit/commit_section.vue';
+import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue';
+
+import { mockLintResponse, mockCiYml } from './mock_data';
+
+describe('Pipeline editor home wrapper', () => {
+ let wrapper;
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(PipelineEditorHome, {
+ propsData: {
+ ciConfigData: mockLintResponse,
+ ciFileContent: mockCiYml,
+ isCiConfigDataLoading: false,
+ ...props,
+ },
+ });
+ };
+
+ const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorTabs);
+ const findPipelineEditorTabs = () => wrapper.findComponent(CommitSection);
+ const findCommitSection = () => wrapper.findComponent(PipelineEditorHeader);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('renders', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows the pipeline editor header', () => {
+ expect(findPipelineEditorHeader().exists()).toBe(true);
+ });
+
+ it('shows the pipeline editor tabs', () => {
+ expect(findPipelineEditorTabs().exists()).toBe(true);
+ });
+
+ it('shows the commit section', () => {
+ expect(findCommitSection().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
index 421ad9f4939..3f1cf67127e 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -5,6 +5,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import httpStatusCodes from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
import PipelineNewForm from '~/pipeline_new/components/pipeline_new_form.vue';
+import { redirectTo } from '~/lib/utils/url_utility';
import {
mockBranches,
mockTags,
@@ -13,7 +14,6 @@ import {
mockProjectId,
mockError,
} from '../mock_data';
-import { redirectTo } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
@@ -34,6 +34,7 @@ describe('Pipeline New Form', () => {
const findForm = () => wrapper.find(GlForm);
const findDropdown = () => wrapper.find(GlDropdown);
const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findSubmitButton = () => wrapper.find('[data-testid="run_pipeline_button"]');
const findVariableRows = () => wrapper.findAll('[data-testid="ci-variable-row"]');
const findRemoveIcons = () => wrapper.findAll('[data-testid="remove-ci-variable-row"]');
const findKeyInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-key"]');
@@ -155,6 +156,18 @@ describe('Pipeline New Form', () => {
await waitForPromises();
});
+
+ it('disables the submit button immediately after submitting', async () => {
+ createComponent();
+
+ expect(findSubmitButton().props('disabled')).toBe(false);
+
+ findForm().vm.$emit('submit', dummySubmitEvent);
+ await waitForPromises();
+
+ expect(findSubmitButton().props('disabled')).toBe(true);
+ });
+
it('creates pipeline with full ref and variables', async () => {
createComponent();
@@ -167,6 +180,7 @@ describe('Pipeline New Form', () => {
expect(getExpectedPostParams().ref).toEqual(wrapper.vm.$data.refValue.fullName);
expect(redirectTo).toHaveBeenCalledWith(`${pipelinesPath}/${postResponse.id}`);
});
+
it('creates a pipeline with short ref and variables', async () => {
// query params are used
createComponent('', mockParams);
@@ -225,42 +239,29 @@ describe('Pipeline New Form', () => {
});
});
- describe('when feature flag new_pipeline_form_prefilled_vars is enabled', () => {
- let origGon;
-
+ describe('when yml defines a variable', () => {
const mockYmlKey = 'yml_var';
const mockYmlValue = 'yml_var_val';
const mockYmlDesc = 'A var from yml.';
- beforeAll(() => {
- origGon = window.gon;
- window.gon = { features: { newPipelineFormPrefilledVars: true } };
- });
-
- afterAll(() => {
- window.gon = origGon;
- });
-
- describe('loading state', () => {
- it('loading icon is shown when content is requested and hidden when received', async () => {
- createComponent('', mockParams, mount);
+ it('loading icon is shown when content is requested and hidden when received', async () => {
+ createComponent('', mockParams, mount);
- mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
- [mockYmlKey]: {
- value: mockYmlValue,
- description: mockYmlDesc,
- },
- });
+ mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
+ [mockYmlKey]: {
+ value: mockYmlValue,
+ description: mockYmlDesc,
+ },
+ });
- expect(findLoadingIcon().exists()).toBe(true);
+ expect(findLoadingIcon().exists()).toBe(true);
- await waitForPromises();
+ await waitForPromises();
- expect(findLoadingIcon().exists()).toBe(false);
- });
+ expect(findLoadingIcon().exists()).toBe(false);
});
- describe('when yml defines a variable with description', () => {
+ describe('with description', () => {
beforeEach(async () => {
createComponent('', mockParams, mount);
@@ -302,7 +303,7 @@ describe('Pipeline New Form', () => {
});
});
- describe('when yml defines a variable without description', () => {
+ describe('without description', () => {
beforeEach(async () => {
createComponent('', mockParams, mount);
@@ -325,31 +326,55 @@ describe('Pipeline New Form', () => {
describe('Form errors and warnings', () => {
beforeEach(() => {
createComponent();
+ });
- mock.onPost(pipelinesPath).reply(httpStatusCodes.BAD_REQUEST, mockError);
+ describe('when the error response can be handled', () => {
+ beforeEach(async () => {
+ mock.onPost(pipelinesPath).reply(httpStatusCodes.BAD_REQUEST, mockError);
- findForm().vm.$emit('submit', dummySubmitEvent);
+ findForm().vm.$emit('submit', dummySubmitEvent);
- return waitForPromises();
- });
+ await waitForPromises();
+ });
- it('shows both error and warning', () => {
- expect(findErrorAlert().exists()).toBe(true);
- expect(findWarningAlert().exists()).toBe(true);
- });
+ it('shows both error and warning', () => {
+ expect(findErrorAlert().exists()).toBe(true);
+ expect(findWarningAlert().exists()).toBe(true);
+ });
- it('shows the correct error', () => {
- expect(findErrorAlert().text()).toBe(mockError.errors[0]);
- });
+ it('shows the correct error', () => {
+ expect(findErrorAlert().text()).toBe(mockError.errors[0]);
+ });
- it('shows the correct warning title', () => {
- const { length } = mockError.warnings;
+ it('shows the correct warning title', () => {
+ const { length } = mockError.warnings;
- expect(findWarningAlertSummary().attributes('message')).toBe(`${length} warnings found:`);
+ expect(findWarningAlertSummary().attributes('message')).toBe(`${length} warnings found:`);
+ });
+
+ it('shows the correct amount of warnings', () => {
+ expect(findWarnings()).toHaveLength(mockError.warnings.length);
+ });
+
+ it('re-enables the submit button', () => {
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
});
- it('shows the correct amount of warnings', () => {
- expect(findWarnings()).toHaveLength(mockError.warnings.length);
+ describe('when the error response cannot be handled', () => {
+ beforeEach(async () => {
+ mock
+ .onPost(pipelinesPath)
+ .reply(httpStatusCodes.INTERNAL_SERVER_ERROR, 'something went wrong');
+
+ findForm().vm.$emit('submit', dummySubmitEvent);
+
+ await waitForPromises();
+ });
+
+ it('re-enables the submit button', () => {
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
});
});
});
diff --git a/spec/frontend/pipelines/blank_state_spec.js b/spec/frontend/pipelines/blank_state_spec.js
index c09d9232569..dae24efd041 100644
--- a/spec/frontend/pipelines/blank_state_spec.js
+++ b/spec/frontend/pipelines/blank_state_spec.js
@@ -1,25 +1,20 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import component from '~/pipelines/components/pipelines_list/blank_state.vue';
+import { mount } from '@vue/test-utils';
+import { getByText } from '@testing-library/dom';
+import BlankState from '~/pipelines/components/pipelines_list/blank_state.vue';
describe('Pipelines Blank State', () => {
- let vm;
- let Component;
-
- beforeEach(() => {
- Component = Vue.extend(component);
-
- vm = mountComponent(Component, {
+ const wrapper = mount(BlankState, {
+ propsData: {
svgPath: 'foo',
message: 'Blank State',
- });
+ },
});
it('should render svg', () => {
- expect(vm.$el.querySelector('.svg-content img').getAttribute('src')).toEqual('foo');
+ expect(wrapper.find('.svg-content img').attributes('src')).toEqual('foo');
});
it('should render message', () => {
- expect(vm.$el.querySelector('h4').textContent.trim()).toEqual('Blank State');
+ expect(getByText(wrapper.element, /Blank State/i)).toBeTruthy();
});
});
diff --git a/spec/frontend/pipelines/graph/graph_component_legacy_spec.js b/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
index 840b1f8baf5..1bde9cae7d2 100644
--- a/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_legacy_spec.js
@@ -6,9 +6,9 @@ import PipelineStore from '~/pipelines/stores/pipeline_store';
import GraphComponentLegacy from '~/pipelines/components/graph/graph_component_legacy.vue';
import StageColumnComponentLegacy from '~/pipelines/components/graph/stage_column_component_legacy.vue';
import LinkedPipelinesColumnLegacy from '~/pipelines/components/graph/linked_pipelines_column_legacy.vue';
+import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
import graphJSON from './mock_data_legacy';
import linkedPipelineJSON from './linked_pipelines_mock_data';
-import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
describe('graph component', () => {
let store;
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index cfc3b7af282..e629396e699 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -22,6 +22,13 @@ describe('graph component', () => {
pipeline: generateResponse(mockPipelineResponse, 'root/fungi-xoxo'),
};
+ const defaultData = {
+ measurements: {
+ width: 800,
+ height: 800,
+ },
+ };
+
const createComponent = ({
data = {},
mountFn = shallowMount,
@@ -34,7 +41,10 @@ describe('graph component', () => {
...props,
},
data() {
- return { ...data };
+ return {
+ ...defaultData,
+ ...data,
+ };
},
provide: {
dataMethod: GRAPHQL,
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index fb005d628a9..fcae4406950 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -2,8 +2,9 @@ import { mount } from '@vue/test-utils';
import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import LinkedPipelineComponent from '~/pipelines/components/graph/linked_pipeline.vue';
import CiStatus from '~/vue_shared/components/ci_icon.vue';
-import mockData from './linked_pipelines_mock_data';
import { UPSTREAM, DOWNSTREAM } from '~/pipelines/components/graph/constants';
+import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
+import mockData from './linked_pipelines_mock_data';
const mockPipeline = mockData.triggered[0];
const validTriggeredPipelineId = mockPipeline.project.id;
@@ -212,11 +213,11 @@ describe('Linked pipeline', () => {
expect(wrapper.emitted().pipelineClicked).toBeTruthy();
});
- it('should emit `bv::hide::tooltip` to close the tooltip', () => {
+ it(`should emit ${BV_HIDE_TOOLTIP} to close the tooltip`, () => {
jest.spyOn(wrapper.vm.$root, '$emit');
findButton().trigger('click');
- expect(wrapper.vm.$root.$emit.mock.calls[0]).toEqual(['bv::hide::tooltip']);
+ expect(wrapper.vm.$root.$emit.mock.calls[0]).toEqual([BV_HIDE_TOOLTIP]);
});
it('should emit downstreamHovered with job name on mouseover', () => {
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/pipelines/graph/stage_column_component_spec.js
index 202e25ccda3..16dc70a63a5 100644
--- a/spec/frontend/pipelines/graph/stage_column_component_spec.js
+++ b/spec/frontend/pipelines/graph/stage_column_component_spec.js
@@ -68,6 +68,10 @@ describe('stage column component', () => {
it('should render the provided groups', () => {
expect(findAllStageColumnGroups().length).toBe(mockGroups.length);
});
+
+ it('should emit updateMeasurements event on mount', () => {
+ expect(wrapper.emitted().updateMeasurements).toHaveLength(1);
+ });
});
describe('when job notifies action is complete', () => {
diff --git a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
new file mode 100644
index 00000000000..cf2b66dea5f
--- /dev/null
+++ b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
@@ -0,0 +1,23 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Links Inner component with a large number of needs matches snapshot and has expected path 1`] = `
+"<div class=\\"gl-display-flex gl-relative\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
+ <path d=\\"M202,118L42,118C72,118,72,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M202,118L52,118C82,118,82,148,112,148\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M222,138L62,138C92,138,92,158,122,158\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M212,128L72,128C102,128,102,168,132,168\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M232,148L82,148C112,148,112,178,142,178\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ </svg> </div>"
+`;
+
+exports[`Links Inner component with a parallel need matches snapshot and has expected path 1`] = `
+"<div class=\\"gl-display-flex gl-relative\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
+ <path d=\\"M192,108L22,108C52,108,52,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ </svg> </div>"
+`;
+
+exports[`Links Inner component with one need matches snapshot and has expected path 1`] = `
+"<div class=\\"gl-display-flex gl-relative\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
+ <path d=\\"M202,118L42,118C72,118,72,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ </svg> </div>"
+`;
diff --git a/spec/frontend/pipelines/graph_shared/links_inner_spec.js b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
new file mode 100644
index 00000000000..6cabe2bc8a7
--- /dev/null
+++ b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
@@ -0,0 +1,197 @@
+import { shallowMount } from '@vue/test-utils';
+import { setHTMLFixture } from 'helpers/fixtures';
+import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
+import { createJobsHash } from '~/pipelines/utils';
+import {
+ jobRect,
+ largePipelineData,
+ parallelNeedData,
+ pipelineData,
+ pipelineDataWithNoNeeds,
+ rootRect,
+} from '../pipeline_graph/mock_data';
+
+describe('Links Inner component', () => {
+ const containerId = 'pipeline-graph-container';
+ const defaultProps = {
+ containerId,
+ containerMeasurements: { width: 1019, height: 445 },
+ pipelineId: 1,
+ pipelineData: [],
+ };
+ let wrapper;
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(LinksInner, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
+
+ const findLinkSvg = () => wrapper.find('#link-svg');
+ const findAllLinksPath = () => findLinkSvg().findAll('path');
+
+ // We create fixture so that each job has an empty div that represent
+ // the JobPill in the DOM. Each `JobPill` would have different coordinates,
+ // so we increment their coordinates on each iteration to simulat different positions.
+ const setFixtures = ({ stages }) => {
+ const jobs = createJobsHash(stages);
+ const arrayOfJobs = Object.keys(jobs);
+
+ const linksHtmlElements = arrayOfJobs.map((job) => {
+ return `<div id=${job}-${defaultProps.pipelineId} />`;
+ });
+
+ setHTMLFixture(`<div id="${containerId}">${linksHtmlElements.join(' ')}</div>`);
+
+ // We are mocking the clientRect data of each job and the container ID.
+ jest
+ .spyOn(document.getElementById(containerId), 'getBoundingClientRect')
+ .mockImplementation(() => rootRect);
+
+ arrayOfJobs.forEach((job, index) => {
+ jest
+ .spyOn(
+ document.getElementById(`${job}-${defaultProps.pipelineId}`),
+ 'getBoundingClientRect',
+ )
+ .mockImplementation(() => {
+ const newValue = 10 * index;
+ const { left, right, top, bottom, x, y } = jobRect;
+ return {
+ ...jobRect,
+ left: left + newValue,
+ right: right + newValue,
+ top: top + newValue,
+ bottom: bottom + newValue,
+ x: x + newValue,
+ y: y + newValue,
+ };
+ });
+ });
+ };
+
+ afterEach(() => {
+ jest.restoreAllMocks();
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('basic SVG creation', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders an SVG of the right size', () => {
+ expect(findLinkSvg().exists()).toBe(true);
+ expect(findLinkSvg().attributes('width')).toBe(
+ `${defaultProps.containerMeasurements.width}px`,
+ );
+ expect(findLinkSvg().attributes('height')).toBe(
+ `${defaultProps.containerMeasurements.height}px`,
+ );
+ });
+ });
+
+ describe('no pipeline data', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the component', () => {
+ expect(findLinkSvg().exists()).toBe(true);
+ expect(findAllLinksPath()).toHaveLength(0);
+ });
+ });
+
+ describe('pipeline data with no needs', () => {
+ beforeEach(() => {
+ createComponent({ pipelineData: pipelineDataWithNoNeeds.stages });
+ });
+
+ it('renders no links', () => {
+ expect(findLinkSvg().exists()).toBe(true);
+ expect(findAllLinksPath()).toHaveLength(0);
+ });
+ });
+
+ describe('with one need', () => {
+ beforeEach(() => {
+ setFixtures(pipelineData);
+ createComponent({ pipelineData: pipelineData.stages });
+ });
+
+ it('renders one link', () => {
+ expect(findAllLinksPath()).toHaveLength(1);
+ });
+
+ it('path does not contain NaN values', () => {
+ expect(wrapper.html()).not.toContain('NaN');
+ });
+
+ it('matches snapshot and has expected path', () => {
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+ });
+
+ describe('with a parallel need', () => {
+ beforeEach(() => {
+ setFixtures(parallelNeedData);
+ createComponent({ pipelineData: parallelNeedData.stages });
+ });
+
+ it('renders only one link for all the same parallel jobs', () => {
+ expect(findAllLinksPath()).toHaveLength(1);
+ });
+
+ it('path does not contain NaN values', () => {
+ expect(wrapper.html()).not.toContain('NaN');
+ });
+
+ it('matches snapshot and has expected path', () => {
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+ });
+
+ describe('with a large number of needs', () => {
+ beforeEach(() => {
+ setFixtures(largePipelineData);
+ createComponent({ pipelineData: largePipelineData.stages });
+ });
+
+ it('renders the correct number of links', () => {
+ expect(findAllLinksPath()).toHaveLength(5);
+ });
+
+ it('path does not contain NaN values', () => {
+ expect(wrapper.html()).not.toContain('NaN');
+ });
+
+ it('matches snapshot and has expected path', () => {
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+ });
+
+ describe('interactions', () => {
+ beforeEach(() => {
+ setFixtures(largePipelineData);
+ createComponent({ pipelineData: largePipelineData.stages });
+ });
+
+ it('highlight needs on hover', async () => {
+ const firstLink = findAllLinksPath().at(0);
+
+ const defaultColorClass = 'gl-stroke-gray-200';
+ const hoverColorClass = 'gl-stroke-blue-400';
+
+ expect(firstLink.classes(defaultColorClass)).toBe(true);
+ expect(firstLink.classes(hoverColorClass)).toBe(false);
+
+ // Because there is a watcher, we need to set the props after the component
+ // has mounted.
+ await wrapper.setProps({ highlightedJob: 'test_1' });
+
+ expect(firstLink.classes(defaultColorClass)).toBe(false);
+ expect(firstLink.classes(hoverColorClass)).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/shared/links_layer_spec.js b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
index 9ef5233dbce..9ef5233dbce 100644
--- a/spec/frontend/pipelines/shared/links_layer_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 03e385e3cc8..5b4cd7b71fe 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -1,15 +1,15 @@
import { shallowMount } from '@vue/test-utils';
import { GlModal, GlLoadingIcon } from '@gitlab/ui';
+import HeaderComponent from '~/pipelines/components/header_component.vue';
+import deletePipelineMutation from '~/pipelines/graphql/mutations/delete_pipeline.mutation.graphql';
+import retryPipelineMutation from '~/pipelines/graphql/mutations/retry_pipeline.mutation.graphql';
+import cancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
import {
mockCancelledPipelineHeader,
mockFailedPipelineHeader,
mockRunningPipelineHeader,
mockSuccessfulPipelineHeader,
} from './mock_data';
-import HeaderComponent from '~/pipelines/components/header_component.vue';
-import deletePipelineMutation from '~/pipelines/graphql/mutations/delete_pipeline.mutation.graphql';
-import retryPipelineMutation from '~/pipelines/graphql/mutations/retry_pipeline.mutation.graphql';
-import cancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
describe('Pipeline details header', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/legacy_header_component_spec.js b/spec/frontend/pipelines/legacy_header_component_spec.js
deleted file mode 100644
index fb7feb8898a..00000000000
--- a/spec/frontend/pipelines/legacy_header_component_spec.js
+++ /dev/null
@@ -1,116 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlModal } from '@gitlab/ui';
-import LegacyHeaderComponent from '~/pipelines/components/legacy_header_component.vue';
-import CiHeader from '~/vue_shared/components/header_ci_component.vue';
-import eventHub from '~/pipelines/event_hub';
-
-describe('Pipeline details header', () => {
- let wrapper;
- let glModalDirective;
-
- const threeWeeksAgo = new Date();
- threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
-
- const findDeleteModal = () => wrapper.find(GlModal);
-
- const defaultProps = {
- pipeline: {
- details: {
- status: {
- group: 'failed',
- icon: 'status_failed',
- label: 'failed',
- text: 'failed',
- details_path: 'path',
- },
- },
- id: 123,
- created_at: threeWeeksAgo.toISOString(),
- user: {
- web_url: 'path',
- name: 'Foo',
- username: 'foobar',
- email: 'foo@bar.com',
- avatar_url: 'link',
- },
- retry_path: 'retry',
- cancel_path: 'cancel',
- delete_path: 'delete',
- },
- isLoading: false,
- };
-
- const createComponent = (props = {}) => {
- glModalDirective = jest.fn();
-
- wrapper = shallowMount(LegacyHeaderComponent, {
- propsData: {
- ...props,
- },
- directives: {
- glModal: {
- bind(el, { value }) {
- glModalDirective(value);
- },
- },
- },
- });
- };
-
- beforeEach(() => {
- jest.spyOn(eventHub, '$emit');
-
- createComponent(defaultProps);
- });
-
- afterEach(() => {
- eventHub.$off();
-
- wrapper.destroy();
- wrapper = null;
- });
-
- it('should render provided pipeline info', () => {
- expect(wrapper.find(CiHeader).props()).toMatchObject({
- status: defaultProps.pipeline.details.status,
- itemId: defaultProps.pipeline.id,
- time: defaultProps.pipeline.created_at,
- user: defaultProps.pipeline.user,
- });
- });
-
- describe('action buttons', () => {
- it('should not trigger eventHub when nothing happens', () => {
- expect(eventHub.$emit).not.toHaveBeenCalled();
- });
-
- it('should call postAction when retry button action is clicked', () => {
- wrapper.find('[data-testid="retryButton"]').vm.$emit('click');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
- });
-
- it('should call postAction when cancel button action is clicked', () => {
- wrapper.find('[data-testid="cancelPipeline"]').vm.$emit('click');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
- });
-
- it('does not show delete modal', () => {
- expect(findDeleteModal()).not.toBeVisible();
- });
-
- describe('when delete button action is clicked', () => {
- it('displays delete modal', () => {
- expect(findDeleteModal().props('modalId')).toBe(wrapper.vm.$options.DELETE_MODAL_ID);
- expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.$options.DELETE_MODAL_ID);
- });
-
- it('should call delete when modal is submitted', () => {
- findDeleteModal().vm.$emit('ok');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('headerDeleteAction', 'delete');
- });
- });
- });
-});
diff --git a/spec/frontend/pipelines/pipeline_graph/mock_data.js b/spec/frontend/pipelines/pipeline_graph/mock_data.js
index 7d1a7a79c7f..339aac9f349 100644
--- a/spec/frontend/pipelines/pipeline_graph/mock_data.js
+++ b/spec/frontend/pipelines/pipeline_graph/mock_data.js
@@ -1,5 +1,3 @@
-import { createUniqueLinkId } from '~/pipelines/components/graph_shared/drawing_utils';
-
export const yamlString = `stages:
- empty
- build
@@ -41,10 +39,28 @@ deploy_a:
script: echo hello
`;
-const jobId1 = createUniqueLinkId('build', 'build_1');
-const jobId2 = createUniqueLinkId('test', 'test_1');
-const jobId3 = createUniqueLinkId('test', 'test_2');
-const jobId4 = createUniqueLinkId('deploy', 'deploy_1');
+export const pipelineDataWithNoNeeds = {
+ stages: [
+ {
+ name: 'build',
+ groups: [
+ {
+ name: 'build_1',
+ jobs: [{ script: 'echo hello', stage: 'build' }],
+ },
+ ],
+ },
+ {
+ name: 'test',
+ groups: [
+ {
+ name: 'test_1',
+ jobs: [{ script: 'yarn test', stage: 'test' }],
+ },
+ ],
+ },
+ ],
+};
export const pipelineData = {
stages: [
@@ -54,7 +70,6 @@ export const pipelineData = {
{
name: 'build_1',
jobs: [{ script: 'echo hello', stage: 'build' }],
- id: jobId1,
},
],
},
@@ -64,12 +79,10 @@ export const pipelineData = {
{
name: 'test_1',
jobs: [{ script: 'yarn test', stage: 'test' }],
- id: jobId2,
},
{
name: 'test_2',
jobs: [{ script: 'yarn karma', stage: 'test' }],
- id: jobId3,
},
],
},
@@ -79,7 +92,86 @@ export const pipelineData = {
{
name: 'deploy_1',
jobs: [{ script: 'yarn magick', stage: 'deploy', needs: ['test_1'] }],
- id: jobId4,
+ },
+ ],
+ },
+ ],
+};
+
+export const parallelNeedData = {
+ stages: [
+ {
+ name: 'build',
+ groups: [
+ {
+ name: 'build_1',
+ parallel: 3,
+ jobs: [
+ { script: 'echo hello', stage: 'build', name: 'build_1 1/3' },
+ { script: 'echo hello', stage: 'build', name: 'build_1 2/3' },
+ { script: 'echo hello', stage: 'build', name: 'build_1 3/3' },
+ ],
+ },
+ ],
+ },
+ {
+ name: 'test',
+ groups: [
+ {
+ name: 'test_1',
+ jobs: [{ script: 'yarn test', stage: 'test', needs: ['build_1'] }],
+ },
+ ],
+ },
+ ],
+};
+
+export const largePipelineData = {
+ stages: [
+ {
+ name: 'build',
+ groups: [
+ {
+ name: 'build_1',
+ jobs: [{ script: 'echo hello', stage: 'build' }],
+ },
+ {
+ name: 'build_2',
+ jobs: [{ script: 'echo hello', stage: 'build' }],
+ },
+ {
+ name: 'build_3',
+ jobs: [{ script: 'echo hello', stage: 'build' }],
+ },
+ ],
+ },
+ {
+ name: 'test',
+ groups: [
+ {
+ name: 'test_1',
+ jobs: [{ script: 'yarn test', stage: 'test', needs: ['build_2'] }],
+ },
+ {
+ name: 'test_2',
+ jobs: [{ script: 'yarn karma', stage: 'test', needs: ['build_2'] }],
+ },
+ ],
+ },
+ {
+ name: 'deploy',
+ groups: [
+ {
+ name: 'deploy_1',
+ jobs: [{ script: 'yarn magick', stage: 'deploy', needs: ['test_1'] }],
+ },
+ {
+ name: 'deploy_2',
+ jobs: [{ script: 'yarn magick', stage: 'deploy', needs: ['build_3'] }],
+ },
+ {
+ name: 'deploy_3',
+ jobs: [{ script: 'yarn magick', stage: 'deploy', needs: ['test_2'] }],
},
],
},
@@ -94,9 +186,30 @@ export const singleStageData = {
{
name: 'build_1',
jobs: [{ script: 'echo hello', stage: 'build' }],
- id: jobId1,
},
],
},
],
};
+
+export const rootRect = {
+ bottom: 463,
+ height: 271,
+ left: 236,
+ right: 1252,
+ top: 192,
+ width: 1016,
+ x: 236,
+ y: 192,
+};
+
+export const jobRect = {
+ bottom: 312,
+ height: 24,
+ left: 308,
+ right: 428,
+ top: 288,
+ width: 120,
+ x: 308,
+ y: 288,
+};
diff --git a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
index b6b0a964383..5462045c5c4 100644
--- a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import { GlAlert } from '@gitlab/ui';
-import { pipelineData, singleStageData } from './mock_data';
import { CI_CONFIG_STATUS_INVALID, CI_CONFIG_STATUS_VALID } from '~/pipeline_editor/constants';
import { DRAW_FAILURE, EMPTY_PIPELINE_DATA, INVALID_CI_CONFIG } from '~/pipelines/constants';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import StagePill from '~/pipelines/components/pipeline_graph/stage_pill.vue';
import JobPill from '~/pipelines/components/pipeline_graph/job_pill.vue';
+import { pipelineData, singleStageData } from './mock_data';
describe('pipeline graph component', () => {
const defaultProps = { pipelineData };
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index 47315bd42e6..5266127ca60 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
-import { trimText } from 'helpers/text_helper';
import { shallowMount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
import PipelineUrlComponent from '~/pipelines/components/pipelines_list/pipeline_url.vue';
$.fn.popover = () => {};
@@ -17,6 +17,7 @@ describe('Pipeline Url Component', () => {
const findStuckTag = () => wrapper.find('[data-testid="pipeline-url-stuck"]');
const findDetachedTag = () => wrapper.find('[data-testid="pipeline-url-detached"]');
const findForkTag = () => wrapper.find('[data-testid="pipeline-url-fork"]');
+ const findTrainTag = () => wrapper.find('[data-testid="pipeline-url-train"]');
const defaultProps = {
pipeline: {
@@ -141,6 +142,7 @@ describe('Pipeline Url Component', () => {
expect(findScheduledTag().exists()).toBe(true);
expect(findScheduledTag().text()).toContain('Scheduled');
});
+
it('should render the fork badge when the pipeline was run in a fork', () => {
createComponent({
pipeline: {
@@ -152,4 +154,28 @@ describe('Pipeline Url Component', () => {
expect(findForkTag().exists()).toBe(true);
expect(findForkTag().text()).toBe('fork');
});
+
+ it('should render the train badge when the pipeline is a merge train pipeline', () => {
+ createComponent({
+ pipeline: {
+ flags: {
+ merge_train_pipeline: true,
+ },
+ },
+ });
+
+ expect(findTrainTag().text()).toContain('train');
+ });
+
+ it('should not render the train badge when the pipeline is not a merge train pipeline', () => {
+ createComponent({
+ pipeline: {
+ flags: {
+ merge_train_pipeline: false,
+ },
+ },
+ });
+
+ expect(findTrainTag().exists()).toBe(false);
+ });
});
diff --git a/spec/frontend/pipelines/pipelines_actions_spec.js b/spec/frontend/pipelines/pipelines_actions_spec.js
index 69c1b7ce43d..ec01fe7c324 100644
--- a/spec/frontend/pipelines/pipelines_actions_spec.js
+++ b/spec/frontend/pipelines/pipelines_actions_spec.js
@@ -1,25 +1,29 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { TEST_HOST } from 'spec/test_constants';
-import { GlButton } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import PipelinesActions from '~/pipelines/components/pipelines_list/pipelines_actions.vue';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
+jest.mock('~/flash');
+
describe('Pipelines Actions dropdown', () => {
let wrapper;
let mock;
- const createComponent = (actions = []) => {
- wrapper = shallowMount(PipelinesActions, {
+ const createComponent = (props, mountFn = shallowMount) => {
+ wrapper = mountFn(PipelinesActions, {
propsData: {
- actions,
+ ...props,
},
});
};
- const findAllDropdownItems = () => wrapper.findAll(GlButton);
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
const findAllCountdowns = () => wrapper.findAll(GlCountdown);
beforeEach(() => {
@@ -47,7 +51,7 @@ describe('Pipelines Actions dropdown', () => {
];
beforeEach(() => {
- createComponent(mockActions);
+ createComponent({ actions: mockActions });
});
it('renders a dropdown with the provided actions', () => {
@@ -59,16 +63,33 @@ describe('Pipelines Actions dropdown', () => {
});
describe('on click', () => {
- it('makes a request and toggles the loading state', () => {
+ beforeEach(() => {
+ createComponent({ actions: mockActions }, mount);
+ });
+
+ it('makes a request and toggles the loading state', async () => {
mock.onPost(mockActions.path).reply(200);
- wrapper.find(GlButton).vm.$emit('click');
+ findAllDropdownItems().at(0).vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+ expect(findDropdown().props('loading')).toBe(true);
+
+ await waitForPromises();
+ expect(findDropdown().props('loading')).toBe(false);
+ });
+
+ it('makes a failed request and toggles the loading state', async () => {
+ mock.onPost(mockActions.path).reply(500);
+
+ findAllDropdownItems().at(0).vm.$emit('click');
- expect(wrapper.vm.isLoading).toBe(true);
+ await wrapper.vm.$nextTick();
+ expect(findDropdown().props('loading')).toBe(true);
- return waitForPromises().then(() => {
- expect(wrapper.vm.isLoading).toBe(false);
- });
+ await waitForPromises();
+ expect(findDropdown().props('loading')).toBe(false);
+ expect(createFlash).toHaveBeenCalledTimes(1);
});
});
});
@@ -89,10 +110,10 @@ describe('Pipelines Actions dropdown', () => {
beforeEach(() => {
jest.spyOn(Date, 'now').mockImplementation(() => new Date('2063-04-04T00:42:00Z').getTime());
- createComponent([scheduledJobAction, expiredJobAction]);
+ createComponent({ actions: [scheduledJobAction, expiredJobAction] });
});
- it('makes post request after confirming', () => {
+ it('makes post request after confirming', async () => {
mock.onPost(scheduledJobAction.path).reply(200);
jest.spyOn(window, 'confirm').mockReturnValue(true);
@@ -100,19 +121,22 @@ describe('Pipelines Actions dropdown', () => {
expect(window.confirm).toHaveBeenCalled();
- return waitForPromises().then(() => {
- expect(mock.history.post.length).toBe(1);
- });
+ await waitForPromises();
+
+ expect(mock.history.post).toHaveLength(1);
});
- it('does not make post request if confirmation is cancelled', () => {
+ it('does not make post request if confirmation is cancelled', async () => {
mock.onPost(scheduledJobAction.path).reply(200);
jest.spyOn(window, 'confirm').mockReturnValue(false);
findAllDropdownItems().at(0).vm.$emit('click');
expect(window.confirm).toHaveBeenCalled();
- expect(mock.history.post.length).toBe(0);
+
+ await waitForPromises();
+
+ expect(mock.history.post).toHaveLength(0);
});
it('displays the remaining time in the dropdown', () => {
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index 5d82669b0b8..cbad5b4c1e6 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,8 +1,8 @@
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
import { GlFilteredSearch, GlButton, GlLoadingIcon } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
@@ -15,9 +15,9 @@ import PipelinesTableComponent from '~/pipelines/components/pipelines_list/pipel
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
import Store from '~/pipelines/stores/pipelines_store';
-import { pipelineWithStages, stageReply, users, mockSearch, branches } from './mock_data';
import { RAW_TEXT_WARNING } from '~/pipelines/constants';
import { deprecatedCreateFlash as createFlash } from '~/flash';
+import { pipelineWithStages, stageReply, users, mockSearch, branches } from './mock_data';
jest.mock('~/flash');
@@ -66,10 +66,10 @@ describe('Pipelines', () => {
const findRunPipelineButton = () => findByTestId('run-pipeline-button');
const findCiLintButton = () => findByTestId('ci-lint-button');
const findCleanCacheButton = () => findByTestId('clear-cache-button');
+ const findStagesDropdown = () => findByTestId('mini-pipeline-graph-dropdown-toggle');
const findEmptyState = () => wrapper.find(EmptyState);
const findBlankState = () => wrapper.find(BlankState);
- const findStagesDropdown = () => wrapper.find('.js-builds-dropdown-button');
const findTablePagination = () => wrapper.find(TablePagination);
diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
index 9cdd24b2ab5..660651547fc 100644
--- a/spec/frontend/pipelines/pipelines_table_row_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_row_spec.js
@@ -155,7 +155,9 @@ describe('Pipelines Table Row', () => {
it('should render an icon for each stage', () => {
expect(
- wrapper.findAll('.table-section:nth-child(4) .js-builds-dropdown-button').length,
+ wrapper.findAll(
+ '.table-section:nth-child(4) [data-testid="mini-pipeline-graph-dropdown-toggle"]',
+ ).length,
).toEqual(pipeline.details.stages.length);
});
});
diff --git a/spec/frontend/pipelines/stage_spec.js b/spec/frontend/pipelines/stage_spec.js
index e4782a1dab1..8c914705d97 100644
--- a/spec/frontend/pipelines/stage_spec.js
+++ b/spec/frontend/pipelines/stage_spec.js
@@ -1,4 +1,6 @@
import 'bootstrap/js/dist/dropdown';
+import $ from 'jquery';
+import { GlDropdown } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
@@ -9,6 +11,7 @@ import { stageReply } from './mock_data';
describe('Pipelines stage component', () => {
let wrapper;
let mock;
+ let glFeatures;
const defaultProps = {
stage: {
@@ -22,8 +25,6 @@ describe('Pipelines stage component', () => {
updateDropdown: false,
};
- const isDropdownOpen = () => wrapper.classes('show');
-
const createComponent = (props = {}) => {
wrapper = mount(StageComponent, {
attachTo: document.body,
@@ -31,110 +32,265 @@ describe('Pipelines stage component', () => {
...defaultProps,
...props,
},
+ provide: {
+ glFeatures,
+ },
});
};
beforeEach(() => {
mock = new MockAdapter(axios);
+ jest.spyOn(eventHub, '$emit');
+ glFeatures = {};
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
+ eventHub.$emit.mockRestore();
mock.restore();
});
- describe('default', () => {
- beforeEach(() => {
- createComponent();
+ describe('when ci_mini_pipeline_gl_dropdown feature flag is disabled', () => {
+ const isDropdownOpen = () => wrapper.classes('show');
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should render a dropdown with the status icon', () => {
+ expect(wrapper.attributes('class')).toEqual('dropdown');
+ expect(wrapper.find('svg').exists()).toBe(true);
+ expect(wrapper.find('button').attributes('data-toggle')).toEqual('dropdown');
+ });
});
- it('should render a dropdown with the status icon', () => {
- expect(wrapper.attributes('class')).toEqual('dropdown');
- expect(wrapper.find('svg').exists()).toBe(true);
- expect(wrapper.find('button').attributes('data-toggle')).toEqual('dropdown');
+ describe('with successful request', () => {
+ beforeEach(() => {
+ mock.onGet('path.json').reply(200, stageReply);
+ createComponent();
+ });
+
+ it('should render the received data and emit `clickedDropdown` event', async () => {
+ wrapper.find('button').trigger('click');
+
+ await axios.waitForAll();
+ expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
+ stageReply.latest_statuses[0].name,
+ );
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
+ });
});
- });
- describe('with successful request', () => {
- beforeEach(() => {
- mock.onGet('path.json').reply(200, stageReply);
+ it('when request fails should close the dropdown', async () => {
+ mock.onGet('path.json').reply(500);
createComponent();
- });
+ wrapper.find({ ref: 'dropdown' }).trigger('click');
- it('should render the received data and emit `clickedDropdown` event', async () => {
- jest.spyOn(eventHub, '$emit');
- wrapper.find('button').trigger('click');
+ expect(isDropdownOpen()).toBe(true);
+ wrapper.find('button').trigger('click');
await axios.waitForAll();
- expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
- stageReply.latest_statuses[0].name,
- );
- expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
+ expect(isDropdownOpen()).toBe(false);
});
- });
- it('when request fails should close the dropdown', async () => {
- mock.onGet('path.json').reply(500);
- createComponent();
- wrapper.find({ ref: 'dropdown' }).trigger('click');
- expect(isDropdownOpen()).toBe(true);
+ describe('update endpoint correctly', () => {
+ beforeEach(() => {
+ const copyStage = { ...stageReply };
+ copyStage.latest_statuses[0].name = 'this is the updated content';
+ mock.onGet('bar.json').reply(200, copyStage);
+ createComponent({
+ stage: {
+ status: {
+ group: 'running',
+ icon: 'status_running',
+ title: 'running',
+ },
+ dropdown_path: 'bar.json',
+ },
+ });
+ return axios.waitForAll();
+ });
+
+ it('should update the stage to request the new endpoint provided', async () => {
+ wrapper.find('button').trigger('click');
+ await axios.waitForAll();
+
+ expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
+ 'this is the updated content',
+ );
+ });
+ });
+
+ describe('pipelineActionRequestComplete', () => {
+ beforeEach(() => {
+ mock.onGet('path.json').reply(200, stageReply);
+ mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(200);
+ });
+
+ const clickCiAction = async () => {
+ wrapper.find('button').trigger('click');
+ await axios.waitForAll();
+
+ wrapper.find('.js-ci-action').trigger('click');
+ await axios.waitForAll();
+ };
+
+ describe('within pipeline table', () => {
+ it('emits `refreshPipelinesTable` event when `pipelineActionRequestComplete` is triggered', async () => {
+ createComponent({ type: 'PIPELINES_TABLE' });
+
+ await clickCiAction();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('refreshPipelinesTable');
+ });
+ });
+
+ describe('in MR widget', () => {
+ beforeEach(() => {
+ jest.spyOn($.fn, 'dropdown');
+ });
- wrapper.find('button').trigger('click');
- await axios.waitForAll();
+ it('closes the dropdown when `pipelineActionRequestComplete` is triggered', async () => {
+ createComponent();
- expect(isDropdownOpen()).toBe(false);
+ await clickCiAction();
+
+ expect($.fn.dropdown).toHaveBeenCalledWith('toggle');
+ });
+ });
+ });
});
- describe('update endpoint correctly', () => {
+ describe('when ci_mini_pipeline_gl_dropdown feature flag is enabled', () => {
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdownToggle = () => wrapper.find('button.gl-dropdown-toggle');
+ const findDropdownMenu = () =>
+ wrapper.find('[data-testid="mini-pipeline-graph-dropdown-menu-list"]');
+ const findCiActionBtn = () => wrapper.find('.js-ci-action');
+
+ const openGlDropdown = () => {
+ findDropdownToggle().trigger('click');
+ return new Promise((resolve) => {
+ wrapper.vm.$root.$on('bv::dropdown::show', resolve);
+ });
+ };
+
beforeEach(() => {
- const copyStage = { ...stageReply };
- copyStage.latest_statuses[0].name = 'this is the updated content';
- mock.onGet('bar.json').reply(200, copyStage);
- createComponent({
- stage: {
- status: {
- group: 'running',
- icon: 'status_running',
- title: 'running',
- },
- dropdown_path: 'bar.json',
- },
+ glFeatures = { ciMiniPipelineGlDropdown: true };
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should render a dropdown with the status icon', () => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(findDropdownToggle().classes('gl-dropdown-toggle')).toEqual(true);
+ expect(wrapper.find('[data-testid="status_success_borderless-icon"]').exists()).toBe(true);
});
- return axios.waitForAll();
});
- it('should update the stage to request the new endpoint provided', async () => {
- wrapper.find('button').trigger('click');
+ describe('with successful request', () => {
+ beforeEach(() => {
+ mock.onGet('path.json').reply(200, stageReply);
+ createComponent();
+ });
+
+ it('should render the received data and emit `clickedDropdown` event', async () => {
+ await openGlDropdown();
+ await axios.waitForAll();
+
+ expect(findDropdownMenu().text()).toContain(stageReply.latest_statuses[0].name);
+ expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
+ });
+ });
+
+ it('when request fails should close the dropdown', async () => {
+ mock.onGet('path.json').reply(500);
+
+ createComponent();
+
+ await openGlDropdown();
await axios.waitForAll();
- expect(wrapper.find('.js-builds-dropdown-container ul').text()).toContain(
- 'this is the updated content',
- );
+ expect(findDropdown().classes('show')).toBe(false);
});
- });
- describe('pipelineActionRequestComplete', () => {
- beforeEach(() => {
- mock.onGet('path.json').reply(200, stageReply);
- mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(200);
+ describe('update endpoint correctly', () => {
+ beforeEach(async () => {
+ const copyStage = { ...stageReply };
+ copyStage.latest_statuses[0].name = 'this is the updated content';
+ mock.onGet('bar.json').reply(200, copyStage);
+ createComponent({
+ stage: {
+ status: {
+ group: 'running',
+ icon: 'status_running',
+ title: 'running',
+ },
+ dropdown_path: 'bar.json',
+ },
+ });
+ await axios.waitForAll();
+ });
- createComponent({ type: 'PIPELINES_TABLE' });
+ it('should update the stage to request the new endpoint provided', async () => {
+ await openGlDropdown();
+ await axios.waitForAll();
+
+ expect(findDropdownMenu().text()).toContain('this is the updated content');
+ });
});
- describe('within pipeline table', () => {
- it('emits `refreshPipelinesTable` event when `pipelineActionRequestComplete` is triggered', async () => {
- jest.spyOn(eventHub, '$emit');
+ describe('pipelineActionRequestComplete', () => {
+ beforeEach(() => {
+ mock.onGet('path.json').reply(200, stageReply);
+ mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(200);
+ });
- wrapper.find('button').trigger('click');
+ const clickCiAction = async () => {
+ await openGlDropdown();
await axios.waitForAll();
- wrapper.find('.js-ci-action').trigger('click');
+ findCiActionBtn().trigger('click');
await axios.waitForAll();
+ };
+
+ describe('within pipeline table', () => {
+ beforeEach(() => {
+ createComponent({ type: 'PIPELINES_TABLE' });
+ });
+
+ it('emits `refreshPipelinesTable` event when `pipelineActionRequestComplete` is triggered', async () => {
+ await clickCiAction();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('refreshPipelinesTable');
+ });
+ });
+
+ describe('in MR widget', () => {
+ beforeEach(() => {
+ jest.spyOn($.fn, 'dropdown');
+ createComponent();
+ });
+
+ it('closes the dropdown when `pipelineActionRequestComplete` is triggered', async () => {
+ const hidden = jest.fn();
+
+ wrapper.vm.$root.$on('bv::dropdown::hide', hidden);
+
+ expect(hidden).toHaveBeenCalledTimes(0);
+
+ await clickCiAction();
- expect(eventHub.$emit).toHaveBeenCalledWith('refreshPipelinesTable');
+ expect(hidden).toHaveBeenCalledTimes(1);
+ });
});
});
});
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
index f7ff36c0a46..970430b3adf 100644
--- a/spec/frontend/pipelines/test_reports/stores/actions_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
@@ -16,7 +16,7 @@ describe('Actions TestReports Store', () => {
const testReports = getJSONFixture('pipelines/test_report.json');
const summary = { total_count: 1 };
- const suiteEndpoint = `${TEST_HOST}/tests/:suite_name.json`;
+ const suiteEndpoint = `${TEST_HOST}/tests/suite.json`;
const summaryEndpoint = `${TEST_HOST}/test_reports/summary.json`;
const defaultState = {
suiteEndpoint,
@@ -69,9 +69,8 @@ describe('Actions TestReports Store', () => {
beforeEach(() => {
const buildIds = [1];
testReports.test_suites[0].build_ids = buildIds;
- const endpoint = suiteEndpoint.replace(':suite_name', testReports.test_suites[0].name);
mock
- .onGet(endpoint, { params: { build_ids: buildIds } })
+ .onGet(suiteEndpoint, { params: { build_ids: buildIds } })
.replyOnce(200, testReports.test_suites[0], {});
});
diff --git a/spec/frontend/pipelines/test_reports/test_case_details_spec.js b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
index bfb8b43778d..42765ca6a32 100644
--- a/spec/frontend/pipelines/test_reports/test_case_details_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
@@ -11,12 +11,17 @@ describe('Test case details', () => {
classname: 'spec.test_spec',
name: 'Test#something cool',
formattedTime: '10.04ms',
+ recent_failures: {
+ count: 2,
+ base_branch: 'master',
+ },
system_output: 'Line 42 is broken',
};
const findModal = () => wrapper.find(GlModal);
const findName = () => wrapper.find('[data-testid="test-case-name"]');
const findDuration = () => wrapper.find('[data-testid="test-case-duration"]');
+ const findRecentFailures = () => wrapper.find('[data-testid="test-case-recent-failures"]');
const findSystemOutput = () => wrapper.find('[data-testid="test-case-trace"]');
const createComponent = (testCase = {}) => {
@@ -56,6 +61,36 @@ describe('Test case details', () => {
});
});
+ describe('when test case has recent failures', () => {
+ describe('has only 1 recent failure', () => {
+ it('renders the recent failure', () => {
+ createComponent({ recent_failures: { ...defaultTestCase.recent_failures, count: 1 } });
+
+ expect(findRecentFailures().text()).toContain(
+ `Failed 1 time in ${defaultTestCase.recent_failures.base_branch} in the last 14 days`,
+ );
+ });
+ });
+
+ describe('has more than 1 recent failure', () => {
+ it('renders the recent failures', () => {
+ createComponent();
+
+ expect(findRecentFailures().text()).toContain(
+ `Failed ${defaultTestCase.recent_failures.count} times in ${defaultTestCase.recent_failures.base_branch} in the last 14 days`,
+ );
+ });
+ });
+ });
+
+ describe('when test case does not have recent failures', () => {
+ it('does not render the recent failures', () => {
+ createComponent({ recent_failures: null });
+
+ expect(findRecentFailures().exists()).toBe(false);
+ });
+ });
+
describe('when test case has system output', () => {
it('renders the test case system output', () => {
createComponent();
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index b8fd056610b..57545aec50f 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -1,7 +1,7 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { getJSONFixture } from 'helpers/fixtures';
import { GlButton, GlFriendlyWrap, GlPagination } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import SuiteTable from '~/pipelines/components/test_reports/test_suite_table.vue';
import * as getters from '~/pipelines/stores/test_reports/getters';
import { TestStatus } from '~/pipelines/constants';
diff --git a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
index 371ba5a4f9b..7ddbbb3b005 100644
--- a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
@@ -1,6 +1,6 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from '@gitlab/ui';
-import { stubComponent } from 'helpers/stub_component';
import { shallowMount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
import Api from '~/api';
import PipelineTriggerAuthorToken from '~/pipelines/components/pipelines_list/tokens/pipeline_trigger_author_token.vue';
import { users } from '../mock_data';
diff --git a/spec/frontend/profile/account/components/delete_account_modal_spec.js b/spec/frontend/profile/account/components/delete_account_modal_spec.js
index 63e27473979..82d2ab2d18a 100644
--- a/spec/frontend/profile/account/components/delete_account_modal_spec.js
+++ b/spec/frontend/profile/account/components/delete_account_modal_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
-import { TEST_HOST } from 'helpers/test_constants';
import { merge } from 'lodash';
import { mount } from '@vue/test-utils';
+import { TEST_HOST } from 'helpers/test_constants';
import deleteAccountModal from '~/profile/account/components/delete_account_modal.vue';
const GlModalStub = {
diff --git a/spec/frontend/projects/commit/components/branches_dropdown_spec.js b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
index 9fa7d658405..6f816006ae3 100644
--- a/spec/frontend/projects/commit/components/branches_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
Vue.use(Vuex);
diff --git a/spec/frontend/projects/commit/components/form_modal_spec.js b/spec/frontend/projects/commit/components/form_modal_spec.js
index 1c37b82fed3..e197ca8eace 100644
--- a/spec/frontend/projects/commit/components/form_modal_spec.js
+++ b/spec/frontend/projects/commit/components/form_modal_spec.js
@@ -1,13 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
import { shallowMount, mount, createWrapper } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { GlModal, GlForm, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
import { within } from '@testing-library/dom';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import eventHub from '~/projects/commit/event_hub';
import CommitFormModal from '~/projects/commit/components/form_modal.vue';
import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
import createStore from '~/projects/commit/store';
+import { BV_SHOW_MODAL } from '~/lib/utils/constants';
import mockData from '../mock_data';
describe('CommitFormModal', () => {
@@ -64,7 +65,7 @@ describe('CommitFormModal', () => {
wrapper.vm.show();
- expect(rootEmit).toHaveBeenCalledWith('bv::show::modal', mockData.modalPropsData.modalId);
+ expect(rootEmit).toHaveBeenCalledWith(BV_SHOW_MODAL, mockData.modalPropsData.modalId);
});
it('Clears the modal state once modal is hidden', () => {
diff --git a/spec/frontend/projects/commit/store/actions_spec.js b/spec/frontend/projects/commit/store/actions_spec.js
index ec528d4ee88..e10e8fab440 100644
--- a/spec/frontend/projects/commit/store/actions_spec.js
+++ b/spec/frontend/projects/commit/store/actions_spec.js
@@ -5,8 +5,8 @@ import createFlash from '~/flash';
import getInitialState from '~/projects/commit/store/state';
import * as actions from '~/projects/commit/store/actions';
import * as types from '~/projects/commit/store/mutation_types';
-import mockData from '../mock_data';
import { PROJECT_BRANCHES_ERROR } from '~/projects/commit/constants';
+import mockData from '../mock_data';
jest.mock('~/flash.js');
diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js
index ebd4ee45dab..8100200cbdd 100644
--- a/spec/frontend/projects/commit_box/info/load_branches_spec.js
+++ b/spec/frontend/projects/commit_box/info/load_branches_spec.js
@@ -1,6 +1,6 @@
import axios from 'axios';
-import waitForPromises from 'helpers/wait_for_promises';
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import { loadBranches } from '~/projects/commit_box/info/load_branches';
const mockCommitPath = '/commit/abcd/branches';
diff --git a/spec/frontend/projects/compare/components/app_spec.js b/spec/frontend/projects/compare/components/app_spec.js
new file mode 100644
index 00000000000..e265055ef1b
--- /dev/null
+++ b/spec/frontend/projects/compare/components/app_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import CompareApp from '~/projects/compare/components/app.vue';
+import RevisionDropdown from '~/projects/compare/components/revision_dropdown.vue';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+const projectCompareIndexPath = 'some/path';
+const refsProjectPath = 'some/refs/path';
+const paramsFrom = 'master';
+const paramsTo = 'master';
+
+describe('CompareApp component', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(CompareApp, {
+ propsData: {
+ projectCompareIndexPath,
+ refsProjectPath,
+ paramsFrom,
+ paramsTo,
+ projectMergeRequestPath: '',
+ createMrPath: '',
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders component with prop', () => {
+ expect(wrapper.props()).toEqual(
+ expect.objectContaining({
+ projectCompareIndexPath,
+ refsProjectPath,
+ paramsFrom,
+ paramsTo,
+ }),
+ );
+ });
+
+ it('contains the correct form attributes', () => {
+ expect(wrapper.attributes('action')).toBe(projectCompareIndexPath);
+ expect(wrapper.attributes('method')).toBe('POST');
+ });
+
+ it('has input with csrf token', () => {
+ expect(wrapper.find('input[name="authenticity_token"]').attributes('value')).toBe(
+ 'mock-csrf-token',
+ );
+ });
+
+ it('has ellipsis', () => {
+ expect(wrapper.find('[data-testid="ellipsis"]').exists()).toBe(true);
+ });
+
+ it('render Source and Target BranchDropdown components', () => {
+ const branchDropdowns = wrapper.findAll(RevisionDropdown);
+
+ expect(branchDropdowns.length).toBe(2);
+ expect(branchDropdowns.at(0).props('revisionText')).toBe('Source');
+ expect(branchDropdowns.at(1).props('revisionText')).toBe('Target');
+ });
+
+ describe('compare button', () => {
+ const findCompareButton = () => wrapper.find(GlButton);
+
+ it('renders button', () => {
+ expect(findCompareButton().exists()).toBe(true);
+ });
+
+ it('submits form', () => {
+ findCompareButton().vm.$emit('click');
+ expect(wrapper.find('form').element.submit).toHaveBeenCalled();
+ });
+
+ it('has compare text', () => {
+ expect(findCompareButton().text()).toBe('Compare');
+ });
+ });
+
+ describe('merge request buttons', () => {
+ const findProjectMrButton = () => wrapper.find('[data-testid="projectMrButton"]');
+ const findCreateMrButton = () => wrapper.find('[data-testid="createMrButton"]');
+
+ it('does not have merge request buttons', () => {
+ createComponent();
+ expect(findProjectMrButton().exists()).toBe(false);
+ expect(findCreateMrButton().exists()).toBe(false);
+ });
+
+ it('has "View open merge request" button', () => {
+ createComponent({
+ projectMergeRequestPath: 'some/project/merge/request/path',
+ });
+ expect(findProjectMrButton().exists()).toBe(true);
+ expect(findCreateMrButton().exists()).toBe(false);
+ });
+
+ it('has "Create merge request" button', () => {
+ createComponent({
+ createMrPath: 'some/create/create/mr/path',
+ });
+ expect(findProjectMrButton().exists()).toBe(false);
+ expect(findCreateMrButton().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
new file mode 100644
index 00000000000..c9e87fb904d
--- /dev/null
+++ b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
@@ -0,0 +1,92 @@
+import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { GlDropdown } from '@gitlab/ui';
+import axios from '~/lib/utils/axios_utils';
+import RevisionDropdown from '~/projects/compare/components/revision_dropdown.vue';
+import createFlash from '~/flash';
+
+const defaultProps = {
+ refsProjectPath: 'some/refs/path',
+ revisionText: 'Target',
+ paramsName: 'from',
+ paramsBranch: 'master',
+};
+
+jest.mock('~/flash');
+
+describe('RevisionDropdown component', () => {
+ let wrapper;
+ let axiosMock;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(RevisionDropdown, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ axiosMock.restore();
+ });
+
+ const findGlDropdown = () => wrapper.find(GlDropdown);
+
+ it('sets hidden input', () => {
+ createComponent();
+ expect(wrapper.find('input[type="hidden"]').attributes('value')).toBe(
+ defaultProps.paramsBranch,
+ );
+ });
+
+ it('update the branches on success', async () => {
+ const Branches = ['branch-1', 'branch-2'];
+ const Tags = ['tag-1', 'tag-2', 'tag-3'];
+
+ axiosMock.onGet(defaultProps.refsProjectPath).replyOnce(200, {
+ Branches,
+ Tags,
+ });
+
+ createComponent();
+
+ await axios.waitForAll();
+
+ expect(wrapper.vm.branches).toEqual(Branches);
+ expect(wrapper.vm.tags).toEqual(Tags);
+ });
+
+ it('shows flash message on error', async () => {
+ axiosMock.onGet('some/invalid/path').replyOnce(404);
+
+ createComponent();
+
+ await wrapper.vm.fetchBranchesAndTags();
+ expect(createFlash).toHaveBeenCalled();
+ });
+
+ describe('GlDropdown component', () => {
+ it('renders props', () => {
+ createComponent();
+ expect(wrapper.props()).toEqual(expect.objectContaining(defaultProps));
+ });
+
+ it('display default text', () => {
+ createComponent({
+ paramsBranch: null,
+ });
+ expect(findGlDropdown().props('text')).toBe('Select branch/tag');
+ });
+
+ it('display params branch text', () => {
+ createComponent();
+ expect(findGlDropdown().props('text')).toBe(defaultProps.paramsBranch);
+ });
+ });
+});
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 0b9f095a700..f0d72124379 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -53,12 +53,12 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
variant="danger"
>
<gl-sprintf-stub
- message="Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its repositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc."
+ message="Once a project is permanently deleted, it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its repositories and %{strongStart}all related resources%{strongEnd}, including issues, merge requests etc."
/>
</gl-alert-stub>
<p>
- This action cannot be undone. You will lose this project's repository and all content: issues, merge requests, etc.
+ This action cannot be undone. You will lose this project's repository and all related resources, including issues, merge requests, etc.
</p>
<p
diff --git a/spec/frontend/projects/members/utils_spec.js b/spec/frontend/projects/members/utils_spec.js
new file mode 100644
index 00000000000..813e8455e85
--- /dev/null
+++ b/spec/frontend/projects/members/utils_spec.js
@@ -0,0 +1,14 @@
+import { projectMemberRequestFormatter } from '~/projects/members/utils';
+
+describe('project member utils', () => {
+ describe('projectMemberRequestFormatter', () => {
+ it('returns expected format', () => {
+ expect(
+ projectMemberRequestFormatter({
+ accessLevel: 50,
+ expires_at: '2020-10-16',
+ }),
+ ).toEqual({ project_member: { access_level: 50, expires_at: '2020-10-16' } });
+ });
+ });
+});
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
index 44329944097..6dcfacb46cb 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -1,32 +1,19 @@
import { merge } from 'lodash';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
import { GlTabs, GlTab } from '@gitlab/ui';
-import createMockApollo from 'helpers/mock_apollo_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
+import { mergeUrlParams, updateHistory, getParameterValues } from '~/lib/utils/url_utility';
import Component from '~/projects/pipelines/charts/components/app.vue';
import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_charts.vue';
-import getPipelineCountByStatus from '~/projects/pipelines/charts/graphql/queries/get_pipeline_count_by_status.query.graphql';
-import getProjectPipelineStatistics from '~/projects/pipelines/charts/graphql/queries/get_project_pipeline_statistics.query.graphql';
-import { mockPipelineCount, mockPipelineStatistics } from '../mock_data';
-const projectPath = 'gitlab-org/gitlab';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+jest.mock('~/lib/utils/url_utility');
const DeploymentFrequencyChartsStub = { name: 'DeploymentFrequencyCharts', render: () => {} };
describe('ProjectsPipelinesChartsApp', () => {
let wrapper;
- function createMockApolloProvider() {
- const requestHandlers = [
- [getPipelineCountByStatus, jest.fn().mockResolvedValue(mockPipelineCount)],
- [getProjectPipelineStatistics, jest.fn().mockResolvedValue(mockPipelineStatistics)],
- ];
-
- return createMockApollo(requestHandlers);
- }
-
function createComponent(mountOptions = {}) {
wrapper = shallowMount(
Component,
@@ -34,11 +21,8 @@ describe('ProjectsPipelinesChartsApp', () => {
{},
{
provide: {
- projectPath,
shouldRenderDeploymentFrequencyCharts: false,
},
- localVue,
- apolloProvider: createMockApolloProvider(),
stubs: {
DeploymentFrequencyCharts: DeploymentFrequencyChartsStub,
},
@@ -57,52 +41,15 @@ describe('ProjectsPipelinesChartsApp', () => {
wrapper = null;
});
- describe('pipelines charts', () => {
- it('displays the pipeline charts', () => {
- const chart = wrapper.find(PipelineCharts);
- const analytics = mockPipelineStatistics.data.project.pipelineAnalytics;
-
- const {
- totalPipelines: total,
- successfulPipelines: success,
- failedPipelines: failed,
- } = mockPipelineCount.data.project;
-
- expect(chart.exists()).toBe(true);
- expect(chart.props()).toMatchObject({
- counts: {
- failed: failed.count,
- success: success.count,
- total: total.count,
- successRatio: (success.count / (success.count + failed.count)) * 100,
- },
- lastWeek: {
- labels: analytics.weekPipelinesLabels,
- totals: analytics.weekPipelinesTotals,
- success: analytics.weekPipelinesSuccessful,
- },
- lastMonth: {
- labels: analytics.monthPipelinesLabels,
- totals: analytics.monthPipelinesTotals,
- success: analytics.monthPipelinesSuccessful,
- },
- lastYear: {
- labels: analytics.yearPipelinesLabels,
- totals: analytics.yearPipelinesTotals,
- success: analytics.yearPipelinesSuccessful,
- },
- timesChart: {
- labels: analytics.pipelineTimesLabels,
- values: analytics.pipelineTimesValues,
- },
- });
- });
- });
-
- const findDeploymentFrequencyCharts = () => wrapper.find(DeploymentFrequencyChartsStub);
const findGlTabs = () => wrapper.find(GlTabs);
const findAllGlTab = () => wrapper.findAll(GlTab);
const findGlTabAt = (i) => findAllGlTab().at(i);
+ const findDeploymentFrequencyCharts = () => wrapper.find(DeploymentFrequencyChartsStub);
+ const findPipelineCharts = () => wrapper.find(PipelineCharts);
+
+ it('renders the pipeline charts', () => {
+ expect(findPipelineCharts().exists()).toBe(true);
+ });
describe('when shouldRenderDeploymentFrequencyCharts is true', () => {
beforeEach(() => {
@@ -115,6 +62,49 @@ describe('ProjectsPipelinesChartsApp', () => {
expect(findGlTabAt(1).attributes('title')).toBe('Deployments');
expect(findDeploymentFrequencyCharts().exists()).toBe(true);
});
+
+ it('sets the tab and url when a tab is clicked', async () => {
+ let chartsPath;
+ setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts`);
+
+ mergeUrlParams.mockImplementation(({ chart }, path) => {
+ expect(chart).toBe('deployments');
+ expect(path).toBe(window.location.pathname);
+ chartsPath = `${path}?chart=${chart}`;
+ return chartsPath;
+ });
+
+ updateHistory.mockImplementation(({ url }) => {
+ expect(url).toBe(chartsPath);
+ });
+ const tabs = findGlTabs();
+
+ expect(tabs.attributes('value')).toBe('0');
+
+ tabs.vm.$emit('input', 1);
+
+ await wrapper.vm.$nextTick();
+
+ expect(tabs.attributes('value')).toBe('1');
+ });
+ });
+
+ describe('when provided with a query param', () => {
+ it.each`
+ chart | tab
+ ${'deployments'} | ${'1'}
+ ${'pipelines'} | ${'0'}
+ ${'fake'} | ${'0'}
+ ${''} | ${'0'}
+ `('shows the correct tab for URL parameter "$chart"', ({ chart, tab }) => {
+ setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts?chart=${chart}`);
+ getParameterValues.mockImplementation((name) => {
+ expect(name).toBe('chart');
+ return chart ? [chart] : [];
+ });
+ createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: true } });
+ expect(findGlTabs().attributes('value')).toBe(tab);
+ });
});
describe('when shouldRenderDeploymentFrequencyCharts is false', () => {
diff --git a/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
index 598055d5828..3e588d46a4f 100644
--- a/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
@@ -1,35 +1,37 @@
-import { shallowMount } from '@vue/test-utils';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import { GlColumnChart } from '@gitlab/ui/dist/charts';
+import createMockApollo from 'helpers/mock_apollo_helper';
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
import CiCdAnalyticsAreaChart from '~/projects/pipelines/charts/components/ci_cd_analytics_area_chart.vue';
import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_charts.vue';
-import {
- counts,
- timesChartData as timesChart,
- areaChartData as lastWeek,
- areaChartData as lastMonth,
- lastYearChartData as lastYear,
-} from '../mock_data';
+import getPipelineCountByStatus from '~/projects/pipelines/charts/graphql/queries/get_pipeline_count_by_status.query.graphql';
+import getProjectPipelineStatistics from '~/projects/pipelines/charts/graphql/queries/get_project_pipeline_statistics.query.graphql';
+import { mockPipelineCount, mockPipelineStatistics } from '../mock_data';
-describe('ProjectsPipelinesChartsApp', () => {
+const projectPath = 'gitlab-org/gitlab';
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('~/projects/pipelines/charts/components/pipeline_charts.vue', () => {
let wrapper;
+ function createMockApolloProvider() {
+ const requestHandlers = [
+ [getPipelineCountByStatus, jest.fn().mockResolvedValue(mockPipelineCount)],
+ [getProjectPipelineStatistics, jest.fn().mockResolvedValue(mockPipelineStatistics)],
+ ];
+
+ return createMockApollo(requestHandlers);
+ }
+
beforeEach(() => {
wrapper = shallowMount(PipelineCharts, {
- propsData: {
- counts,
- timesChart,
- lastWeek,
- lastMonth,
- lastYear,
- },
provide: {
- projectPath: 'test/project',
- shouldRenderDeploymentFrequencyCharts: true,
- },
- stubs: {
- DeploymentFrequencyCharts: true,
+ projectPath,
},
+ localVue,
+ apolloProvider: createMockApolloProvider(),
});
});
@@ -43,7 +45,12 @@ describe('ProjectsPipelinesChartsApp', () => {
const list = wrapper.find(StatisticsList);
expect(list.exists()).toBe(true);
- expect(list.props('counts')).toBe(counts);
+ expect(list.props('counts')).toEqual({
+ total: 34,
+ success: 23,
+ failed: 1,
+ successRatio: (23 / (23 + 1)) * 100,
+ });
});
it('displays the commit duration chart', () => {
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index c83b1852147..f9fbb1b3016 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -1,20 +1,36 @@
-import { mount } from '@vue/test-utils';
+import { GlAlert } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
-import ServiceDeskRoot from '~/projects/settings_service_desk/components/service_desk_root.vue';
-import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
+import ServiceDeskRoot from '~/projects/settings_service_desk/components/service_desk_root.vue';
+import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
describe('ServiceDeskRoot', () => {
- const endpoint = '/gitlab-org/gitlab-test/service_desk';
- const initialIncomingEmail = 'servicedeskaddress@example.com';
let axiosMock;
let wrapper;
let spy;
+ const provideData = {
+ customEmail: 'custom.email@example.com',
+ customEmailEnabled: true,
+ endpoint: '/gitlab-org/gitlab-test/service_desk',
+ initialIncomingEmail: 'servicedeskaddress@example.com',
+ initialIsEnabled: true,
+ outgoingName: 'GitLab Support Bot',
+ projectKey: 'key',
+ selectedTemplate: 'Bug',
+ templates: ['Bug', 'Documentation'],
+ };
+
+ const getAlertText = () => wrapper.find(GlAlert).text();
+
+ const createComponent = () => shallowMount(ServiceDeskRoot, { provide: provideData });
+
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
+ spy = jest.spyOn(axios, 'put');
});
afterEach(() => {
@@ -25,156 +41,122 @@ describe('ServiceDeskRoot', () => {
}
});
- it('sends a request to toggle service desk off when the toggle is clicked from the on state', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK);
+ describe('ServiceDeskSetting component', () => {
+ it('is rendered', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.find(ServiceDeskSetting).props()).toEqual({
+ customEmail: provideData.customEmail,
+ customEmailEnabled: provideData.customEmailEnabled,
+ incomingEmail: provideData.initialIncomingEmail,
+ initialOutgoingName: provideData.outgoingName,
+ initialProjectKey: provideData.projectKey,
+ initialSelectedTemplate: provideData.selectedTemplate,
+ isEnabled: provideData.initialIsEnabled,
+ isTemplateSaving: false,
+ templates: provideData.templates,
+ });
+ });
+
+ describe('toggle event', () => {
+ describe('when toggling service desk on', () => {
+ beforeEach(async () => {
+ wrapper = createComponent();
- spy = jest.spyOn(axios, 'put');
+ wrapper.find(ServiceDeskSetting).vm.$emit('toggle', true);
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: true,
- initialIncomingEmail,
- endpoint,
- },
- });
+ await waitForPromises();
+ });
+
+ it('sends a request to turn service desk on', () => {
+ axiosMock.onPut(provideData.endpoint).replyOnce(httpStatusCodes.OK);
- wrapper.find('button.gl-toggle').trigger('click');
+ expect(spy).toHaveBeenCalledWith(provideData.endpoint, { service_desk_enabled: true });
+ });
- return wrapper.vm
- .$nextTick()
- .then(waitForPromises)
- .then(() => {
- expect(spy).toHaveBeenCalledWith(endpoint, { service_desk_enabled: false });
+ it('shows a message when there is an error', () => {
+ axiosMock.onPut(provideData.endpoint).networkError();
+
+ expect(getAlertText()).toContain('An error occurred while enabling Service Desk.');
+ });
});
- });
- it('sends a request to toggle service desk on when the toggle is clicked from the off state', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK);
+ describe('when toggling service desk off', () => {
+ beforeEach(async () => {
+ wrapper = createComponent();
- spy = jest.spyOn(axios, 'put');
+ wrapper.find(ServiceDeskSetting).vm.$emit('toggle', false);
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: false,
- initialIncomingEmail: '',
- endpoint,
- },
- });
+ await waitForPromises();
+ });
- wrapper.find('button.gl-toggle').trigger('click');
+ it('sends a request to turn service desk off', () => {
+ axiosMock.onPut(provideData.endpoint).replyOnce(httpStatusCodes.OK);
- return wrapper.vm.$nextTick(() => {
- expect(spy).toHaveBeenCalledWith(endpoint, { service_desk_enabled: true });
- });
- });
+ expect(spy).toHaveBeenCalledWith(provideData.endpoint, { service_desk_enabled: false });
+ });
- it('shows an error message when there is an issue toggling service desk on', () => {
- axiosMock.onPut(endpoint).networkError();
+ it('shows a message when there is an error', () => {
+ axiosMock.onPut(provideData.endpoint).networkError();
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: false,
- initialIncomingEmail: '',
- endpoint,
- },
+ expect(getAlertText()).toContain('An error occurred while disabling Service Desk.');
+ });
+ });
});
- wrapper.find('button.gl-toggle').trigger('click');
+ describe('save event', () => {
+ describe('successful request', () => {
+ beforeEach(async () => {
+ axiosMock.onPut(provideData.endpoint).replyOnce(httpStatusCodes.OK);
- return wrapper.vm
- .$nextTick()
- .then(waitForPromises)
- .then(() => {
- expect(wrapper.html()).toContain('An error occurred while enabling Service Desk.');
- });
- });
+ wrapper = createComponent();
- it('sends a request to update template when the "Save template" button is clicked', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK);
+ const payload = {
+ selectedTemplate: 'Bug',
+ outgoingName: 'GitLab Support Bot',
+ projectKey: 'key',
+ };
- spy = jest.spyOn(axios, 'put');
+ wrapper.find(ServiceDeskSetting).vm.$emit('save', payload);
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: true,
- endpoint,
- initialIncomingEmail,
- selectedTemplate: 'Bug',
- outgoingName: 'GitLab Support Bot',
- templates: ['Bug', 'Documentation'],
- projectKey: 'key',
- },
- });
+ await waitForPromises();
+ });
- wrapper.find('button.btn-success').trigger('click');
+ it('sends a request to update template', async () => {
+ expect(spy).toHaveBeenCalledWith(provideData.endpoint, {
+ issue_template_key: 'Bug',
+ outgoing_name: 'GitLab Support Bot',
+ project_key: 'key',
+ service_desk_enabled: true,
+ });
+ });
- return wrapper.vm.$nextTick(() => {
- expect(spy).toHaveBeenCalledWith(endpoint, {
- issue_template_key: 'Bug',
- outgoing_name: 'GitLab Support Bot',
- project_key: 'key',
- service_desk_enabled: true,
+ it('shows success message', () => {
+ expect(getAlertText()).toContain('Changes saved.');
+ });
});
- });
- });
- it('saves the template when the "Save template" button is clicked', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK);
-
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: true,
- endpoint,
- initialIncomingEmail,
- selectedTemplate: 'Bug',
- templates: ['Bug', 'Documentation'],
- },
- });
+ describe('unsuccessful request', () => {
+ beforeEach(async () => {
+ axiosMock.onPut(provideData.endpoint).networkError();
- wrapper.find('button.btn-success').trigger('click');
+ wrapper = createComponent();
- return wrapper.vm
- .$nextTick()
- .then(waitForPromises)
- .then(() => {
- expect(wrapper.html()).toContain('Changes saved.');
- });
- });
+ const payload = {
+ selectedTemplate: 'Bug',
+ outgoingName: 'GitLab Support Bot',
+ projectKey: 'key',
+ };
- it('shows an error message when there is an issue saving the template', () => {
- axiosMock.onPut(endpoint).networkError();
-
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: true,
- endpoint,
- initialIncomingEmail,
- selectedTemplate: 'Bug',
- templates: ['Bug', 'Documentation'],
- },
- });
+ wrapper.find(ServiceDeskSetting).vm.$emit('save', payload);
- wrapper.find('button.btn-success').trigger('click');
+ await waitForPromises();
+ });
- return wrapper.vm
- .$nextTick()
- .then(waitForPromises)
- .then(() => {
- expect(wrapper.html()).toContain('An error occured while saving changes:');
+ it('shows an error message', () => {
+ expect(getAlertText()).toContain('An error occured while saving changes:');
+ });
});
- });
-
- it('passes customEmail through updatedCustomEmail correctly', () => {
- const customEmail = 'foo';
-
- wrapper = mount(ServiceDeskRoot, {
- propsData: {
- initialIsEnabled: true,
- endpoint,
- customEmail,
- },
});
-
- expect(wrapper.find(ServiceDeskSetting).props('customEmail')).toEqual(customEmail);
});
});
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
index ddd9a7b2fad..51d656e0ea4 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
@@ -1,63 +1,68 @@
import { shallowMount, mount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
-import eventHub from '~/projects/settings_service_desk/event_hub';
+import { GlButton, GlFormSelect, GlLoadingIcon, GlToggle } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('ServiceDeskSetting', () => {
let wrapper;
+ const findButton = () => wrapper.find(GlButton);
+ const findClipboardButton = () => wrapper.find(ClipboardButton);
+ const findIncomingEmail = () => wrapper.findByTestId('incoming-email');
+ const findIncomingEmailLabel = () => wrapper.findByTestId('incoming-email-describer');
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findTemplateDropdown = () => wrapper.find(GlFormSelect);
+ const findToggle = () => wrapper.find(GlToggle);
+
+ const createComponent = ({ props = {}, mountFunction = shallowMount } = {}) =>
+ extendedWrapper(
+ mountFunction(ServiceDeskSetting, {
+ propsData: {
+ isEnabled: true,
+ ...props,
+ },
+ }),
+ );
+
afterEach(() => {
if (wrapper) {
wrapper.destroy();
}
});
- const findTemplateDropdown = () => wrapper.find('#service-desk-template-select');
- const findIncomingEmail = () => wrapper.find('[data-testid="incoming-email"]');
-
describe('when isEnabled=true', () => {
describe('only isEnabled', () => {
describe('as project admin', () => {
beforeEach(() => {
- wrapper = shallowMount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- },
- });
+ wrapper = createComponent();
});
it('should see activation checkbox', () => {
- expect(wrapper.find('#service-desk-checkbox').exists()).toBe(true);
+ expect(findToggle().exists()).toBe(true);
});
it('should see main panel with the email info', () => {
- expect(wrapper.find('#incoming-email-describer').exists()).toBe(true);
+ expect(findIncomingEmailLabel().exists()).toBe(true);
});
it('should see loading spinner and not the incoming email', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(findLoadingIcon().exists()).toBe(true);
expect(findIncomingEmail().exists()).toBe(false);
});
});
});
describe('service desk toggle', () => {
- it('emits an event to turn on Service Desk when clicked', () => {
- const eventSpy = jest.fn();
- eventHub.$on('serviceDeskEnabledCheckboxToggled', eventSpy);
-
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: false,
- },
- });
+ it('emits an event to turn on Service Desk when clicked', async () => {
+ wrapper = createComponent();
- wrapper.find('#service-desk-checkbox').trigger('click');
+ findToggle().vm.$emit('change', true);
- expect(eventSpy).toHaveBeenCalledWith(true);
+ await nextTick();
- eventHub.$off('serviceDeskEnabledCheckboxToggled', eventSpy);
- eventSpy.mockRestore();
+ expect(wrapper.emitted('toggle')[0]).toEqual([true]);
});
});
@@ -65,23 +70,23 @@ describe('ServiceDeskSetting', () => {
const incomingEmail = 'foo@bar.com';
beforeEach(() => {
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- incomingEmail,
- },
+ wrapper = createComponent({
+ props: { incomingEmail },
});
});
it('should see email and not the loading spinner', () => {
expect(findIncomingEmail().element.value).toEqual(incomingEmail);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(findLoadingIcon().exists()).toBe(false);
});
it('renders a copy to clipboard button', () => {
- expect(wrapper.find('.qa-clipboard-button').exists()).toBe(true);
- expect(wrapper.find('.qa-clipboard-button').element.dataset.clipboardText).toBe(
- incomingEmail,
+ expect(findClipboardButton().exists()).toBe(true);
+ expect(findClipboardButton().props()).toEqual(
+ expect.objectContaining({
+ title: 'Copy',
+ text: incomingEmail,
+ }),
);
});
});
@@ -92,12 +97,8 @@ describe('ServiceDeskSetting', () => {
const customEmail = 'custom@bar.com';
beforeEach(() => {
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- incomingEmail,
- customEmail,
- },
+ wrapper = createComponent({
+ props: { incomingEmail, customEmail },
});
});
@@ -110,12 +111,8 @@ describe('ServiceDeskSetting', () => {
const email = 'foo@bar.com';
beforeEach(() => {
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- incomingEmail: email,
- customEmail: email,
- },
+ wrapper = createComponent({
+ props: { incomingEmail: email, customEmail: email },
});
});
@@ -127,21 +124,13 @@ describe('ServiceDeskSetting', () => {
describe('templates dropdown', () => {
it('renders a dropdown to choose a template', () => {
- wrapper = shallowMount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- },
- });
+ wrapper = createComponent();
- expect(wrapper.find('#service-desk-template-select').exists()).toBe(true);
+ expect(findTemplateDropdown().exists()).toBe(true);
});
it('renders a dropdown with a default value of ""', () => {
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- },
- });
+ wrapper = createComponent({ mountFunction: mount });
expect(findTemplateDropdown().element.value).toEqual('');
});
@@ -149,23 +138,18 @@ describe('ServiceDeskSetting', () => {
it('renders a dropdown with a value of "Bug" when it is the initial value', () => {
const templates = ['Bug', 'Documentation', 'Security release'];
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- initialSelectedTemplate: 'Bug',
- templates,
- },
+ wrapper = createComponent({
+ props: { initialSelectedTemplate: 'Bug', templates },
+ mountFunction: mount,
});
expect(findTemplateDropdown().element.value).toEqual('Bug');
});
it('renders a dropdown with no options when the project has no templates', () => {
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- templates: [],
- },
+ wrapper = createComponent({
+ props: { templates: [] },
+ mountFunction: mount,
});
// The dropdown by default has one empty option
@@ -174,11 +158,10 @@ describe('ServiceDeskSetting', () => {
it('renders a dropdown with options when the project has templates', () => {
const templates = ['Bug', 'Documentation', 'Security release'];
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- templates,
- },
+
+ wrapper = createComponent({
+ props: { templates },
+ mountFunction: mount,
});
// An empty-named template is prepended so the user can select no template
@@ -199,78 +182,59 @@ describe('ServiceDeskSetting', () => {
describe('save button', () => {
it('renders a save button to save a template', () => {
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- },
- });
+ wrapper = createComponent();
- expect(wrapper.find('button.btn-success').text()).toContain('Save changes');
+ expect(findButton().text()).toContain('Save changes');
});
- it('emits a save event with the chosen template when the save button is clicked', () => {
- const eventSpy = jest.fn();
- eventHub.$on('serviceDeskTemplateSave', eventSpy);
-
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
+ it('emits a save event with the chosen template when the save button is clicked', async () => {
+ wrapper = createComponent({
+ props: {
initialSelectedTemplate: 'Bug',
initialOutgoingName: 'GitLab Support Bot',
initialProjectKey: 'key',
},
});
- wrapper.find('button.btn-success').trigger('click');
+ findButton().vm.$emit('click');
+
+ await nextTick();
- expect(eventSpy).toHaveBeenCalledWith({
+ const payload = {
selectedTemplate: 'Bug',
outgoingName: 'GitLab Support Bot',
projectKey: 'key',
- });
+ };
- eventHub.$off('serviceDeskTemplateSave', eventSpy);
- eventSpy.mockRestore();
+ expect(wrapper.emitted('save')[0]).toEqual([payload]);
});
});
describe('when isEnabled=false', () => {
beforeEach(() => {
- wrapper = shallowMount(ServiceDeskSetting, {
- propsData: {
- isEnabled: false,
- },
+ wrapper = createComponent({
+ props: { isEnabled: false },
});
});
it('does not render email panel', () => {
- expect(wrapper.find('#incoming-email-describer').exists()).toBe(false);
+ expect(findIncomingEmailLabel().exists()).toBe(false);
});
it('does not render template dropdown', () => {
- expect(wrapper.find('#service-desk-template-select').exists()).toBe(false);
+ expect(findTemplateDropdown().exists()).toBe(false);
});
it('does not render template save button', () => {
- expect(wrapper.find('button.btn-success').exists()).toBe(false);
+ expect(findButton().exists()).toBe(false);
});
- it('emits an event to turn on Service Desk when the toggle is clicked', () => {
- const eventSpy = jest.fn();
- eventHub.$on('serviceDeskEnabledCheckboxToggled', eventSpy);
-
- wrapper = mount(ServiceDeskSetting, {
- propsData: {
- isEnabled: true,
- },
- });
-
- wrapper.find('#service-desk-checkbox').trigger('click');
+ it('emits an event to turn on Service Desk when the toggle is clicked', async () => {
+ findToggle().vm.$emit('change', false);
- expect(eventSpy).toHaveBeenCalledWith(false);
+ await nextTick();
- eventHub.$off('serviceDeskEnabledCheckboxToggled', eventSpy);
- eventSpy.mockRestore();
+ expect(wrapper.emitted('toggle')[0]).toEqual([false]);
});
});
});
diff --git a/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js b/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js
deleted file mode 100644
index d5340df03fe..00000000000
--- a/spec/frontend/projects/settings_service_desk/services/service_desk_service_spec.js
+++ /dev/null
@@ -1,111 +0,0 @@
-import AxiosMockAdapter from 'axios-mock-adapter';
-import ServiceDeskService from '~/projects/settings_service_desk/services/service_desk_service';
-import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
-
-describe('ServiceDeskService', () => {
- const endpoint = `/gitlab-org/gitlab-test/service_desk`;
- const dummyResponse = { message: 'Dummy response' };
- const errorMessage = 'Network Error';
- let axiosMock;
- let service;
-
- beforeEach(() => {
- axiosMock = new AxiosMockAdapter(axios);
- service = new ServiceDeskService(endpoint);
- });
-
- afterEach(() => {
- axiosMock.restore();
- });
-
- describe('toggleServiceDesk', () => {
- it('makes a request to set service desk', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK, dummyResponse);
-
- return service.toggleServiceDesk(true).then((response) => {
- expect(response.data).toEqual(dummyResponse);
- });
- });
-
- it('fails on error response', () => {
- axiosMock.onPut(endpoint).networkError();
-
- return service.toggleServiceDesk(true).catch((error) => {
- expect(error.message).toBe(errorMessage);
- });
- });
-
- it('makes a request with the expected body', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK, dummyResponse);
-
- const spy = jest.spyOn(axios, 'put');
-
- service.toggleServiceDesk(true);
-
- expect(spy).toHaveBeenCalledWith(endpoint, {
- service_desk_enabled: true,
- });
-
- spy.mockRestore();
- });
- });
-
- describe('updateTemplate', () => {
- it('makes a request to update template', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK, dummyResponse);
-
- return service
- .updateTemplate(
- {
- selectedTemplate: 'Bug',
- outgoingName: 'GitLab Support Bot',
- },
- true,
- )
- .then((response) => {
- expect(response.data).toEqual(dummyResponse);
- });
- });
-
- it('fails on error response', () => {
- axiosMock.onPut(endpoint).networkError();
-
- return service
- .updateTemplate(
- {
- selectedTemplate: 'Bug',
- outgoingName: 'GitLab Support Bot',
- },
- true,
- )
- .catch((error) => {
- expect(error.message).toBe(errorMessage);
- });
- });
-
- it('makes a request with the expected body', () => {
- axiosMock.onPut(endpoint).replyOnce(httpStatusCodes.OK, dummyResponse);
-
- const spy = jest.spyOn(axios, 'put');
-
- service.updateTemplate(
- {
- selectedTemplate: 'Bug',
- outgoingName: 'GitLab Support Bot',
- projectKey: 'key',
- },
- true,
- );
-
- expect(spy).toHaveBeenCalledWith(endpoint, {
- issue_template_key: 'Bug',
- outgoing_name: 'GitLab Support Bot',
- project_key: 'key',
- service_desk_enabled: true,
- });
-
- spy.mockRestore();
- });
- });
-});
diff --git a/spec/frontend/registry/explorer/components/delete_image_spec.js b/spec/frontend/registry/explorer/components/delete_image_spec.js
new file mode 100644
index 00000000000..5c9ecf57a9f
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/delete_image_spec.js
@@ -0,0 +1,153 @@
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import component from '~/registry/explorer/components/delete_image.vue';
+import deleteContainerRepositoryMutation from '~/registry/explorer/graphql/mutations/delete_container_repository.mutation.graphql';
+import getContainerRepositoryDetailsQuery from '~/registry/explorer/graphql/queries/get_container_repository_details.query.graphql';
+
+import { GRAPHQL_PAGE_SIZE } from '~/registry/explorer/constants/index';
+
+describe('Delete Image', () => {
+ let wrapper;
+ const id = '1';
+ const storeMock = {
+ readQuery: jest.fn().mockReturnValue({
+ containerRepository: {
+ status: 'foo',
+ },
+ }),
+ writeQuery: jest.fn(),
+ };
+
+ const updatePayload = {
+ data: {
+ destroyContainerRepository: {
+ containerRepository: {
+ status: 'baz',
+ },
+ },
+ },
+ };
+
+ const findButton = () => wrapper.find('button');
+
+ const mountComponent = ({
+ propsData = { id },
+ mutate = jest.fn().mockResolvedValue({}),
+ } = {}) => {
+ wrapper = shallowMount(component, {
+ propsData,
+ mocks: {
+ $apollo: {
+ mutate,
+ },
+ },
+ scopedSlots: {
+ default: '<button @click="props.doDelete">test</button>',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('executes apollo mutate on doDelete', () => {
+ const mutate = jest.fn().mockResolvedValue({});
+ mountComponent({ mutate });
+
+ wrapper.vm.doDelete();
+
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: deleteContainerRepositoryMutation,
+ variables: {
+ id,
+ },
+ update: undefined,
+ });
+ });
+
+ it('on success emits the correct events', async () => {
+ const mutate = jest.fn().mockResolvedValue({});
+ mountComponent({ mutate });
+
+ wrapper.vm.doDelete();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('start')).toEqual([[]]);
+ expect(wrapper.emitted('success')).toEqual([[]]);
+ expect(wrapper.emitted('end')).toEqual([[]]);
+ });
+
+ it('when a payload contains an error emits an error event', async () => {
+ const mutate = jest
+ .fn()
+ .mockResolvedValue({ data: { destroyContainerRepository: { errors: ['foo'] } } });
+
+ mountComponent({ mutate });
+ wrapper.vm.doDelete();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[['foo']]]);
+ });
+
+ it('when the api call errors emits an error event', async () => {
+ const mutate = jest.fn().mockRejectedValue('error');
+
+ mountComponent({ mutate });
+ wrapper.vm.doDelete();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[['error']]]);
+ });
+
+ it('uses the update function, when the prop is set to true', () => {
+ const mutate = jest.fn().mockResolvedValue({});
+
+ mountComponent({ mutate, propsData: { id, useUpdateFn: true } });
+ wrapper.vm.doDelete();
+
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: deleteContainerRepositoryMutation,
+ variables: {
+ id,
+ },
+ update: wrapper.vm.updateImageStatus,
+ });
+ });
+
+ it('updateImage status reads and write to the cache', () => {
+ mountComponent();
+
+ const variables = {
+ id,
+ first: GRAPHQL_PAGE_SIZE,
+ };
+
+ wrapper.vm.updateImageStatus(storeMock, updatePayload);
+
+ expect(storeMock.readQuery).toHaveBeenCalledWith({
+ query: getContainerRepositoryDetailsQuery,
+ variables,
+ });
+ expect(storeMock.writeQuery).toHaveBeenCalledWith({
+ query: getContainerRepositoryDetailsQuery,
+ variables,
+ data: {
+ containerRepository: {
+ status: updatePayload.data.destroyContainerRepository.containerRepository.status,
+ },
+ },
+ });
+ });
+
+ it('binds the doDelete function to the default scoped slot', () => {
+ const mutate = jest.fn().mockResolvedValue({});
+ mountComponent({ mutate });
+ findButton().trigger('click');
+ expect(mutate).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/details_page/empty_state_spec.js b/spec/frontend/registry/explorer/components/details_page/empty_state_spec.js
new file mode 100644
index 00000000000..7739f111906
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/details_page/empty_state_spec.js
@@ -0,0 +1,54 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlEmptyState } from '@gitlab/ui';
+import component from '~/registry/explorer/components/details_page/empty_state.vue';
+import {
+ NO_TAGS_TITLE,
+ NO_TAGS_MESSAGE,
+ MISSING_OR_DELETED_IMAGE_TITLE,
+ MISSING_OR_DELETED_IMAGE_MESSAGE,
+} from '~/registry/explorer/constants';
+
+describe('EmptyTagsState component', () => {
+ let wrapper;
+
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+
+ const mountComponent = (propsData) => {
+ wrapper = shallowMount(component, {
+ stubs: {
+ GlEmptyState,
+ },
+ propsData,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('contains gl-empty-state', () => {
+ mountComponent();
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it.each`
+ isEmptyImage | title | description
+ ${false} | ${NO_TAGS_TITLE} | ${NO_TAGS_MESSAGE}
+ ${true} | ${MISSING_OR_DELETED_IMAGE_TITLE} | ${MISSING_OR_DELETED_IMAGE_MESSAGE}
+ `(
+ 'when isEmptyImage is $isEmptyImage has the correct props',
+ ({ isEmptyImage, title, description }) => {
+ mountComponent({
+ noContainersImage: 'foo',
+ isEmptyImage,
+ });
+
+ expect(findEmptyState().props()).toMatchObject({
+ title,
+ description,
+ svgPath: 'foo',
+ });
+ },
+ );
+});
diff --git a/spec/frontend/registry/explorer/components/details_page/empty_tags_state_spec.js b/spec/frontend/registry/explorer/components/details_page/empty_tags_state_spec.js
deleted file mode 100644
index 09afd9d2d84..00000000000
--- a/spec/frontend/registry/explorer/components/details_page/empty_tags_state_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlEmptyState } from '@gitlab/ui';
-import component from '~/registry/explorer/components/details_page/empty_tags_state.vue';
-import {
- EMPTY_IMAGE_REPOSITORY_TITLE,
- EMPTY_IMAGE_REPOSITORY_MESSAGE,
-} from '~/registry/explorer/constants';
-
-describe('EmptyTagsState component', () => {
- let wrapper;
-
- const findEmptyState = () => wrapper.find(GlEmptyState);
-
- const mountComponent = () => {
- wrapper = shallowMount(component, {
- stubs: {
- GlEmptyState,
- },
- propsData: {
- noContainersImage: 'foo',
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('contains gl-empty-state', () => {
- mountComponent();
- expect(findEmptyState().exists()).toBe(true);
- });
-
- it('has the correct props', () => {
- mountComponent();
- expect(findEmptyState().props()).toMatchObject({
- title: EMPTY_IMAGE_REPOSITORY_TITLE,
- description: EMPTY_IMAGE_REPOSITORY_MESSAGE,
- svgPath: 'foo',
- });
- });
-});
diff --git a/spec/frontend/registry/explorer/components/list_page/group_empty_state_spec.js b/spec/frontend/registry/explorer/components/list_page/group_empty_state_spec.js
index 1ba2036dc34..ffdc6b787cb 100644
--- a/spec/frontend/registry/explorer/components/list_page/group_empty_state_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/group_empty_state_spec.js
@@ -1,8 +1,8 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlSprintf } from '@gitlab/ui';
-import { GlEmptyState } from '../../stubs';
import groupEmptyState from '~/registry/explorer/components/list_page/group_empty_state.vue';
+import { GlEmptyState } from '../../stubs';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/registry/explorer/components/list_page/project_empty_state_spec.js b/spec/frontend/registry/explorer/components/list_page/project_empty_state_spec.js
index 3a27cf1923c..cbd1f1a99db 100644
--- a/spec/frontend/registry/explorer/components/list_page/project_empty_state_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/project_empty_state_spec.js
@@ -1,8 +1,8 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlSprintf } from '@gitlab/ui';
-import { GlEmptyState } from '../../stubs';
import projectEmptyState from '~/registry/explorer/components/list_page/project_empty_state.vue';
+import { GlEmptyState } from '../../stubs';
import { dockerCommands } from '../../mock_data';
const localVue = createLocalVue();
diff --git a/spec/frontend/registry/explorer/mock_data.js b/spec/frontend/registry/explorer/mock_data.js
index b0fc009872c..f4453912db4 100644
--- a/spec/frontend/registry/explorer/mock_data.js
+++ b/spec/frontend/registry/explorer/mock_data.js
@@ -235,3 +235,9 @@ export const graphQLProjectImageRepositoriesDetailsMock = {
},
},
};
+
+export const graphQLEmptyImageDetailsMock = {
+ data: {
+ containerRepository: null,
+ },
+};
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 1746a6a63b6..7106cdae27f 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -4,13 +4,14 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Tracking from '~/tracking';
+import axios from '~/lib/utils/axios_utils';
import component from '~/registry/explorer/pages/details.vue';
import DeleteAlert from '~/registry/explorer/components/details_page/delete_alert.vue';
import PartialCleanupAlert from '~/registry/explorer/components/details_page/partial_cleanup_alert.vue';
import DetailsHeader from '~/registry/explorer/components/details_page/details_header.vue';
import TagsLoader from '~/registry/explorer/components/details_page/tags_loader.vue';
import TagsList from '~/registry/explorer/components/details_page/tags_list.vue';
-import EmptyTagsState from '~/registry/explorer/components/details_page/empty_tags_state.vue';
+import EmptyTagsState from '~/registry/explorer/components/details_page/empty_state.vue';
import getContainerRepositoryDetailsQuery from '~/registry/explorer/graphql/queries/get_container_repository_details.query.graphql';
import deleteContainerRepositoryTagsMutation from '~/registry/explorer/graphql/mutations/delete_container_repository_tags.mutation.graphql';
@@ -22,6 +23,7 @@ import {
graphQLImageDetailsEmptyTagsMock,
graphQLDeleteImageRepositoryTagsMock,
containerRepositoryMock,
+ graphQLEmptyImageDetailsMock,
tagsMock,
tagsPageInfo,
} from '../mock_data';
@@ -39,7 +41,7 @@ describe('Details Page', () => {
const findTagsList = () => wrapper.find(TagsList);
const findDeleteAlert = () => wrapper.find(DeleteAlert);
const findDetailsHeader = () => wrapper.find(DetailsHeader);
- const findEmptyTagsState = () => wrapper.find(EmptyTagsState);
+ const findEmptyState = () => wrapper.find(EmptyTagsState);
const findPartialCleanupAlert = () => wrapper.find(PartialCleanupAlert);
const routeId = 1;
@@ -133,6 +135,27 @@ describe('Details Page', () => {
});
});
+ describe('when the image does not exist', () => {
+ it('does not show the default ui', async () => {
+ mountComponent({ resolver: jest.fn().mockResolvedValue(graphQLEmptyImageDetailsMock) });
+
+ await waitForApolloRequestRender();
+
+ expect(findTagsLoader().exists()).toBe(false);
+ expect(findDetailsHeader().exists()).toBe(false);
+ expect(findTagsList().exists()).toBe(false);
+ expect(findPagination().exists()).toBe(false);
+ });
+
+ it('shows an empty state message', async () => {
+ mountComponent({ resolver: jest.fn().mockResolvedValue(graphQLEmptyImageDetailsMock) });
+
+ await waitForApolloRequestRender();
+
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+
describe('when the list of tags is empty', () => {
const resolver = jest.fn().mockResolvedValue(graphQLImageDetailsEmptyTagsMock);
@@ -141,7 +164,7 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
- expect(findEmptyTagsState().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(true);
});
it('does not show the loader', async () => {
@@ -401,6 +424,9 @@ describe('Details Page', () => {
const config = {
runCleanupPoliciesHelpPagePath: 'foo',
cleanupPoliciesHelpPagePath: 'bar',
+ userCalloutsPath: 'call_out_path',
+ userCalloutId: 'call_out_id',
+ showUnfinishedTagCleanupCallout: true,
};
describe(`when expirationPolicyCleanupStatus is ${UNFINISHED_STATUS}`, () => {
@@ -413,8 +439,9 @@ describe('Details Page', () => {
}),
);
});
+
it('exists', async () => {
- mountComponent({ resolver });
+ mountComponent({ resolver, config });
await waitForApolloRequestRender();
@@ -426,11 +453,16 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
- expect(findPartialCleanupAlert().props()).toEqual({ ...config });
+ expect(findPartialCleanupAlert().props()).toEqual({
+ runCleanupPoliciesHelpPagePath: config.runCleanupPoliciesHelpPagePath,
+ cleanupPoliciesHelpPagePath: config.cleanupPoliciesHelpPagePath,
+ });
});
it('dismiss hides the component', async () => {
- mountComponent({ resolver });
+ jest.spyOn(axios, 'post').mockReturnValue();
+
+ mountComponent({ resolver, config });
await waitForApolloRequestRender();
@@ -440,13 +472,25 @@ describe('Details Page', () => {
await wrapper.vm.$nextTick();
+ expect(axios.post).toHaveBeenCalledWith(config.userCalloutsPath, {
+ feature_name: config.userCalloutId,
+ });
+ expect(findPartialCleanupAlert().exists()).toBe(false);
+ });
+
+ it('is hidden if the callout is dismissed', async () => {
+ mountComponent({ resolver });
+
+ await waitForApolloRequestRender();
+
expect(findPartialCleanupAlert().exists()).toBe(false);
});
});
describe(`when expirationPolicyCleanupStatus is not ${UNFINISHED_STATUS}`, () => {
it('the component is hidden', async () => {
- mountComponent();
+ mountComponent({ config });
+
await waitForApolloRequestRender();
expect(findPartialCleanupAlert().exists()).toBe(false);
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index c4556934934..9c49ff0c9e5 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -11,6 +11,7 @@ import GroupEmptyState from '~/registry/explorer/components/list_page/group_empt
import ProjectEmptyState from '~/registry/explorer/components/list_page/project_empty_state.vue';
import RegistryHeader from '~/registry/explorer/components/list_page/registry_header.vue';
import ImageList from '~/registry/explorer/components/list_page/image_list.vue';
+import DeleteImage from '~/registry/explorer/components/delete_image.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import {
@@ -27,7 +28,6 @@ import {
graphQLImageListMock,
graphQLImageDeleteMock,
deletedContainerRepository,
- graphQLImageDeleteMockError,
graphQLEmptyImageListMock,
graphQLEmptyGroupImageListMock,
pageInfo,
@@ -58,6 +58,7 @@ describe('List Page', () => {
const findListHeader = () => wrapper.find('[data-testid="listHeader"]');
const findSearchBox = () => wrapper.find(GlSearchBoxByClick);
const findEmptySearchMessage = () => wrapper.find('[data-testid="emptySearch"]');
+ const findDeleteImage = () => wrapper.find(DeleteImage);
const waitForApolloRequestRender = async () => {
jest.runOnlyPendingTimers();
@@ -91,6 +92,7 @@ describe('List Page', () => {
GlSprintf,
RegistryHeader,
TitleArea,
+ DeleteImage,
},
mocks: {
$toast,
@@ -300,23 +302,22 @@ describe('List Page', () => {
});
describe('delete image', () => {
- const deleteImage = async () => {
- await wrapper.vm.$nextTick();
+ const selectImageForDeletion = async () => {
+ await waitForApolloRequestRender();
findImageList().vm.$emit('delete', deletedContainerRepository);
- findDeleteModal().vm.$emit('ok');
-
- await waitForApolloRequestRender();
};
it('should call deleteItem when confirming deletion', async () => {
const mutationResolver = jest.fn().mockResolvedValue(graphQLImageDeleteMock);
mountComponent({ mutationResolver });
- await deleteImage();
+ await selectImageForDeletion();
+
+ findDeleteModal().vm.$emit('primary');
+ await waitForApolloRequestRender();
expect(wrapper.vm.itemToDelete).toEqual(deletedContainerRepository);
- expect(mutationResolver).toHaveBeenCalledWith({ id: deletedContainerRepository.id });
const updatedImage = findImageList()
.props('images')
@@ -326,10 +327,12 @@ describe('List Page', () => {
});
it('should show a success alert when delete request is successful', async () => {
- const mutationResolver = jest.fn().mockResolvedValue(graphQLImageDeleteMock);
- mountComponent({ mutationResolver });
+ mountComponent();
- await deleteImage();
+ await selectImageForDeletion();
+
+ findDeleteImage().vm.$emit('success');
+ await wrapper.vm.$nextTick();
const alert = findDeleteAlert();
expect(alert.exists()).toBe(true);
@@ -340,23 +343,12 @@ describe('List Page', () => {
describe('when delete request fails it shows an alert', () => {
it('user recoverable error', async () => {
- const mutationResolver = jest.fn().mockResolvedValue(graphQLImageDeleteMockError);
- mountComponent({ mutationResolver });
+ mountComponent();
- await deleteImage();
+ await selectImageForDeletion();
- const alert = findDeleteAlert();
- expect(alert.exists()).toBe(true);
- expect(alert.text().replace(/\s\s+/gm, ' ')).toBe(
- DELETE_IMAGE_ERROR_MESSAGE.replace('%{title}', wrapper.vm.itemToDelete.path),
- );
- });
-
- it('network error', async () => {
- const mutationResolver = jest.fn().mockRejectedValue();
- mountComponent({ mutationResolver });
-
- await deleteImage();
+ findDeleteImage().vm.$emit('error');
+ await wrapper.vm.$nextTick();
const alert = findDeleteAlert();
expect(alert.exists()).toBe(true);
@@ -499,9 +491,8 @@ describe('List Page', () => {
testTrackingCall('cancel_delete');
});
- it('send an event when confirm is clicked on modal', () => {
- const deleteModal = findDeleteModal();
- deleteModal.vm.$emit('ok');
+ it('send an event when the deletion starts', () => {
+ findDeleteImage().vm.$emit('start');
testTrackingCall('confirm_delete');
});
});
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 1481dd30fd4..3c5ee212225 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -7,9 +7,9 @@ import ReleasesApp from '~/releases/components/app_index.vue';
import createStore from '~/releases/stores';
import createListModule from '~/releases/stores/modules/list';
import api from '~/api';
-import { pageInfoHeadersWithoutPagination, pageInfoHeadersWithPagination } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleasesPagination from '~/releases/components/releases_pagination.vue';
+import { pageInfoHeadersWithoutPagination, pageInfoHeadersWithPagination } from '../mock_data';
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index f1c0c24f8ca..b3161f9fc0d 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
import { GlLink, GlIcon } from '@gitlab/ui';
+import { cloneDeep } from 'lodash';
import { trimText } from 'helpers/text_helper';
import { getJSONFixture } from 'helpers/fixtures';
-import { cloneDeep } from 'lodash';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 396e7bd8745..39c00ae69fe 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,8 +1,8 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { cloneDeep } from 'lodash';
import testAction from 'helpers/vuex_action_helper';
import { getJSONFixture } from 'helpers/fixtures';
-import { cloneDeep } from 'lodash';
import * as actions from '~/releases/stores/modules/detail/actions';
import * as types from '~/releases/stores/modules/detail/mutation_types';
import createState from '~/releases/stores/modules/detail/state';
diff --git a/spec/frontend/releases/stores/modules/list/actions_spec.js b/spec/frontend/releases/stores/modules/list/actions_spec.js
index 35551b77dc4..278c8b3b3d3 100644
--- a/spec/frontend/releases/stores/modules/list/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/list/actions_spec.js
@@ -17,9 +17,9 @@ import {
parseIntPagination,
convertObjectPropsToCamelCase,
} from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
import allReleasesQuery from '~/releases/queries/all_releases.query.graphql';
import { PAGE_SIZE } from '~/releases/constants';
+import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
const originalRelease = getJSONFixture('api/releases/release.json');
const originalReleases = [originalRelease];
diff --git a/spec/frontend/releases/stores/modules/list/mutations_spec.js b/spec/frontend/releases/stores/modules/list/mutations_spec.js
index 78071573072..7eb78b23b7b 100644
--- a/spec/frontend/releases/stores/modules/list/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/list/mutations_spec.js
@@ -3,8 +3,8 @@ import createState from '~/releases/stores/modules/list/state';
import mutations from '~/releases/stores/modules/list/mutations';
import * as types from '~/releases/stores/modules/list/mutation_types';
import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
import { convertAllReleasesGraphQLResponse } from '~/releases/util';
+import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
const originalRelease = getJSONFixture('api/releases/release.json');
const originalReleases = [originalRelease];
diff --git a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
index ecb657af6f1..fb743c8b90c 100644
--- a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
+++ b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
@@ -9,7 +9,6 @@ const localVue = createLocalVue();
localVue.use(Vuex);
describe('Grouped code quality reports app', () => {
- const Component = localVue.extend(GroupedCodequalityReportsApp);
let wrapper;
let mockStore;
@@ -22,7 +21,7 @@ describe('Grouped code quality reports app', () => {
};
const mountComponent = (props = {}) => {
- wrapper = mount(Component, {
+ wrapper = mount(GroupedCodequalityReportsApp, {
store: mockStore,
localVue,
propsData: {
@@ -135,7 +134,7 @@ describe('Grouped code quality reports app', () => {
});
it('renders error text', () => {
- expect(findWidget().text()).toEqual('Failed to load codeclimate report');
+ expect(findWidget().text()).toContain('Failed to load codeclimate report');
});
it('renders a help icon with more information', () => {
diff --git a/spec/frontend/reports/codequality_report/mock_data.js b/spec/frontend/reports/codequality_report/mock_data.js
index 9bd61527d3f..c5cecb34509 100644
--- a/spec/frontend/reports/codequality_report/mock_data.js
+++ b/spec/frontend/reports/codequality_report/mock_data.js
@@ -88,3 +88,53 @@ export const issueDiff = [
urlPath: 'headPath/lib/six.rb#L6',
},
];
+
+export const reportIssues = {
+ status: 'failed',
+ new_errors: [
+ {
+ description:
+ 'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.',
+ severity: 'minor',
+ file_path: 'codequality.rb',
+ line: 5,
+ },
+ ],
+ resolved_errors: [
+ {
+ description: 'Insecure Dependency',
+ severity: 'major',
+ file_path: 'lib/six.rb',
+ line: 22,
+ },
+ ],
+ existing_errors: [],
+ summary: { total: 3, resolved: 0, errored: 3 },
+};
+
+export const parsedReportIssues = {
+ newIssues: [
+ {
+ description:
+ 'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.',
+ file_path: 'codequality.rb',
+ line: 5,
+ name:
+ 'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.',
+ path: 'codequality.rb',
+ severity: 'minor',
+ urlPath: 'null/codequality.rb#L5',
+ },
+ ],
+ resolvedIssues: [
+ {
+ description: 'Insecure Dependency',
+ file_path: 'lib/six.rb',
+ line: 22,
+ name: 'Insecure Dependency',
+ path: 'lib/six.rb',
+ severity: 'major',
+ urlPath: 'null/lib/six.rb#L22',
+ },
+ ],
+};
diff --git a/spec/frontend/reports/codequality_report/store/actions_spec.js b/spec/frontend/reports/codequality_report/store/actions_spec.js
index 321785cb85a..6ff6bae1284 100644
--- a/spec/frontend/reports/codequality_report/store/actions_spec.js
+++ b/spec/frontend/reports/codequality_report/store/actions_spec.js
@@ -5,7 +5,14 @@ import axios from '~/lib/utils/axios_utils';
import * as actions from '~/reports/codequality_report/store/actions';
import * as types from '~/reports/codequality_report/store/mutation_types';
import createStore from '~/reports/codequality_report/store';
-import { headIssues, baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../mock_data';
+import {
+ headIssues,
+ baseIssues,
+ mockParsedHeadIssues,
+ mockParsedBaseIssues,
+ reportIssues,
+ parsedReportIssues,
+} from '../mock_data';
// mock codequality comparison worker
jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () =>
@@ -39,6 +46,7 @@ describe('Codequality Reports actions', () => {
headPath: 'headPath',
baseBlobPath: 'baseBlobPath',
headBlobPath: 'headBlobPath',
+ reportsPath: 'reportsPath',
helpPath: 'codequalityHelpPath',
};
@@ -55,68 +63,119 @@ describe('Codequality Reports actions', () => {
describe('fetchReports', () => {
let mock;
+ let diffFeatureFlagEnabled;
- beforeEach(() => {
- localState.headPath = `${TEST_HOST}/head.json`;
- localState.basePath = `${TEST_HOST}/base.json`;
- mock = new MockAdapter(axios);
- });
+ describe('with codequalityBackendComparison feature flag enabled', () => {
+ beforeEach(() => {
+ diffFeatureFlagEnabled = true;
+ localState.reportsPath = `${TEST_HOST}/codequality_reports.json`;
+ mock = new MockAdapter(axios);
+ });
- afterEach(() => {
- mock.restore();
- });
+ afterEach(() => {
+ mock.restore();
+ });
- describe('on success', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
- mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues);
- mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues);
-
- testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [
- {
- payload: {
- newIssues: [mockParsedHeadIssues[0]],
- resolvedIssues: [mockParsedBaseIssues[0]],
+ describe('on success', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
+ mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, reportIssues);
+
+ testAction(
+ actions.fetchReports,
+ diffFeatureFlagEnabled,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [
+ {
+ payload: parsedReportIssues,
+ type: 'receiveReportsSuccess',
},
- type: 'receiveReportsSuccess',
- },
- ],
- done,
- );
+ ],
+ done,
+ );
+ });
});
- });
- describe('on error', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- mock.onGet(`${TEST_HOST}/head.json`).reply(500);
-
- testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError' }],
- done,
- );
+ describe('on error', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
+ mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(500);
+
+ testAction(
+ actions.fetchReports,
+ diffFeatureFlagEnabled,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [{ type: 'receiveReportsError', payload: expect.any(Error) }],
+ done,
+ );
+ });
});
});
- describe('with no base path', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- localState.basePath = null;
-
- testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError' }],
- done,
- );
+ describe('with codequalityBackendComparison feature flag disabled', () => {
+ beforeEach(() => {
+ diffFeatureFlagEnabled = false;
+ localState.headPath = `${TEST_HOST}/head.json`;
+ localState.basePath = `${TEST_HOST}/base.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('on success', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
+ mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues);
+ mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues);
+
+ testAction(
+ actions.fetchReports,
+ diffFeatureFlagEnabled,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [
+ {
+ payload: {
+ newIssues: [mockParsedHeadIssues[0]],
+ resolvedIssues: [mockParsedBaseIssues[0]],
+ },
+ type: 'receiveReportsSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('on error', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
+ mock.onGet(`${TEST_HOST}/head.json`).reply(500);
+
+ testAction(
+ actions.fetchReports,
+ diffFeatureFlagEnabled,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [{ type: 'receiveReportsError', payload: expect.any(Error) }],
+ done,
+ );
+ });
+ });
+
+ describe('with no base path', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
+ localState.basePath = null;
+
+ testAction(
+ actions.fetchReports,
+ diffFeatureFlagEnabled,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [{ type: 'receiveReportsError' }],
+ done,
+ );
+ });
});
});
});
@@ -142,7 +201,7 @@ describe('Codequality Reports actions', () => {
actions.receiveReportsError,
null,
localState,
- [{ type: types.RECEIVE_REPORTS_ERROR }],
+ [{ type: types.RECEIVE_REPORTS_ERROR, payload: null }],
[],
done,
);
diff --git a/spec/frontend/reports/codequality_report/store/mutations_spec.js b/spec/frontend/reports/codequality_report/store/mutations_spec.js
index 658abf3088c..f7f9e611ee8 100644
--- a/spec/frontend/reports/codequality_report/store/mutations_spec.js
+++ b/spec/frontend/reports/codequality_report/store/mutations_spec.js
@@ -55,6 +55,12 @@ describe('Codequality Reports mutations', () => {
expect(localState.hasError).toEqual(false);
});
+ it('clears statusReason', () => {
+ mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
+
+ expect(localState.statusReason).toEqual('');
+ });
+
it('sets newIssues and resolvedIssues from response data', () => {
const data = { newIssues: [{ id: 1 }], resolvedIssues: [{ id: 2 }] };
mutations.RECEIVE_REPORTS_SUCCESS(localState, data);
@@ -76,5 +82,13 @@ describe('Codequality Reports mutations', () => {
expect(localState.hasError).toEqual(true);
});
+
+ it('sets statusReason to string from error response data', () => {
+ const data = { status_reason: 'This merge request does not have codequality reports' };
+ const error = { response: { data } };
+ mutations.RECEIVE_REPORTS_ERROR(localState, error);
+
+ expect(localState.statusReason).toEqual(data.status_reason);
+ });
});
});
diff --git a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
index 085d697672d..389e9b4a1f6 100644
--- a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
+++ b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
@@ -2,7 +2,13 @@ import {
parseCodeclimateMetrics,
doCodeClimateComparison,
} from '~/reports/codequality_report/store/utils/codequality_comparison';
-import { baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../../mock_data';
+import {
+ baseIssues,
+ mockParsedHeadIssues,
+ mockParsedBaseIssues,
+ reportIssues,
+ parsedReportIssues,
+} from '../../mock_data';
jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () => {
let mockPostMessageCallback;
@@ -34,7 +40,7 @@ describe('Codequality report store utils', () => {
let result;
describe('parseCodeclimateMetrics', () => {
- it('should parse the received issues', () => {
+ it('should parse the issues from codeclimate artifacts', () => {
[result] = parseCodeclimateMetrics(baseIssues, 'path');
expect(result.name).toEqual(baseIssues[0].check_name);
@@ -42,6 +48,14 @@ describe('Codequality report store utils', () => {
expect(result.line).toEqual(baseIssues[0].location.lines.begin);
});
+ it('should parse the issues from backend codequality diff', () => {
+ [result] = parseCodeclimateMetrics(reportIssues.new_errors, 'path');
+
+ expect(result.name).toEqual(parsedReportIssues.newIssues[0].name);
+ expect(result.path).toEqual(parsedReportIssues.newIssues[0].path);
+ expect(result.line).toEqual(parsedReportIssues.newIssues[0].line);
+ });
+
describe('when an issue has no location or path', () => {
const issue = { description: 'Insecure Dependency' };
diff --git a/spec/frontend/reports/components/grouped_test_reports_app_spec.js b/spec/frontend/reports/components/grouped_test_reports_app_spec.js
index 492192988fb..88a82908e1e 100644
--- a/spec/frontend/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/components/grouped_test_reports_app_spec.js
@@ -18,12 +18,11 @@ localVue.use(Vuex);
describe('Grouped test reports app', () => {
const endpoint = 'endpoint.json';
const pipelinePath = '/path/to/pipeline';
- const Component = localVue.extend(GroupedTestReportsApp);
let wrapper;
let mockStore;
const mountComponent = ({ props = { pipelinePath } } = {}) => {
- wrapper = mount(Component, {
+ wrapper = mount(GroupedTestReportsApp, {
store: mockStore,
localVue,
propsData: {
diff --git a/spec/frontend/reports/components/summary_row_spec.js b/spec/frontend/reports/components/summary_row_spec.js
index 85c68ed069b..bdd6de1e0be 100644
--- a/spec/frontend/reports/components/summary_row_spec.js
+++ b/spec/frontend/reports/components/summary_row_spec.js
@@ -32,7 +32,7 @@ describe('Summary row', () => {
it('renders provided summary', () => {
createComponent();
- expect(findSummary().text()).toEqual(props.summary);
+ expect(findSummary().text()).toContain(props.summary);
});
it('renders provided icon', () => {
@@ -48,7 +48,7 @@ describe('Summary row', () => {
createComponent({ slots: { summary: summarySlotContent } });
expect(wrapper.text()).not.toContain(props.summary);
- expect(findSummary().text()).toEqual(summarySlotContent);
+ expect(findSummary().text()).toContain(summarySlotContent);
});
});
});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index fe77057c3d4..f611d6a3535 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -58,77 +58,75 @@ describe('Repository last commit component', () => {
loading | label
${true} | ${'shows'}
${false} | ${'hides'}
- `('$label when loading icon $loading is true', ({ loading }) => {
+ `('$label when loading icon $loading is true', async ({ loading }) => {
factory(createCommitData(), loading);
- return vm.vm.$nextTick(() => {
- expect(vm.find(GlLoadingIcon).exists()).toBe(loading);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find(GlLoadingIcon).exists()).toBe(loading);
});
- it('renders commit widget', () => {
+ it('renders commit widget', async () => {
factory();
- return vm.vm.$nextTick(() => {
- expect(vm.element).toMatchSnapshot();
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.element).toMatchSnapshot();
});
- it('renders short commit ID', () => {
+ it('renders short commit ID', async () => {
factory();
- return vm.vm.$nextTick(() => {
- expect(vm.find('[data-testid="last-commit-id-label"]').text()).toEqual('12345678');
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find('[data-testid="last-commit-id-label"]').text()).toEqual('12345678');
});
- it('hides pipeline components when pipeline does not exist', () => {
+ it('hides pipeline components when pipeline does not exist', async () => {
factory(createCommitData({ pipeline: null }));
- return vm.vm.$nextTick(() => {
- expect(vm.find('.js-commit-pipeline').exists()).toBe(false);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.js-commit-pipeline').exists()).toBe(false);
});
- it('renders pipeline components', () => {
+ it('renders pipeline components', async () => {
factory();
- return vm.vm.$nextTick(() => {
- expect(vm.find('.js-commit-pipeline').exists()).toBe(true);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.js-commit-pipeline').exists()).toBe(true);
});
- it('hides author component when author does not exist', () => {
+ it('hides author component when author does not exist', async () => {
factory(createCommitData({ author: null }));
- return vm.vm.$nextTick(() => {
- expect(vm.find('.js-user-link').exists()).toBe(false);
- expect(vm.find(UserAvatarLink).exists()).toBe(false);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.js-user-link').exists()).toBe(false);
+ expect(vm.find(UserAvatarLink).exists()).toBe(false);
});
- it('does not render description expander when description is null', () => {
+ it('does not render description expander when description is null', async () => {
factory(createCommitData({ descriptionHtml: null }));
- return vm.vm.$nextTick(() => {
- expect(vm.find('.text-expander').exists()).toBe(false);
- expect(vm.find('.commit-row-description').exists()).toBe(false);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.text-expander').exists()).toBe(false);
+ expect(vm.find('.commit-row-description').exists()).toBe(false);
});
- it('expands commit description when clicking expander', () => {
+ it('expands commit description when clicking expander', async () => {
factory(createCommitData({ descriptionHtml: 'Test description' }));
- return vm.vm
- .$nextTick()
- .then(() => {
- vm.find('.text-expander').vm.$emit('click');
- return vm.vm.$nextTick();
- })
- .then(() => {
- expect(vm.find('.commit-row-description').isVisible()).toBe(true);
- expect(vm.find('.text-expander').classes('open')).toBe(true);
- });
+ await vm.vm.$nextTick();
+
+ vm.find('.text-expander').vm.$emit('click');
+
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.commit-row-description').isVisible()).toBe(true);
+ expect(vm.find('.text-expander').classes('open')).toBe(true);
});
it('strips the first newline of the description', async () => {
@@ -141,19 +139,19 @@ describe('Repository last commit component', () => {
);
});
- it('renders the signature HTML as returned by the backend', () => {
+ it('renders the signature HTML as returned by the backend', async () => {
factory(createCommitData({ signatureHtml: '<button>Verified</button>' }));
- return vm.vm.$nextTick().then(() => {
- expect(vm.element).toMatchSnapshot();
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.element).toMatchSnapshot();
});
- it('sets correct CSS class if the commit message is empty', () => {
+ it('sets correct CSS class if the commit message is empty', async () => {
factory(createCommitData({ message: '' }));
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.item-title').classes()).toContain(emptyMessageClass);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find('.item-title').classes()).toContain(emptyMessageClass);
});
});
diff --git a/spec/frontend/search/highlight_blob_search_result_spec.js b/spec/frontend/search/highlight_blob_search_result_spec.js
index 112e6f5124f..c1b0c7d794b 100644
--- a/spec/frontend/search/highlight_blob_search_result_spec.js
+++ b/spec/frontend/search/highlight_blob_search_result_spec.js
@@ -1,6 +1,7 @@
import setHighlightClass from '~/search/highlight_blob_search_result';
const fixture = 'search/blob_search_result.html';
+const searchKeyword = 'Send'; // spec/frontend/fixtures/search.rb#79
describe('search/highlight_blob_search_result', () => {
preloadFixtures(fixture);
@@ -8,7 +9,7 @@ describe('search/highlight_blob_search_result', () => {
beforeEach(() => loadFixtures(fixture));
it('highlights lines with search term occurrence', () => {
- setHighlightClass();
+ setHighlightClass(searchKeyword);
expect(document.querySelectorAll('.blob-result .hll').length).toBe(4);
});
diff --git a/spec/frontend/search/index_spec.js b/spec/frontend/search/index_spec.js
index 023cd341345..1992a7f4437 100644
--- a/spec/frontend/search/index_spec.js
+++ b/spec/frontend/search/index_spec.js
@@ -1,9 +1,11 @@
+import setHighlightClass from 'ee_else_ce/search/highlight_blob_search_result';
import { initSearchApp } from '~/search';
import createStore from '~/search/store';
jest.mock('~/search/store');
jest.mock('~/search/topbar');
jest.mock('~/search/sidebar');
+jest.mock('ee_else_ce/search/highlight_blob_search_result');
describe('initSearchApp', () => {
let defaultLocation;
@@ -42,6 +44,7 @@ describe('initSearchApp', () => {
it(`decodes ${search} to ${decodedSearch}`, () => {
expect(createStore).toHaveBeenCalledWith({ query: { search: decodedSearch } });
+ expect(setHighlightClass).toHaveBeenCalledWith(decodedSearch);
});
});
});
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index ee509eaad8d..d076997b04a 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -26,7 +26,7 @@ export const MOCK_GROUPS = [
export const MOCK_PROJECT = {
name: 'test project',
- namespace_id: MOCK_GROUP.id,
+ namespace: MOCK_GROUP,
nameWithNamespace: 'test group test project',
id: 'test_1',
};
@@ -34,14 +34,30 @@ export const MOCK_PROJECT = {
export const MOCK_PROJECTS = [
{
name: 'test project',
- namespace_id: MOCK_GROUP.id,
+ namespace: MOCK_GROUP,
name_with_namespace: 'test group test project',
id: 'test_1',
},
{
name: 'test project 2',
- namespace_id: MOCK_GROUP.id,
+ namespace: MOCK_GROUP,
name_with_namespace: 'test group test project 2',
id: 'test_2',
},
];
+
+export const MOCK_SORT_OPTIONS = [
+ {
+ title: 'Most relevant',
+ sortable: false,
+ sortParam: 'relevant',
+ },
+ {
+ title: 'Created date',
+ sortable: true,
+ sortParam: {
+ asc: 'created_asc',
+ desc: 'created_desc',
+ },
+ },
+];
diff --git a/spec/frontend/search/sort/components/app_spec.js b/spec/frontend/search/sort/components/app_spec.js
new file mode 100644
index 00000000000..8e3f08f7e28
--- /dev/null
+++ b/spec/frontend/search/sort/components/app_spec.js
@@ -0,0 +1,168 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlButtonGroup, GlButton, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { MOCK_QUERY, MOCK_SORT_OPTIONS } from 'jest/search/mock_data';
+import GlobalSearchSort from '~/search/sort/components/app.vue';
+import { SORT_DIRECTION_UI } from '~/search/sort/constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('GlobalSearchSort', () => {
+ let wrapper;
+
+ const actionSpies = {
+ setQuery: jest.fn(),
+ applyQuery: jest.fn(),
+ };
+
+ const defaultProps = {
+ searchSortOptions: MOCK_SORT_OPTIONS,
+ };
+
+ const createComponent = (initialState, props) => {
+ const store = new Vuex.Store({
+ state: {
+ query: MOCK_QUERY,
+ ...initialState,
+ },
+ actions: actionSpies,
+ });
+
+ wrapper = shallowMount(GlobalSearchSort, {
+ localVue,
+ store,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findSortButtonGroup = () => wrapper.find(GlButtonGroup);
+ const findSortDropdown = () => wrapper.find(GlDropdown);
+ const findSortDirectionButton = () => wrapper.find(GlButton);
+ const findDropdownItems = () => findSortDropdown().findAll(GlDropdownItem);
+ const findDropdownItemsText = () => findDropdownItems().wrappers.map((w) => w.text());
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders Sort Button Group', () => {
+ expect(findSortButtonGroup().exists()).toBe(true);
+ });
+
+ it('renders Sort Dropdown', () => {
+ expect(findSortDropdown().exists()).toBe(true);
+ });
+
+ it('renders Sort Direction Button', () => {
+ expect(findSortDirectionButton().exists()).toBe(true);
+ });
+ });
+
+ describe('Sort Dropdown Items', () => {
+ describe('renders', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('an instance for each namespace', () => {
+ expect(findDropdownItemsText()).toStrictEqual(
+ MOCK_SORT_OPTIONS.map((option) => option.title),
+ );
+ });
+ });
+
+ describe.each`
+ sortQuery | value
+ ${null} | ${MOCK_SORT_OPTIONS[0].title}
+ ${'asdf'} | ${MOCK_SORT_OPTIONS[0].title}
+ ${MOCK_SORT_OPTIONS[0].sortParam} | ${MOCK_SORT_OPTIONS[0].title}
+ ${MOCK_SORT_OPTIONS[1].sortParam.desc} | ${MOCK_SORT_OPTIONS[1].title}
+ ${MOCK_SORT_OPTIONS[1].sortParam.asc} | ${MOCK_SORT_OPTIONS[1].title}
+ `('selected', ({ sortQuery, value }) => {
+ describe(`when sort option is ${sortQuery}`, () => {
+ beforeEach(() => {
+ createComponent({ query: { sort: sortQuery } });
+ });
+
+ it('is set correctly', () => {
+ expect(findSortDropdown().attributes('text')).toBe(value);
+ });
+ });
+ });
+ });
+
+ describe.each`
+ description | sortQuery | sortUi | disabled
+ ${'non-sortable'} | ${MOCK_SORT_OPTIONS[0].sortParam} | ${SORT_DIRECTION_UI.disabled} | ${'true'}
+ ${'descending sortable'} | ${MOCK_SORT_OPTIONS[1].sortParam.desc} | ${SORT_DIRECTION_UI.desc} | ${undefined}
+ ${'ascending sortable'} | ${MOCK_SORT_OPTIONS[1].sortParam.asc} | ${SORT_DIRECTION_UI.asc} | ${undefined}
+ `('Sort Direction Button', ({ description, sortQuery, sortUi, disabled }) => {
+ describe(`when sort option is ${description}`, () => {
+ beforeEach(() => {
+ createComponent({ query: { sort: sortQuery } });
+ });
+
+ it('sets the UI correctly', () => {
+ expect(findSortDirectionButton().attributes('disabled')).toBe(disabled);
+ expect(findSortDirectionButton().attributes('title')).toBe(sortUi.tooltip);
+ expect(findSortDirectionButton().attributes('icon')).toBe(sortUi.icon);
+ });
+ });
+ });
+
+ describe('actions', () => {
+ describe.each`
+ description | index | value
+ ${'non-sortable'} | ${0} | ${MOCK_SORT_OPTIONS[0].sortParam}
+ ${'sortable'} | ${1} | ${MOCK_SORT_OPTIONS[1].sortParam.desc}
+ `('handleSortChange', ({ description, index, value }) => {
+ describe(`when clicking a ${description} option`, () => {
+ beforeEach(() => {
+ createComponent();
+ findDropdownItems().at(index).vm.$emit('click');
+ });
+
+ it('calls setQuery and applyQuery correctly', () => {
+ expect(actionSpies.setQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.applyQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
+ key: 'sort',
+ value,
+ });
+ });
+ });
+ });
+
+ describe.each`
+ description | sortQuery | value
+ ${'descending'} | ${MOCK_SORT_OPTIONS[1].sortParam.desc} | ${MOCK_SORT_OPTIONS[1].sortParam.asc}
+ ${'ascending'} | ${MOCK_SORT_OPTIONS[1].sortParam.asc} | ${MOCK_SORT_OPTIONS[1].sortParam.desc}
+ `('handleSortDirectionChange', ({ description, sortQuery, value }) => {
+ describe(`when toggling a ${description} option`, () => {
+ beforeEach(() => {
+ createComponent({ query: { sort: sortQuery } });
+ findSortDirectionButton().vm.$emit('click');
+ });
+
+ it('calls setQuery and applyQuery correctly', () => {
+ expect(actionSpies.setQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.applyQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
+ key: 'sort',
+ value,
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
new file mode 100644
index 00000000000..faf3629b444
--- /dev/null
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -0,0 +1,113 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlForm, GlSearchBoxByType, GlButton } from '@gitlab/ui';
+import { MOCK_QUERY } from 'jest/search/mock_data';
+import GlobalSearchTopbar from '~/search/topbar/components/app.vue';
+import GroupFilter from '~/search/topbar/components/group_filter.vue';
+import ProjectFilter from '~/search/topbar/components/project_filter.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('GlobalSearchTopbar', () => {
+ let wrapper;
+
+ const actionSpies = {
+ applyQuery: jest.fn(),
+ setQuery: jest.fn(),
+ };
+
+ const createComponent = (initialState) => {
+ const store = new Vuex.Store({
+ state: {
+ query: MOCK_QUERY,
+ ...initialState,
+ },
+ actions: actionSpies,
+ });
+
+ wrapper = shallowMount(GlobalSearchTopbar, {
+ localVue,
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findTopbarForm = () => wrapper.find(GlForm);
+ const findGlSearchBox = () => wrapper.find(GlSearchBoxByType);
+ const findGroupFilter = () => wrapper.find(GroupFilter);
+ const findProjectFilter = () => wrapper.find(ProjectFilter);
+ const findSearchButton = () => wrapper.find(GlButton);
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders Topbar Form always', () => {
+ expect(findTopbarForm().exists()).toBe(true);
+ });
+
+ describe('Search box', () => {
+ it('renders always', () => {
+ expect(findGlSearchBox().exists()).toBe(true);
+ });
+
+ describe('onSearch', () => {
+ const testSearch = 'test search';
+
+ beforeEach(() => {
+ findGlSearchBox().vm.$emit('input', testSearch);
+ });
+
+ it('calls setQuery when input event is fired from GlSearchBoxByType', () => {
+ expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
+ key: 'search',
+ value: testSearch,
+ });
+ });
+ });
+ });
+
+ describe.each`
+ snippets | showFilters
+ ${null} | ${true}
+ ${{ query: { snippets: '' } }} | ${true}
+ ${{ query: { snippets: false } }} | ${true}
+ ${{ query: { snippets: true } }} | ${false}
+ ${{ query: { snippets: 'false' } }} | ${true}
+ ${{ query: { snippets: 'true' } }} | ${false}
+ `('topbar filters', ({ snippets, showFilters }) => {
+ beforeEach(() => {
+ createComponent(snippets);
+ });
+
+ it(`does${showFilters ? '' : ' not'} render when snippets is ${JSON.stringify(
+ snippets,
+ )}`, () => {
+ expect(findGroupFilter().exists()).toBe(showFilters);
+ expect(findProjectFilter().exists()).toBe(showFilters);
+ });
+ });
+
+ it('renders SearchButton always', () => {
+ expect(findSearchButton().exists()).toBe(true);
+ });
+ });
+
+ describe('actions', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('clicking SearchButton calls applyQuery', () => {
+ findTopbarForm().vm.$emit('submit', { preventDefault: () => {} });
+
+ expect(actionSpies.applyQuery).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/search/topbar/components/project_filter_spec.js b/spec/frontend/search/topbar/components/project_filter_spec.js
index c1fc61d7e89..f2ac8f2689d 100644
--- a/spec/frontend/search/topbar/components/project_filter_spec.js
+++ b/spec/frontend/search/topbar/components/project_filter_spec.js
@@ -99,7 +99,7 @@ describe('ProjectFilter', () => {
it('calls setUrlParams with project id, group id, then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
- [GROUP_DATA.queryParam]: MOCK_PROJECT.namespace_id,
+ [GROUP_DATA.queryParam]: MOCK_PROJECT.namespace.id,
[PROJECT_DATA.queryParam]: MOCK_PROJECT.id,
});
expect(visitUrl).toHaveBeenCalled();
diff --git a/spec/frontend/search_settings/index_spec.js b/spec/frontend/search_settings/index_spec.js
index 122ee1251bb..1d56d054eea 100644
--- a/spec/frontend/search_settings/index_spec.js
+++ b/spec/frontend/search_settings/index_spec.js
@@ -1,36 +1,25 @@
-import $ from 'jquery';
-import { setHTMLFixture } from 'helpers/fixtures';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import initSearch from '~/search_settings';
-import { expandSection, closeSection } from '~/settings_panels';
+import mount from '~/search_settings/mount';
-jest.mock('~/settings_panels');
-
-describe('search_settings/index', () => {
- let app;
-
- beforeEach(() => {
- const el = document.createElement('div');
-
- setHTMLFixture('<div id="content-body"></div>');
-
- app = initSearch({ el });
- });
+jest.mock('~/search_settings/mount');
+describe('~/search_settings', () => {
afterEach(() => {
- app.$destroy();
+ resetHTMLFixture();
});
- it('calls settings_panel.onExpand when expand event is emitted', () => {
- const section = { name: 'section' };
- app.$refs.searchSettings.$emit('expand', section);
+ it('initializes search settings when js-search-settings-app is available', async () => {
+ setHTMLFixture('<div class="js-search-settings-app"></div>');
+
+ await initSearch();
- expect(expandSection).toHaveBeenCalledWith($(section));
+ expect(mount).toHaveBeenCalled();
});
- it('calls settings_panel.closeSection when collapse event is emitted', () => {
- const section = { name: 'section' };
- app.$refs.searchSettings.$emit('collapse', section);
+ it('does not initialize search settings when js-search-settings-app is unavailable', async () => {
+ await initSearch();
- expect(closeSection).toHaveBeenCalledWith($(section));
+ expect(mount).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/search_settings/mount_spec.js b/spec/frontend/search_settings/mount_spec.js
new file mode 100644
index 00000000000..b35266e56ae
--- /dev/null
+++ b/spec/frontend/search_settings/mount_spec.js
@@ -0,0 +1,36 @@
+import $ from 'jquery';
+import { setHTMLFixture } from 'helpers/fixtures';
+import mount from '~/search_settings/mount';
+import { expandSection, closeSection } from '~/settings_panels';
+
+jest.mock('~/settings_panels');
+
+describe('search_settings/mount', () => {
+ let app;
+
+ beforeEach(() => {
+ const el = document.createElement('div');
+
+ setHTMLFixture('<div id="content-body"></div>');
+
+ app = mount({ el });
+ });
+
+ afterEach(() => {
+ app.$destroy();
+ });
+
+ it('calls settings_panel.onExpand when expand event is emitted', () => {
+ const section = { name: 'section' };
+ app.$refs.searchSettings.$emit('expand', section);
+
+ expect(expandSection).toHaveBeenCalledWith($(section));
+ });
+
+ it('calls settings_panel.closeSection when collapse event is emitted', () => {
+ const section = { name: 'section' };
+ app.$refs.searchSettings.$emit('collapse', section);
+
+ expect(closeSection).toHaveBeenCalledWith($(section));
+ });
+});
diff --git a/spec/frontend/search_spec.js b/spec/frontend/search_spec.js
deleted file mode 100644
index d234a7fccb9..00000000000
--- a/spec/frontend/search_spec.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import setHighlightClass from 'ee_else_ce/search/highlight_blob_search_result';
-import Search from '~/pages/search/show/search';
-
-jest.mock('~/api');
-jest.mock('ee_else_ce/search/highlight_blob_search_result');
-
-describe('Search', () => {
- const fixturePath = 'search/show.html';
-
- preloadFixtures(fixturePath);
-
- describe('constructor side effects', () => {
- afterEach(() => {
- jest.restoreAllMocks();
- });
-
- it('highlights lines with search terms in blob search results', () => {
- new Search(); // eslint-disable-line no-new
-
- expect(setHighlightClass).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/serverless/components/environment_row_spec.js b/spec/frontend/serverless/components/environment_row_spec.js
index a59b4fdbb7b..944283136d0 100644
--- a/spec/frontend/serverless/components/environment_row_spec.js
+++ b/spec/frontend/serverless/components/environment_row_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import environmentRowComponent from '~/serverless/components/environment_row.vue';
-import { mockServerlessFunctions, mockServerlessFunctionsDiffEnv } from '../mock_data';
import { translate } from '~/serverless/utils';
+import { mockServerlessFunctions, mockServerlessFunctionsDiffEnv } from '../mock_data';
const createComponent = (env, envName) =>
shallowMount(environmentRowComponent, {
diff --git a/spec/frontend/serverless/store/actions_spec.js b/spec/frontend/serverless/store/actions_spec.js
index 32e30a57d4b..41d99fcb26c 100644
--- a/spec/frontend/serverless/store/actions_spec.js
+++ b/spec/frontend/serverless/store/actions_spec.js
@@ -2,8 +2,8 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import statusCodes from '~/lib/utils/http_status';
import { fetchFunctions, fetchMetrics } from '~/serverless/store/actions';
-import { mockServerlessFunctions, mockMetrics } from '../mock_data';
import axios from '~/lib/utils/axios_utils';
+import { mockServerlessFunctions, mockMetrics } from '../mock_data';
import { adjustMetricQuery } from '../utils';
describe('ServerlessActions', () => {
diff --git a/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap b/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap
index e295c587d70..846f45345e7 100644
--- a/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap
+++ b/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap
@@ -3,7 +3,7 @@
exports[`SidebarTodo template renders component container element with proper data attributes 1`] = `
<button
aria-label="Mark as done"
- class="btn btn-default btn-todo issuable-header-btn float-right"
+ class="gl-button btn btn-default btn-todo issuable-header-btn float-right"
data-issuable-id="1"
data-issuable-type="epic"
type="button"
diff --git a/spec/frontend/sidebar/assignees_realtime_spec.js b/spec/frontend/sidebar/assignees_realtime_spec.js
index 1c62c52dc67..a7e280b5ae1 100644
--- a/spec/frontend/sidebar/assignees_realtime_spec.js
+++ b/spec/frontend/sidebar/assignees_realtime_spec.js
@@ -2,8 +2,8 @@ import { shallowMount } from '@vue/test-utils';
import ActionCable from '@rails/actioncable';
import AssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue';
import SidebarMediator from '~/sidebar/sidebar_mediator';
-import Mock from './mock_data';
import query from '~/issuable_sidebar/queries/issue_sidebar.query.graphql';
+import Mock from './mock_data';
jest.mock('@rails/actioncable', () => {
const mockConsumer = {
diff --git a/spec/frontend/sidebar/assignees_spec.js b/spec/frontend/sidebar/assignees_spec.js
index 23e82171fe9..fbf5fca2234 100644
--- a/spec/frontend/sidebar/assignees_spec.js
+++ b/spec/frontend/sidebar/assignees_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
-import { trimText } from 'helpers/text_helper';
import { GlIcon } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
import UsersMockHelper from 'helpers/user_mock_data_helper';
import Assignee from '~/sidebar/components/assignees/assignees.vue';
import UsersMock from './mock_data';
diff --git a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
index 0b6a2e6ceb9..d52a59bb67b 100644
--- a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
@@ -1,5 +1,5 @@
-import { createMockDirective } from 'helpers/vue_mock_directive';
import { mount } from '@vue/test-utils';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import { stubTransition } from 'helpers/stub_transition';
import TimeTracker from '~/sidebar/components/time_tracking/time_tracker.vue';
diff --git a/spec/frontend/sidebar/reviewers_spec.js b/spec/frontend/sidebar/reviewers_spec.js
index 91f28e85f3b..9208fb7288b 100644
--- a/spec/frontend/sidebar/reviewers_spec.js
+++ b/spec/frontend/sidebar/reviewers_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
-import { trimText } from 'helpers/text_helper';
import { GlIcon } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
import UsersMockHelper from 'helpers/user_mock_data_helper';
import Reviewer from '~/sidebar/components/reviewers/reviewers.vue';
import UsersMock from './mock_data';
@@ -114,8 +114,7 @@ describe('Reviewer component', () => {
editable: true,
});
- expect(wrapper.findAll('.user-item').length).toBe(users.length);
- expect(wrapper.find('.user-list-more').exists()).toBe(false);
+ expect(wrapper.findAll('[data-testid="reviewer"]').length).toBe(users.length);
});
it('shows sorted reviewer where "can merge" users are sorted first', () => {
@@ -144,10 +143,10 @@ describe('Reviewer component', () => {
users,
});
- const userItems = wrapper.findAll('.user-list .user-item a');
+ const userItems = wrapper.findAll('[data-testid="reviewer"]');
expect(userItems.length).toBe(3);
- expect(userItems.at(0).attributes('title')).toBe(users[2].name);
+ expect(userItems.at(0).find('a').attributes('title')).toBe(users[2].name);
});
it('passes the sorted reviewers to the collapsed-reviewer-list', () => {
diff --git a/spec/frontend/sidebar/subscriptions_spec.js b/spec/frontend/sidebar/subscriptions_spec.js
index 043ffd972da..e7ae59e26cf 100644
--- a/spec/frontend/sidebar/subscriptions_spec.js
+++ b/spec/frontend/sidebar/subscriptions_spec.js
@@ -1,17 +1,20 @@
+import { GlToggle } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import Subscriptions from '~/sidebar/components/subscriptions/subscriptions.vue';
import eventHub from '~/sidebar/event_hub';
-import ToggleButton from '~/vue_shared/components/toggle_button.vue';
describe('Subscriptions', () => {
let wrapper;
- const findToggleButton = () => wrapper.find(ToggleButton);
+ const findToggleButton = () => wrapper.findComponent(GlToggle);
const mountComponent = (propsData) =>
- shallowMount(Subscriptions, {
- propsData,
- });
+ extendedWrapper(
+ shallowMount(Subscriptions, {
+ propsData,
+ }),
+ );
afterEach(() => {
wrapper.destroy();
@@ -24,7 +27,7 @@ describe('Subscriptions', () => {
subscribed: undefined,
});
- expect(findToggleButton().attributes('isloading')).toBe('true');
+ expect(findToggleButton().props('isLoading')).toBe(true);
});
it('is toggled "off" when currently not subscribed', () => {
@@ -32,7 +35,7 @@ describe('Subscriptions', () => {
subscribed: false,
});
- expect(findToggleButton().attributes('value')).toBeFalsy();
+ expect(findToggleButton().props('value')).toBe(false);
});
it('is toggled "on" when currently subscribed', () => {
@@ -40,7 +43,7 @@ describe('Subscriptions', () => {
subscribed: true,
});
- expect(findToggleButton().attributes('value')).toBe('true');
+ expect(findToggleButton().props('value')).toBe(true);
});
it('toggleSubscription method emits `toggleSubscription` event on eventHub and Component', () => {
@@ -93,14 +96,16 @@ describe('Subscriptions', () => {
});
it('sets the correct display text', () => {
- expect(wrapper.find('.issuable-header-text').text()).toContain(subscribeDisabledDescription);
+ expect(wrapper.findByTestId('subscription-title').text()).toContain(
+ subscribeDisabledDescription,
+ );
expect(wrapper.find({ ref: 'tooltip' }).attributes('title')).toBe(
subscribeDisabledDescription,
);
});
it('does not render the toggle button', () => {
- expect(wrapper.find('.js-issuable-subscribe-button').exists()).toBe(false);
+ expect(findToggleButton().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/sidebar/todo_spec.js b/spec/frontend/sidebar/todo_spec.js
index 4adfaf7ad7b..7b7e24f2a8f 100644
--- a/spec/frontend/sidebar/todo_spec.js
+++ b/spec/frontend/sidebar/todo_spec.js
@@ -26,7 +26,7 @@ describe('SidebarTodo', () => {
it.each`
state | classes
- ${false} | ${['btn', 'btn-default', 'btn-todo', 'issuable-header-btn', 'float-right']}
+ ${false} | ${['gl-button', 'btn', 'btn-default', 'btn-todo', 'issuable-header-btn', 'float-right']}
${true} | ${['btn-blank', 'btn-todo', 'sidebar-collapsed-icon', 'dont-change-state']}
`('returns todo button classes for when `collapsed` prop is `$state`', ({ state, classes }) => {
createComponent({ collapsed: state });
@@ -35,7 +35,7 @@ describe('SidebarTodo', () => {
it.each`
isTodo | iconClass | label | icon
- ${false} | ${''} | ${'Add a To-Do'} | ${'todo-add'}
+ ${false} | ${''} | ${'Add a to do'} | ${'todo-add'}
${true} | ${'todo-undone'} | ${'Mark as done'} | ${'todo-done'}
`(
'renders proper button when `isTodo` prop is `$isTodo`',
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index b5ab7def753..6dbf7cfc4b4 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -1,6 +1,6 @@
import { GlLoadingIcon } from '@gitlab/ui';
-import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
import { shallowMount } from '@vue/test-utils';
+import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
import SnippetApp from '~/snippets/components/show.vue';
import EmbedDropdown from '~/snippets/components/embed_dropdown.vue';
import SnippetHeader from '~/snippets/components/snippet_header.vue';
diff --git a/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js b/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
index c7d0abee05c..d80b28b91f8 100644
--- a/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
+++ b/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlModal } from '@gitlab/ui';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import MockAdapter from 'axios-mock-adapter';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import axios from '~/lib/utils/axios_utils';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import EditMetaModal from '~/static_site_editor/components/edit_meta_modal.vue';
diff --git a/spec/frontend/static_site_editor/pages/success_spec.js b/spec/frontend/static_site_editor/pages/success_spec.js
index 3fc69dc4586..7d2d5edda84 100644
--- a/spec/frontend/static_site_editor/pages/success_spec.js
+++ b/spec/frontend/static_site_editor/pages/success_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { GlButton, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import Success from '~/static_site_editor/pages/success.vue';
-import { savedContentMeta, returnUrl, sourcePath } from '../mock_data';
import { HOME_ROUTE } from '~/static_site_editor/router/constants';
+import { savedContentMeta, returnUrl, sourcePath } from '../mock_data';
describe('~/static_site_editor/pages/success.vue', () => {
const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
diff --git a/spec/frontend/static_site_editor/services/front_matterify_spec.js b/spec/frontend/static_site_editor/services/front_matterify_spec.js
index 866897f21ef..ec3752b30c6 100644
--- a/spec/frontend/static_site_editor/services/front_matterify_spec.js
+++ b/spec/frontend/static_site_editor/services/front_matterify_spec.js
@@ -1,3 +1,4 @@
+import { frontMatterify, stringify } from '~/static_site_editor/services/front_matterify';
import {
sourceContentYAML as content,
sourceContentHeaderObjYAML as yamlFrontMatterObj,
@@ -5,8 +6,6 @@ import {
sourceContentBody as body,
} from '../mock_data';
-import { frontMatterify, stringify } from '~/static_site_editor/services/front_matterify';
-
describe('static_site_editor/services/front_matterify', () => {
const frontMatterifiedContent = {
source: content,
diff --git a/spec/frontend/static_site_editor/services/parse_source_file_spec.js b/spec/frontend/static_site_editor/services/parse_source_file_spec.js
index ab9e63f4cd2..fdd11297e09 100644
--- a/spec/frontend/static_site_editor/services/parse_source_file_spec.js
+++ b/spec/frontend/static_site_editor/services/parse_source_file_spec.js
@@ -1,3 +1,4 @@
+import parseSourceFile from '~/static_site_editor/services/parse_source_file';
import {
sourceContentYAML as content,
sourceContentHeaderYAML as yamlFrontMatter,
@@ -5,8 +6,6 @@ import {
sourceContentBody as body,
} from '../mock_data';
-import parseSourceFile from '~/static_site_editor/services/parse_source_file';
-
describe('static_site_editor/services/parse_source_file', () => {
const contentComplex = [content, content, content].join('');
const complexBody = [body, content, content].join('');
diff --git a/spec/frontend/terraform/components/states_table_actions_spec.js b/spec/frontend/terraform/components/states_table_actions_spec.js
index 3f5df8a96f8..6100b1f67a2 100644
--- a/spec/frontend/terraform/components/states_table_actions_spec.js
+++ b/spec/frontend/terraform/components/states_table_actions_spec.js
@@ -1,7 +1,8 @@
import { GlDropdown, GlModal, GlSprintf } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import createMockApollo from 'helpers/mock_apollo_helper';
import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import StateActions from '~/terraform/components/states_table_actions.vue';
import lockStateMutation from '~/terraform/graphql/mutations/lock_state.mutation.graphql';
import removeStateMutation from '~/terraform/graphql/mutations/remove_state.mutation.graphql';
@@ -14,6 +15,7 @@ describe('StatesTableActions', () => {
let lockResponse;
let removeResponse;
let unlockResponse;
+ let updateStateResponse;
let wrapper;
const defaultProps = {
@@ -26,7 +28,9 @@ describe('StatesTableActions', () => {
};
const createMockApolloProvider = () => {
- lockResponse = jest.fn().mockResolvedValue({ data: { terraformStateLock: { errors: [] } } });
+ lockResponse = jest
+ .fn()
+ .mockResolvedValue({ data: { terraformStateLock: { errors: ['There was an error'] } } });
removeResponse = jest
.fn()
@@ -36,11 +40,20 @@ describe('StatesTableActions', () => {
.fn()
.mockResolvedValue({ data: { terraformStateUnlock: { errors: [] } } });
- return createMockApollo([
- [lockStateMutation, lockResponse],
- [removeStateMutation, removeResponse],
- [unlockStateMutation, unlockResponse],
- ]);
+ updateStateResponse = jest.fn().mockResolvedValue({});
+
+ return createMockApollo(
+ [
+ [lockStateMutation, lockResponse],
+ [removeStateMutation, removeResponse],
+ [unlockStateMutation, unlockResponse],
+ ],
+ {
+ Mutation: {
+ addDataToTerraformState: updateStateResponse,
+ },
+ },
+ );
};
const createComponent = (propsData = defaultProps) => {
@@ -56,6 +69,7 @@ describe('StatesTableActions', () => {
return wrapper.vm.$nextTick();
};
+ const findActionsDropdown = () => wrapper.find(GlDropdown);
const findLockBtn = () => wrapper.find('[data-testid="terraform-state-lock"]');
const findUnlockBtn = () => wrapper.find('[data-testid="terraform-state-unlock"]');
const findDownloadBtn = () => wrapper.find('[data-testid="terraform-state-download"]');
@@ -70,9 +84,25 @@ describe('StatesTableActions', () => {
lockResponse = null;
removeResponse = null;
unlockResponse = null;
+ updateStateResponse = null;
wrapper.destroy();
});
+ describe('when the state is loading', () => {
+ beforeEach(() => {
+ return createComponent({
+ state: {
+ ...defaultProps.state,
+ loadingActions: true,
+ },
+ });
+ });
+
+ it('disables the actions dropdown', () => {
+ expect(findActionsDropdown().props('disabled')).toBe(true);
+ });
+ });
+
describe('download button', () => {
it('displays a download button', () => {
expect(findDownloadBtn().text()).toBe('Download JSON');
@@ -104,7 +134,8 @@ describe('StatesTableActions', () => {
describe('when clicking the unlock button', () => {
beforeEach(() => {
findUnlockBtn().vm.$emit('click');
- return wrapper.vm.$nextTick();
+
+ return waitForPromises();
});
it('calls the unlock mutation', () => {
@@ -137,7 +168,8 @@ describe('StatesTableActions', () => {
describe('when clicking the lock button', () => {
beforeEach(() => {
findLockBtn().vm.$emit('click');
- return wrapper.vm.$nextTick();
+
+ return waitForPromises();
});
it('calls the lock mutation', () => {
@@ -145,6 +177,42 @@ describe('StatesTableActions', () => {
stateID: unlockedProps.state.id,
});
});
+
+ it('calls mutations to set loading and errors', () => {
+ // loading update
+ expect(updateStateResponse).toHaveBeenNthCalledWith(
+ 1,
+ {},
+ {
+ terraformState: {
+ ...unlockedProps.state,
+ _showDetails: false,
+ errorMessages: [],
+ loadingActions: true,
+ },
+ },
+ // Apollo fields
+ expect.any(Object),
+ expect.any(Object),
+ );
+
+ // final update
+ expect(updateStateResponse).toHaveBeenNthCalledWith(
+ 2,
+ {},
+ {
+ terraformState: {
+ ...unlockedProps.state,
+ _showDetails: true,
+ errorMessages: ['There was an error'],
+ loadingActions: false,
+ },
+ },
+ // Apollo fields
+ expect.any(Object),
+ expect.any(Object),
+ );
+ });
});
});
@@ -156,7 +224,8 @@ describe('StatesTableActions', () => {
describe('when clicking the remove button', () => {
beforeEach(() => {
findRemoveBtn().vm.$emit('click');
- return wrapper.vm.$nextTick();
+
+ return waitForPromises();
});
it('displays a remove modal', () => {
diff --git a/spec/frontend/terraform/components/states_table_spec.js b/spec/frontend/terraform/components/states_table_spec.js
index f2b7bc00e5b..ba676c9741e 100644
--- a/spec/frontend/terraform/components/states_table_spec.js
+++ b/spec/frontend/terraform/components/states_table_spec.js
@@ -11,6 +11,8 @@ describe('StatesTable', () => {
const defaultProps = {
states: [
{
+ _showDetails: true,
+ errorMessages: ['State 1 has errored'],
name: 'state-1',
lockedAt: '2020-10-13T00:00:00Z',
lockedByUser: {
@@ -20,6 +22,8 @@ describe('StatesTable', () => {
latestVersion: null,
},
{
+ _showDetails: false,
+ errorMessages: [],
name: 'state-2',
lockedAt: null,
lockedByUser: null,
@@ -27,6 +31,8 @@ describe('StatesTable', () => {
latestVersion: null,
},
{
+ _showDetails: false,
+ errorMessages: [],
name: 'state-3',
lockedAt: '2020-10-10T00:00:00Z',
lockedByUser: {
@@ -54,6 +60,8 @@ describe('StatesTable', () => {
},
},
{
+ _showDetails: true,
+ errorMessages: ['State 4 has errored'],
name: 'state-4',
lockedAt: '2020-10-10T00:00:00Z',
lockedByUser: null,
@@ -154,6 +162,17 @@ describe('StatesTable', () => {
expect(findActions().length).toEqual(0);
});
+ it.each`
+ errorMessage | lineNumber
+ ${defaultProps.states[0].errorMessages[0]} | ${0}
+ ${defaultProps.states[3].errorMessages[0]} | ${1}
+ `('displays table error message "$errorMessage"', ({ errorMessage, lineNumber }) => {
+ const states = wrapper.findAll('[data-testid="terraform-states-table-error"]');
+ const state = states.at(lineNumber);
+
+ expect(state.text()).toBe(errorMessage);
+ });
+
describe('when user is a terraform administrator', () => {
beforeEach(() => {
return createComponent({
diff --git a/spec/frontend/terraform/components/terraform_list_spec.js b/spec/frontend/terraform/components/terraform_list_spec.js
index fb56a7135a3..20d42ebd4fd 100644
--- a/spec/frontend/terraform/components/terraform_list_spec.js
+++ b/spec/frontend/terraform/components/terraform_list_spec.js
@@ -1,7 +1,7 @@
import { GlAlert, GlBadge, GlKeysetPagination, GlLoadingIcon, GlTab } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import createMockApollo from 'helpers/mock_apollo_helper';
import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import EmptyState from '~/terraform/components/empty_state.vue';
import StatesTable from '~/terraform/components/states_table.vue';
import TerraformList from '~/terraform/components/terraform_list.vue';
@@ -27,6 +27,15 @@ describe('TerraformList', () => {
},
};
+ // Override @client _showDetails
+ getStatesQuery.getStates.definitions[1].selectionSet.selections[0].directives = [];
+
+ // Override @client errorMessages
+ getStatesQuery.getStates.definitions[1].selectionSet.selections[1].directives = [];
+
+ // Override @client loadingActions
+ getStatesQuery.getStates.definitions[1].selectionSet.selections[2].directives = [];
+
const statsQueryResponse = queryResponse || jest.fn().mockResolvedValue(apolloQueryResponse);
const apolloProvider = createMockApollo([[getStatesQuery, statsQueryResponse]]);
@@ -52,20 +61,26 @@ describe('TerraformList', () => {
describe('when there is a list of terraform states', () => {
const states = [
{
+ _showDetails: false,
+ errorMessages: [],
id: 'gid://gitlab/Terraform::State/1',
name: 'state-1',
+ latestVersion: null,
+ loadingActions: false,
lockedAt: null,
- updatedAt: null,
lockedByUser: null,
- latestVersion: null,
+ updatedAt: null,
},
{
+ _showDetails: false,
+ errorMessages: [],
id: 'gid://gitlab/Terraform::State/2',
name: 'state-2',
+ latestVersion: null,
+ loadingActions: false,
lockedAt: null,
- updatedAt: null,
lockedByUser: null,
- latestVersion: null,
+ updatedAt: null,
},
];
diff --git a/spec/frontend/user_lists/components/edit_user_list_spec.js b/spec/frontend/user_lists/components/edit_user_list_spec.js
index 958e86ac050..4149a43e06b 100644
--- a/spec/frontend/user_lists/components/edit_user_list_spec.js
+++ b/spec/frontend/user_lists/components/edit_user_list_spec.js
@@ -7,8 +7,8 @@ import Api from '~/api';
import createStore from '~/user_lists/store/edit';
import EditUserList from '~/user_lists/components/edit_user_list.vue';
import UserListForm from '~/user_lists/components/user_list_form.vue';
-import { userList } from '../../feature_flags/mock_data';
import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../feature_flags/mock_data';
jest.mock('~/api');
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
index 7ff8d9678fe..bc192a7d9d8 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
@@ -3,8 +3,8 @@ import MockAdapter from 'axios-mock-adapter';
import MrWidgetPipelineContainer from '~/vue_merge_request_widget/components/mr_widget_pipeline_container.vue';
import MrWidgetPipeline from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
import ArtifactsApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
-import { mockStore } from '../mock_data';
import axios from '~/lib/utils/axios_utils';
+import { mockStore } from '../mock_data';
describe('MrWidgetPipelineContainer', () => {
let wrapper;
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js
index 6c3b4a01659..c25e10c5249 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js
@@ -1,48 +1,60 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
import mrStatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
describe('MR widget status icon component', () => {
- let vm;
- let Component;
+ let wrapper;
- beforeEach(() => {
- Component = Vue.extend(mrStatusIcon);
- });
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findDisabledMergeButton = () => wrapper.find('[data-testid="disabled-merge-button"]');
+
+ const createWrapper = (props, mountFn = shallowMount) => {
+ wrapper = mountFn(mrStatusIcon, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('while loading', () => {
it('renders loading icon', () => {
- vm = mountComponent(Component, { status: 'loading' });
+ createWrapper({ status: 'loading' });
- expect(vm.$el.querySelector('.mr-widget-icon span').classList).toContain('gl-spinner');
+ expect(findLoadingIcon().exists()).toBe(true);
});
});
describe('with status icon', () => {
- it('renders ci status icon', () => {
- vm = mountComponent(Component, { status: 'failed' });
+ it('renders success status icon', () => {
+ createWrapper({ status: 'success' }, mount);
+
+ expect(wrapper.find('[data-testid="status_success-icon"]').exists()).toBe(true);
+ });
+
+ it('renders failed status icon', () => {
+ createWrapper({ status: 'failed' }, mount);
- expect(vm.$el.querySelector('.js-ci-status-icon-failed')).not.toBeNull();
+ expect(wrapper.find('[data-testid="status_failed-icon"]').exists()).toBe(true);
});
});
describe('with disabled button', () => {
it('renders a disabled button', () => {
- vm = mountComponent(Component, { status: 'failed', showDisabledButton: true });
+ createWrapper({ status: 'failed', showDisabledButton: true });
- expect(vm.$el.querySelector('.js-disabled-merge-button').textContent.trim()).toEqual('Merge');
+ expect(findDisabledMergeButton().exists()).toBe(true);
});
});
describe('without disabled button', () => {
it('does not render a disabled button', () => {
- vm = mountComponent(Component, { status: 'failed' });
+ createWrapper({ status: 'failed' });
- expect(vm.$el.querySelector('.js-disabled-merge-button')).toBeNull();
+ expect(findDisabledMergeButton().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
index 8fcc982ac99..6ce6253c74d 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
@@ -1,11 +1,10 @@
import { mount, shallowMount } from '@vue/test-utils';
import { GlLink, GlSprintf } from '@gitlab/ui';
-import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
import MockAdapter from 'axios-mock-adapter';
+import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
import suggestPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue';
import MrWidgetIcon from '~/vue_merge_request_widget/components/mr_widget_icon.vue';
import dismissibleContainer from '~/vue_shared/components/dismissible_container.vue';
-import { suggestProps, iconName } from './pipeline_tour_mock_data';
import axios from '~/lib/utils/axios_utils';
import {
SP_TRACK_LABEL,
@@ -15,6 +14,7 @@ import {
SP_SHOW_TRACK_VALUE,
SP_HELP_URL,
} from '~/vue_merge_request_widget/constants';
+import { suggestProps, iconName } from './pipeline_tour_mock_data';
describe('MRWidgetSuggestPipeline', () => {
describe('template', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap
new file mode 100644
index 00000000000..4c3c7fbea36
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap
@@ -0,0 +1,24 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PipelineFailed should render error message with a disabled merge button 1`] = `
+<div
+ class="mr-widget-body media"
+>
+ <status-icon-stub
+ showdisabledbutton="true"
+ status="warning"
+ />
+
+ <div
+ class="media-body space-children"
+ >
+ <span
+ class="bold"
+ >
+ <gl-sprintf-stub
+ message="The pipeline for this merge request did not complete. Push a new commit to fix the failure or check the %{linkStart}troubleshooting documentation%{linkEnd} to see other possible actions."
+ />
+ </span>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index 850bbd93df5..d309aa905e8 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -202,7 +202,11 @@ describe('MRWidgetAutoMergeEnabled', () => {
wrapper.vm.cancelAutomaticMerge();
setImmediate(() => {
expect(wrapper.vm.isCancellingAutoMerge).toBeTruthy();
- expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
+ if (mergeRequestWidgetGraphql) {
+ expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+ } else {
+ expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
+ }
done();
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
index 48c1a9eedf9..c1471314c4a 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
@@ -54,7 +54,7 @@ describe('MRWidgetFailedToMerge', () => {
Vue.nextTick()
.then(() => {
- expect(vm.mergeError).toBe('contains line breaks');
+ expect(vm.mergeError).toBe('contains line breaks.');
})
.then(done)
.catch(done.fail);
@@ -113,14 +113,14 @@ describe('MRWidgetFailedToMerge', () => {
describe('while it is not regresing', () => {
it('renders warning icon and disabled merge button', () => {
expect(vm.$el.querySelector('.js-ci-status-icon-warning')).not.toBeNull();
- expect(vm.$el.querySelector('.js-disabled-merge-button').getAttribute('disabled')).toEqual(
- 'disabled',
- );
+ expect(
+ vm.$el.querySelector('[data-testid="disabled-merge-button"]').getAttribute('disabled'),
+ ).toEqual('disabled');
});
it('renders given error', () => {
expect(vm.$el.querySelector('.has-error-message').textContent.trim()).toEqual(
- 'Merge error happened',
+ 'Merge error happened.',
);
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
index 36c4174c03d..cdb80b30d36 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -1,7 +1,10 @@
import Vue from 'vue';
+import { getByRole } from '@testing-library/dom';
import mountComponent from 'helpers/vue_mount_component_helper';
import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
+import modalEventHub from '~/projects/commit/event_hub';
+import { OPEN_REVERT_MODAL } from '~/projects/commit/constants';
describe('MRWidgetMerged', () => {
let vm;
@@ -16,6 +19,7 @@ describe('MRWidgetMerged', () => {
};
beforeEach(() => {
+ jest.spyOn(document, 'dispatchEvent');
const Component = Vue.extend(mergedComponent);
const mr = {
isRemovingSourceBranch: false,
@@ -147,6 +151,18 @@ describe('MRWidgetMerged', () => {
});
});
+ it('calls dispatchDocumentEvent to load in the modal component', () => {
+ expect(document.dispatchEvent).toHaveBeenCalledWith(new CustomEvent('merged:UpdateActions'));
+ });
+
+ it('emits event to open the revert modal on revert button click', () => {
+ const eventHubSpy = jest.spyOn(modalEventHub, '$emit');
+
+ getByRole(vm.$el, 'button', { name: /Revert/i }).click();
+
+ expect(eventHubSpy).toHaveBeenCalledWith(OPEN_REVERT_MODAL);
+ });
+
it('has merged by information', () => {
expect(vm.$el.textContent).toContain('Merged by');
expect(vm.$el.textContent).toContain('Administrator');
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
index 8847e4e6bdd..bd77a1d657e 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
@@ -1,25 +1,27 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import { removeBreakLine } from 'helpers/text_helper';
-import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue';
+import { shallowMount, mount } from '@vue/test-utils';
+import PipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue';
describe('MRWidgetPipelineBlocked', () => {
- let vm;
- beforeEach(() => {
- const Component = Vue.extend(pipelineBlockedComponent);
- vm = mountComponent(Component);
- });
+ let wrapper;
+
+ const createWrapper = (mountFn = shallowMount) => {
+ wrapper = mountFn(PipelineBlockedComponent);
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders warning icon', () => {
- expect(vm.$el.querySelector('.ci-status-icon-warning')).not.toBe(null);
+ createWrapper(mount);
+
+ expect(wrapper.find('.ci-status-icon-warning').exists()).toBe(true);
});
it('renders information text', () => {
- expect(removeBreakLine(vm.$el.textContent).trim()).toContain(
+ createWrapper();
+
+ expect(wrapper.text()).toBe(
'Pipeline blocked. The pipeline for this merge request requires a manual action to proceed',
);
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js
index 179adef12d9..ed3e7d9c0eb 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js
@@ -1,19 +1,30 @@
-import Vue from 'vue';
-import { removeBreakLine } from 'helpers/text_helper';
+import { shallowMount } from '@vue/test-utils';
import PipelineFailed from '~/vue_merge_request_widget/components/states/pipeline_failed.vue';
+import statusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
describe('PipelineFailed', () => {
- describe('template', () => {
- const Component = Vue.extend(PipelineFailed);
- const vm = new Component({
- el: document.createElement('div'),
- });
- it('should have correct elements', () => {
- expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
- expect(removeBreakLine(vm.$el.innerText).trim()).toContain(
- 'The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure',
- );
- });
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineFailed);
+ };
+
+ const findStatusIcon = () => wrapper.find(statusIcon);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('should render error message with a disabled merge button', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('merge button should be disabled', () => {
+ expect(findStatusIcon().props('showDisabledButton')).toBe(true);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
index 8da0d0f16d6..a9b852b084d 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
@@ -1,12 +1,12 @@
import { GlDeprecatedSkeletonLoading as GlSkeletonLoading, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import { invalidPlanWithName, plans, validPlanWithName } from './mock_data';
import axios from '~/lib/utils/axios_utils';
import MrWidgetExpanableSection from '~/vue_merge_request_widget/components/mr_widget_expandable_section.vue';
import MrWidgetTerraformContainer from '~/vue_merge_request_widget/components/terraform/mr_widget_terraform_container.vue';
import Poll from '~/lib/utils/poll';
import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
+import { invalidPlanWithName, plans, validPlanWithName } from './mock_data';
describe('MrWidgetTerraformConainer', () => {
let mock;
diff --git a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js b/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
index ea4eb44ebfe..f95a92c2cb1 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
+++ b/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
@@ -33,7 +33,7 @@ describe('TerraformPlan', () => {
it('diplays the header text with a name', () => {
expect(wrapper.text()).toContain(
- `The Terraform report ${validPlanWithName.job_name} was generated in your pipelines.`,
+ `The report ${validPlanWithName.job_name} was generated in your pipelines.`,
);
});
@@ -55,7 +55,7 @@ describe('TerraformPlan', () => {
});
it('diplays the header text without a name', () => {
- expect(wrapper.text()).toContain('A Terraform report was generated in your pipelines.');
+ expect(wrapper.text()).toContain('A report was generated in your pipelines.');
});
});
@@ -70,7 +70,7 @@ describe('TerraformPlan', () => {
it('diplays the header text with a name', () => {
expect(wrapper.text()).toContain(
- `The Terraform report ${invalidPlanWithName.job_name} failed to generate.`,
+ `The report ${invalidPlanWithName.job_name} failed to generate.`,
);
});
@@ -85,7 +85,7 @@ describe('TerraformPlan', () => {
});
it('diplays the header text without a name', () => {
- expect(wrapper.text()).toContain('A Terraform report failed to generate.');
+ expect(wrapper.text()).toContain('A report failed to generate.');
});
it('does not render button because url is missing', () => {
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index 1ea7fe1fbfe..872dcd2af7f 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -1,7 +1,9 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import MockAdapter from 'axios-mock-adapter';
-import Api from '~/api';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { securityReportDownloadPathsQueryResponse } from 'jest/vue_shared/security_reports/mock_data';
import axios from '~/lib/utils/axios_utils';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -9,20 +11,16 @@ import notify from '~/lib/utils/notify';
import SmartInterval from '~/smart_interval';
import { setFaviconOverlay } from '~/lib/utils/favicon';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
-import mockData from './mock_data';
-import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
+import securityReportDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_download_paths.query.graphql';
+import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
+import mockData from './mock_data';
jest.mock('~/smart_interval');
jest.mock('~/lib/utils/favicon');
-const returnPromise = (data) =>
- new Promise((resolve) => {
- resolve({
- data,
- });
- });
+Vue.use(VueApollo);
describe('MrWidgetOptions', () => {
let wrapper;
@@ -48,7 +46,7 @@ describe('MrWidgetOptions', () => {
gon.features = {};
});
- const createComponent = (mrData = mockData) => {
+ const createComponent = (mrData = mockData, options = {}) => {
if (wrapper) {
wrapper.destroy();
}
@@ -57,6 +55,7 @@ describe('MrWidgetOptions', () => {
propsData: {
mrData: { ...mrData },
},
+ ...options,
});
return axios.waitForAll();
@@ -68,6 +67,7 @@ describe('MrWidgetOptions', () => {
describe('default', () => {
beforeEach(() => {
+ jest.spyOn(document, 'dispatchEvent');
return createComponent();
});
@@ -281,7 +281,7 @@ describe('MrWidgetOptions', () => {
let isCbExecuted;
beforeEach(() => {
- jest.spyOn(wrapper.vm.service, 'checkStatus').mockReturnValue(returnPromise(mockData));
+ jest.spyOn(wrapper.vm.service, 'checkStatus').mockResolvedValue({ data: mockData });
jest.spyOn(wrapper.vm.mr, 'setData').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'handleNotification').mockImplementation(() => {});
@@ -331,7 +331,7 @@ describe('MrWidgetOptions', () => {
it('should fetch deployments', () => {
jest
.spyOn(wrapper.vm.service, 'fetchDeployments')
- .mockReturnValue(returnPromise([{ id: 1, status: SUCCESS }]));
+ .mockResolvedValue({ data: [{ id: 1, status: SUCCESS }] });
wrapper.vm.fetchPreMergeDeployments();
@@ -347,13 +347,16 @@ describe('MrWidgetOptions', () => {
it('should fetch content of Cherry Pick and Revert modals', () => {
jest
.spyOn(wrapper.vm.service, 'fetchMergeActionsContent')
- .mockReturnValue(returnPromise('hello world'));
+ .mockResolvedValue({ data: 'hello world' });
wrapper.vm.fetchActionsContent();
return nextTick().then(() => {
expect(wrapper.vm.service.fetchMergeActionsContent).toHaveBeenCalled();
expect(document.body.textContent).toContain('hello world');
+ expect(document.dispatchEvent).toHaveBeenCalledWith(
+ new CustomEvent('merged:UpdateActions'),
+ );
});
});
});
@@ -822,36 +825,34 @@ describe('MrWidgetOptions', () => {
describe('security widget', () => {
describe.each`
- context | hasPipeline | reportType | isFlagEnabled | shouldRender
- ${'security report and flag enabled'} | ${true} | ${'sast'} | ${true} | ${true}
- ${'security report and flag disabled'} | ${true} | ${'sast'} | ${false} | ${false}
- ${'no security report and flag enabled'} | ${true} | ${'foo'} | ${true} | ${false}
- ${'no pipeline and flag enabled'} | ${false} | ${'sast'} | ${true} | ${false}
- `('given $context', ({ hasPipeline, reportType, isFlagEnabled, shouldRender }) => {
+ context | hasPipeline | shouldRender
+ ${'there is a pipeline'} | ${true} | ${true}
+ ${'no pipeline'} | ${false} | ${false}
+ `('given $context', ({ hasPipeline, shouldRender }) => {
beforeEach(() => {
- gon.features.coreSecurityMrWidget = isFlagEnabled;
+ const mrData = {
+ ...mockData,
+ ...(hasPipeline ? {} : { pipeline: null }),
+ };
- if (hasPipeline) {
- jest.spyOn(Api, 'pipelineJobs').mockResolvedValue({
- data: [{ artifacts: [{ file_type: reportType }] }],
- });
- }
+ // Override top-level mocked requests, which always use a fresh copy of
+ // mockData, which always includes the full pipeline object.
+ mock.onGet(mockData.merge_request_widget_path).reply(() => [200, mrData]);
+ mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, mrData]);
- return createComponent({
- ...mockData,
- ...(hasPipeline ? {} : { pipeline: undefined }),
+ return createComponent(mrData, {
+ apolloProvider: createMockApollo([
+ [
+ securityReportDownloadPathsQuery,
+ async () => ({ data: securityReportDownloadPathsQueryResponse }),
+ ],
+ ]),
});
});
- if (shouldRender) {
- it('renders', () => {
- expect(findSecurityMrWidget().exists()).toBe(true);
- });
- } else {
- it('does not render', () => {
- expect(findSecurityMrWidget().exists()).toBe(false);
- });
- }
+ it(shouldRender ? 'renders' : 'does not render', () => {
+ expect(findSecurityMrWidget().exists()).toBe(shouldRender);
+ });
});
});
diff --git a/spec/frontend/alert_management/components/alert_details_spec.js b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
index 976e50625a6..40af8e60008 100644
--- a/spec/frontend/alert_management/components/alert_details_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
@@ -4,17 +4,14 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import AlertDetails from '~/alert_management/components/alert_details.vue';
-import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
-import {
- ALERTS_SEVERITY_LABELS,
- trackAlertsDetailsViewsOptions,
-} from '~/alert_management/constants';
-import createIssueMutation from '~/alert_management/graphql/mutations/create_issue_from_alert.mutation.graphql';
+import AlertDetails from '~/vue_shared/alert_details/components/alert_details.vue';
+import AlertSummaryRow from '~/vue_shared/alert_details/components/alert_summary_row.vue';
+import { SEVERITY_LEVELS } from '~/vue_shared/alert_details/constants';
+import createIssueMutation from '~/vue_shared/alert_details/graphql/mutations/alert_issue_create.mutation.graphql';
import { joinPaths } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
-import mockAlerts from '../mocks/alerts.json';
+import mockAlerts from './mocks/alerts.json';
const mockAlert = mockAlerts[0];
const environmentName = 'Production';
@@ -29,7 +26,13 @@ describe('AlertDetails', () => {
const projectId = '1';
const $router = { replace: jest.fn() };
- function mountComponent({ data, loading = false, mountMethod = shallowMount, stubs = {} } = {}) {
+ function mountComponent({
+ data,
+ loading = false,
+ mountMethod = shallowMount,
+ provide = {},
+ stubs = {},
+ } = {}) {
wrapper = extendedWrapper(
mountMethod(AlertDetails, {
provide: {
@@ -37,6 +40,7 @@ describe('AlertDetails', () => {
projectPath,
projectIssuesPath,
projectId,
+ ...provide,
},
data() {
return {
@@ -112,9 +116,7 @@ describe('AlertDetails', () => {
});
it('renders severity', () => {
- expect(wrapper.findByTestId('severity').text()).toBe(
- ALERTS_SEVERITY_LABELS[mockAlert.severity],
- );
+ expect(wrapper.findByTestId('severity').text()).toBe(SEVERITY_LEVELS[mockAlert.severity]);
});
it('renders a title', () => {
@@ -321,16 +323,27 @@ describe('AlertDetails', () => {
});
describe('Snowplow tracking', () => {
+ const mountOptions = {
+ props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
+ data: { alert: mockAlert },
+ loading: false,
+ };
+
beforeEach(() => {
jest.spyOn(Tracking, 'event');
- mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alert: mockAlert },
- loading: false,
- });
});
- it('should track alert details page views', () => {
+ it('should not track alert details page views when the tracking options do not exist', () => {
+ mountComponent(mountOptions);
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+
+ it('should track alert details page views when the tracking options exist', () => {
+ const trackAlertsDetailsViewsOptions = {
+ category: 'Alert Management',
+ action: 'view_alert_details',
+ };
+ mountComponent({ ...mountOptions, provide: { trackAlertsDetailsViewsOptions } });
const { category, action } = trackAlertsDetailsViewsOptions;
expect(Tracking.event).toHaveBeenCalledWith(category, action);
});
diff --git a/spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js b/spec/frontend/vue_shared/alert_details/alert_management_sidebar_todo_spec.js
index ea7b4584a63..e08955d5f8e 100644
--- a/spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_management_sidebar_todo_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
-import SidebarTodo from '~/alert_management/components/sidebar/sidebar_todo.vue';
-import createAlertTodoMutation from '~/alert_management/graphql/mutations/alert_todo_create.mutation.graphql';
+import SidebarTodo from '~/vue_shared/alert_details/components/sidebar/sidebar_todo.vue';
+import createAlertTodoMutation from '~/vue_shared/alert_details/graphql/mutations/alert_todo_create.mutation.graphql';
import todoMarkDoneMutation from '~/graphql_shared/mutations/todo_mark_done.mutation.graphql';
-import mockAlerts from '../mocks/alerts.json';
+import mockAlerts from './mocks/alerts.json';
const mockAlert = mockAlerts[0];
@@ -59,7 +59,7 @@ describe('Alert Details Sidebar To Do', () => {
it('renders a button for adding a To-Do', async () => {
await wrapper.vm.$nextTick();
- expect(findToDoButton().text()).toBe('Add a To-Do');
+ expect(findToDoButton().text()).toBe('Add a to do');
});
it('calls `$apollo.mutate` with `createAlertTodoMutation` mutation and variables containing `iid`, `todoEvent`, & `projectPath`', async () => {
diff --git a/spec/frontend/alert_management/components/alert_metrics_spec.js b/spec/frontend/vue_shared/alert_details/alert_metrics_spec.js
index 42da8c3768b..c75909651d7 100644
--- a/spec/frontend/alert_management/components/alert_metrics_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_metrics_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
-import waitForPromises from 'helpers/wait_for_promises';
import MockAdapter from 'axios-mock-adapter';
import axios from 'axios';
-import AlertMetrics from '~/alert_management/components/alert_metrics.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+import AlertMetrics from '~/vue_shared/alert_details/components/alert_metrics.vue';
import MetricEmbed from '~/monitoring/components/embeds/metric_embed.vue';
jest.mock('~/monitoring/stores', () => ({
diff --git a/spec/frontend/alert_management/components/alert_status_spec.js b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
index 6f2ddb86020..b37518cc424 100644
--- a/spec/frontend/alert_management/components/alert_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
@@ -1,11 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
-import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
-import AlertManagementStatus from '~/alert_management/components/alert_status.vue';
-import updateAlertStatusMutation from '~/graphql_shared/mutations/update_alert_status.mutation.graphql';
+import AlertManagementStatus from '~/vue_shared/alert_details/components/alert_status.vue';
+import updateAlertStatusMutation from '~/graphql_shared//mutations/alert_status_update.mutation.graphql';
import Tracking from '~/tracking';
-import mockAlerts from '../mocks/alerts.json';
+import mockAlerts from './mocks/alerts.json';
const mockAlert = mockAlerts[0];
@@ -20,7 +19,7 @@ describe('AlertManagementStatus', () => {
return waitForPromises();
};
- function mountComponent({ props = {}, loading = false, stubs = {} } = {}) {
+ function mountComponent({ props = {}, provide = {}, loading = false, stubs = {} } = {}) {
wrapper = shallowMount(AlertManagementStatus, {
propsData: {
alert: { ...mockAlert },
@@ -28,6 +27,7 @@ describe('AlertManagementStatus', () => {
isSidebar: false,
...props,
},
+ provide,
mocks: {
$apollo: {
mutate: jest.fn(),
@@ -134,10 +134,25 @@ describe('AlertManagementStatus', () => {
describe('Snowplow tracking', () => {
beforeEach(() => {
jest.spyOn(Tracking, 'event');
+ });
+
+ it('should not track alert status updates when the tracking options do not exist', () => {
mountComponent({});
+ Tracking.event.mockClear();
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({});
+ findFirstStatusOption().vm.$emit('click');
+ setImmediate(() => {
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
});
- it('should track alert status updates', () => {
+ it('should track alert status updates when the tracking options exist', () => {
+ const trackAlertStatusUpdateOptions = {
+ category: 'Alert Management',
+ action: 'update_alert_status',
+ label: 'Status',
+ };
+ mountComponent({ provide: { trackAlertStatusUpdateOptions } });
Tracking.event.mockClear();
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({});
findFirstStatusOption().vm.$emit('click');
diff --git a/spec/frontend/alert_management/components/alert_summary_row_spec.js b/spec/frontend/vue_shared/alert_details/alert_summary_row_spec.js
index 47c715c089a..a2981478954 100644
--- a/spec/frontend/alert_management/components/alert_summary_row_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_summary_row_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
+import AlertSummaryRow from '~/vue_shared/alert_details/components/alert_summary_row.vue';
const label = 'a label';
const value = 'a value';
diff --git a/spec/frontend/alert_management/mocks/alerts.json b/spec/frontend/vue_shared/alert_details/mocks/alerts.json
index 5267a4fe50d..5267a4fe50d 100644
--- a/spec/frontend/alert_management/mocks/alerts.json
+++ b/spec/frontend/vue_shared/alert_details/mocks/alerts.json
diff --git a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_managment_sidebar_assignees_spec.js
index 00c479071fe..4c7f86a40ee 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_managment_sidebar_assignees_spec.js
@@ -2,10 +2,10 @@ import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { GlDropdownItem } from '@gitlab/ui';
-import SidebarAssignee from '~/alert_management/components/sidebar/sidebar_assignee.vue';
-import SidebarAssignees from '~/alert_management/components/sidebar/sidebar_assignees.vue';
-import AlertSetAssignees from '~/alert_management/graphql/mutations/alert_set_assignees.mutation.graphql';
-import mockAlerts from '../../mocks/alerts.json';
+import SidebarAssignee from '~/vue_shared/alert_details/components/sidebar/sidebar_assignee.vue';
+import SidebarAssignees from '~/vue_shared/alert_details/components/sidebar/sidebar_assignees.vue';
+import AlertSetAssignees from '~/vue_shared/alert_details/graphql/mutations/alert_set_assignees.mutation.graphql';
+import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[0];
diff --git a/spec/frontend/alert_management/components/sidebar/alert_sidebar_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
index 5235ae63fee..c2df37821d3 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_sidebar_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
@@ -1,9 +1,9 @@
import { shallowMount, mount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import AlertSidebar from '~/alert_management/components/alert_sidebar.vue';
-import SidebarAssignees from '~/alert_management/components/sidebar/sidebar_assignees.vue';
-import mockAlerts from '../../mocks/alerts.json';
+import AlertSidebar from '~/vue_shared/alert_details/components/alert_sidebar.vue';
+import SidebarAssignees from '~/vue_shared/alert_details/components/sidebar/sidebar_assignees.vue';
+import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[0];
diff --git a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
index 0b60a36cf54..95c9dac5034 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
@@ -1,10 +1,8 @@
import { mount } from '@vue/test-utils';
import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
-import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
-import AlertSidebarStatus from '~/alert_management/components/sidebar/sidebar_status.vue';
-import updateAlertStatusMutation from '~/graphql_shared/mutations/update_alert_status.mutation.graphql';
-import Tracking from '~/tracking';
-import mockAlerts from '../../mocks/alerts.json';
+import AlertSidebarStatus from '~/vue_shared/alert_details/components/sidebar/sidebar_status.vue';
+import updateAlertStatusMutation from '~/graphql_shared/mutations/alert_status_update.mutation.graphql';
+import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[0];
@@ -101,30 +99,5 @@ describe('Alert Details Sidebar Status', () => {
expect(wrapper.find('[data-testid="status"]').text()).toBe('Triggered');
});
});
-
- describe('Snowplow tracking', () => {
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
- mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alert: mockAlert },
- loading: false,
- });
- });
-
- it('should track alert status updates', () => {
- Tracking.event.mockClear();
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({});
- findStatusDropdownItem().vm.$emit('click');
- const status = findStatusDropdownItem().text();
- setImmediate(() => {
- const { category, action, label } = trackAlertStatusUpdateOptions;
- expect(Tracking.event).toHaveBeenCalledWith(category, action, {
- label,
- property: status,
- });
- });
- });
- });
});
});
diff --git a/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js b/spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js
index 65cfc600d76..90966482bb4 100644
--- a/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
+++ b/spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
-import SystemNote from '~/alert_management/components/system_notes/system_note.vue';
-import mockAlerts from '../../mocks/alerts.json';
+import SystemNote from '~/vue_shared/alert_details/components/system_notes/system_note.vue';
+import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[1];
diff --git a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
index c8fe6c3131c..d30f36ec63c 100644
--- a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
+++ b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
@@ -13,6 +13,7 @@ describe('ColorPicker', () => {
};
const setColor = '#000000';
+ const invalidText = 'Please enter a valid hex (#RRGGBB or #RGB) color value';
const label = () => wrapper.find(GlFormGroup).attributes('label');
const colorPreview = () => wrapper.find('[data-testid="color-preview"]');
const colorPicker = () => wrapper.find(GlFormInput);
@@ -28,8 +29,6 @@ describe('ColorPicker', () => {
'#428BCA': 'Moderate blue',
'#44AD8E': 'Lime green',
};
-
- createComponent(shallowMount);
});
afterEach(() => {
@@ -38,6 +37,8 @@ describe('ColorPicker', () => {
describe('label', () => {
it('hides the label if the label is not passed', () => {
+ createComponent(shallowMount);
+
expect(label()).toBe('');
});
@@ -55,43 +56,37 @@ describe('ColorPicker', () => {
expect(colorPreview().attributes('style')).toBe(undefined);
expect(colorPicker().attributes('value')).toBe(undefined);
expect(colorInput().props('value')).toBe('');
+ expect(colorPreview().attributes('class')).toContain('gl-inset-border-1-gray-400');
});
it('has a color set on initialization', () => {
- createComponent(shallowMount, { setColor });
+ createComponent(mount, { value: setColor });
- expect(wrapper.vm.$data.selectedColor).toBe(setColor);
+ expect(colorInput().props('value')).toBe(setColor);
});
it('emits input event from component when a color is selected', async () => {
createComponent();
await colorInput().setValue(setColor);
- expect(wrapper.emitted().input[0]).toEqual([setColor]);
+ expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
});
it('trims spaces from submitted colors', async () => {
createComponent();
await colorInput().setValue(` ${setColor} `);
- expect(wrapper.vm.$data.selectedColor).toBe(setColor);
+ expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
+ expect(colorPreview().attributes('class')).toContain('gl-inset-border-1-gray-400');
+ expect(colorInput().attributes('class')).not.toContain('is-invalid');
});
- it('shows invalid feedback when an invalid color is used', async () => {
- createComponent();
- await colorInput().setValue('abcd');
-
- expect(invalidFeedback().text()).toBe(
- 'Please enter a valid hex (#RRGGBB or #RGB) color value',
- );
- expect(wrapper.emitted().input).toBe(undefined);
- });
-
- it('shows an invalid feedback border on the preview when an invalid color is used', async () => {
- createComponent();
- await colorInput().setValue('abcd');
+ it('shows invalid feedback when the state is marked as invalid', async () => {
+ createComponent(mount, { invalidFeedback: invalidText, state: false });
+ expect(invalidFeedback().text()).toBe(invalidText);
expect(colorPreview().attributes('class')).toContain('gl-inset-border-1-red-500');
+ expect(colorInput().attributes('class')).toContain('is-invalid');
});
});
@@ -100,14 +95,14 @@ describe('ColorPicker', () => {
createComponent();
await colorInput().setValue(setColor);
- expect(wrapper.vm.$data.selectedColor).toBe(setColor);
+ expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
});
it('has color picker value entered', async () => {
createComponent();
await colorPicker().setValue(setColor);
- expect(wrapper.vm.$data.selectedColor).toBe(setColor);
+ expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
});
});
@@ -132,7 +127,7 @@ describe('ColorPicker', () => {
createComponent();
await presetColors().at(0).trigger('click');
- expect(wrapper.vm.$data.selectedColor).toBe(setColor);
+ expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
});
});
});
diff --git a/spec/frontend/vue_shared/components/editor_lite_spec.js b/spec/frontend/vue_shared/components/editor_lite_spec.js
index 70fdd8e24a5..9298851d143 100644
--- a/spec/frontend/vue_shared/components/editor_lite_spec.js
+++ b/spec/frontend/vue_shared/components/editor_lite_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import EditorLite from '~/vue_shared/components/editor_lite.vue';
import Editor from '~/editor/editor_lite';
+import { EDITOR_READY_EVENT } from '~/editor/constants';
jest.mock('~/editor/editor_lite');
@@ -110,13 +111,13 @@ describe('Editor Lite component', () => {
expect(wrapper.emitted().input).toEqual([[value]]);
});
- it('emits editor-ready event when the Editor Lite is ready', async () => {
+ it('emits EDITOR_READY_EVENT event when the Editor Lite is ready', async () => {
const el = wrapper.find({ ref: 'editor' }).element;
- expect(wrapper.emitted()['editor-ready']).toBeUndefined();
+ expect(wrapper.emitted()[EDITOR_READY_EVENT]).toBeUndefined();
- await el.dispatchEvent(new Event('editor-ready'));
+ await el.dispatchEvent(new Event(EDITOR_READY_EVENT));
- expect(wrapper.emitted()['editor-ready']).toBeDefined();
+ expect(wrapper.emitted()[EDITOR_READY_EVENT]).toBeDefined();
});
it('component API `getEditor()` returns the editor instance', () => {
diff --git a/spec/frontend/vue_shared/components/file_row_spec.js b/spec/frontend/vue_shared/components/file_row_spec.js
index bd6a18bf704..92690f6fc37 100644
--- a/spec/frontend/vue_shared/components/file_row_spec.js
+++ b/spec/frontend/vue_shared/components/file_row_spec.js
@@ -1,6 +1,6 @@
-import { file } from 'jest/ide/helpers';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { file } from 'jest/ide/helpers';
import FileRow from '~/vue_shared/components/file_row.vue';
import FileHeader from '~/vue_shared/components/file_row_header.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
diff --git a/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap b/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap
index d8e6e37bb89..5f55502567b 100644
--- a/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap
+++ b/spec/frontend/vue_shared/components/gfm_autocomplete/__snapshots__/utils_spec.js.snap
@@ -21,10 +21,10 @@ exports[`gfm_autocomplete/utils labels config shows the title in the menu item 1
exports[`gfm_autocomplete/utils members config shows an avatar character, name, parent name, and count in the menu item for a group 1`] = `
"
<div class=\\"gl-display-flex gl-align-items-center\\">
- <div class=\\"gl-avatar gl-avatar-s24 gl-flex-shrink-0 gl-rounded-small
+ <div class=\\"gl-avatar gl-avatar-s32 gl-flex-shrink-0 gl-rounded-small
gl-display-flex gl-align-items-center gl-justify-content-center\\" aria-hidden=\\"true\\">
G</div>
- <div class=\\"gl-font-sm gl-line-height-normal gl-ml-3\\">
+ <div class=\\"gl-line-height-normal gl-ml-4\\">
<div>1-1s &lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt; (2)</div>
<div class=\\"gl-text-gray-700\\">GitLab Support Team</div>
</div>
@@ -36,8 +36,8 @@ exports[`gfm_autocomplete/utils members config shows an avatar character, name,
exports[`gfm_autocomplete/utils members config shows the avatar, name and username in the menu item for a user 1`] = `
"
<div class=\\"gl-display-flex gl-align-items-center\\">
- <img class=\\"gl-avatar gl-avatar-s24 gl-flex-shrink-0 gl-avatar-circle\\" src=\\"/uploads/-/system/user/avatar/123456/avatar.png\\" alt=\\"\\" />
- <div class=\\"gl-font-sm gl-line-height-normal gl-ml-3\\">
+ <img class=\\"gl-avatar gl-avatar-s32 gl-flex-shrink-0 gl-avatar-circle\\" src=\\"/uploads/-/system/user/avatar/123456/avatar.png\\" alt=\\"\\" />
+ <div class=\\"gl-line-height-normal gl-ml-4\\">
<div>My Name &lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;</div>
<div class=\\"gl-text-gray-700\\">@myusername</div>
</div>
diff --git a/spec/frontend/vue_shared/components/gl_countdown_spec.js b/spec/frontend/vue_shared/components/gl_countdown_spec.js
index fcc5c0cd310..82d18c7fd3f 100644
--- a/spec/frontend/vue_shared/components/gl_countdown_spec.js
+++ b/spec/frontend/vue_shared/components/gl_countdown_spec.js
@@ -1,5 +1,5 @@
-import mountComponent from 'helpers/vue_mount_component_helper';
import Vue from 'vue';
+import mountComponent from 'helpers/vue_mount_component_helper';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
describe('GlCountdown', () => {
diff --git a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
index 6802499ed52..81518e67377 100644
--- a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
@@ -3,6 +3,7 @@ import Vuex from 'vuex';
import { GlModal } from '@gitlab/ui';
import GlModalVuex from '~/vue_shared/components/gl_modal_vuex.vue';
import createState from '~/vuex_shared/modules/modal/state';
+import { BV_SHOW_MODAL, BV_HIDE_MODAL } from '~/lib/utils/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -129,7 +130,7 @@ describe('GlModalVuex', () => {
wrapper.vm
.$nextTick()
.then(() => {
- expect(rootEmit).toHaveBeenCalledWith('bv::show::modal', TEST_MODAL_ID);
+ expect(rootEmit).toHaveBeenCalledWith(BV_SHOW_MODAL, TEST_MODAL_ID);
})
.then(done)
.catch(done.fail);
@@ -146,7 +147,7 @@ describe('GlModalVuex', () => {
wrapper.vm
.$nextTick()
.then(() => {
- expect(rootEmit).toHaveBeenCalledWith('bv::hide::modal', TEST_MODAL_ID);
+ expect(rootEmit).toHaveBeenCalledWith(BV_HIDE_MODAL, TEST_MODAL_ID);
})
.then(done)
.catch(done.fail);
diff --git a/spec/frontend/vue_shared/components/help_popover_spec.js b/spec/frontend/vue_shared/components/help_popover_spec.js
new file mode 100644
index 00000000000..112085e91e1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/help_popover_spec.js
@@ -0,0 +1,65 @@
+import { mount } from '@vue/test-utils';
+import { GlButton, GlPopover } from '@gitlab/ui';
+import HelpPopover from '~/vue_shared/components/help_popover.vue';
+
+describe('HelpPopover', () => {
+ let wrapper;
+ const title = 'popover <strong>title</strong>';
+ const content = 'popover <b>content</b>';
+
+ const findQuestionButton = () => wrapper.find(GlButton);
+ const findPopover = () => wrapper.find(GlPopover);
+ const buildWrapper = (options = {}) => {
+ wrapper = mount(HelpPopover, {
+ propsData: {
+ options: {
+ title,
+ content,
+ ...options,
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ buildWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders a link button with an icon question', () => {
+ expect(findQuestionButton().props()).toMatchObject({
+ icon: 'question',
+ variant: 'link',
+ });
+ expect(findQuestionButton().attributes().tabindex).toBe('0');
+ });
+
+ it('renders popover that uses the question button as target', () => {
+ expect(findPopover().props().target()).toBe(findQuestionButton().vm.$el);
+ });
+
+ it('triggers popover on hover and focus', () => {
+ expect(findPopover().props().triggers).toBe('hover focus');
+ });
+
+ it('allows rendering title with HTML tags', () => {
+ expect(findPopover().find('strong').exists()).toBe(true);
+ });
+
+ it('allows rendering content with HTML tags', () => {
+ expect(findPopover().find('b').exists()).toBe(true);
+ });
+
+ it('binds other popover options to the popover instance', () => {
+ const placement = 'bottom';
+
+ wrapper.destroy();
+ buildWrapper({ placement });
+
+ expect(findPopover().props().placement).toBe(placement);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
index ffcb891c4fc..8143196c6e8 100644
--- a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
-import { mockMilestone } from 'jest/boards/mock_data';
import { GlIcon } from '@gitlab/ui';
+import { mockMilestone } from 'jest/boards/mock_data';
import IssueMilestone from '~/vue_shared/components/issue/issue_milestone.vue';
const createComponent = (milestone = mockMilestone) => {
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index a2ce6f40193..97cc22e1418 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
-import { TEST_HOST, FIXTURES_PATH } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
+import { TEST_HOST, FIXTURES_PATH } from 'spec/test_constants';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import axios from '~/lib/utils/axios_utils';
diff --git a/spec/frontend/vue_shared/components/modal_copy_button_spec.js b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
index ca9f8ff54d4..97d4313b89e 100644
--- a/spec/frontend/vue_shared/components/modal_copy_button_spec.js
+++ b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
@@ -1,5 +1,6 @@
import { shallowMount, createWrapper } from '@vue/test-utils';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
+import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
describe('modal copy button', () => {
let wrapper;
@@ -31,7 +32,7 @@ describe('modal copy button', () => {
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted().success).not.toBeEmpty();
expect(document.execCommand).toHaveBeenCalledWith('copy');
- expect(root.emitted('bv::hide::tooltip')).toEqual([['test-id']]);
+ expect(root.emitted(BV_HIDE_TOOLTIP)).toEqual([['test-id']]);
});
});
it("should propagate the clipboard error event if execCommand doesn't work", () => {
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js
index 0e6f951bd53..594b1ab9d78 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/toolbar_item_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { GlIcon } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ToolbarItem from '~/vue_shared/components/rich_content_editor/toolbar_item.vue';
describe('Toolbar Item', () => {
diff --git a/spec/frontend/vue_shared/components/runner_instructions/mock_data.js b/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
new file mode 100644
index 00000000000..01f7f3d49c7
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
@@ -0,0 +1,107 @@
+export const mockGraphqlRunnerPlatforms = {
+ data: {
+ runnerPlatforms: {
+ nodes: [
+ {
+ name: 'linux',
+ humanReadableName: 'Linux',
+ architectures: {
+ nodes: [
+ {
+ name: 'amd64',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-amd64',
+ __typename: 'RunnerArchitecture',
+ },
+ {
+ name: '386',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-386',
+ __typename: 'RunnerArchitecture',
+ },
+ {
+ name: 'arm',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-arm',
+ __typename: 'RunnerArchitecture',
+ },
+ {
+ name: 'arm64',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-arm64',
+ __typename: 'RunnerArchitecture',
+ },
+ ],
+ __typename: 'RunnerArchitectureConnection',
+ },
+ __typename: 'RunnerPlatform',
+ },
+ {
+ name: 'osx',
+ humanReadableName: 'macOS',
+ architectures: {
+ nodes: [
+ {
+ name: 'amd64',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-darwin-amd64',
+ __typename: 'RunnerArchitecture',
+ },
+ ],
+ __typename: 'RunnerArchitectureConnection',
+ },
+ __typename: 'RunnerPlatform',
+ },
+ {
+ name: 'windows',
+ humanReadableName: 'Windows',
+ architectures: {
+ nodes: [
+ {
+ name: 'amd64',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-windows-amd64.exe',
+ __typename: 'RunnerArchitecture',
+ },
+ {
+ name: '386',
+ downloadLocation:
+ 'https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-windows-386.exe',
+ __typename: 'RunnerArchitecture',
+ },
+ ],
+ __typename: 'RunnerArchitectureConnection',
+ },
+ __typename: 'RunnerPlatform',
+ },
+ {
+ name: 'docker',
+ humanReadableName: 'Docker',
+ architectures: null,
+ __typename: 'RunnerPlatform',
+ },
+ {
+ name: 'kubernetes',
+ humanReadableName: 'Kubernetes',
+ architectures: null,
+ __typename: 'RunnerPlatform',
+ },
+ ],
+ __typename: 'RunnerPlatformConnection',
+ },
+ project: { id: 'gid://gitlab/Project/1', __typename: 'Project' },
+ group: null,
+ },
+};
+
+export const mockGraphqlInstructions = {
+ data: {
+ runnerSetup: {
+ installInstructions:
+ "# Download the binary for your system\nsudo curl -L --output /usr/local/bin/gitlab-runner https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-amd64\n\n# Give it permissions to execute\nsudo chmod +x /usr/local/bin/gitlab-runner\n\n# Create a GitLab CI user\nsudo useradd --comment 'GitLab Runner' --create-home gitlab-runner --shell /bin/bash\n\n# Install and run as service\nsudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner\nsudo gitlab-runner start\n",
+ registerInstructions:
+ 'sudo gitlab-runner register --url http://192.168.1.81:3000/ --registration-token GE5gsjeep_HAtBf9s3Yz',
+ __typename: 'RunnerSetup',
+ },
+ },
+};
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
new file mode 100644
index 00000000000..6e2ba603e04
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
@@ -0,0 +1,113 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import RunnerInstructions from '~/vue_shared/components/runner_instructions/runner_instructions.vue';
+import getRunnerPlatforms from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_platforms.query.graphql';
+import getRunnerSetupInstructions from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_setup.query.graphql';
+
+import { mockGraphqlRunnerPlatforms, mockGraphqlInstructions } from './mock_data';
+
+const projectPath = 'gitlab-org/gitlab';
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('RunnerInstructions component', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findModalButton = () => wrapper.find('[data-testid="show-modal-button"]');
+ const findPlatformButtons = () => wrapper.findAll('[data-testid="platform-button"]');
+ const findArchitectureDropdownItems = () =>
+ wrapper.findAll('[data-testid="architecture-dropdown-item"]');
+ const findBinaryInstructionsSection = () => wrapper.find('[data-testid="binary-instructions"]');
+ const findRunnerInstructionsSection = () => wrapper.find('[data-testid="runner-instructions"]');
+
+ beforeEach(async () => {
+ const requestHandlers = [
+ [getRunnerPlatforms, jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms)],
+ [getRunnerSetupInstructions, jest.fn().mockResolvedValue(mockGraphqlInstructions)],
+ ];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ wrapper = shallowMount(RunnerInstructions, {
+ provide: {
+ projectPath,
+ },
+ localVue,
+ apolloProvider: fakeApollo,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('should show the "Show Runner installation instructions" button', () => {
+ const button = findModalButton();
+
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Show Runner installation instructions');
+ });
+
+ it('should contain a number of platforms buttons', () => {
+ const buttons = findPlatformButtons();
+
+ expect(buttons).toHaveLength(mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes.length);
+ });
+
+ it('should contain a number of dropdown items for the architecture options', () => {
+ const platformButton = findPlatformButtons().at(0);
+ platformButton.vm.$emit('click');
+
+ return wrapper.vm.$nextTick(() => {
+ const dropdownItems = findArchitectureDropdownItems();
+
+ expect(dropdownItems).toHaveLength(
+ mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
+ );
+ });
+ });
+
+ it('should display the binary installation instructions for a selected architecture', async () => {
+ const platformButton = findPlatformButtons().at(0);
+ platformButton.vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ const dropdownItem = findArchitectureDropdownItems().at(0);
+ dropdownItem.vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ const runner = findBinaryInstructionsSection();
+
+ expect(runner.text()).toMatch('sudo chmod +x /usr/local/bin/gitlab-runner');
+ expect(runner.text()).toMatch(
+ `sudo useradd --comment 'GitLab Runner' --create-home gitlab-runner --shell /bin/bash`,
+ );
+ expect(runner.text()).toMatch(
+ 'sudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner',
+ );
+ expect(runner.text()).toMatch('sudo gitlab-runner start');
+ });
+
+ it('should display the runner register instructions for a selected architecture', async () => {
+ const platformButton = findPlatformButtons().at(0);
+ platformButton.vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ const dropdownItem = findArchitectureDropdownItems().at(0);
+ dropdownItem.vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ const runner = findRunnerInstructionsSection();
+
+ expect(runner.text()).toMatch(mockGraphqlInstructions.data.runnerSetup.registerInstructions);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap b/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap
new file mode 100644
index 00000000000..51b8aa162bc
--- /dev/null
+++ b/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap
@@ -0,0 +1,43 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Settings Block renders the correct markup 1`] = `
+<section
+ class="settings no-animate"
+>
+ <div
+ class="settings-header"
+ >
+ <h4>
+ <div
+ data-testid="title-slot"
+ />
+ </h4>
+
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ icon=""
+ size="medium"
+ variant="default"
+ >
+
+ Expand
+
+ </gl-button-stub>
+
+ <p>
+ <div
+ data-testid="description-slot"
+ />
+ </p>
+ </div>
+
+ <div
+ class="settings-content"
+ >
+ <div
+ data-testid="default-slot"
+ />
+ </div>
+</section>
+`;
diff --git a/spec/frontend/vue_shared/components/settings/settings_block_spec.js b/spec/frontend/vue_shared/components/settings/settings_block_spec.js
new file mode 100644
index 00000000000..b550c4cfcc3
--- /dev/null
+++ b/spec/frontend/vue_shared/components/settings/settings_block_spec.js
@@ -0,0 +1,86 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import component from '~/vue_shared/components/settings/settings_block.vue';
+
+describe('Settings Block', () => {
+ let wrapper;
+
+ const mountComponent = (propsData) => {
+ wrapper = shallowMount(component, {
+ propsData,
+ slots: {
+ title: '<div data-testid="title-slot"></div>',
+ description: '<div data-testid="description-slot"></div>',
+ default: '<div data-testid="default-slot"></div>',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
+ const findTitleSlot = () => wrapper.find('[data-testid="title-slot"]');
+ const findDescriptionSlot = () => wrapper.find('[data-testid="description-slot"]');
+ const findExpandButton = () => wrapper.find(GlButton);
+
+ it('renders the correct markup', () => {
+ mountComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('has a default slot', () => {
+ mountComponent();
+
+ expect(findDefaultSlot().exists()).toBe(true);
+ });
+
+ it('has a title slot', () => {
+ mountComponent();
+
+ expect(findTitleSlot().exists()).toBe(true);
+ });
+
+ it('has a description slot', () => {
+ mountComponent();
+
+ expect(findDescriptionSlot().exists()).toBe(true);
+ });
+
+ describe('expanded behaviour', () => {
+ it('is collapsed by default', () => {
+ mountComponent();
+
+ expect(wrapper.classes('expanded')).toBe(false);
+ });
+
+ it('adds expanded class when the expand button is clicked', async () => {
+ mountComponent();
+
+ expect(wrapper.classes('expanded')).toBe(false);
+ expect(findExpandButton().text()).toBe('Expand');
+
+ await findExpandButton().vm.$emit('click');
+
+ expect(wrapper.classes('expanded')).toBe(true);
+ expect(findExpandButton().text()).toBe('Collapse');
+ });
+
+ it('is expanded when `defaultExpanded` is true no matter what', async () => {
+ mountComponent({ defaultExpanded: true });
+
+ expect(wrapper.classes('expanded')).toBe(true);
+
+ await findExpandButton().vm.$emit('click');
+
+ expect(wrapper.classes('expanded')).toBe(true);
+
+ await findExpandButton().vm.$emit('click');
+
+ expect(wrapper.classes('expanded')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/todo_button_spec.js b/spec/frontend/vue_shared/components/todo_button_spec.js
index 1f8a214d632..2066c3bd671 100644
--- a/spec/frontend/vue_shared/components/todo_button_spec.js
+++ b/spec/frontend/vue_shared/components/todo_button_spec.js
@@ -33,7 +33,7 @@ describe('Todo Button', () => {
it.each`
label | isTodo
${'Mark as done'} | ${true}
- ${'Add a To Do'} | ${false}
+ ${'Add a to do'} | ${false}
`('sets correct label when isTodo is $isTodo', ({ label, isTodo }) => {
createComponent({ isTodo });
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index d151cd15bc4..3eae707daf8 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -1,7 +1,7 @@
import { each } from 'lodash';
-import { trimText } from 'helpers/text_helper';
import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
import { TEST_HOST } from 'spec/test_constants';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
diff --git a/spec/frontend/vue_shared/directives/track_event_spec.js b/spec/frontend/vue_shared/directives/track_event_spec.js
index 8d867c8e3fc..c42169440ed 100644
--- a/spec/frontend/vue_shared/directives/track_event_spec.js
+++ b/spec/frontend/vue_shared/directives/track_event_spec.js
@@ -5,7 +5,7 @@ import TrackEvent from '~/vue_shared/directives/track_event';
jest.mock('~/tracking');
-const Component = Vue.component('dummy-element', {
+const Component = Vue.component('DummyElement', {
directives: {
TrackEvent,
},
diff --git a/spec/frontend/vue_shared/security_reports/mock_data.js b/spec/frontend/vue_shared/security_reports/mock_data.js
index b3ff7daef2b..7918f70d702 100644
--- a/spec/frontend/vue_shared/security_reports/mock_data.js
+++ b/spec/frontend/vue_shared/security_reports/mock_data.js
@@ -322,6 +322,23 @@ export const secretScanningDiffSuccessMock = {
head_report_created_at: '2020-01-10T10:00:00.000Z',
};
+export const securityReportDownloadPathsQueryNoArtifactsResponse = {
+ project: {
+ mergeRequest: {
+ headPipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/176',
+ jobs: {
+ nodes: [],
+ __typename: 'CiJobConnection',
+ },
+ __typename: 'Pipeline',
+ },
+ __typename: 'MergeRequest',
+ },
+ __typename: 'Project',
+ },
+};
+
export const securityReportDownloadPathsQueryResponse = {
project: {
mergeRequest: {
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index 50d1d130675..bd66142ab9e 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -1,6 +1,7 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { merge } from 'lodash';
+import Vue from 'vue';
import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -8,11 +9,11 @@ import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import {
expectedDownloadDropdownProps,
+ securityReportDownloadPathsQueryNoArtifactsResponse,
securityReportDownloadPathsQueryResponse,
sastDiffSuccessMock,
secretScanningDiffSuccessMock,
} from 'jest/vue_shared/security_reports/mock_data';
-import Api from '~/api';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import {
@@ -26,8 +27,8 @@ import securityReportDownloadPathsQuery from '~/vue_shared/security_reports/quer
jest.mock('~/flash');
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(VueApollo);
+Vue.use(Vuex);
const SAST_COMPARISON_PATH = '/sast.json';
const SECRET_SCANNING_COMPARISON_PATH = '/secret_detection.json';
@@ -47,7 +48,6 @@ describe('Security reports app', () => {
SecurityReportsApp,
merge(
{
- localVue,
propsData: { ...props },
stubs: {
HelpIcon: true,
@@ -60,187 +60,94 @@ describe('Security reports app', () => {
const pendingHandler = () => new Promise(() => {});
const successHandler = () => Promise.resolve({ data: securityReportDownloadPathsQueryResponse });
+ const successEmptyHandler = () =>
+ Promise.resolve({ data: securityReportDownloadPathsQueryNoArtifactsResponse });
const failureHandler = () => Promise.resolve({ errors: [{ message: 'some error' }] });
const createMockApolloProvider = (handler) => {
- localVue.use(VueApollo);
-
const requestHandlers = [[securityReportDownloadPathsQuery, handler]];
return createMockApollo(requestHandlers);
};
- const anyParams = expect.any(Object);
-
const findDownloadDropdown = () => wrapper.find(SecurityReportDownloadDropdown);
- const findPipelinesTabAnchor = () => wrapper.find('[data-testid="show-pipelines"]');
const findHelpIconComponent = () => wrapper.find(HelpIcon);
- const setupMockJobArtifact = (reportType) => {
- jest
- .spyOn(Api, 'pipelineJobs')
- .mockResolvedValue({ data: [{ artifacts: [{ file_type: reportType }] }] });
- };
- const expectPipelinesTabAnchor = () => {
- const mrTabsMock = { tabShown: jest.fn() };
- window.mrTabs = mrTabsMock;
- findPipelinesTabAnchor().trigger('click');
- expect(mrTabsMock.tabShown.mock.calls).toEqual([['pipelines']]);
- };
afterEach(() => {
wrapper.destroy();
- delete window.mrTabs;
});
- describe.each([false, true])(
- 'given the coreSecurityMrWidgetCounts feature flag is %p',
- (coreSecurityMrWidgetCounts) => {
- const createComponentWithFlag = (options) =>
- createComponent(
- merge(
- {
- provide: {
- glFeatures: {
- coreSecurityMrWidgetCounts,
- },
- },
- },
- options,
- ),
- );
-
- describe.each(SecurityReportsApp.reportTypes)('given a report type %p', (reportType) => {
- beforeEach(() => {
- window.mrTabs = { tabShown: jest.fn() };
- setupMockJobArtifact(reportType);
- createComponentWithFlag();
- return wrapper.vm.$nextTick();
- });
-
- it('calls the pipelineJobs API correctly', () => {
- expect(Api.pipelineJobs).toHaveBeenCalledTimes(1);
- expect(Api.pipelineJobs).toHaveBeenCalledWith(
- props.projectId,
- props.pipelineId,
- anyParams,
- );
- });
-
- it('renders the expected message', () => {
- expect(wrapper.text()).toMatchInterpolatedText(
- SecurityReportsApp.i18n.scansHaveRunWithDownloadGuidance,
- );
- });
-
- describe('clicking the anchor to the pipelines tab', () => {
- it('calls the mrTabs.tabShown global', () => {
- expectPipelinesTabAnchor();
- });
- });
-
- it('renders a help link', () => {
- expect(findHelpIconComponent().props()).toEqual({
- helpPath: props.securityReportsDocsPath,
- discoverProjectSecurityPath: props.discoverProjectSecurityPath,
- });
- });
+ describe('given the artifacts query is loading', () => {
+ beforeEach(() => {
+ createComponent({
+ apolloProvider: createMockApolloProvider(pendingHandler),
});
+ });
- describe('given a report type "foo"', () => {
- beforeEach(() => {
- setupMockJobArtifact('foo');
- createComponentWithFlag();
- return wrapper.vm.$nextTick();
- });
-
- it('calls the pipelineJobs API correctly', () => {
- expect(Api.pipelineJobs).toHaveBeenCalledTimes(1);
- expect(Api.pipelineJobs).toHaveBeenCalledWith(
- props.projectId,
- props.pipelineId,
- anyParams,
- );
- });
+ // TODO: Remove this assertion as part of
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/273431
+ it('initially renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+ });
- it('renders nothing', () => {
- expect(wrapper.html()).toBe('');
- });
+ describe('given the artifacts query loads successfully', () => {
+ beforeEach(() => {
+ createComponent({
+ apolloProvider: createMockApolloProvider(successHandler),
});
+ });
- describe('security artifacts on last page of multi-page response', () => {
- const numPages = 3;
-
- beforeEach(() => {
- jest
- .spyOn(Api, 'pipelineJobs')
- .mockImplementation(async (projectId, pipelineId, { page }) => {
- const requestedPage = parseInt(page, 10);
- if (requestedPage < numPages) {
- return {
- // Some jobs with no relevant artifacts
- data: [{}, {}],
- headers: { 'x-next-page': String(requestedPage + 1) },
- };
- } else if (requestedPage === numPages) {
- return {
- data: [{ artifacts: [{ file_type: SecurityReportsApp.reportTypes[0] }] }],
- };
- }
-
- throw new Error('Test failed due to request of non-existent jobs page');
- });
-
- createComponentWithFlag();
- return wrapper.vm.$nextTick();
- });
+ it('renders the download dropdown', () => {
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
+ });
- it('fetches all pages', () => {
- expect(Api.pipelineJobs).toHaveBeenCalledTimes(numPages);
- });
+ it('renders the expected message', () => {
+ expect(wrapper.text()).toContain(SecurityReportsApp.i18n.scansHaveRun);
+ });
- it('renders the expected message', () => {
- expect(wrapper.text()).toMatchInterpolatedText(
- SecurityReportsApp.i18n.scansHaveRunWithDownloadGuidance,
- );
- });
+ it('renders a help link', () => {
+ expect(findHelpIconComponent().props()).toEqual({
+ helpPath: props.securityReportsDocsPath,
+ discoverProjectSecurityPath: props.discoverProjectSecurityPath,
});
+ });
+ });
- describe('given an error from the API', () => {
- let error;
-
- beforeEach(() => {
- error = new Error('an error');
- jest.spyOn(Api, 'pipelineJobs').mockRejectedValue(error);
- createComponentWithFlag();
- return wrapper.vm.$nextTick();
- });
+ describe('given the artifacts query loads successfully with no artifacts', () => {
+ beforeEach(() => {
+ createComponent({
+ apolloProvider: createMockApolloProvider(successEmptyHandler),
+ });
+ });
- it('calls the pipelineJobs API correctly', () => {
- expect(Api.pipelineJobs).toHaveBeenCalledTimes(1);
- expect(Api.pipelineJobs).toHaveBeenCalledWith(
- props.projectId,
- props.pipelineId,
- anyParams,
- );
- });
+ // TODO: Remove this assertion as part of
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/273431
+ it('initially renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+ });
- it('renders nothing', () => {
- expect(wrapper.html()).toBe('');
- });
+ describe('given the artifacts query fails', () => {
+ beforeEach(() => {
+ createComponent({
+ apolloProvider: createMockApolloProvider(failureHandler),
+ });
+ });
- it('calls createFlash correctly', () => {
- expect(createFlash.mock.calls).toEqual([
- [
- {
- message: SecurityReportsApp.i18n.apiError,
- captureError: true,
- error,
- },
- ],
- ]);
- });
+ it('calls createFlash correctly', () => {
+ expect(createFlash).toHaveBeenCalledWith({
+ message: SecurityReportsApp.i18n.apiError,
+ captureError: true,
+ error: expect.any(Error),
});
- },
- );
+ });
+
+ // TODO: Remove this assertion as part of
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/273431
+ it('renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+ });
describe('given the coreSecurityMrWidgetCounts feature flag is enabled', () => {
let mock;
@@ -253,6 +160,7 @@ describe('Security reports app', () => {
coreSecurityMrWidgetCounts: true,
},
},
+ apolloProvider: createMockApolloProvider(successHandler),
}),
);
@@ -274,11 +182,7 @@ describe('Security reports app', () => {
${REPORT_TYPE_SECRET_DETECTION} | ${'secretScanningComparisonPath'} | ${SECRET_SCANNING_COMPARISON_PATH} | ${secretScanningDiffSuccessMock} | ${SECRET_SCANNING_SUCCESS_MESSAGE}
`(
'given a $pathProp and $reportType artifact',
- ({ reportType, pathProp, path, successResponse, successMessage }) => {
- beforeEach(() => {
- setupMockJobArtifact(reportType);
- });
-
+ ({ pathProp, path, successResponse, successMessage }) => {
describe('when loading', () => {
beforeEach(() => {
mock = new MockAdapter(axios, { delayResponse: 1 });
@@ -294,11 +198,11 @@ describe('Security reports app', () => {
});
it('should have loading message', () => {
- expect(wrapper.text()).toBe('Security scanning is loading');
+ expect(wrapper.text()).toContain('Security scanning is loading');
});
- it('should not render the pipeline tab anchor', () => {
- expect(findPipelinesTabAnchor().exists()).toBe(false);
+ it('renders the download dropdown', () => {
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
});
});
@@ -319,8 +223,8 @@ describe('Security reports app', () => {
expect(trimText(wrapper.text())).toContain(successMessage);
});
- it('should render the pipeline tab anchor', () => {
- expectPipelinesTabAnchor();
+ it('renders the download dropdown', () => {
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
});
});
@@ -341,125 +245,25 @@ describe('Security reports app', () => {
expect(trimText(wrapper.text())).toContain('Loading resulted in an error');
});
- it('should render the pipeline tab anchor', () => {
- expectPipelinesTabAnchor();
+ it('renders the download dropdown', () => {
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
});
});
- },
- );
- });
-
- describe('given coreSecurityMrWidgetDownloads feature flag is enabled', () => {
- const createComponentWithFlagEnabled = (options) =>
- createComponent(
- merge(options, {
- provide: {
- glFeatures: {
- coreSecurityMrWidgetDownloads: true,
- },
- },
- }),
- );
-
- describe('given the query is loading', () => {
- beforeEach(() => {
- createComponentWithFlagEnabled({
- apolloProvider: createMockApolloProvider(pendingHandler),
- });
- });
-
- // TODO: Remove this assertion as part of
- // https://gitlab.com/gitlab-org/gitlab/-/issues/273431
- it('initially renders nothing', () => {
- expect(wrapper.html()).toBe('');
- });
- });
- describe('given the query loads successfully', () => {
- beforeEach(() => {
- createComponentWithFlagEnabled({
- apolloProvider: createMockApolloProvider(successHandler),
- });
- });
-
- it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
- });
-
- it('renders the expected message', () => {
- const text = wrapper.text();
- expect(text).not.toContain(SecurityReportsApp.i18n.scansHaveRunWithDownloadGuidance);
- expect(text).toContain(SecurityReportsApp.i18n.scansHaveRun);
- });
+ describe('when the comparison endpoint is not provided', () => {
+ beforeEach(() => {
+ mock.onGet(path).replyOnce(500);
- it('should not render the pipeline tab anchor', () => {
- expect(findPipelinesTabAnchor().exists()).toBe(false);
- });
- });
+ createComponentWithFlagEnabled();
- describe('given the query fails', () => {
- beforeEach(() => {
- createComponentWithFlagEnabled({
- apolloProvider: createMockApolloProvider(failureHandler),
- });
- });
+ return waitForPromises();
+ });
- it('calls createFlash correctly', () => {
- expect(createFlash).toHaveBeenCalledWith({
- message: SecurityReportsApp.i18n.apiError,
- captureError: true,
- error: expect.any(Error),
+ it('renders the basic scansHaveRun message', () => {
+ expect(wrapper.text()).toContain(SecurityReportsApp.i18n.scansHaveRun);
+ });
});
- });
-
- // TODO: Remove this assertion as part of
- // https://gitlab.com/gitlab-org/gitlab/-/issues/273431
- it('renders nothing', () => {
- expect(wrapper.html()).toBe('');
- });
- });
- });
-
- describe('given coreSecurityMrWidgetCounts and coreSecurityMrWidgetDownloads feature flags are enabled', () => {
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mock.onGet(SAST_COMPARISON_PATH).replyOnce(200, sastDiffSuccessMock);
- mock.onGet(SECRET_SCANNING_COMPARISON_PATH).replyOnce(200, secretScanningDiffSuccessMock);
- createComponent({
- propsData: {
- sastComparisonPath: SAST_COMPARISON_PATH,
- secretScanningComparisonPath: SECRET_SCANNING_COMPARISON_PATH,
- },
- provide: {
- glFeatures: {
- coreSecurityMrWidgetCounts: true,
- coreSecurityMrWidgetDownloads: true,
- },
- },
- apolloProvider: createMockApolloProvider(successHandler),
- });
-
- return waitForPromises();
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
- });
-
- it('renders the expected counts message', () => {
- expect(trimText(wrapper.text())).toContain(
- 'Security scanning detected 3 potential vulnerabilities 2 Critical 1 High and 0 Others',
- );
- });
-
- it('should not render the pipeline tab anchor', () => {
- expect(findPipelinesTabAnchor().exists()).toBe(false);
- });
+ },
+ );
});
});
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
index 82f17a2726f..b5e0b2a0364 100644
--- a/spec/frontend/whats_new/store/actions_spec.js
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -1,6 +1,6 @@
+import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import actions from '~/whats_new/store/actions';
import * as types from '~/whats_new/store/mutation_types';
diff --git a/spec/frontend_integration/ide/helpers/start.js b/spec/frontend_integration/ide/helpers/start.js
index 43a996286e7..ed698b40a93 100644
--- a/spec/frontend_integration/ide/helpers/start.js
+++ b/spec/frontend_integration/ide/helpers/start.js
@@ -1,8 +1,8 @@
import { TEST_HOST } from 'helpers/test_constants';
import extendStore from '~/ide/stores/extend';
-import { IDE_DATASET } from './mock_data';
import { initIde } from '~/ide';
import Editor from '~/ide/lib/editor';
+import { IDE_DATASET } from './mock_data';
export default (container, { isRepoEmpty = false, path = '' } = {}) => {
global.jsdom.reconfigure({
diff --git a/spec/frontend_integration/ide/user_opens_file_spec.js b/spec/frontend_integration/ide/user_opens_file_spec.js
index 7fa6dcecc9e..2cb3363ef85 100644
--- a/spec/frontend_integration/ide/user_opens_file_spec.js
+++ b/spec/frontend_integration/ide/user_opens_file_spec.js
@@ -1,5 +1,5 @@
-import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import { screen } from '@testing-library/dom';
+import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
diff --git a/spec/frontend_integration/ide/user_opens_ide_spec.js b/spec/frontend_integration/ide/user_opens_ide_spec.js
index 502cb2e2c7d..f56cd008d1c 100644
--- a/spec/frontend_integration/ide/user_opens_ide_spec.js
+++ b/spec/frontend_integration/ide/user_opens_ide_spec.js
@@ -1,5 +1,5 @@
-import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import { screen } from '@testing-library/dom';
+import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
diff --git a/spec/frontend_integration/test_helpers/fixtures.js b/spec/frontend_integration/test_helpers/fixtures.js
index fde3fd8cb63..4c0143534c5 100644
--- a/spec/frontend_integration/test_helpers/fixtures.js
+++ b/spec/frontend_integration/test_helpers/fixtures.js
@@ -1,4 +1,10 @@
-/* eslint-disable global-require, import/no-unresolved */
+/* eslint-disable global-require */
+// We use "require" rather than `fs` so that this works in a browser environment.
+
+/* eslint "import/no-unresolved": 0 */
+// We don't want to require *all* fixtures to be generated (especailly in a local environment).
+// We use `eslint` instead of `eslint-disable`, so that we also don't trigger an `Unused eslint-disable directive` when all fixtures are present.
+
import { memoize } from 'lodash';
const createFactoryWithDefault = (fn, defaultValue) => () => {
diff --git a/spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb b/spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb
new file mode 100644
index 00000000000..ee8db7a1f31
--- /dev/null
+++ b/spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::CanMutateSpammable do
+ let(:mutation_class) do
+ Class.new(Mutations::BaseMutation) do
+ include Mutations::CanMutateSpammable
+ end
+ end
+
+ let(:request) { double(:request) }
+ let(:query) { double(:query, schema: GitlabSchema) }
+ let(:context) { GraphQL::Query::Context.new(query: query, object: nil, values: { request: request }) }
+
+ subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
+
+ describe '#additional_spam_params' do
+ it 'returns additional spam-related params' do
+ expect(subject.send(:additional_spam_params)).to eq({ api: true, request: request })
+ end
+ end
+
+ describe '#with_spam_action_fields' do
+ let(:spam_log) { double(:spam_log, id: 1) }
+ let(:spammable) { double(:spammable, spam?: true, render_recaptcha?: true, spam_log: spam_log) }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:recaptcha_site_key) { 'abc123' }
+ end
+
+ it 'merges in spam action fields from spammable' do
+ result = subject.send(:with_spam_action_fields, spammable) do
+ { other_field: true }
+ end
+ expect(result)
+ .to eq({
+ spam: true,
+ needs_captcha_response: true,
+ spam_log_id: 1,
+ captcha_site_key: 'abc123',
+ other_field: true
+ })
+ end
+ end
+end
diff --git a/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb b/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb
new file mode 100644
index 00000000000..ed03a1cb906
--- /dev/null
+++ b/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Security::CiConfiguration::ConfigureSast do
+ subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:service_result_json) do
+ {
+ status: "success",
+ success_path: "http://127.0.0.1:3000/root/demo-historic-secrets/-/merge_requests/new?",
+ errors: nil
+ }
+ end
+
+ let_it_be(:service_error_result_json) do
+ {
+ status: "error",
+ success_path: nil,
+ errors: %w(error1 error2)
+ }
+ end
+
+ let(:context) do
+ GraphQL::Query::Context.new(
+ query: OpenStruct.new(schema: nil),
+ values: { current_user: user },
+ object: nil
+ )
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:push_code) }
+
+ describe '#resolve' do
+ subject { mutation.resolve(project_path: project.full_path, configuration: {}) }
+
+ let(:result) { subject }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when user does not have enough permissions' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when user is a maintainer of a different project' do
+ before do
+ create(:project_empty_repo).add_maintainer(user)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when the user does not have permission to create a new branch' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ let(:error_message) { 'You are not allowed to create protected branches on this project.' }
+
+ it 'returns an array of errors' do
+ allow_next_instance_of(::Files::MultiService) do |multi_service|
+ allow(multi_service).to receive(:execute).and_raise(Gitlab::Git::PreReceiveError.new("GitLab: #{error_message}"))
+ end
+
+ expect(result).to match(
+ status: :error,
+ success_path: nil,
+ errors: match_array([error_message])
+ )
+ end
+ end
+
+ context 'when the user can create a merge request' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ context 'when service successfully generates a path to create a new merge request' do
+ it 'returns a success path' do
+ allow_next_instance_of(::Security::CiConfiguration::SastCreateService) do |service|
+ allow(service).to receive(:execute).and_return(service_result_json)
+ end
+
+ expect(result).to match(
+ status: 'success',
+ success_path: service_result_json[:success_path],
+ errors: []
+ )
+ end
+ end
+
+ context 'when service can not generate any path to create a new merge request' do
+ it 'returns an array of errors' do
+ allow_next_instance_of(::Security::CiConfiguration::SastCreateService) do |service|
+ allow(service).to receive(:execute).and_return(service_error_result_json)
+ end
+
+ expect(result).to match(
+ status: 'error',
+ success_path: be_nil,
+ errors: match_array(service_error_result_json[:errors])
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index 8a24b69eb6f..8d2ae238bfe 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -277,8 +277,8 @@ RSpec.describe Resolvers::BaseResolver do
describe '#offset_pagination' do
let(:instance) { resolver_instance(resolver) }
- it 'is sugar for OffsetActiveRecordRelationConnection.new' do
- expect(instance.offset_pagination(User.none)).to be_a(::Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ it 'is sugar for OffsetPaginatedRelation.new' do
+ expect(instance.offset_pagination(User.none)).to be_a(::Gitlab::Graphql::Pagination::OffsetPaginatedRelation)
end
end
end
diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
index e7c56a526f4..5eda840854a 100644
--- a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
@@ -23,19 +23,19 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
it 'returns the issues in the correct order' do
# by relative_position and then ID
- issues = resolve_board_list_issues.items
+ issues = resolve_board_list_issues
expect(issues.map(&:id)).to eq [issue3.id, issue1.id, issue2.id]
end
it 'finds only issues matching filters' do
- result = resolve_board_list_issues(args: { filters: { label_name: [label.title], not: { label_name: [label2.title] } } }).items
+ result = resolve_board_list_issues(args: { filters: { label_name: [label.title], not: { label_name: [label2.title] } } })
expect(result).to match_array([issue1, issue3])
end
it 'finds only issues matching search param' do
- result = resolve_board_list_issues(args: { filters: { search: issue1.title } }).items
+ result = resolve_board_list_issues(args: { filters: { search: issue1.title } })
expect(result).to match_array([issue1])
end
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
index 71ebec4dc7e..fdcebd30bb3 100644
--- a/spec/graphql/resolvers/board_lists_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Resolvers::BoardListsResolver do
end
it 'does not create the backlog list' do
- lists = resolve_board_lists.items
+ lists = resolve_board_lists
expect(lists.count).to eq 1
expect(lists[0].list_type).to eq 'closed'
@@ -38,7 +38,7 @@ RSpec.describe Resolvers::BoardListsResolver do
let!(:backlog_list) { create(:backlog_list, board: board) }
it 'returns a list of board lists' do
- lists = resolve_board_lists.items
+ lists = resolve_board_lists
expect(lists.count).to eq 3
expect(lists.map(&:list_type)).to eq %w(backlog label closed)
@@ -50,7 +50,7 @@ RSpec.describe Resolvers::BoardListsResolver do
end
it 'returns the complete list of board lists for this user' do
- lists = resolve_board_lists.items
+ lists = resolve_board_lists
expect(lists.count).to eq 3
end
@@ -58,7 +58,7 @@ RSpec.describe Resolvers::BoardListsResolver do
context 'when querying for a single list' do
it 'returns specified list' do
- list = resolve_board_lists(args: { id: global_id_of(label_list) }).items
+ list = resolve_board_lists(args: { id: global_id_of(label_list) })
expect(list).to eq [label_list]
end
@@ -69,13 +69,13 @@ RSpec.describe Resolvers::BoardListsResolver do
external_label = create(:group_label, group: group)
external_list = create(:list, board: external_board, label: external_label)
- list = resolve_board_lists(args: { id: global_id_of(external_list) }).items
+ list = resolve_board_lists(args: { id: global_id_of(external_list) })
expect(list).to eq List.none
end
it 'raises an argument error if list ID is not valid' do
- expect { resolve_board_lists(args: { id: 'test' }).items }
+ expect { resolve_board_lists(args: { id: 'test' }) }
.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 269ee9eabf9..8980f4aa19d 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -195,11 +195,11 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:priority_issue4) { create(:issue, project: project) }
it 'sorts issues ascending' do
- expect(resolve_issues(sort: :priority_asc).items).to eq([priority_issue3, priority_issue1, priority_issue2, priority_issue4])
+ expect(resolve_issues(sort: :priority_asc).to_a).to eq([priority_issue3, priority_issue1, priority_issue2, priority_issue4])
end
it 'sorts issues descending' do
- expect(resolve_issues(sort: :priority_desc).items).to eq([priority_issue1, priority_issue3, priority_issue2, priority_issue4])
+ expect(resolve_issues(sort: :priority_desc).to_a).to eq([priority_issue1, priority_issue3, priority_issue2, priority_issue4])
end
end
@@ -214,11 +214,11 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:label_issue4) { create(:issue, project: project) }
it 'sorts issues ascending' do
- expect(resolve_issues(sort: :label_priority_asc).items).to eq([label_issue3, label_issue1, label_issue2, label_issue4])
+ expect(resolve_issues(sort: :label_priority_asc).to_a).to eq([label_issue3, label_issue1, label_issue2, label_issue4])
end
it 'sorts issues descending' do
- expect(resolve_issues(sort: :label_priority_desc).items).to eq([label_issue2, label_issue3, label_issue1, label_issue4])
+ expect(resolve_issues(sort: :label_priority_desc).to_a).to eq([label_issue2, label_issue3, label_issue1, label_issue4])
end
end
@@ -231,11 +231,11 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:milestone_issue3) { create(:issue, project: project, milestone: late_milestone) }
it 'sorts issues ascending' do
- expect(resolve_issues(sort: :milestone_due_asc).items).to eq([milestone_issue2, milestone_issue3, milestone_issue1])
+ expect(resolve_issues(sort: :milestone_due_asc).to_a).to eq([milestone_issue2, milestone_issue3, milestone_issue1])
end
it 'sorts issues descending' do
- expect(resolve_issues(sort: :milestone_due_desc).items).to eq([milestone_issue3, milestone_issue2, milestone_issue1])
+ expect(resolve_issues(sort: :milestone_due_desc).to_a).to eq([milestone_issue3, milestone_issue2, milestone_issue1])
end
end
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index 50b9243efa5..c5c368fc88f 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Resolvers::MergeRequestsResolver do
include GraphqlHelpers
+ include SortingHelper
let_it_be(:project) { create(:project, :repository) }
let_it_be(:milestone) { create(:milestone, project: project) }
@@ -30,6 +31,16 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
describe '#resolve' do
+ # One for the initial auth, then MRs, and the load of project and project_feature (for further auth):
+ # SELECT MAX("project_authorizations"."access_level") AS maximum_access_level,
+ # "project_authorizations"."user_id" AS project_authorizations_user_id
+ # FROM "project_authorizations"
+ # WHERE "project_authorizations"."project_id" = 2 AND "project_authorizations"."user_id" = 2
+ # GROUP BY "project_authorizations"."user_id"
+ # SELECT "merge_requests".* FROM "merge_requests" WHERE "merge_requests"."target_project_id" = 2
+ # AND "merge_requests"."iid" = 1 ORDER BY "merge_requests"."id" DESC
+ # SELECT "projects".* FROM "projects" WHERE "projects"."id" = 2
+ # SELECT "project_features".* FROM "project_features" WHERE "project_features"."project_id" = 2
let(:queries_per_project) { 3 }
context 'no arguments' do
@@ -72,15 +83,17 @@ RSpec.describe Resolvers::MergeRequestsResolver do
expect(result).to contain_exactly(merge_request_1, merge_request_2, merge_request_3)
end
- it 'can batch-resolve merge requests from different projects' do
+ it 'can batch-resolve merge requests from different projects', :request_store, :use_clean_rails_memory_store_caching do
# 2 queries for project_authorizations, and 2 for merge_requests
- result = batch_sync(max_queries: queries_per_project * 2) do
- resolve_mr(project, iids: [iid_1]) +
- resolve_mr(project, iids: [iid_2]) +
- resolve_mr(other_project, iids: [other_iid])
+ results = batch_sync(max_queries: queries_per_project * 2) do
+ a = resolve_mr(project, iids: [iid_1])
+ b = resolve_mr(project, iids: [iid_2])
+ c = resolve_mr(other_project, iids: [other_iid])
+
+ [a, b, c].flat_map(&:to_a)
end
- expect(result).to contain_exactly(merge_request_1, merge_request_2, other_merge_request)
+ expect(results).to contain_exactly(merge_request_1, merge_request_2, other_merge_request)
end
it 'resolves an unknown iid to be empty' do
@@ -134,9 +147,9 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'takes more than one argument' do
mrs = [merge_request_3, merge_request_4]
branches = mrs.map(&:target_branch)
- result = resolve_mr(project, target_branches: branches )
+ result = resolve_mr(project, target_branches: branches)
- expect(result.compact).to match_array(mrs)
+ expect(result).to match_array(mrs)
end
end
@@ -173,7 +186,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'returns merge requests merged between the given period' do
result = resolve_mr(project, merged_after: 20.days.ago, merged_before: 5.days.ago)
- expect(result).to eq([merge_request_1])
+ expect(result).to contain_exactly(merge_request_1)
end
it 'does not return anything' do
@@ -187,7 +200,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'filters merge requests by milestone title' do
result = resolve_mr(project, milestone_title: milestone.title)
- expect(result).to eq([merge_request_with_milestone])
+ expect(result).to contain_exactly(merge_request_with_milestone)
end
it 'does not find anything' do
@@ -203,18 +216,29 @@ RSpec.describe Resolvers::MergeRequestsResolver do
result = resolve_mr(project, source_branches: [merge_request_4.source_branch], state: 'locked')
- expect(result.compact).to contain_exactly(merge_request_4)
+ expect(result).to contain_exactly(merge_request_4)
end
end
describe 'sorting' do
+ let(:mrs) do
+ [
+ merge_request_with_milestone, merge_request_6, merge_request_5, merge_request_4,
+ merge_request_3, merge_request_2, merge_request_1
+ ]
+ end
+
context 'when sorting by created' do
it 'sorts merge requests ascending' do
- expect(resolve_mr(project, sort: 'created_asc')).to eq [merge_request_1, merge_request_2, merge_request_3, merge_request_4, merge_request_5, merge_request_6, merge_request_with_milestone]
+ expect(resolve_mr(project, sort: 'created_asc'))
+ .to match_array(mrs)
+ .and be_sorted(:created_at, :asc)
end
it 'sorts merge requests descending' do
- expect(resolve_mr(project, sort: 'created_desc')).to eq [merge_request_with_milestone, merge_request_6, merge_request_5, merge_request_4, merge_request_3, merge_request_2, merge_request_1]
+ expect(resolve_mr(project, sort: 'created_desc'))
+ .to match_array(mrs)
+ .and be_sorted(:created_at, :desc)
end
end
@@ -225,11 +249,19 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
it 'sorts merge requests ascending' do
- expect(resolve_mr(project, sort: :merged_at_asc)).to eq [merge_request_1, merge_request_3, merge_request_with_milestone, merge_request_6, merge_request_5, merge_request_4, merge_request_2]
+ expect(resolve_mr(project, sort: :merged_at_asc))
+ .to match_array(mrs)
+ .and be_sorted(->(mr) { [merged_at(mr), -mr.id] })
end
it 'sorts merge requests descending' do
- expect(resolve_mr(project, sort: :merged_at_desc)).to eq [merge_request_3, merge_request_1, merge_request_with_milestone, merge_request_6, merge_request_5, merge_request_4, merge_request_2]
+ expect(resolve_mr(project, sort: :merged_at_desc))
+ .to match_array(mrs)
+ .and be_sorted(->(mr) { [-merged_at(mr), -mr.id] })
+ end
+
+ def merged_at(mr)
+ nils_last(mr.metrics.merged_at)
end
context 'when label filter is given and the optimized_issuable_label_filter feature flag is off' do
diff --git a/spec/graphql/resolvers/package_details_resolver_spec.rb b/spec/graphql/resolvers/package_details_resolver_spec.rb
index 825b2aed40a..1bdc069b3bb 100644
--- a/spec/graphql/resolvers/package_details_resolver_spec.rb
+++ b/spec/graphql/resolvers/package_details_resolver_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Resolvers::PackageDetailsResolver do
include GraphqlHelpers
+ include ::Gitlab::Graphql::Laziness
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:user) { project.owner }
@@ -11,10 +12,10 @@ RSpec.describe Resolvers::PackageDetailsResolver do
describe '#resolve' do
let(:args) do
- { id: package.to_global_id.to_s }
+ { id: global_id_of(package) }
end
- subject { resolve(described_class, ctx: { current_user: user }, args: args).sync }
+ subject { force(resolve(described_class, ctx: { current_user: user }, args: args)) }
it { is_expected.to eq(package) }
end
diff --git a/spec/graphql/resolvers/release_milestones_resolver_spec.rb b/spec/graphql/resolvers/release_milestones_resolver_spec.rb
index 5f66cba859d..b737aacc864 100644
--- a/spec/graphql/resolvers/release_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/release_milestones_resolver_spec.rb
@@ -12,12 +12,12 @@ RSpec.describe Resolvers::ReleaseMilestonesResolver do
end
describe '#resolve' do
- it "returns an OffsetActiveRecordRelationConnection" do
- expect(resolved).to be_a(::Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ it "uses offset-pagination" do
+ expect(resolved).to be_a(::Gitlab::Graphql::Pagination::OffsetPaginatedRelation)
end
it "includes the release's milestones in the returned OffsetActiveRecordRelationConnection" do
- expect(resolved.items).to eq(release.milestones.order_by_dates_and_title)
+ expect(resolved.to_a).to eq(release.milestones.order_by_dates_and_title)
end
end
end
diff --git a/spec/graphql/resolvers/terraform/states_resolver_spec.rb b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
index 64b515528cd..91d48cd782b 100644
--- a/spec/graphql/resolvers/terraform/states_resolver_spec.rb
+++ b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Resolvers::Terraform::StatesResolver do
include GraphqlHelpers
- it { expect(described_class.type).to eq(Types::Terraform::StateType) }
+ it { expect(described_class).to have_nullable_graphql_type(Types::Terraform::StateType.connection_type) }
it { expect(described_class.null).to be_truthy }
describe '#resolve' do
@@ -31,3 +31,21 @@ RSpec.describe Resolvers::Terraform::StatesResolver do
end
end
end
+
+RSpec.describe Resolvers::Terraform::StatesResolver.single do
+ it { expect(described_class).to be < Resolvers::Terraform::StatesResolver }
+
+ describe 'arguments' do
+ subject { described_class.arguments[argument] }
+
+ describe 'name' do
+ let(:argument) { 'name' }
+
+ it do
+ expect(subject).to be_present
+ expect(subject.type.to_s).to eq('String!')
+ expect(subject.description).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/ci_configuration/sast/analyzers_entity_input_type_spec.rb b/spec/graphql/types/ci_configuration/sast/analyzers_entity_input_type_spec.rb
new file mode 100644
index 00000000000..ac18f8d53a1
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/analyzers_entity_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::CiConfiguration::Sast::AnalyzersEntityInputType do
+ it { expect(described_class.graphql_name).to eq('SastCiConfigurationAnalyzersEntityInput') }
+
+ it { expect(described_class.arguments.keys).to match_array(%w[enabled name variables]) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb b/spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb
new file mode 100644
index 00000000000..27f6703b429
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['SastCiConfigurationAnalyzersEntity'] do
+ let(:fields) { %i[name label enabled description variables] }
+
+ it { expect(described_class.graphql_name).to eq('SastCiConfigurationAnalyzersEntity') }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/entity_input_type_spec.rb b/spec/graphql/types/ci_configuration/sast/entity_input_type_spec.rb
new file mode 100644
index 00000000000..cefcf64164a
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/entity_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::CiConfiguration::Sast::EntityInputType do
+ it { expect(described_class.graphql_name).to eq('SastCiConfigurationEntityInput') }
+
+ it { expect(described_class.arguments.keys).to match_array(%w[field defaultValue value]) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/entity_type_spec.rb b/spec/graphql/types/ci_configuration/sast/entity_type_spec.rb
new file mode 100644
index 00000000000..762798670a5
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/entity_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['SastCiConfigurationEntity'] do
+ let(:fields) { %i[field label description type options default_value value size] }
+
+ it { expect(described_class.graphql_name).to eq('SastCiConfigurationEntity') }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/input_type_spec.rb b/spec/graphql/types/ci_configuration/sast/input_type_spec.rb
new file mode 100644
index 00000000000..9f9d1dea98f
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::CiConfiguration::Sast::InputType do
+ it { expect(described_class.graphql_name).to eq('SastCiConfigurationInput') }
+
+ it { expect(described_class.arguments.keys).to match_array(%w[global pipeline analyzers]) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/options_entity_spec.rb b/spec/graphql/types/ci_configuration/sast/options_entity_spec.rb
new file mode 100644
index 00000000000..c60c8b9c84a
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/options_entity_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['SastCiConfigurationOptionsEntity'] do
+ let(:fields) { %i[label value] }
+
+ it { expect(described_class.graphql_name).to eq('SastCiConfigurationOptionsEntity') }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/type_spec.rb b/spec/graphql/types/ci_configuration/sast/type_spec.rb
new file mode 100644
index 00000000000..e7a8cd436e4
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['SastCiConfiguration'] do
+ let(:fields) { %i[global pipeline analyzers] }
+
+ it { expect(described_class.graphql_name).to eq('SastCiConfiguration') }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/ci_configuration/sast/ui_component_size_enum_spec.rb b/spec/graphql/types/ci_configuration/sast/ui_component_size_enum_spec.rb
new file mode 100644
index 00000000000..23184df809f
--- /dev/null
+++ b/spec/graphql/types/ci_configuration/sast/ui_component_size_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::CiConfiguration::Sast::UiComponentSizeEnum do
+ specify { expect(described_class.graphql_name).to eq('SastUiComponentSize') }
+
+ it 'exposes all sizes of ui components' do
+ expect(described_class.values.keys).to include(*%w[SMALL MEDIUM LARGE])
+ end
+end
diff --git a/spec/graphql/types/packages/composer/details_type_spec.rb b/spec/graphql/types/packages/composer/details_type_spec.rb
deleted file mode 100644
index 2e4cb965ded..00000000000
--- a/spec/graphql/types/packages/composer/details_type_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabSchema.types['PackageComposerDetails'] do
- it { expect(described_class.graphql_name).to eq('PackageComposerDetails') }
-
- it 'includes all the package fields' do
- expected_fields = %w[
- id name version created_at updated_at package_type tags project pipelines versions
- ]
-
- expect(described_class).to include_graphql_fields(*expected_fields)
- end
-
- it 'includes composer specific files' do
- expected_fields = %w[
- composer_metadatum
- ]
-
- expect(described_class).to include_graphql_fields(*expected_fields)
- end
-end
diff --git a/spec/graphql/types/packages/composer/metadatum_type_spec.rb b/spec/graphql/types/packages/composer/metadatum_type_spec.rb
index 0f47d8f1812..a950c10a41d 100644
--- a/spec/graphql/types/packages/composer/metadatum_type_spec.rb
+++ b/spec/graphql/types/packages/composer/metadatum_type_spec.rb
@@ -2,9 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['PackageComposerMetadatumType'] do
- it { expect(described_class.graphql_name).to eq('PackageComposerMetadatumType') }
-
+RSpec.describe GitlabSchema.types['ComposerMetadata'] do
it 'includes composer metadatum fields' do
expected_fields = %w[
target_sha composer_json
diff --git a/spec/graphql/types/packages/package_type_spec.rb b/spec/graphql/types/packages/package_type_spec.rb
index 7003a4d4d07..43289a019b3 100644
--- a/spec/graphql/types/packages/package_type_spec.rb
+++ b/spec/graphql/types/packages/package_type_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Package'] do
- it { expect(described_class.graphql_name).to eq('Package') }
-
it 'includes all the package fields' do
expected_fields = %w[
- id name version created_at updated_at package_type tags project pipelines versions
+ id name version package_type
+ created_at updated_at
+ project
+ tags pipelines versions
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/package_without_versions_type_spec.rb b/spec/graphql/types/packages/package_without_versions_type_spec.rb
new file mode 100644
index 00000000000..faa79e588d5
--- /dev/null
+++ b/spec/graphql/types/packages/package_without_versions_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageWithoutVersions'] do
+ it 'includes all the package fields' do
+ expected_fields = %w[
+ id name version created_at updated_at package_type tags project pipelines
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 9d0d7a3918a..95c835773e1 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -31,12 +31,171 @@ RSpec.describe GitlabSchema.types['Project'] do
container_expiration_policy service_desk_enabled service_desk_address
issue_status_counts terraform_states alert_management_integrations
container_repositories container_repositories_count
- pipeline_analytics squash_read_only
+ pipeline_analytics squash_read_only sast_ci_configuration
]
expect(described_class).to include_graphql_fields(*expected_fields)
end
+ describe 'sast_ci_configuration' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ stub_licensed_features(security_dashboard: true)
+ project.add_developer(user)
+ allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_content)
+ end
+
+ include_context 'read ci configuration for sast enabled project'
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ sastCiConfiguration {
+ global {
+ nodes {
+ type
+ options {
+ nodes {
+ label
+ value
+ }
+ }
+ field
+ label
+ defaultValue
+ value
+ size
+ }
+ }
+ pipeline {
+ nodes {
+ type
+ options {
+ nodes {
+ label
+ value
+ }
+ }
+ field
+ label
+ defaultValue
+ value
+ size
+ }
+ }
+ analyzers {
+ nodes {
+ name
+ label
+ enabled
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ it "returns the project's sast configuration for global variables" do
+ secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration', 'global', 'nodes').first
+ expect(secure_analyzers_prefix['type']).to eq('string')
+ expect(secure_analyzers_prefix['field']).to eq('SECURE_ANALYZERS_PREFIX')
+ expect(secure_analyzers_prefix['label']).to eq('Image prefix')
+ expect(secure_analyzers_prefix['defaultValue']).to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(secure_analyzers_prefix['value']).to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(secure_analyzers_prefix['size']).to eq('LARGE')
+ expect(secure_analyzers_prefix['options']).to be_nil
+ end
+
+ it "returns the project's sast configuration for pipeline variables" do
+ pipeline_stage = subject.dig('data', 'project', 'sastCiConfiguration', 'pipeline', 'nodes').first
+ expect(pipeline_stage['type']).to eq('string')
+ expect(pipeline_stage['field']).to eq('stage')
+ expect(pipeline_stage['label']).to eq('Stage')
+ expect(pipeline_stage['defaultValue']).to eq('test')
+ expect(pipeline_stage['value']).to eq('test')
+ expect(pipeline_stage['size']).to eq('MEDIUM')
+ end
+
+ it "returns the project's sast configuration for analyzer variables" do
+ analyzer = subject.dig('data', 'project', 'sastCiConfiguration', 'analyzers', 'nodes').first
+ expect(analyzer['name']).to eq('brakeman')
+ expect(analyzer['label']).to eq('Brakeman')
+ expect(analyzer['enabled']).to eq(true)
+ end
+
+ context "with guest user" do
+ before do
+ project.add_guest(user)
+ end
+
+ context 'when project is private' do
+ let(:project) { create(:project, :private, :repository) }
+
+ it "returns no configuration" do
+ secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
+ expect(secure_analyzers_prefix).to be_nil
+ end
+ end
+
+ context 'when project is public' do
+ let(:project) { create(:project, :public, :repository) }
+
+ context 'when repository is accessible by everyone' do
+ it "returns the project's sast configuration for global variables" do
+ secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration', 'global', 'nodes').first
+
+ expect(secure_analyzers_prefix['type']).to eq('string')
+ expect(secure_analyzers_prefix['field']).to eq('SECURE_ANALYZERS_PREFIX')
+ end
+ end
+ end
+ end
+
+ context "with non-member user" do
+ before do
+ project.team.truncate
+ end
+
+ context 'when project is private' do
+ let(:project) { create(:project, :private, :repository) }
+
+ it "returns no configuration" do
+ secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
+ expect(secure_analyzers_prefix).to be_nil
+ end
+ end
+
+ context 'when project is public' do
+ let(:project) { create(:project, :public, :repository) }
+
+ context 'when repository is accessible by everyone' do
+ it "returns the project's sast configuration for global variables" do
+ secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration', 'global', 'nodes').first
+ expect(secure_analyzers_prefix['type']).to eq('string')
+ expect(secure_analyzers_prefix['field']).to eq('SECURE_ANALYZERS_PREFIX')
+ end
+ end
+
+ context 'when repository is accessible only by team members' do
+ it "returns no configuration" do
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED,
+ builds_access_level: ProjectFeature::DISABLED,
+ repository_access_level: ProjectFeature::PRIVATE)
+
+ secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
+ expect(secure_analyzers_prefix).to be_nil
+ end
+ end
+ end
+ end
+ end
+
describe 'issue field' do
subject { described_class.fields['issue'] }
@@ -159,6 +318,13 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::ContainerExpirationPolicyType) }
end
+ describe 'terraform state field' do
+ subject { described_class.fields['terraformState'] }
+
+ it { is_expected.to have_graphql_type(Types::Terraform::StateType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Terraform::StatesResolver.single) }
+ end
+
describe 'terraform states field' do
subject { described_class.fields['terraformStates'] }
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 3e716865e56..fea0a3bd37e 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -95,9 +95,9 @@ RSpec.describe GitlabSchema.types['Query'] do
it { is_expected.to have_graphql_type(Types::ContainerRepositoryDetailsType) }
end
- describe 'package_composer_details field' do
- subject { described_class.fields['packageComposerDetails'] }
+ describe 'package field' do
+ subject { described_class.fields['package'] }
- it { is_expected.to have_graphql_type(Types::Packages::Composer::DetailsType) }
+ it { is_expected.to have_graphql_type(Types::Packages::PackageType) }
end
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 8a570bf9a90..45485a12574 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -7,19 +7,38 @@ RSpec.describe CommitsHelper do
context 'when current_user exists' do
before do
allow(helper).to receive(:current_user).and_return(double('User'))
- allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
end
it 'renders a div for Vue' do
- result = helper.revert_commit_link('_commit_', '_path_', pajamas: true)
+ result = helper.revert_commit_link
expect(result).to include('js-revert-commit-trigger')
end
+ end
+
+ context 'when current_user does not exist' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
+ it 'does not render anything' do
+ result = helper.revert_commit_link
- it 'does not render a div for Vue' do
- result = helper.revert_commit_link('_commit_', '_path_')
+ expect(result).to be_nil
+ end
+ end
+ end
+
+ describe '#cherry_pick_commit_link' do
+ context 'when current_user exists' do
+ before do
+ allow(helper).to receive(:current_user).and_return(double('User'))
+ end
+
+ it 'renders a div for Vue' do
+ result = helper.cherry_pick_commit_link
- expect(result).not_to include('js-revert-commit-trigger')
+ expect(result).to include('js-cherry-pick-commit-trigger')
end
end
@@ -29,7 +48,7 @@ RSpec.describe CommitsHelper do
end
it 'does not render anything' do
- result = helper.revert_commit_link(double('Commit'), '_path_')
+ result = helper.cherry_pick_commit_link
expect(result).to be_nil
end
diff --git a/spec/helpers/container_registry_helper_spec.rb b/spec/helpers/container_registry_helper_spec.rb
index 6e6e8137b3e..49e56113dd8 100644
--- a/spec/helpers/container_registry_helper_spec.rb
+++ b/spec/helpers/container_registry_helper_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe ContainerRegistryHelper do
using RSpec::Parameterized::TableSyntax
- describe '#limit_delete_tags_service?' do
- subject { helper.limit_delete_tags_service? }
+ describe '#container_registry_expiration_policies_throttling?' do
+ subject { helper.container_registry_expiration_policies_throttling? }
where(:feature_flag_enabled, :client_support, :expected_result) do
true | true | true
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 3580959fde0..4bc12aef755 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -169,9 +169,9 @@ RSpec.describe DiffHelper do
it "returns strings with marked inline diffs" do
marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line)
- expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">&#39;def&#39;</span>})
+ expect(marked_old_line).to eq(%q{abc <span class="idiff left deletion">&#39;</span>def<span class="idiff right deletion">&#39;</span>})
expect(marked_old_line).to be_html_safe
- expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">&quot;def&quot;</span>})
+ expect(marked_new_line).to eq(%q{abc <span class="idiff left addition">&quot;</span>def<span class="idiff right addition">&quot;</span>})
expect(marked_new_line).to be_html_safe
end
@@ -358,4 +358,24 @@ RSpec.describe DiffHelper do
expect(diff_file_path_text(diff_file, max: 10)).to eq("...open.rb")
end
end
+
+ describe "#collapsed_diff_url" do
+ let(:params) do
+ {
+ controller: "projects/commit",
+ action: "show",
+ namespace_id: "foo",
+ project_id: "bar",
+ id: commit.sha
+ }
+ end
+
+ subject { helper.collapsed_diff_url(diff_file) }
+
+ it "returns a valid URL" do
+ allow(helper).to receive(:safe_params).and_return(params)
+
+ expect(subject).to match(/foo\/bar\/-\/commit\/#{commit.sha}\/diff_for_path/)
+ end
+ end
end
diff --git a/spec/helpers/enable_search_settings_helper_spec.rb b/spec/helpers/enable_search_settings_helper_spec.rb
new file mode 100644
index 00000000000..c55c549ea51
--- /dev/null
+++ b/spec/helpers/enable_search_settings_helper_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe EnableSearchSettingsHelper do
+ describe '#enable_search_settings' do
+ def before_content
+ helper.content_for(:before_content)
+ end
+
+ it 'sets content for before_content' do
+ expect(before_content).to be_nil
+
+ locals = { container_class: 'test-container-class' }
+
+ helper.enable_search_settings(locals: locals)
+
+ expect(before_content).to eql(helper.render("shared/search_settings", locals))
+ end
+ end
+end
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index d75124b6da7..99efc7963e6 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -23,13 +23,13 @@ RSpec.describe Groups::GroupMembersHelper do
end
end
- describe '#linked_groups_data_json' do
+ describe '#group_group_links_data_json' do
include_context 'group_group_link'
it 'matches json schema' do
- json = helper.linked_groups_data_json(shared_group.shared_with_group_links)
+ json = helper.group_group_links_data_json(shared_group.shared_with_group_links)
- expect(json).to match_schema('group_group_links')
+ expect(json).to match_schema('group_link/group_group_links')
end
end
@@ -81,13 +81,13 @@ RSpec.describe Groups::GroupMembersHelper do
expect(helper.group_members_list_data_attributes(group, present_members([group_member]))).to include({
members: helper.members_data_json(group, present_members([group_member])),
member_path: '/groups/foo-bar/-/group_members/:id',
- group_id: group.id,
+ source_id: group.id,
can_manage_members: 'true'
})
end
end
- describe '#linked_groups_list_data_attributes' do
+ describe '#group_group_links_list_data_attributes' do
include_context 'group_group_link'
before do
@@ -95,10 +95,10 @@ RSpec.describe Groups::GroupMembersHelper do
end
it 'returns expected hash' do
- expect(helper.linked_groups_list_data_attributes(shared_group)).to include({
- members: helper.linked_groups_data_json(shared_group.shared_with_group_links),
+ expect(helper.group_group_links_list_data_attributes(shared_group)).to include({
+ members: helper.group_group_links_data_json(shared_group.shared_with_group_links),
member_path: '/groups/foo-bar/-/group_links/:id',
- group_id: shared_group.id
+ source_id: shared_group.id
})
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 8eb1b7b3b3d..61aaa618c45 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -444,4 +444,82 @@ RSpec.describe GroupsHelper do
end
end
end
+
+ describe '#group_open_issues_count' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:count_service) { Groups::OpenIssuesCountService }
+
+ before do
+ allow(helper).to receive(:current_user) { current_user }
+ end
+
+ context 'when cached_sidebar_open_issues_count feature flag is enabled' do
+ before do
+ stub_feature_flags(cached_sidebar_open_issues_count: true)
+ end
+
+ it 'returns count value from cache' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(2500)
+ end
+
+ expect(helper.group_open_issues_count(group)).to eq('2.5k')
+ end
+ end
+
+ context 'when cached_sidebar_open_issues_count feature flag is disabled' do
+ before do
+ stub_feature_flags(cached_sidebar_open_issues_count: false)
+ end
+
+ it 'returns not cached issues count' do
+ allow(helper).to receive(:group_issues_count).and_return(2500)
+
+ expect(helper.group_open_issues_count(group)).to eq('2,500')
+ end
+ end
+ end
+
+ describe '#cached_open_group_issues_count' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, name: 'group') }
+ let_it_be(:count_service) { Groups::OpenIssuesCountService }
+
+ before do
+ allow(helper).to receive(:current_user) { current_user }
+ end
+
+ it 'returns all digits for count value under 1000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(999)
+ end
+
+ expect(helper.cached_open_group_issues_count(group)).to eq('999')
+ end
+
+ it 'returns truncated digits for count value over 1000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(2300)
+ end
+
+ expect(helper.cached_open_group_issues_count(group)).to eq('2.3k')
+ end
+
+ it 'returns truncated digits for count value over 10000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(12560)
+ end
+
+ expect(helper.cached_open_group_issues_count(group)).to eq('12.6k')
+ end
+
+ it 'returns truncated digits for count value over 100000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(112560)
+ end
+
+ expect(helper.cached_open_group_issues_count(group)).to eq('112.6k')
+ end
+ end
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index 914d0931476..576021b37b3 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -7,11 +7,49 @@ RSpec.describe InviteMembersHelper do
let_it_be(:developer) { create(:user, developer_projects: [project]) }
let(:owner) { project.owner }
+ before do
+ helper.extend(Gitlab::Experimentation::ControllerConcern)
+ end
+
context 'with project' do
before do
assign(:project, project)
end
+ describe "#can_invite_members_for_project?" do
+ context 'when the user can_import_members' do
+ before do
+ allow(helper).to receive(:can_import_members?).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(helper.can_invite_members_for_project?(project)).to eq true
+ expect(helper).to have_received(:can_import_members?)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
+
+ it 'returns false' do
+ expect(helper.can_invite_members_for_project?(project)).to eq false
+ expect(helper).not_to have_received(:can_import_members?)
+ end
+ end
+ end
+
+ context 'when the user can not invite members' do
+ before do
+ expect(helper).to receive(:can_import_members?).and_return(false)
+ end
+
+ it 'returns false' do
+ expect(helper.can_invite_members_for_project?(project)).to eq false
+ end
+ end
+ end
+
describe "#directly_invite_members?" do
context 'when the user is an owner' do
before do
@@ -80,6 +118,51 @@ RSpec.describe InviteMembersHelper do
context 'with group' do
let_it_be(:group) { create(:group) }
+ describe "#can_invite_members_for_group?" do
+ include Devise::Test::ControllerHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ allow(helper).to receive(:current_user) { user }
+ end
+
+ context 'when the user can_import_members' do
+ before do
+ allow(helper).to receive(:can?).with(user, :admin_group_member, group).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(helper.can_invite_members_for_group?(group)).to eq true
+ expect(helper).to have_received(:can?).with(user, :admin_group_member, group)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
+
+ it 'returns false' do
+ stub_feature_flags(invite_members_group_modal: false)
+
+ expect(helper.can_invite_members_for_group?(group)).to eq false
+ expect(helper).not_to have_received(:can?)
+ end
+ end
+ end
+
+ context 'when the user can not invite members' do
+ before do
+ expect(helper).to receive(:can?).with(user, :admin_group_member, group).and_return(false)
+ end
+
+ it 'returns false' do
+ expect(helper.can_invite_members_for_group?(group)).to eq false
+ end
+ end
+ end
+
describe "#invite_group_members?" do
context 'when the user is an owner' do
before do
@@ -123,7 +206,6 @@ RSpec.describe InviteMembersHelper do
before do
allow(helper).to receive(:experiment_tracking_category_and_group) { '_track_property_' }
- allow(helper).to receive(:tracking_label).with(owner)
allow(helper).to receive(:current_user) { owner }
end
@@ -132,8 +214,7 @@ RSpec.describe InviteMembersHelper do
helper.dropdown_invite_members_link(form_model)
- expect(helper).to have_received(:experiment_tracking_category_and_group)
- .with(:invite_members_new_dropdown, subject: owner)
+ expect(helper).to have_received(:experiment_tracking_category_and_group).with(:invite_members_new_dropdown)
end
context 'with experiment enabled' do
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 57845904d32..7c7838e0053 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -72,28 +72,38 @@ RSpec.describe IssuablesHelper do
let(:user) { create(:user) }
describe 'state text' do
- before do
- allow(helper).to receive(:issuables_count_for_state).and_return(42)
- end
-
- it 'returns "Open" when state is :opened' do
- expect(helper.issuables_state_counter_text(:issues, :opened, true))
- .to eq('<span>Open</span> <span class="badge badge-pill">42</span>')
- end
+ context 'when number of issuables can be generated' do
+ before do
+ allow(helper).to receive(:issuables_count_for_state).and_return(42)
+ end
- it 'returns "Closed" when state is :closed' do
- expect(helper.issuables_state_counter_text(:issues, :closed, true))
- .to eq('<span>Closed</span> <span class="badge badge-pill">42</span>')
+ it 'returns navigation with badges' do
+ expect(helper.issuables_state_counter_text(:issues, :opened, true))
+ .to eq('<span>Open</span> <span class="badge badge-pill">42</span>')
+ expect(helper.issuables_state_counter_text(:issues, :closed, true))
+ .to eq('<span>Closed</span> <span class="badge badge-pill">42</span>')
+ expect(helper.issuables_state_counter_text(:merge_requests, :merged, true))
+ .to eq('<span>Merged</span> <span class="badge badge-pill">42</span>')
+ expect(helper.issuables_state_counter_text(:merge_requests, :all, true))
+ .to eq('<span>All</span> <span class="badge badge-pill">42</span>')
+ end
end
- it 'returns "Merged" when state is :merged' do
- expect(helper.issuables_state_counter_text(:merge_requests, :merged, true))
- .to eq('<span>Merged</span> <span class="badge badge-pill">42</span>')
- end
+ context 'when count cannot be generated' do
+ before do
+ allow(helper).to receive(:issuables_count_for_state).and_return(-1)
+ end
- it 'returns "All" when state is :all' do
- expect(helper.issuables_state_counter_text(:merge_requests, :all, true))
- .to eq('<span>All</span> <span class="badge badge-pill">42</span>')
+ it 'returns avigation without badges' do
+ expect(helper.issuables_state_counter_text(:issues, :opened, true))
+ .to eq('<span>Open</span>')
+ expect(helper.issuables_state_counter_text(:issues, :closed, true))
+ .to eq('<span>Closed</span>')
+ expect(helper.issuables_state_counter_text(:merge_requests, :merged, true))
+ .to eq('<span>Merged</span>')
+ expect(helper.issuables_state_counter_text(:merge_requests, :all, true))
+ .to eq('<span>All</span>')
+ end
end
end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 1ed61bd3144..07e55e9b016 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -254,4 +254,31 @@ RSpec.describe IssuesHelper do
expect(helper.use_startup_call?).to eq(true)
end
end
+
+ describe '#issue_header_actions_data' do
+ let(:current_user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:can?).and_return(true)
+ end
+
+ it 'returns expected result' do
+ expected = {
+ can_create_issue: "true",
+ can_reopen_issue: "true",
+ can_report_spam: "false",
+ can_update_issue: "true",
+ iid: issue.iid,
+ is_issue_author: "false",
+ issue_type: "issue",
+ new_issue_path: new_project_issue_path(project),
+ project_path: project.full_path,
+ report_abuse_path: new_abuse_report_path(user_id: issue.author.id, ref_url: issue_url(issue)),
+ submit_as_spam_path: mark_as_spam_project_issue_path(project, issue)
+ }
+
+ expect(helper.issue_header_actions_data(project, issue, current_user)).to include(expected)
+ end
+ end
end
diff --git a/spec/helpers/jira_connect_helper_spec.rb b/spec/helpers/jira_connect_helper_spec.rb
index a99072527c8..9695bed948b 100644
--- a/spec/helpers/jira_connect_helper_spec.rb
+++ b/spec/helpers/jira_connect_helper_spec.rb
@@ -4,12 +4,43 @@ require 'spec_helper'
RSpec.describe JiraConnectHelper do
describe '#jira_connect_app_data' do
- subject { helper.jira_connect_app_data }
+ let_it_be(:subscription) { create(:jira_connect_subscription) }
+ let(:user) { create(:user) }
- it 'includes Jira Connect app attributes' do
- is_expected.to include(
- :groups_path
- )
+ subject { helper.jira_connect_app_data([subscription]) }
+
+ context 'user is not logged in' do
+ before do
+ allow(view).to receive(:current_user).and_return(nil)
+ end
+
+ it 'includes Jira Connect app attributes' do
+ is_expected.to include(
+ :groups_path,
+ :subscriptions_path,
+ :users_path
+ )
+ end
+
+ it 'assigns users_path with value' do
+ expect(subject[:users_path]).to eq(jira_connect_users_path)
+ end
+
+ it 'passes group as "skip_groups" param' do
+ skip_groups_param = CGI.escape('skip_groups[]')
+
+ expect(subject[:groups_path]).to include("#{skip_groups_param}=#{subscription.namespace.id}")
+ end
+ end
+
+ context 'user is logged in' do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ it 'assigns users_path to nil' do
+ expect(subject[:users_path]).to be_nil
+ end
end
end
end
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index f9b3b535334..b8502cdf25e 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -316,4 +316,15 @@ RSpec.describe NotesHelper do
end
end
end
+
+ describe '#notes_data' do
+ let(:issue) { create(:issue, project: project) }
+
+ it 'sets last_fetched_at to 0 when start_at_zero is true' do
+ @project = project
+ @noteable = issue
+
+ expect(helper.notes_data(issue, true)[:lastFetchedAt]).to eq(0)
+ end
+ end
end
diff --git a/spec/helpers/operations_helper_spec.rb b/spec/helpers/operations_helper_spec.rb
index 801d5de79b1..5b0ce00063f 100644
--- a/spec/helpers/operations_helper_spec.rb
+++ b/spec/helpers/operations_helper_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe OperationsHelper do
it 'returns the correct values' do
expect(subject).to eq(
- 'alerts_setup_url' => help_page_path('operations/incident_management/alert_integrations.md', anchor: 'generic-http-endpoint'),
+ 'alerts_setup_url' => help_page_path('operations/incident_management/integrations.md', anchor: 'configuration'),
'alerts_usage_url' => project_alert_management_index_path(project),
'prometheus_form_path' => project_service_path(project, prometheus_service),
'prometheus_reset_key_path' => reset_alerting_token_project_settings_operations_path(project),
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index cc290367e34..5e0b4df7f7f 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::ProjectMembersHelper do
+ include MembersPresentation
+
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
@@ -142,4 +144,58 @@ RSpec.describe Projects::ProjectMembersHelper do
it { is_expected.to be(false) }
end
end
+
+ describe 'project members' do
+ let_it_be(:project_members) { create_list(:project_member, 1, project: project) }
+
+ describe '#project_members_data_json' do
+ it 'matches json schema' do
+ expect(helper.project_members_data_json(project, present_members(project_members))).to match_schema('members')
+ end
+ end
+
+ describe '#project_members_list_data_attributes' do
+ let(:allow_admin_project) { true }
+
+ before do
+ allow(helper).to receive(:project_project_member_path).with(project, ':id').and_return('/foo-bar/-/project_members/:id')
+ end
+
+ it 'returns expected hash' do
+ expect(helper.project_members_list_data_attributes(project, present_members(project_members))).to include({
+ members: helper.project_members_data_json(project, present_members(project_members)),
+ member_path: '/foo-bar/-/project_members/:id',
+ source_id: project.id,
+ can_manage_members: true
+ })
+ end
+ end
+ end
+
+ describe 'project group links' do
+ let_it_be(:project_group_links) { create_list(:project_group_link, 1, project: project) }
+ let(:allow_admin_project) { true }
+
+ describe '#project_group_links_data_json' do
+ it 'matches json schema' do
+ expect(helper.project_group_links_data_json(project_group_links)).to match_schema('group_link/project_group_links')
+ end
+ end
+
+ describe '#project_group_links_list_data_attributes' do
+ before do
+ allow(helper).to receive(:project_group_link_path).with(project, ':id').and_return('/foo-bar/-/group_links/:id')
+ allow(helper).to receive(:can?).with(current_user, :admin_project_member, project).and_return(true)
+ end
+
+ it 'returns expected hash' do
+ expect(helper.project_group_links_list_data_attributes(project, project_group_links)).to include({
+ members: helper.project_group_links_data_json(project_group_links),
+ member_path: '/foo-bar/-/group_links/:id',
+ source_id: project.id,
+ can_manage_members: true
+ })
+ end
+ end
+ end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index b920e2e5600..b61db537159 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -398,6 +398,45 @@ RSpec.describe ProjectsHelper do
helper.send(:get_project_nav_tabs, project, user)
end
+ context 'Security & Compliance tabs' do
+ before do
+ stub_feature_flags(secure_security_and_compliance_configuration_page_on_ce: feature_flag_enabled)
+ allow(helper).to receive(:can?).with(user, :read_security_configuration, project).and_return(can_read_security_configuration)
+ end
+
+ context 'when user cannot read security configuration' do
+ let(:can_read_security_configuration) { false }
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_enabled) { false }
+
+ it { is_expected.not_to include(:security_configuration) }
+ end
+
+ context 'when feature flag is enabled' do
+ let(:feature_flag_enabled) { true }
+
+ it { is_expected.not_to include(:security_configuration) }
+ end
+ end
+
+ context 'when user can read security configuration' do
+ let(:can_read_security_configuration) { true }
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_enabled) { false }
+
+ it { is_expected.not_to include(:security_configuration) }
+ end
+
+ context 'when feature flag is enabled' do
+ let(:feature_flag_enabled) { true }
+
+ it { is_expected.to include(:security_configuration) }
+ end
+ end
+ end
+
context 'when builds feature is enabled' do
before do
allow(project).to receive(:builds_enabled?).and_return(true)
@@ -657,31 +696,6 @@ RSpec.describe ProjectsHelper do
end
end
- describe 'link_to_filter_repo' do
- subject { helper.link_to_filter_repo }
-
- it 'generates a hardcoded link to git filter-repo' do
- result = helper.link_to_filter_repo
- doc = Nokogiri::HTML.fragment(result)
-
- expect(doc.children.size).to eq(1)
-
- link = doc.children.first
-
- aggregate_failures do
- expect(result).to be_html_safe
-
- expect(link.name).to eq('a')
- expect(link[:target]).to eq('_blank')
- expect(link[:rel]).to eq('noopener noreferrer')
- expect(link[:href]).to eq('https://github.com/newren/git-filter-repo')
- expect(link.inner_html).to eq('git filter-repo')
-
- expect(result).to be_html_safe
- end
- end
- end
-
describe '#explore_projects_tab?' do
subject { helper.explore_projects_tab? }
@@ -854,16 +868,36 @@ RSpec.describe ProjectsHelper do
end
describe '#can_import_members?' do
- let(:owner) { project.owner }
+ context 'when user is project owner' do
+ before do
+ allow(helper).to receive(:current_user) { project.owner }
+ end
- it 'returns false if user cannot admin_project_member' do
- allow(helper).to receive(:current_user) { user }
- expect(helper.can_import_members?).to eq false
+ it 'returns true for owner of project' do
+ expect(helper.can_import_members?).to eq true
+ end
end
- it 'returns true if user can admin_project_member' do
- allow(helper).to receive(:current_user) { owner }
- expect(helper.can_import_members?).to eq true
+ context 'when user is not a project owner' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:user_project_role, :can_import) do
+ :maintainer | true
+ :developer | false
+ :reporter | false
+ :guest | false
+ end
+
+ with_them do
+ before do
+ project.add_role(user, user_project_role)
+ allow(helper).to receive(:current_user) { user }
+ end
+
+ it 'resolves if the user can import members' do
+ expect(helper.can_import_members?).to eq can_import
+ end
+ end
end
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 2cb9d66ac63..80401635b95 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -610,4 +610,25 @@ RSpec.describe SearchHelper do
end
end
end
+
+ describe '#search_sort_options' do
+ let(:user) { create(:user) }
+
+ mock_created_sort = {
+ title: _('Created date'),
+ sortable: true,
+ sortParam: {
+ asc: 'created_asc',
+ desc: 'created_desc'
+ }
+ }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ end
+
+ it 'returns the correct data' do
+ expect(search_sort_options).to eq([mock_created_sort])
+ end
+ end
end
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
index 2d581dfba37..f976fb098a8 100644
--- a/spec/helpers/sorting_helper_spec.rb
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -50,24 +50,6 @@ RSpec.describe SortingHelper do
end
end
- describe '#search_sort_direction_button' do
- before do
- set_sorting_url 'test_label'
- end
-
- it 'keeps label filter param' do
- expect(search_sort_direction_button('created_asc')).to include('label_name=test_label')
- end
-
- it 'returns icon with sort-lowest when sort is asc' do
- expect(search_sort_direction_button('created_asc')).to include('sort-lowest')
- end
-
- it 'returns icon with sort-highest when sort is desc' do
- expect(search_sort_direction_button('created_desc')).to include('sort-highest')
- end
- end
-
def stub_controller_path(value)
allow(helper.controller).to receive(:controller_path).and_return(value)
end
diff --git a/spec/helpers/stat_anchors_helper_spec.rb b/spec/helpers/stat_anchors_helper_spec.rb
index c6556647bc8..0615baac3cb 100644
--- a/spec/helpers/stat_anchors_helper_spec.rb
+++ b/spec/helpers/stat_anchors_helper_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe StatAnchorsHelper do
let(:anchor) { anchor_klass.new(false, nil, nil, 'default') }
it 'returns the proper attributes' do
- expect(subject[:class]).to include('btn btn-default')
+ expect(subject[:class]).to include('gl-button btn btn-default')
end
end
@@ -29,7 +29,7 @@ RSpec.describe StatAnchorsHelper do
let(:anchor) { anchor_klass.new(false) }
it 'returns the proper attributes' do
- expect(subject[:class]).to include('btn btn-missing')
+ expect(subject[:class]).to include('gl-button btn btn-dashed')
end
end
end
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 6cb9894e306..bc25a2fcdfc 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -19,94 +19,6 @@ RSpec.describe TreeHelper do
)
end
- describe '.render_tree' do
- before do
- @id = sha
- @path = ""
- @project = project
- @lfs_blob_ids = []
- end
-
- it 'displays all entries without a warning' do
- tree = repository.tree(sha, 'files')
-
- html = render_tree(tree)
-
- expect(html).not_to have_selector('.tree-truncated-warning')
- end
-
- it 'truncates entries and adds a warning' do
- stub_const('TreeHelper::FILE_LIMIT', 1)
- tree = repository.tree(sha, 'files')
-
- html = render_tree(tree)
-
- expect(html).to have_selector('.tree-truncated-warning', count: 1)
- expect(html).to have_selector('.tree-item-file-name', count: 1)
- end
- end
-
- describe '.fast_project_blob_path' do
- it 'generates the same path as project_blob_path' do
- blob_path = repository.tree(sha, 'with space').entries.first.path
- fast_path = fast_project_blob_path(project, blob_path)
- std_path = project_blob_path(project, blob_path)
-
- expect(fast_path).to eq(std_path)
- end
-
- it 'generates the same path with encoded file names' do
- tree = repository.tree(sha, 'encoding')
- blob_path = tree.entries.find { |entry| entry.path == 'encoding/テスト.txt' }.path
- fast_path = fast_project_blob_path(project, blob_path)
- std_path = project_blob_path(project, blob_path)
-
- expect(fast_path).to eq(std_path)
- end
-
- it 'respects a configured relative URL' do
- allow(Gitlab.config.gitlab).to receive(:relative_url_root).and_return('/gitlab/root')
- blob_path = repository.tree(sha, '').entries.first.path
- fast_path = fast_project_blob_path(project, blob_path)
-
- expect(fast_path).to start_with('/gitlab/root')
- end
-
- it 'encodes files starting with #' do
- filename = '#test-file'
- create_file(filename)
-
- fast_path = fast_project_blob_path(project, filename)
-
- expect(fast_path).to end_with('%23test-file')
- end
- end
-
- describe '.fast_project_tree_path' do
- let(:tree_path) { repository.tree(sha, 'with space').path }
- let(:fast_path) { fast_project_tree_path(project, tree_path) }
- let(:std_path) { project_tree_path(project, tree_path) }
-
- it 'generates the same path as project_tree_path' do
- expect(fast_path).to eq(std_path)
- end
-
- it 'respects a configured relative URL' do
- allow(Gitlab.config.gitlab).to receive(:relative_url_root).and_return('/gitlab/root')
-
- expect(fast_path).to start_with('/gitlab/root')
- end
-
- it 'encodes files starting with #' do
- filename = '#test-file'
- create_file(filename)
-
- fast_path = fast_project_tree_path(project, filename)
-
- expect(fast_path).to end_with('%23test-file')
- end
- end
-
describe 'flatten_tree' do
let(:tree) { repository.tree(sha, 'files') }
let(:root_path) { 'files' }
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index 250aedda906..b6607182461 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -222,4 +222,24 @@ RSpec.describe UserCalloutsHelper do
it { is_expected.to be true }
end
end
+
+ describe '.show_unfinished_tag_cleanup_callout?' do
+ subject { helper.show_unfinished_tag_cleanup_callout? }
+
+ before do
+ allow(helper).to receive(:user_dismissed?).with(described_class::UNFINISHED_TAG_CLEANUP_CALLOUT) { dismissed }
+ end
+
+ context 'when user has not dismissed' do
+ let(:dismissed) { false }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when user dismissed' do
+ let(:dismissed) { true }
+
+ it { is_expected.to be false }
+ end
+ end
end
diff --git a/spec/initializers/validate_puma_spec.rb b/spec/initializers/validate_puma_spec.rb
new file mode 100644
index 00000000000..2c02bde2bb3
--- /dev/null
+++ b/spec/initializers/validate_puma_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'validate puma' do
+ subject do
+ load Rails.root.join('config/initializers/validate_puma.rb')
+ end
+
+ before do
+ stub_env('PUMA_SKIP_CLUSTER_VALIDATION', skip_validation)
+ stub_const('Puma', double)
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
+ allow(Puma).to receive_message_chain(:cli_config, :options).and_return(workers: workers)
+ end
+
+ context 'for .com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ context 'when worker count is 0' do
+ let(:workers) { 0 }
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is true' do
+ let(:skip_validation) { true }
+
+ specify { expect { subject }.to raise_error(String) }
+ end
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is false' do
+ let(:skip_validation) { false }
+
+ specify { expect { subject }.to raise_error(String) }
+ end
+ end
+
+ context 'when worker count is > 0' do
+ let(:workers) { 2 }
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is true' do
+ let(:skip_validation) { true }
+
+ specify { expect { subject }.not_to raise_error }
+ end
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is false' do
+ let(:skip_validation) { false }
+
+ specify { expect { subject }.not_to raise_error }
+ end
+ end
+ end
+
+ context 'for other environments' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ context 'when worker count is 0' do
+ let(:workers) { 0 }
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is true' do
+ let(:skip_validation) { true }
+
+ specify { expect { subject }.not_to raise_error }
+ end
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is false' do
+ let(:skip_validation) { false }
+
+ specify { expect { subject }.to raise_error(String) }
+ end
+ end
+
+ context 'when worker count is > 0' do
+ let(:workers) { 2 }
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is true' do
+ let(:skip_validation) { true }
+
+ specify { expect { subject }.not_to raise_error }
+ end
+
+ context 'and PUMA_SKIP_CLUSTER_VALIDATION is false' do
+ let(:skip_validation) { false }
+
+ specify { expect { subject }.not_to raise_error }
+ end
+ end
+ end
+end
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index 59136de0b0d..faa07775aaa 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -81,14 +81,6 @@ window.addEventListener('unhandledrejection', (event) => {
console.error(event.reason.stack || event.reason);
});
-// HACK: Chrome 59 disconnects if there are too many synchronous tests in a row
-// because it appears to lock up the thread that communicates to Karma's socket
-// This async beforeEach gets called on every spec and releases the JS thread long
-// enough for the socket to continue to communicate.
-// The downside is that it creates a minor performance penalty in the time it takes
-// to run our unit tests.
-beforeEach((done) => done());
-
let longRunningTestTimeoutHandle;
beforeEach((done) => {
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index 99ffe0eb925..e35deeb6263 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -23,4 +23,16 @@ RSpec.describe API::Entities::User do
expect(subject).not_to include(:created_at)
end
+
+ it 'exposes user as not a bot' do
+ expect(subject[:bot]).to be_falsey
+ end
+
+ context 'with bot user' do
+ let(:user) { create(:user, :security_bot) }
+
+ it 'exposes user as a bot' do
+ expect(subject[:bot]).to eq(true)
+ end
+ end
end
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index 21ee40f22fe..5c8d4282118 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -18,15 +18,15 @@ RSpec.describe Atlassian::JiraConnect::Client do
end
end
- around do |example|
- freeze_time { example.run }
- end
-
describe '.generate_update_sequence_id' do
- it 'returns monotonic_time converted it to integer' do
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(1.0)
+ it 'returns unix time in microseconds as integer', :aggregate_failures do
+ travel_to(Time.utc(1970, 1, 1, 0, 0, 1)) do
+ expect(described_class.generate_update_sequence_id).to eq(1000)
+ end
- expect(described_class.generate_update_sequence_id).to eq(1)
+ travel_to(Time.utc(1970, 1, 1, 0, 0, 5)) do
+ expect(described_class.generate_update_sequence_id).to eq(5000)
+ end
end
end
@@ -238,22 +238,6 @@ RSpec.describe Atlassian::JiraConnect::Client do
expect(response['errorMessages']).to eq(%w(X Y Z))
end
end
-
- it 'does not call the API if the feature flag is not enabled' do
- stub_feature_flags(jira_sync_deployments: false)
-
- expect(subject).not_to receive(:post)
-
- subject.send(:store_deploy_info, project: project, deployments: deployments)
- end
-
- it 'does call the API if the feature flag enabled for the project' do
- stub_feature_flags(jira_sync_deployments: project)
-
- expect(subject).to receive(:post).with('/rest/deployments/0.1/bulk', { deployments: Array }).and_call_original
-
- subject.send(:store_deploy_info, project: project, deployments: deployments)
- end
end
describe '#store_ff_info' do
@@ -319,24 +303,6 @@ RSpec.describe Atlassian::JiraConnect::Client do
expect(response['errorMessages']).to eq(['a: X', 'a: Y', 'b: Z'])
end
end
-
- it 'does not call the API if the feature flag is not enabled' do
- stub_feature_flags(jira_sync_feature_flags: false)
-
- expect(subject).not_to receive(:post)
-
- subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
- end
-
- it 'does call the API if the feature flag enabled for the project' do
- stub_feature_flags(jira_sync_feature_flags: project)
-
- expect(subject).to receive(:post).with('/rest/featureflags/0.1/bulk', {
- flags: Array, properties: Hash
- }).and_call_original
-
- subject.send(:store_ff_info, project: project, feature_flags: feature_flags)
- end
end
describe '#store_build_info' do
@@ -384,24 +350,6 @@ RSpec.describe Atlassian::JiraConnect::Client do
subject.send(:store_build_info, project: project, pipelines: pipelines.take(1))
end
- it 'does not call the API if the feature flag is not enabled' do
- stub_feature_flags(jira_sync_builds: false)
-
- expect(subject).not_to receive(:post)
-
- subject.send(:store_build_info, project: project, pipelines: pipelines)
- end
-
- it 'does call the API if the feature flag enabled for the project' do
- stub_feature_flags(jira_sync_builds: project)
-
- expect(subject).to receive(:post)
- .with('/rest/builds/0.1/bulk', { builds: Array })
- .and_call_original
-
- subject.send(:store_build_info, project: project, pipelines: pipelines)
- end
-
context 'there are errors' do
let(:failures) do
[{ errors: [{ message: 'X' }, { message: 'Y' }] }, { errors: [{ message: 'Z' }] }]
diff --git a/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb
index 964801338cf..2d12cd1ed0a 100644
--- a/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::FeatureFlagEntity do
subject { described_class.represent(feature_flag) }
context 'when the feature flag does not belong to any Jira issue' do
- let_it_be(:feature_flag) { create(:operations_feature_flag) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
describe '#issue_keys' do
it 'is empty' do
@@ -30,7 +30,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::FeatureFlagEntity do
context 'when the feature flag does belong to a Jira issue' do
let(:feature_flag) do
- create(:operations_feature_flag, description: 'THING-123')
+ create(:operations_feature_flag, project: project, description: 'THING-123')
end
describe '#issue_keys' do
@@ -66,6 +66,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::FeatureFlagEntity do
end
it 'has the correct summary' do
+ expect(entity.dig('summary', 'url')).to eq "http://localhost/#{project.full_path}/-/feature_flags/#{feature_flag.iid}/edit"
expect(entity.dig('summary', 'status')).to eq(
'enabled' => true,
'defaultValue' => '',
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 450e396a389..90cd08ae9cc 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe Backup::Files do
it 'excludes tmp dirs from rsync' do
expect(Gitlab::Popen).to receive(:popen)
- .with(%w(rsync -a --delete --exclude=lost+found --exclude=/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
+ .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['', 0])
subject.dump
@@ -158,7 +158,7 @@ RSpec.describe Backup::Files do
it 'retries if rsync fails due to vanishing files' do
expect(Gitlab::Popen).to receive(:popen)
- .with(%w(rsync -a --delete --exclude=lost+found --exclude=/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
+ .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['rsync failed', 24], ['', 0])
expect do
@@ -168,7 +168,7 @@ RSpec.describe Backup::Files do
it 'raises an error and outputs an error message if rsync failed' do
allow(Gitlab::Popen).to receive(:popen)
- .with(%w(rsync -a --delete --exclude=lost+found --exclude=/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
+ .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['rsync failed', 1])
expect do
@@ -186,8 +186,8 @@ RSpec.describe Backup::Files do
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
end
- it 'prepends a leading slash to rsync excludes' do
- expect(subject.exclude_dirs(:rsync)).to eq(['--exclude=lost+found', '--exclude=/@pages.tmp'])
+ it 'prepends a leading slash and app_files_dir basename to rsync excludes' do
+ expect(subject.exclude_dirs(:rsync)).to eq(['--exclude=lost+found', '--exclude=/gitlab-pages/@pages.tmp'])
end
end
diff --git a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
index 1f886059bf6..81aa8d35ebc 100644
--- a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
+++ b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Banzai::Filter::AssetProxyFilter do
stub_application_setting(asset_proxy_enabled: true)
stub_application_setting(asset_proxy_secret_key: 'shared-secret')
stub_application_setting(asset_proxy_url: 'https://assets.example.com')
- stub_application_setting(asset_proxy_whitelist: %w(gitlab.com *.mydomain.com))
+ stub_application_setting(asset_proxy_allowlist: %w(gitlab.com *.mydomain.com))
described_class.initialize_settings
@@ -39,16 +39,26 @@ RSpec.describe Banzai::Filter::AssetProxyFilter do
expect(Gitlab.config.asset_proxy.domain_regexp).to eq(/^(gitlab\.com|.*?\.mydomain\.com)$/i)
end
- context 'when whitelist is empty' do
+ context 'when allowlist is empty' do
it 'defaults to the install domain' do
stub_application_setting(asset_proxy_enabled: true)
- stub_application_setting(asset_proxy_whitelist: [])
+ stub_application_setting(asset_proxy_allowlist: [])
described_class.initialize_settings
expect(Gitlab.config.asset_proxy.allowlist).to eq [Gitlab.config.gitlab.host]
end
end
+
+ it 'supports deprecated whitelist settings' do
+ stub_application_setting(asset_proxy_enabled: true)
+ stub_application_setting(asset_proxy_whitelist: %w(foo.com bar.com))
+ stub_application_setting(asset_proxy_allowlist: [])
+
+ described_class.initialize_settings
+
+ expect(Gitlab.config.asset_proxy.allowlist).to eq %w(foo.com bar.com)
+ end
end
context 'when properly configured' do
diff --git a/spec/lib/banzai/filter/truncate_source_filter_spec.rb b/spec/lib/banzai/filter/truncate_source_filter_spec.rb
index b0c6d91daa8..d5eb8b738b1 100644
--- a/spec/lib/banzai/filter/truncate_source_filter_spec.rb
+++ b/spec/lib/banzai/filter/truncate_source_filter_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Banzai::Filter::TruncateSourceFilter do
it 'truncates UTF-8 text by bytes, on a character boundary' do
utf8_text = '日本語の文字が大きい'
- truncated = '日…'
+ truncated = '日...'
expect(filter(utf8_text, limit: truncated.bytesize)).to eq(truncated)
expect(filter(utf8_text, limit: utf8_text.bytesize)).to eq(utf8_text)
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 9391ca386cf..bcee6f8f65d 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -131,4 +131,16 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
expect(output).to include("test [[<em>TOC</em>]]")
end
end
+
+ describe 'backslash escapes' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ it 'does not convert an escaped reference' do
+ markdown = "\\#{issue.to_reference}"
+ output = described_class.to_html(markdown, project: project)
+
+ expect(output).to include("<span>#</span>#{issue.iid}")
+ end
+ end
end
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
new file mode 100644
index 00000000000..241d6db4f11
--- /dev/null
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
+ using RSpec::Parameterized::TableSyntax
+
+ describe 'backslash escapes' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ def correct_html_included(markdown, expected)
+ result = described_class.call(markdown, {})
+
+ expect(result[:output].to_html).to include(expected)
+
+ result
+ end
+
+ context 'when feature flag honor_escaped_markdown is disabled' do
+ before do
+ stub_feature_flags(honor_escaped_markdown: false)
+ end
+
+ it 'does not escape the markdown' do
+ result = described_class.call(%q(\!), project: project)
+ output = result[:output].to_html
+
+ expect(output).to eq('<p data-sourcepos="1:1-1:2">!</p>')
+ expect(result[:escaped_literals]).to be_falsey
+ end
+ end
+
+ # Test strings taken from https://spec.commonmark.org/0.29/#backslash-escapes
+ describe 'CommonMark tests', :aggregate_failures do
+ it 'converts all ASCII punctuation to literals' do
+ markdown = %q(\!\"\#\$\%\&\'\*\+\,\-\.\/\:\;\<\=\>\?\@\[\]\^\_\`\{\|\}\~) + %q[\(\)\\\\]
+ punctuation = %w(! " # $ % &amp; ' * + , - . / : ; &lt; = &gt; ? @ [ \\ ] ^ _ ` { | } ~) + %w[( )]
+
+ result = described_class.call(markdown, project: project)
+ output = result[:output].to_html
+
+ punctuation.each { |char| expect(output).to include("<span>#{char}</span>") }
+ expect(result[:escaped_literals]).to be_truthy
+ end
+
+ it 'does not convert other characters to literals' do
+ markdown = %q(\→\A\a\ \3\φ\«)
+ expected = '\→\A\a\ \3\φ\«'
+
+ result = correct_html_included(markdown, expected)
+ expect(result[:escaped_literals]).to be_falsey
+ end
+
+ describe 'escaped characters are treated as regular characters and do not have their usual Markdown meanings' do
+ where(:markdown, :expected) do
+ %q(\*not emphasized*) | %q(<span>*</span>not emphasized*)
+ %q(\<br/> not a tag) | %q(<span>&lt;</span>br/&gt; not a tag)
+ %q!\[not a link](/foo)! | %q!<span>[</span>not a link](/foo)!
+ %q(\`not code`) | %q(<span>`</span>not code`)
+ %q(1\. not a list) | %q(1<span>.</span> not a list)
+ %q(\# not a heading) | %q(<span>#</span> not a heading)
+ %q(\[foo]: /url "not a reference") | %q(<span>[</span>foo]: /url "not a reference")
+ %q(\&ouml; not a character entity) | %q(<span>&amp;</span>ouml; not a character entity)
+ end
+
+ with_them do
+ it 'keeps them as literals' do
+ correct_html_included(markdown, expected)
+ end
+ end
+ end
+
+ it 'backslash is itself escaped, the following character is not' do
+ markdown = %q(\\\\*emphasis*)
+ expected = %q(<span>\</span><em>emphasis</em>)
+
+ correct_html_included(markdown, expected)
+ end
+
+ it 'backslash at the end of the line is a hard line break' do
+ markdown = <<~MARKDOWN
+ foo\\
+ bar
+ MARKDOWN
+ expected = "foo<br>\nbar"
+
+ correct_html_included(markdown, expected)
+ end
+
+ describe 'backslash escapes do not work in code blocks, code spans, autolinks, or raw HTML' do
+ where(:markdown, :expected) do
+ %q(`` \[\` ``) | %q(<code>\[\`</code>)
+ %q( \[\]) | %Q(<code>\\[\\]\n</code>)
+ %Q(~~~\n\\[\\]\n~~~) | %Q(<code>\\[\\]\n</code>)
+ %q(<http://example.com?find=\*>) | %q(<a href="http://example.com?find=%5C*">http://example.com?find=\*</a>)
+ %q[<a href="/bar\/)">] | %q[<a href="/bar%5C/)">]
+ end
+
+ with_them do
+ it { correct_html_included(markdown, expected) }
+ end
+ end
+
+ describe 'work in all other contexts, including URLs and link titles, link references, and info strings in fenced code blocks' do
+ where(:markdown, :expected) do
+ %q![foo](/bar\* "ti\*tle")! | %q(<a href="/bar*" title="ti*tle">foo</a>)
+ %Q![foo]\n\n[foo]: /bar\\* "ti\\*tle"! | %q(<a href="/bar*" title="ti*tle">foo</a>)
+ %Q(``` foo\\+bar\nfoo\n```) | %Q(<code lang="foo+bar">foo\n</code>)
+ end
+
+ with_them do
+ it { correct_html_included(markdown, expected) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
index f0498f41b61..c628d8d5b41 100644
--- a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
@@ -30,6 +30,6 @@ RSpec.describe Banzai::Pipeline::PreProcessPipeline do
result = described_class.call(text, limit: 12)
- expect(result[:output]).to eq('foo foo f…')
+ expect(result[:output]).to eq('foo foo f...')
end
end
diff --git a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
index 2abd3df20fd..80607485b6e 100644
--- a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
+++ b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
@@ -5,8 +5,18 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
let(:graphql_client) { instance_double(BulkImports::Clients::Graphql) }
let(:import_entity) { create(:bulk_import_entity) }
- let(:response) { double(original_hash: { foo: :bar }) }
- let(:query) { { query: double(to_s: 'test', variables: {}) } }
+ let(:response) { double(original_hash: { 'data' => { 'foo' => 'bar' }, 'page_info' => {} }) }
+ let(:options) do
+ {
+ query: double(
+ to_s: 'test',
+ variables: {},
+ data_path: %w[data foo],
+ page_info_path: %w[data page_info]
+ )
+ }
+ end
+
let(:context) do
instance_double(
BulkImports::Pipeline::Context,
@@ -14,58 +24,20 @@ RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
)
end
- subject { described_class.new(query) }
-
- before do
- allow(subject).to receive(:graphql_client).and_return(graphql_client)
- allow(graphql_client).to receive(:parse)
- end
+ subject { described_class.new(options) }
describe '#extract' do
before do
- allow(subject).to receive(:query_variables).and_return({})
- allow(graphql_client).to receive(:execute).and_return(response)
- end
-
- it 'returns original hash' do
- expect(subject.extract(context)).to eq({ foo: :bar })
- end
- end
-
- describe 'query variables' do
- before do
+ allow(subject).to receive(:graphql_client).and_return(graphql_client)
+ allow(graphql_client).to receive(:parse)
allow(graphql_client).to receive(:execute).and_return(response)
end
- context 'when variables are present' do
- let(:variables) { { foo: :bar } }
- let(:query) { { query: double(to_s: 'test', variables: variables) } }
-
- it 'builds graphql query variables for import entity' do
- expect(graphql_client).to receive(:execute).with(anything, variables)
-
- subject.extract(context).first
- end
- end
-
- context 'when no variables are present' do
- let(:query) { { query: double(to_s: 'test', variables: nil) } }
-
- it 'returns empty hash' do
- expect(graphql_client).to receive(:execute).with(anything, nil)
-
- subject.extract(context).first
- end
- end
-
- context 'when variables are empty hash' do
- let(:query) { { query: double(to_s: 'test', variables: {}) } }
-
- it 'makes graphql request with empty hash' do
- expect(graphql_client).to receive(:execute).with(anything, {})
+ it 'returns ExtractedData' do
+ extracted_data = subject.extract(context)
- subject.extract(context).first
- end
+ expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
+ expect(extracted_data.data).to contain_exactly('bar')
end
end
end
diff --git a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb b/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
index 4de7d95172f..57ffdfa9aee 100644
--- a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
+++ b/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe BulkImports::Common::Loaders::EntityLoader do
it "creates entities for the given data" do
group = create(:group, path: "imported-group")
parent_entity = create(:bulk_import_entity, group: group, bulk_import: create(:bulk_import))
- context = instance_double(BulkImports::Pipeline::Context, entity: parent_entity)
+ context = BulkImports::Pipeline::Context.new(parent_entity)
data = {
source_type: :group_entity,
diff --git a/spec/lib/bulk_imports/common/transformers/hash_key_digger_spec.rb b/spec/lib/bulk_imports/common/transformers/hash_key_digger_spec.rb
deleted file mode 100644
index 2b33701653e..00000000000
--- a/spec/lib/bulk_imports/common/transformers/hash_key_digger_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Common::Transformers::HashKeyDigger do
- describe '#transform' do
- it 'when the key_path is an array' do
- data = { foo: { bar: :value } }
- key_path = %i[foo bar]
- transformed = described_class.new(key_path: key_path).transform(nil, data)
-
- expect(transformed).to eq(:value)
- end
-
- it 'when the key_path is not an array' do
- data = { foo: { bar: :value } }
- key_path = :foo
- transformed = described_class.new(key_path: key_path).transform(nil, data)
-
- expect(transformed).to eq({ bar: :value })
- end
-
- it "when the data is not a hash" do
- expect { described_class.new(key_path: nil).transform(nil, nil) }
- .to raise_error(ArgumentError, "Given data must be a Hash")
- end
- end
-end
diff --git a/spec/lib/bulk_imports/common/transformers/underscorify_keys_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/underscorify_keys_transformer_spec.rb
deleted file mode 100644
index cdffa750694..00000000000
--- a/spec/lib/bulk_imports/common/transformers/underscorify_keys_transformer_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Common::Transformers::UnderscorifyKeysTransformer do
- describe '#transform' do
- it 'deep underscorifies hash keys' do
- data = {
- 'fullPath' => 'Foo',
- 'snakeKeys' => {
- 'snakeCaseKey' => 'Bar',
- 'moreKeys' => {
- 'anotherSnakeCaseKey' => 'Test'
- }
- }
- }
-
- transformed_data = described_class.new.transform(nil, data)
-
- expect(transformed_data).to have_key('full_path')
- expect(transformed_data).to have_key('snake_keys')
- expect(transformed_data['snake_keys']).to have_key('snake_case_key')
- expect(transformed_data['snake_keys']).to have_key('more_keys')
- expect(transformed_data.dig('snake_keys', 'more_keys')).to have_key('another_snake_case_key')
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
new file mode 100644
index 00000000000..627247c04ab
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do
+ describe '#extract' do
+ it 'returns ExtractedData response' do
+ bulk_import = create(:bulk_import)
+ create(:bulk_import_configuration, bulk_import: bulk_import)
+ entity = create(:bulk_import_entity, bulk_import: bulk_import)
+ response = [{ 'test' => 'group' }]
+ context = BulkImports::Pipeline::Context.new(entity)
+
+ allow_next_instance_of(BulkImports::Clients::Http) do |client|
+ allow(client).to receive(:each_page).and_return(response)
+ end
+
+ extracted_data = subject.extract(context)
+
+ expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
+ expect(extracted_data.data).to eq(response)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
new file mode 100644
index 00000000000..78f99c19346
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Graphql::GetGroupQuery do
+ describe '#variables' do
+ let(:entity) { double(source_full_path: 'test') }
+
+ it 'returns query variables based on entity information' do
+ expected = { full_path: entity.source_full_path }
+
+ expect(described_class.variables(entity)).to eq(expected)
+ end
+ end
+
+ describe '#data_path' do
+ it 'returns data path' do
+ expected = %w[data group]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '#page_info_path' do
+ it 'returns pagination information path' do
+ expected = %w[data group page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
new file mode 100644
index 00000000000..2d8f4fb399a
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do
+ describe '#variables' do
+ let(:entity) { double(source_full_path: 'test', next_page_for: 'next_page') }
+
+ it 'returns query variables based on entity information' do
+ expected = { full_path: entity.source_full_path, cursor: entity.next_page_for }
+
+ expect(described_class.variables(entity)).to eq(expected)
+ end
+ end
+
+ describe '#data_path' do
+ it 'returns data path' do
+ expected = %w[data group labels nodes]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '#page_info_path' do
+ it 'returns pagination information path' do
+ expected = %w[data group labels page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
index b14dfc615a9..183292722d2 100644
--- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
@@ -7,21 +7,20 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
let(:user) { create(:user) }
let(:data) { { foo: :bar } }
let(:service_double) { instance_double(::Groups::CreateService) }
- let(:entity) { create(:bulk_import_entity) }
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- entity: entity,
- current_user: user
- )
- end
+ let(:bulk_import) { create(:bulk_import, user: user) }
+ let(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
subject { described_class.new }
context 'when user can create group' do
shared_examples 'calls Group Create Service to create a new group' do
it 'calls Group Create Service to create a new group' do
- expect(::Groups::CreateService).to receive(:new).with(context.current_user, data).and_return(service_double)
+ expect(::Groups::CreateService)
+ .to receive(:new)
+ .with(context.current_user, data)
+ .and_return(service_double)
+
expect(service_double).to receive(:execute)
expect(entity).to receive(:update!)
diff --git a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
new file mode 100644
index 00000000000..ac2f9c8cb1d
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Loaders::LabelsLoader do
+ describe '#load' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:entity) { create(:bulk_import_entity, group: group) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
+
+ let(:data) do
+ {
+ 'title' => 'label',
+ 'description' => 'description',
+ 'color' => '#FFFFFF'
+ }
+ end
+
+ it 'creates the label' do
+ expect { subject.load(context, data) }.to change(Label, :count).by(1)
+
+ label = group.labels.first
+
+ expect(label.title).to eq(data['title'])
+ expect(label.description).to eq(data['description'])
+ expect(label.color).to eq(data['color'])
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index 1a91f3d7a78..1a6a955544a 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -6,42 +6,33 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
describe '#run' do
let(:user) { create(:user) }
let(:parent) { create(:group) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) do
create(
:bulk_import_entity,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: parent.full_path
)
end
- let(:context) do
- BulkImports::Pipeline::Context.new(
- current_user: user,
- entity: entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
let(:group_data) do
{
- 'data' => {
- 'group' => {
- 'name' => 'source_name',
- 'fullPath' => 'source/full/path',
- 'visibility' => 'private',
- 'projectCreationLevel' => 'developer',
- 'subgroupCreationLevel' => 'maintainer',
- 'description' => 'Group Description',
- 'emailsDisabled' => true,
- 'lfsEnabled' => false,
- 'mentionsDisabled' => true
- }
- }
+ 'name' => 'source_name',
+ 'full_path' => 'source/full/path',
+ 'visibility' => 'private',
+ 'project_creation_level' => 'developer',
+ 'subgroup_creation_level' => 'maintainer',
+ 'description' => 'Group Description',
+ 'emails_disabled' => true,
+ 'lfs_enabled' => false,
+ 'mentions_disabled' => true
}
end
- subject { described_class.new }
-
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return([group_data])
@@ -60,13 +51,13 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
expect(imported_group).not_to be_nil
expect(imported_group.parent).to eq(parent)
expect(imported_group.path).to eq(group_path)
- expect(imported_group.description).to eq(group_data.dig('data', 'group', 'description'))
- expect(imported_group.visibility).to eq(group_data.dig('data', 'group', 'visibility'))
- expect(imported_group.project_creation_level).to eq(Gitlab::Access.project_creation_string_options[group_data.dig('data', 'group', 'projectCreationLevel')])
- expect(imported_group.subgroup_creation_level).to eq(Gitlab::Access.subgroup_creation_string_options[group_data.dig('data', 'group', 'subgroupCreationLevel')])
- expect(imported_group.lfs_enabled?).to eq(group_data.dig('data', 'group', 'lfsEnabled'))
- expect(imported_group.emails_disabled?).to eq(group_data.dig('data', 'group', 'emailsDisabled'))
- expect(imported_group.mentions_disabled?).to eq(group_data.dig('data', 'group', 'mentionsDisabled'))
+ expect(imported_group.description).to eq(group_data['description'])
+ expect(imported_group.visibility).to eq(group_data['visibility'])
+ expect(imported_group.project_creation_level).to eq(Gitlab::Access.project_creation_string_options[group_data['project_creation_level']])
+ expect(imported_group.subgroup_creation_level).to eq(Gitlab::Access.subgroup_creation_string_options[group_data['subgroup_creation_level']])
+ expect(imported_group.lfs_enabled?).to eq(group_data['lfs_enabled'])
+ expect(imported_group.emails_disabled?).to eq(group_data['emails_disabled'])
+ expect(imported_group.mentions_disabled?).to eq(group_data['mentions_disabled'])
end
end
@@ -87,8 +78,6 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
- { klass: BulkImports::Common::Transformers::HashKeyDigger, options: { key_path: %w[data group] } },
- { klass: BulkImports::Common::Transformers::UnderscorifyKeysTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Groups::Transformers::GroupAttributesTransformer, options: nil }
)
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
new file mode 100644
index 00000000000..7c96967971f
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:cursor) { 'cursor' }
+ let(:entity) do
+ create(
+ :bulk_import_entity,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path,
+ group: group
+ )
+ end
+
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
+
+ def extractor_data(title:, has_next_page:, cursor: nil)
+ data = [
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'color' => '#428BCA'
+ }
+ ]
+
+ page_info = {
+ 'end_cursor' => cursor,
+ 'has_next_page' => has_next_page
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
+ end
+
+ describe '#run' do
+ it 'imports a group labels' do
+ first_page = extractor_data(title: 'label1', has_next_page: true, cursor: cursor)
+ last_page = extractor_data(title: 'label2', has_next_page: false)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor)
+ .to receive(:extract)
+ .and_return(first_page, last_page)
+ end
+
+ expect { subject.run(context) }.to change(Label, :count).by(2)
+
+ label = group.labels.order(:created_at).last
+
+ expect(label.title).to eq('label2')
+ expect(label.description).to eq('desc')
+ expect(label.color).to eq('#428BCA')
+ end
+ end
+
+ describe '#after_run' do
+ context 'when extracted data has next page' do
+ it 'updates tracker information and runs pipeline again' do
+ data = extractor_data(title: 'label', has_next_page: true, cursor: cursor)
+
+ expect(subject).to receive(:run).with(context)
+
+ subject.after_run(context, data)
+
+ tracker = entity.trackers.find_by(relation: :labels)
+
+ expect(tracker.has_next_page).to eq(true)
+ expect(tracker.next_page).to eq(cursor)
+ end
+ end
+
+ context 'when extracted data has no next page' do
+ it 'updates tracker information and does not run pipeline' do
+ data = extractor_data(title: 'label', has_next_page: false)
+
+ expect(subject).not_to receive(:run).with(context)
+
+ subject.after_run(context, data)
+
+ tracker = entity.trackers.find_by(relation: :labels)
+
+ expect(tracker.has_next_page).to eq(false)
+ expect(tracker.next_page).to be_nil
+ end
+ end
+ end
+
+ describe 'pipeline parts' do
+ it { expect(described_class).to include_module(BulkImports::Pipeline) }
+ it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+
+ it 'has extractors' do
+ expect(described_class.get_extractor)
+ .to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Groups::Graphql::GetLabelsQuery
+ }
+ )
+ end
+
+ it 'has transformers' do
+ expect(described_class.transformers)
+ .to contain_exactly(
+ { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
+ )
+ end
+
+ it 'has loaders' do
+ expect(described_class.get_loader).to eq(klass: BulkImports::Groups::Loaders::LabelsLoader, options: nil)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index e5a8ed7f47d..71d1ffdeba3 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -14,13 +14,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
)
end
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- current_user: user,
- entity: parent_entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(parent_entity) }
let(:subgroup_data) do
[
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index 28a7859915d..5a7a51675d6 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -7,22 +7,18 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
let(:user) { create(:user) }
let(:parent) { create(:group) }
let(:group) { create(:group, name: 'My Source Group', parent: parent) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) do
- instance_double(
- BulkImports::Entity,
+ create(
+ :bulk_import_entity,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: group.name,
destination_namespace: parent.full_path
)
end
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- current_user: user,
- entity: entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
let(:data) do
{
@@ -85,16 +81,16 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
end
context 'when destination namespace is user namespace' do
- let(:entity) do
- instance_double(
- BulkImports::Entity,
+ it 'does not set parent id' do
+ entity = create(
+ :bulk_import_entity,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: group.name,
destination_namespace: user.namespace.full_path
)
- end
+ context = BulkImports::Pipeline::Context.new(entity)
- it 'does not set parent id' do
transformed_data = subject.transform(context, data)
expect(transformed_data).not_to have_key('parent_id')
diff --git a/spec/lib/bulk_imports/importers/group_importer_spec.rb b/spec/lib/bulk_imports/importers/group_importer_spec.rb
index 87baf1b8026..0884a51ce7d 100644
--- a/spec/lib/bulk_imports/importers/group_importer_spec.rb
+++ b/spec/lib/bulk_imports/importers/group_importer_spec.rb
@@ -7,23 +7,18 @@ RSpec.describe BulkImports::Importers::GroupImporter do
let(:bulk_import) { create(:bulk_import) }
let(:bulk_import_entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let(:context) do
- BulkImports::Pipeline::Context.new(
- current_user: user,
- entity: bulk_import_entity,
- configuration: bulk_import_configuration
- )
- end
-
- subject { described_class.new(bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(bulk_import_entity) }
before do
allow(BulkImports::Pipeline::Context).to receive(:new).and_return(context)
end
+ subject { described_class.new(bulk_import_entity) }
+
describe '#execute' do
it 'starts the entity and run its pipelines' do
expect_to_run_pipeline BulkImports::Groups::Pipelines::GroupPipeline, context: context
+ expect_to_run_pipeline BulkImports::Groups::Pipelines::LabelsPipeline, context: context
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicsPipeline'.constantize, context: context) if Gitlab.ee?
expect_to_run_pipeline BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline, context: context
diff --git a/spec/lib/bulk_imports/pipeline/context_spec.rb b/spec/lib/bulk_imports/pipeline/context_spec.rb
index e9af6313ca4..c8c3fe3a861 100644
--- a/spec/lib/bulk_imports/pipeline/context_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/context_spec.rb
@@ -3,25 +3,29 @@
require 'spec_helper'
RSpec.describe BulkImports::Pipeline::Context do
- describe '#initialize' do
- it 'initializes with permitted attributes' do
- args = {
- current_user: create(:user),
- entity: create(:bulk_import_entity),
- configuration: create(:bulk_import_configuration)
- }
+ let(:group) { instance_double(Group) }
+ let(:user) { instance_double(User) }
+ let(:bulk_import) { instance_double(BulkImport, user: user, configuration: :config) }
- context = described_class.new(args)
+ let(:entity) do
+ instance_double(
+ BulkImports::Entity,
+ bulk_import: bulk_import,
+ group: group
+ )
+ end
+
+ subject { described_class.new(entity) }
- args.each do |k, v|
- expect(context.public_send(k)).to eq(v)
- end
- end
+ describe '#group' do
+ it { expect(subject.group).to eq(group) }
+ end
+
+ describe '#current_user' do
+ it { expect(subject.current_user).to eq(user) }
+ end
- context 'when invalid argument is passed' do
- it 'raises NoMethodError' do
- expect { described_class.new(test: 'test').test }.to raise_exception(NoMethodError)
- end
- end
+ describe '#current_user' do
+ it { expect(subject.configuration).to eq(bulk_import.configuration) }
end
end
diff --git a/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb b/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
new file mode 100644
index 00000000000..25c5178227a
--- /dev/null
+++ b/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Pipeline::ExtractedData do
+ let(:data) { 'data' }
+ let(:has_next_page) { true }
+ let(:cursor) { 'cursor' }
+ let(:page_info) do
+ {
+ 'has_next_page' => has_next_page,
+ 'end_cursor' => cursor
+ }
+ end
+
+ subject { described_class.new(data: data, page_info: page_info) }
+
+ describe '#has_next_page?' do
+ context 'when next page is present' do
+ it 'returns true' do
+ expect(subject.has_next_page?).to eq(true)
+ end
+ end
+
+ context 'when next page is not present' do
+ let(:has_next_page) { false }
+
+ it 'returns false' do
+ expect(subject.has_next_page?).to eq(false)
+ end
+ end
+ end
+
+ describe '#next_page' do
+ it 'returns next page cursor information' do
+ expect(subject.next_page).to eq(cursor)
+ end
+ end
+
+ describe '#each' do
+ context 'when block is present' do
+ it 'yields each data item' do
+ expect { |b| subject.each(&b) }.to yield_control
+ end
+ end
+
+ context 'when block is not present' do
+ it 'returns enumerator' do
+ expect(subject.each).to be_instance_of(Enumerator)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 60833e83dcc..3f6a172beee 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -45,42 +45,71 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
context 'when entity is not marked as failed' do
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- entity: instance_double(BulkImports::Entity, id: 1, source_type: 'group', failed?: false)
- )
- end
+ let(:entity) { create(:bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
it 'runs pipeline extractor, transformer, loader' do
- entries = [{ foo: :bar }]
+ extracted_data = BulkImports::Pipeline::ExtractedData.new(data: { foo: :bar })
expect_next_instance_of(BulkImports::Extractor) do |extractor|
- expect(extractor).to receive(:extract).with(context).and_return(entries)
+ expect(extractor)
+ .to receive(:extract)
+ .with(context)
+ .and_return(extracted_data)
end
expect_next_instance_of(BulkImports::Transformer) do |transformer|
- expect(transformer).to receive(:transform).with(context, entries.first).and_return(entries.first)
+ expect(transformer)
+ .to receive(:transform)
+ .with(context, extracted_data.data.first)
+ .and_return(extracted_data.data.first)
end
expect_next_instance_of(BulkImports::Loader) do |loader|
- expect(loader).to receive(:load).with(context, entries.first)
+ expect(loader)
+ .to receive(:load)
+ .with(context, extracted_data.data.first)
end
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
message: 'Pipeline started',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
+ expect(logger).to receive(:info)
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
pipeline_class: 'BulkImports::MyPipeline',
- bulk_import_entity_id: 1,
- bulk_import_entity_type: 'group'
+ pipeline_step: :extractor,
+ step_class: 'BulkImports::Extractor'
)
expect(logger).to receive(:info)
- .with(bulk_import_entity_id: 1, bulk_import_entity_type: 'group', extractor: 'BulkImports::Extractor')
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
+ pipeline_class: 'BulkImports::MyPipeline',
+ pipeline_step: :transformer,
+ step_class: 'BulkImports::Transformer'
+ )
expect(logger).to receive(:info)
- .with(bulk_import_entity_id: 1, bulk_import_entity_type: 'group', transformer: 'BulkImports::Transformer')
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
+ pipeline_class: 'BulkImports::MyPipeline',
+ pipeline_step: :loader,
+ step_class: 'BulkImports::Loader'
+ )
expect(logger).to receive(:info)
- .with(bulk_import_entity_id: 1, bulk_import_entity_type: 'group', loader: 'BulkImports::Loader')
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
+ message: 'Pipeline finished',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
end
BulkImports::MyPipeline.new.run(context)
@@ -88,7 +117,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
context 'when exception is raised' do
let(:entity) { create(:bulk_import_entity, :created) }
- let(:context) { BulkImports::Pipeline::Context.new(entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
before do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
@@ -103,6 +132,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
expect(failure).to be_present
expect(failure.pipeline_class).to eq('BulkImports::MyPipeline')
+ expect(failure.pipeline_step).to eq('extractor')
expect(failure.exception_class).to eq('StandardError')
expect(failure.exception_message).to eq('Error!')
end
@@ -144,21 +174,19 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
context 'when entity is marked as failed' do
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- entity: instance_double(BulkImports::Entity, id: 1, source_type: 'group', failed?: true)
- )
- end
+ let(:entity) { create(:bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
it 'logs and returns without execution' do
+ allow(entity).to receive(:failed?).and_return(true)
+
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
message: 'Skipping due to failed pipeline status',
pipeline_class: 'BulkImports::MyPipeline',
- bulk_import_entity_id: 1,
- bulk_import_entity_type: 'group'
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity'
)
end
diff --git a/spec/lib/gitlab/access/branch_protection_spec.rb b/spec/lib/gitlab/access/branch_protection_spec.rb
index 9b736a30c7e..44c30d1f596 100644
--- a/spec/lib/gitlab/access/branch_protection_spec.rb
+++ b/spec/lib/gitlab/access/branch_protection_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Access::BranchProtection do
- describe '#any?' do
- using RSpec::Parameterized::TableSyntax
+ using RSpec::Parameterized::TableSyntax
+ describe '#any?' do
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | false
Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
@@ -19,8 +19,6 @@ RSpec.describe Gitlab::Access::BranchProtection do
end
describe '#developer_can_push?' do
- using RSpec::Parameterized::TableSyntax
-
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | false
Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
@@ -36,8 +34,6 @@ RSpec.describe Gitlab::Access::BranchProtection do
end
describe '#developer_can_merge?' do
- using RSpec::Parameterized::TableSyntax
-
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | false
Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
@@ -53,8 +49,6 @@ RSpec.describe Gitlab::Access::BranchProtection do
end
describe '#fully_protected?' do
- using RSpec::Parameterized::TableSyntax
-
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | false
Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
diff --git a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
index 3d782272d7e..f23fdd3fbcb 100644
--- a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
@@ -19,6 +19,9 @@ RSpec.describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_cachin
before do
stub_rack_attack_setting(options)
+ Rack::Attack.reset!
+ Rack::Attack.clear_configuration
+ Gitlab::RackAttack.configure(Rack::Attack)
end
after do
diff --git a/spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb b/spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb
new file mode 100644
index 00000000000..deddc7f5294
--- /dev/null
+++ b/spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::U2fWebauthnConverter do
+ let_it_be(:u2f_registration) do
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
+ create(:u2f_registration, name: 'u2f_device',
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw))
+ end
+
+ it 'converts u2f registration' do
+ webauthn_credential = WebAuthn::U2fMigrator.new(
+ app_id: Gitlab.config.gitlab.url,
+ certificate: u2f_registration.certificate,
+ key_handle: u2f_registration.key_handle,
+ public_key: u2f_registration.public_key,
+ counter: u2f_registration.counter
+ ).credential
+
+ converted_webauthn = described_class.new(u2f_registration).convert
+
+ expect(converted_webauthn).to(
+ include(user_id: u2f_registration.user_id,
+ credential_xid: Base64.strict_encode64(webauthn_credential.id)))
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb
new file mode 100644
index 00000000000..47e1d4620cd
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let!(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') }
+ let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'vulnerability-identifier',
+ external_id: 'vulnerability-identifier',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'vulnerability identifier')
+ end
+
+ let!(:first_finding) do
+ create_finding!(
+ uuid: "test1",
+ vulnerability_id: nil,
+ report_type: 0,
+ location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:first_duplicate) do
+ create_finding!(
+ uuid: "test2",
+ vulnerability_id: nil,
+ report_type: 0,
+ location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:second_duplicate) do
+ create_finding!(
+ uuid: "test3",
+ vulnerability_id: nil,
+ report_type: 0,
+ location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner3.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:unrelated_finding) do
+ create_finding!(
+ uuid: "unreleated_finding",
+ vulnerability_id: nil,
+ report_type: 1,
+ location_fingerprint: 'random_location_fingerprint',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: unrelated_scanner.id,
+ project_id: project.id
+ )
+ end
+
+ subject { described_class.new.perform(first_finding.id, unrelated_finding.id) }
+
+ before do
+ stub_const("#{described_class}::DELETE_BATCH_SIZE", 1)
+ end
+
+ it "removes entries which would result in duplicate UUIDv5" do
+ expect(vulnerability_findings.count).to eq(4)
+
+ expect { subject }.to change { vulnerability_findings.count }.from(4).to(2)
+
+ expect(vulnerability_findings.pluck(:id)).to eq([second_duplicate.id, unrelated_finding.id])
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerability_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: confirmed_at
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb
index 052a01a8dd8..5b20572578c 100644
--- a/spec/lib/gitlab/background_migration_spec.rb
+++ b/spec/lib/gitlab/background_migration_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::BackgroundMigration do
expect(described_class).to receive(:perform)
.with('Foo', [10, 20])
- described_class.steal('Foo') { |(arg1, arg2)| arg1 == 10 && arg2 == 20 }
+ described_class.steal('Foo') { |job| job.args.second.first == 10 && job.args.second.second == 20 }
end
it 'does not steal jobs that do not match the predicate' do
diff --git a/spec/lib/gitlab/changelog/committer_spec.rb b/spec/lib/gitlab/changelog/committer_spec.rb
new file mode 100644
index 00000000000..f0d6bc2b6b5
--- /dev/null
+++ b/spec/lib/gitlab/changelog/committer_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Changelog::Committer do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.creator }
+ let(:committer) { described_class.new(project, user) }
+ let(:config) { Gitlab::Changelog::Config.new(project) }
+
+ describe '#commit' do
+ context "when the release isn't in the changelog" do
+ it 'commits the changes' do
+ release = Gitlab::Changelog::Release
+ .new(version: '1.0.0', date: Time.utc(2020, 1, 1), config: config)
+
+ committer.commit(
+ release: release,
+ file: 'CHANGELOG.md',
+ branch: 'master',
+ message: 'Test commit'
+ )
+
+ content = project.repository.blob_at('master', 'CHANGELOG.md').data
+
+ expect(content).to eq(<<~MARKDOWN)
+ ## 1.0.0 (2020-01-01)
+
+ No changes.
+ MARKDOWN
+ end
+ end
+
+ context 'when the release is already in the changelog' do
+ it "doesn't commit the changes" do
+ release = Gitlab::Changelog::Release
+ .new(version: '1.0.0', date: Time.utc(2020, 1, 1), config: config)
+
+ 2.times do
+ committer.commit(
+ release: release,
+ file: 'CHANGELOG.md',
+ branch: 'master',
+ message: 'Test commit'
+ )
+ end
+
+ content = project.repository.blob_at('master', 'CHANGELOG.md').data
+
+ expect(content).to eq(<<~MARKDOWN)
+ ## 1.0.0 (2020-01-01)
+
+ No changes.
+ MARKDOWN
+ end
+ end
+
+ context 'when committing the changes fails' do
+ it 'retries the operation' do
+ release = Gitlab::Changelog::Release
+ .new(version: '1.0.0', date: Time.utc(2020, 1, 1), config: config)
+
+ service = instance_spy(Files::MultiService)
+ errored = false
+
+ allow(Files::MultiService)
+ .to receive(:new)
+ .and_return(service)
+
+ allow(service).to receive(:execute) do
+ if errored
+ { status: :success }
+ else
+ errored = true
+ { status: :error }
+ end
+ end
+
+ expect do
+ committer.commit(
+ release: release,
+ file: 'CHANGELOG.md',
+ branch: 'master',
+ message: 'Test commit'
+ )
+ end.not_to raise_error
+ end
+ end
+
+ context "when the changelog changes before saving the changes" do
+ it 'raises a CommitError' do
+ release1 = Gitlab::Changelog::Release
+ .new(version: '1.0.0', date: Time.utc(2020, 1, 1), config: config)
+
+ release2 = Gitlab::Changelog::Release
+ .new(version: '2.0.0', date: Time.utc(2020, 1, 1), config: config)
+
+ # This creates the initial commit we'll later use to see if the
+ # changelog changed before saving our changes.
+ committer.commit(
+ release: release1,
+ file: 'CHANGELOG.md',
+ branch: 'master',
+ message: 'Initial commit'
+ )
+
+ allow(Gitlab::Git::Commit)
+ .to receive(:last_for_path)
+ .with(
+ project.repository,
+ 'master',
+ 'CHANGELOG.md',
+ literal_pathspec: true
+ )
+ .and_return(double(:commit, sha: 'foo'))
+
+ expect do
+ committer.commit(
+ release: release2,
+ file: 'CHANGELOG.md',
+ branch: 'master',
+ message: 'Test commit'
+ )
+ end.to raise_error(described_class::CommitError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/changelog/config_spec.rb b/spec/lib/gitlab/changelog/config_spec.rb
new file mode 100644
index 00000000000..adf82fa3ac2
--- /dev/null
+++ b/spec/lib/gitlab/changelog/config_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Changelog::Config do
+ let(:project) { build_stubbed(:project) }
+
+ describe '.from_git' do
+ it 'retrieves the configuration from Git' do
+ allow(project.repository)
+ .to receive(:changelog_config)
+ .and_return("---\ndate_format: '%Y'")
+
+ expect(described_class)
+ .to receive(:from_hash)
+ .with(project, 'date_format' => '%Y')
+
+ described_class.from_git(project)
+ end
+
+ it 'returns the default configuration when no YAML file exists in Git' do
+ allow(project.repository)
+ .to receive(:changelog_config)
+ .and_return(nil)
+
+ expect(described_class)
+ .to receive(:new)
+ .with(project)
+
+ described_class.from_git(project)
+ end
+ end
+
+ describe '.from_hash' do
+ it 'sets the configuration according to a Hash' do
+ config = described_class.from_hash(
+ project,
+ 'date_format' => 'foo',
+ 'template' => 'bar',
+ 'categories' => { 'foo' => 'bar' }
+ )
+
+ expect(config.date_format).to eq('foo')
+ expect(config.template).to be_instance_of(Gitlab::Changelog::Template::Template)
+ expect(config.categories).to eq({ 'foo' => 'bar' })
+ end
+
+ it 'raises ConfigError when the categories are not a Hash' do
+ expect { described_class.from_hash(project, 'categories' => 10) }
+ .to raise_error(described_class::ConfigError)
+ end
+ end
+
+ describe '#contributor?' do
+ it 'returns true if a user is a contributor' do
+ user = build_stubbed(:author)
+
+ allow(project.team).to receive(:contributor?).with(user).and_return(true)
+
+ expect(described_class.new(project).contributor?(user)).to eq(true)
+ end
+
+ it "returns true if a user isn't a contributor" do
+ user = build_stubbed(:author)
+
+ allow(project.team).to receive(:contributor?).with(user).and_return(false)
+
+ expect(described_class.new(project).contributor?(user)).to eq(false)
+ end
+ end
+
+ describe '#category' do
+ it 'returns the name of a category' do
+ config = described_class.new(project)
+
+ config.categories['foo'] = 'Foo'
+
+ expect(config.category('foo')).to eq('Foo')
+ end
+
+ it 'returns the raw category name when no alternative name is configured' do
+ config = described_class.new(project)
+
+ expect(config.category('bla')).to eq('bla')
+ end
+ end
+
+ describe '#format_date' do
+ it 'formats a date according to the configured date format' do
+ config = described_class.new(project)
+ time = Time.utc(2021, 1, 5)
+
+ expect(config.format_date(time)).to eq('2021-01-05')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/changelog/generator_spec.rb b/spec/lib/gitlab/changelog/generator_spec.rb
new file mode 100644
index 00000000000..bc4a7c5dd6b
--- /dev/null
+++ b/spec/lib/gitlab/changelog/generator_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Changelog::Generator do
+ describe '#add' do
+ let(:project) { build_stubbed(:project) }
+ let(:author) { build_stubbed(:user) }
+ let(:commit) { build_stubbed(:commit) }
+ let(:config) { Gitlab::Changelog::Config.new(project) }
+
+ it 'generates the Markdown for the first release' do
+ release = Gitlab::Changelog::Release.new(
+ version: '1.0.0',
+ date: Time.utc(2021, 1, 5),
+ config: config
+ )
+
+ release.add_entry(
+ title: 'This is a new change',
+ commit: commit,
+ category: 'added',
+ author: author
+ )
+
+ gen = described_class.new('')
+
+ expect(gen.add(release)).to eq(<<~MARKDOWN)
+ ## 1.0.0 (2021-01-05)
+
+ ### added (1 change)
+
+ - [This is a new change](#{commit.to_reference(full: true)})
+ MARKDOWN
+ end
+
+ it 'generates the Markdown for a newer release' do
+ release = Gitlab::Changelog::Release.new(
+ version: '2.0.0',
+ date: Time.utc(2021, 1, 5),
+ config: config
+ )
+
+ release.add_entry(
+ title: 'This is a new change',
+ commit: commit,
+ category: 'added',
+ author: author
+ )
+
+ gen = described_class.new(<<~MARKDOWN)
+ This is a changelog file.
+
+ ## 1.0.0
+
+ This is the changelog for version 1.0.0.
+ MARKDOWN
+
+ expect(gen.add(release)).to eq(<<~MARKDOWN)
+ This is a changelog file.
+
+ ## 2.0.0 (2021-01-05)
+
+ ### added (1 change)
+
+ - [This is a new change](#{commit.to_reference(full: true)})
+
+ ## 1.0.0
+
+ This is the changelog for version 1.0.0.
+ MARKDOWN
+ end
+
+ it 'generates the Markdown for a patch release' do
+ release = Gitlab::Changelog::Release.new(
+ version: '1.1.0',
+ date: Time.utc(2021, 1, 5),
+ config: config
+ )
+
+ release.add_entry(
+ title: 'This is a new change',
+ commit: commit,
+ category: 'added',
+ author: author
+ )
+
+ gen = described_class.new(<<~MARKDOWN)
+ This is a changelog file.
+
+ ## 2.0.0
+
+ This is another release.
+
+ ## 1.0.0
+
+ This is the changelog for version 1.0.0.
+ MARKDOWN
+
+ expect(gen.add(release)).to eq(<<~MARKDOWN)
+ This is a changelog file.
+
+ ## 2.0.0
+
+ This is another release.
+
+ ## 1.1.0 (2021-01-05)
+
+ ### added (1 change)
+
+ - [This is a new change](#{commit.to_reference(full: true)})
+
+ ## 1.0.0
+
+ This is the changelog for version 1.0.0.
+ MARKDOWN
+ end
+
+ it 'generates the Markdown for an old release' do
+ release = Gitlab::Changelog::Release.new(
+ version: '0.5.0',
+ date: Time.utc(2021, 1, 5),
+ config: config
+ )
+
+ release.add_entry(
+ title: 'This is a new change',
+ commit: commit,
+ category: 'added',
+ author: author
+ )
+
+ gen = described_class.new(<<~MARKDOWN)
+ This is a changelog file.
+
+ ## 2.0.0
+
+ This is another release.
+
+ ## 1.0.0
+
+ This is the changelog for version 1.0.0.
+ MARKDOWN
+
+ expect(gen.add(release)).to eq(<<~MARKDOWN)
+ This is a changelog file.
+
+ ## 2.0.0
+
+ This is another release.
+
+ ## 1.0.0
+
+ This is the changelog for version 1.0.0.
+
+ ## 0.5.0 (2021-01-05)
+
+ ### added (1 change)
+
+ - [This is a new change](#{commit.to_reference(full: true)})
+ MARKDOWN
+ end
+ end
+end
diff --git a/spec/lib/gitlab/changelog/release_spec.rb b/spec/lib/gitlab/changelog/release_spec.rb
new file mode 100644
index 00000000000..50a23d23299
--- /dev/null
+++ b/spec/lib/gitlab/changelog/release_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Changelog::Release do
+ describe '#to_markdown' do
+ let(:config) { Gitlab::Changelog::Config.new(build_stubbed(:project)) }
+ let(:commit) { build_stubbed(:commit) }
+ let(:author) { build_stubbed(:user) }
+ let(:mr) { build_stubbed(:merge_request) }
+ let(:release) do
+ described_class
+ .new(version: '1.0.0', date: Time.utc(2021, 1, 5), config: config)
+ end
+
+ context 'when there are no entries' do
+ it 'includes a notice about the lack of entries' do
+ expect(release.to_markdown).to eq(<<~OUT)
+ ## 1.0.0 (2021-01-05)
+
+ No changes.
+
+ OUT
+ end
+ end
+
+ context 'when all data is present' do
+ it 'includes all data' do
+ allow(config).to receive(:contributor?).with(author).and_return(true)
+
+ release.add_entry(
+ title: 'Entry title',
+ commit: commit,
+ category: 'fixed',
+ author: author,
+ merge_request: mr
+ )
+
+ expect(release.to_markdown).to eq(<<~OUT)
+ ## 1.0.0 (2021-01-05)
+
+ ### fixed (1 change)
+
+ - [Entry title](#{commit.to_reference(full: true)}) \
+ by #{author.to_reference(full: true)} \
+ ([merge request](#{mr.to_reference(full: true)}))
+
+ OUT
+ end
+ end
+
+ context 'when no merge request is present' do
+ it "doesn't include a merge request link" do
+ allow(config).to receive(:contributor?).with(author).and_return(true)
+
+ release.add_entry(
+ title: 'Entry title',
+ commit: commit,
+ category: 'fixed',
+ author: author
+ )
+
+ expect(release.to_markdown).to eq(<<~OUT)
+ ## 1.0.0 (2021-01-05)
+
+ ### fixed (1 change)
+
+ - [Entry title](#{commit.to_reference(full: true)}) \
+ by #{author.to_reference(full: true)}
+
+ OUT
+ end
+ end
+
+ context 'when the author is not a contributor' do
+ it "doesn't include the author" do
+ allow(config).to receive(:contributor?).with(author).and_return(false)
+
+ release.add_entry(
+ title: 'Entry title',
+ commit: commit,
+ category: 'fixed',
+ author: author
+ )
+
+ expect(release.to_markdown).to eq(<<~OUT)
+ ## 1.0.0 (2021-01-05)
+
+ ### fixed (1 change)
+
+ - [Entry title](#{commit.to_reference(full: true)})
+
+ OUT
+ end
+ end
+ end
+
+ describe '#header_start_position' do
+ it 'returns a regular expression for finding the start of a release section' do
+ config = Gitlab::Changelog::Config.new(build_stubbed(:project))
+ release = described_class
+ .new(version: '1.0.0', date: Time.utc(2021, 1, 5), config: config)
+
+ expect(release.header_start_pattern).to eq(/^##\s*1\.0\.0/)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/changelog/template/compiler_spec.rb b/spec/lib/gitlab/changelog/template/compiler_spec.rb
new file mode 100644
index 00000000000..8b09bc90529
--- /dev/null
+++ b/spec/lib/gitlab/changelog/template/compiler_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Changelog::Template::Compiler do
+ def compile(template, data = {})
+ Gitlab::Changelog::Template::Compiler.new.compile(template).render(data)
+ end
+
+ describe '#compile' do
+ it 'compiles an empty template' do
+ expect(compile('')).to eq('')
+ end
+
+ it 'compiles a template with an undefined variable' do
+ expect(compile('{{number}}')).to eq('')
+ end
+
+ it 'compiles a template with a defined variable' do
+ expect(compile('{{number}}', 'number' => 42)).to eq('42')
+ end
+
+ it 'compiles a template with the special "it" variable' do
+ expect(compile('{{it}}', 'values' => 10)).to eq({ 'values' => 10 }.to_s)
+ end
+
+ it 'compiles a template containing an if statement' do
+ expect(compile('{% if foo %}yes{% end %}', 'foo' => true)).to eq('yes')
+ end
+
+ it 'compiles a template containing an if/else statement' do
+ expect(compile('{% if foo %}yes{% else %}no{% end %}', 'foo' => false))
+ .to eq('no')
+ end
+
+ it 'compiles a template that iterates over an Array' do
+ expect(compile('{% each numbers %}{{it}}{% end %}', 'numbers' => [1, 2, 3]))
+ .to eq('123')
+ end
+
+ it 'compiles a template that iterates over a Hash' do
+ output = compile(
+ '{% each pairs %}{{0}}={{1}}{% end %}',
+ 'pairs' => { 'key' => 'value' }
+ )
+
+ expect(output).to eq('key=value')
+ end
+
+ it 'compiles a template that iterates over a Hash of Arrays' do
+ output = compile(
+ '{% each values %}{{key}}{% end %}',
+ 'values' => [{ 'key' => 'value' }]
+ )
+
+ expect(output).to eq('value')
+ end
+
+ it 'compiles a template with a variable path' do
+ output = compile('{{foo.bar}}', 'foo' => { 'bar' => 10 })
+
+ expect(output).to eq('10')
+ end
+
+ it 'compiles a template with a variable path that uses an Array index' do
+ output = compile('{{foo.values.1}}', 'foo' => { 'values' => [10, 20] })
+
+ expect(output).to eq('20')
+ end
+
+ it 'compiles a template with a variable path that uses a Hash and a numeric index' do
+ output = compile('{{foo.1}}', 'foo' => { 'key' => 'value' })
+
+ expect(output).to eq('')
+ end
+
+ it 'compiles a template with a variable path that uses an Array and a String based index' do
+ output = compile('{{foo.numbers.bla}}', 'foo' => { 'numbers' => [10, 20] })
+
+ expect(output).to eq('')
+ end
+
+ it 'ignores ERB tags provided by the user' do
+ input = '<% exit %> <%= exit %> <%= foo -%>'
+
+ expect(compile(input)).to eq(input)
+ end
+
+ it 'removes newlines introduced by end statements on their own lines' do
+ output = compile(<<~TPL, 'foo' => true)
+ {% if foo %}
+ foo
+ {% end %}
+ TPL
+
+ expect(output).to eq("foo\n")
+ end
+
+ it 'supports escaping of trailing newlines' do
+ output = compile(<<~TPL)
+ foo \
+ bar\
+ baz
+ TPL
+
+ expect(output).to eq("foo barbaz\n")
+ end
+
+ # rubocop: disable Lint/InterpolationCheck
+ it 'ignores embedded Ruby expressions' do
+ input = '#{exit}'
+
+ expect(compile(input)).to eq(input)
+ end
+ # rubocop: enable Lint/InterpolationCheck
+
+ it 'ignores ERB tags inside variable tags' do
+ input = '{{<%= exit %>}}'
+
+ expect(compile(input)).to eq(input)
+ end
+
+ it 'ignores malicious code that tries to escape a variable' do
+ input = "{{') ::Kernel.exit # '}}"
+
+ expect(compile(input)).to eq(input)
+ end
+
+ it 'ignores malicious code that makes use of whitespace' do
+ input = "x<\\\n%::Kernel.system(\"id\")%>"
+
+ expect(Kernel).not_to receive(:system).with('id')
+ expect(compile(input)).to eq('x<%::Kernel.system("id")%>')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/badge/coverage/metadata_spec.rb b/spec/lib/gitlab/ci/badge/coverage/metadata_spec.rb
index 725ae03ad74..6d272f060ab 100644
--- a/spec/lib/gitlab/badge/coverage/metadata_spec.rb
+++ b/spec/lib/gitlab/ci/badge/coverage/metadata_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'lib/gitlab/badge/shared/metadata'
+require 'lib/gitlab/ci/badge/shared/metadata'
-RSpec.describe Gitlab::Badge::Coverage::Metadata do
+RSpec.describe Gitlab::Ci::Badge::Coverage::Metadata do
let(:badge) do
double(project: create(:project), ref: 'feature', job: 'test')
end
diff --git a/spec/lib/gitlab/badge/coverage/report_spec.rb b/spec/lib/gitlab/ci/badge/coverage/report_spec.rb
index 3b5ea3291e4..13696d815aa 100644
--- a/spec/lib/gitlab/badge/coverage/report_spec.rb
+++ b/spec/lib/gitlab/ci/badge/coverage/report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Badge::Coverage::Report do
+RSpec.describe Gitlab::Ci::Badge::Coverage::Report do
let_it_be(:project) { create(:project) }
let_it_be(:success_pipeline) { create(:ci_pipeline, :success, project: project) }
let_it_be(:running_pipeline) { create(:ci_pipeline, :running, project: project) }
diff --git a/spec/lib/gitlab/badge/coverage/template_spec.rb b/spec/lib/gitlab/ci/badge/coverage/template_spec.rb
index ba5c1b2ce6e..f010d1bce50 100644
--- a/spec/lib/gitlab/badge/coverage/template_spec.rb
+++ b/spec/lib/gitlab/ci/badge/coverage/template_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Badge::Coverage::Template do
+RSpec.describe Gitlab::Ci::Badge::Coverage::Template do
let(:badge) { double(entity: 'coverage', status: 90.00, customization: {}) }
let(:template) { described_class.new(badge) }
diff --git a/spec/lib/gitlab/badge/pipeline/metadata_spec.rb b/spec/lib/gitlab/ci/badge/pipeline/metadata_spec.rb
index c8ed0c8ea29..2f677237fad 100644
--- a/spec/lib/gitlab/badge/pipeline/metadata_spec.rb
+++ b/spec/lib/gitlab/ci/badge/pipeline/metadata_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'lib/gitlab/badge/shared/metadata'
+require 'lib/gitlab/ci/badge/shared/metadata'
-RSpec.describe Gitlab::Badge::Pipeline::Metadata do
+RSpec.describe Gitlab::Ci::Badge::Pipeline::Metadata do
let(:badge) { double(project: create(:project), ref: 'feature') }
let(:metadata) { described_class.new(badge) }
diff --git a/spec/lib/gitlab/badge/pipeline/status_spec.rb b/spec/lib/gitlab/ci/badge/pipeline/status_spec.rb
index b5dabca0477..45d0d781090 100644
--- a/spec/lib/gitlab/badge/pipeline/status_spec.rb
+++ b/spec/lib/gitlab/ci/badge/pipeline/status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Badge::Pipeline::Status do
+RSpec.describe Gitlab::Ci::Badge::Pipeline::Status do
let(:project) { create(:project, :repository) }
let(:sha) { project.commit.sha }
let(:branch) { 'master' }
diff --git a/spec/lib/gitlab/badge/pipeline/template_spec.rb b/spec/lib/gitlab/ci/badge/pipeline/template_spec.rb
index c78e95852f3..696bb62b4d6 100644
--- a/spec/lib/gitlab/badge/pipeline/template_spec.rb
+++ b/spec/lib/gitlab/ci/badge/pipeline/template_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Badge::Pipeline::Template do
+RSpec.describe Gitlab::Ci::Badge::Pipeline::Template do
let(:badge) { double(entity: 'pipeline', status: 'success', customization: {}) }
let(:template) { described_class.new(badge) }
diff --git a/spec/lib/gitlab/badge/shared/metadata.rb b/spec/lib/gitlab/ci/badge/shared/metadata.rb
index c99a65bb2f4..c99a65bb2f4 100644
--- a/spec/lib/gitlab/badge/shared/metadata.rb
+++ b/spec/lib/gitlab/ci/badge/shared/metadata.rb
diff --git a/spec/lib/gitlab/ci/build/credentials/registry/dependency_proxy_spec.rb b/spec/lib/gitlab/ci/build/credentials/registry/dependency_proxy_spec.rb
new file mode 100644
index 00000000000..f50c6e99e99
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/credentials/registry/dependency_proxy_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Build::Credentials::Registry::DependencyProxy do
+ let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let(:gitlab_url) { 'gitlab.example.com:443' }
+
+ subject { described_class.new(build) }
+
+ before do
+ stub_config_setting(host: 'gitlab.example.com', port: 443)
+ end
+
+ it 'contains valid dependency proxy credentials' do
+ expect(subject).to be_kind_of(described_class)
+
+ expect(subject.username).to eq 'gitlab-ci-token'
+ expect(subject.password).to eq build.token
+ expect(subject.url).to eq gitlab_url
+ expect(subject.type).to eq 'registry'
+ end
+
+ describe '.valid?' do
+ subject { described_class.new(build).valid? }
+
+ context 'when dependency proxy is enabled' do
+ before do
+ stub_config(dependency_proxy: { enabled: true })
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when dependency proxy is disabled' do
+ before do
+ stub_config(dependency_proxy: { enabled: false })
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/credentials/registry_spec.rb b/spec/lib/gitlab/ci/build/credentials/registry/gitlab_registry_spec.rb
index c0a76973f60..43913e91085 100644
--- a/spec/lib/gitlab/ci/build/credentials/registry_spec.rb
+++ b/spec/lib/gitlab/ci/build/credentials/registry/gitlab_registry_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Credentials::Registry do
+RSpec.describe Gitlab::Ci::Build::Credentials::Registry::GitlabRegistry do
let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) }
let(:registry_url) { 'registry.example.com:5005' }
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index a1af5b75f87..0b50def05d4 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -201,40 +201,13 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
describe '#build_attributes' do
- let(:seed_attributes) { {} }
-
subject(:build_attributes) do
- result.build_attributes(seed_attributes)
+ result.build_attributes
end
it 'compacts nil values' do
is_expected.to eq(options: {}, when: 'on_success')
end
-
- context 'when there are variables in rules' do
- let(:variables) { { VAR1: 'new var 1', VAR3: 'var 3' } }
-
- context 'when there are seed variables' do
- let(:seed_attributes) do
- { yaml_variables: [{ key: 'VAR1', value: 'var 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true }] }
- end
-
- it 'returns yaml_variables with override' do
- is_expected.to include(
- yaml_variables: [{ key: 'VAR1', value: 'new var 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }]
- )
- end
- end
-
- context 'when there is not seed variables' do
- it 'does not return yaml_variables' do
- is_expected.not_to have_key(:yaml_variables)
- end
- end
- end
end
describe '#pass?' do
diff --git a/spec/lib/gitlab/ci/charts_spec.rb b/spec/lib/gitlab/ci/charts_spec.rb
index cfc2019a89b..46d7d4a58f0 100644
--- a/spec/lib/gitlab/ci/charts_spec.rb
+++ b/spec/lib/gitlab/ci/charts_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe Gitlab::Ci::Charts do
subject { chart.to }
+ before do
+ create(:ci_empty_pipeline, project: project, duration: 120)
+ end
+
it 'goes until the end of the current month (including the whole last day of the month)' do
is_expected.to eq(Date.today.end_of_month.end_of_day)
end
@@ -20,6 +24,10 @@ RSpec.describe Gitlab::Ci::Charts do
it 'uses %B %Y as labels format' do
expect(chart.labels).to include(chart.from.strftime('%B %Y'))
end
+
+ it 'returns count of pipelines run each day in the current year' do
+ expect(chart.total.sum).to eq(1)
+ end
end
context 'monthchart' do
@@ -28,6 +36,10 @@ RSpec.describe Gitlab::Ci::Charts do
subject { chart.to }
+ before do
+ create(:ci_empty_pipeline, project: project, duration: 120)
+ end
+
it 'includes the whole current day' do
is_expected.to eq(Date.today.end_of_day)
end
@@ -39,6 +51,10 @@ RSpec.describe Gitlab::Ci::Charts do
it 'uses %d %B as labels format' do
expect(chart.labels).to include(chart.from.strftime('%d %B'))
end
+
+ it 'returns count of pipelines run each day in the current month' do
+ expect(chart.total.sum).to eq(1)
+ end
end
context 'weekchart' do
@@ -47,6 +63,10 @@ RSpec.describe Gitlab::Ci::Charts do
subject { chart.to }
+ before do
+ create(:ci_empty_pipeline, project: project, duration: 120)
+ end
+
it 'includes the whole current day' do
is_expected.to eq(Date.today.end_of_day)
end
@@ -58,6 +78,68 @@ RSpec.describe Gitlab::Ci::Charts do
it 'uses %d %B as labels format' do
expect(chart.labels).to include(chart.from.strftime('%d %B'))
end
+
+ it 'returns count of pipelines run each day in the current week' do
+ expect(chart.total.sum).to eq(1)
+ end
+ end
+
+ context 'weekchart_utc' do
+ today = Date.today
+ end_of_today = Time.use_zone(Time.find_zone('UTC')) { today.end_of_day }
+
+ let(:project) { create(:project) }
+ let(:chart) do
+ allow(Date).to receive(:today).and_return(today)
+ allow(today).to receive(:end_of_day).and_return(end_of_today)
+ Gitlab::Ci::Charts::WeekChart.new(project)
+ end
+
+ subject { chart.total }
+
+ before do
+ create(:ci_empty_pipeline, project: project, duration: 120)
+ end
+
+ it 'uses a utc time zone for range times' do
+ expect(chart.to.zone).to eq(end_of_today.zone)
+ expect(chart.from.zone).to eq(end_of_today.zone)
+ end
+
+ it 'returns count of pipelines run each day in the current week' do
+ expect(chart.total.sum).to eq(1)
+ end
+ end
+
+ context 'weekchart_non_utc' do
+ today = Date.today
+ end_of_today = Time.use_zone(Time.find_zone('Asia/Dubai')) { today.end_of_day }
+
+ let(:project) { create(:project) }
+ let(:chart) do
+ allow(Date).to receive(:today).and_return(today)
+ allow(today).to receive(:end_of_day).and_return(end_of_today)
+ Gitlab::Ci::Charts::WeekChart.new(project)
+ end
+
+ subject { chart.total }
+
+ before do
+ # The DB uses UTC always, so our use of a Time Zone in the application
+ # can cause the creation date of the pipeline to go unmatched depending
+ # on the offset. We can work around this by requesting the pipeline be
+ # created a with the `created_at` field set to a day ago in the same week.
+ create(:ci_empty_pipeline, project: project, duration: 120, created_at: today - 1.day)
+ end
+
+ it 'uses a non-utc time zone for range times' do
+ expect(chart.to.zone).to eq(end_of_today.zone)
+ expect(chart.from.zone).to eq(end_of_today.zone)
+ end
+
+ it 'returns count of pipelines run each day in the current week' do
+ expect(chart.total.sum).to eq(1)
+ end
end
context 'pipeline_times' do
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 80427eaa6ee..247f4b63910 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Cache do
+ using RSpec::Parameterized::TableSyntax
+
subject(:entry) { described_class.new(config) }
describe 'validations' do
@@ -56,8 +58,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
context 'with `policy`' do
- using RSpec::Parameterized::TableSyntax
-
where(:policy, :result) do
'pull-push' | 'pull-push'
'push' | 'push'
@@ -77,8 +77,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
context 'with `when`' do
- using RSpec::Parameterized::TableSyntax
-
where(:when_config, :result) do
'on_success' | 'on_success'
'on_failure' | 'on_failure'
@@ -109,8 +107,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
context 'with `policy`' do
- using RSpec::Parameterized::TableSyntax
-
where(:policy, :valid) do
'pull-push' | true
'push' | true
@@ -126,8 +122,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
context 'with `when`' do
- using RSpec::Parameterized::TableSyntax
-
where(:when_config, :valid) do
'on_success' | true
'on_failure' | true
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 7834a1a94f2..a3b5f32b9f9 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -763,16 +763,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'returns allow_failure_criteria' do
expect(entry.value[:allow_failure_criteria]).to match(exit_codes: [42])
end
-
- context 'with ci_allow_failure_with_exit_codes disabled' do
- before do
- stub_feature_flags(ci_allow_failure_with_exit_codes: false)
- end
-
- it 'does not return allow_failure_criteria' do
- expect(entry.value.key?(:allow_failure_criteria)).to be_falsey
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index aadf94365c6..04e80450263 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -73,6 +73,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
+ context 'when resource_group key is not a string' do
+ let(:config) { { resource_group: 123 } }
+
+ it 'returns error about wrong value type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "job resource group should be a string"
+ end
+ end
+
context 'when it uses both "when:" and "rules:"' do
let(:config) do
{
@@ -340,6 +349,26 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
+ context 'with resource group' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:resource_group, :result) do
+ 'iOS' | 'iOS'
+ 'review/$CI_COMMIT_REF_NAME' | 'review/$CI_COMMIT_REF_NAME'
+ nil | nil
+ end
+
+ with_them do
+ let(:config) { { script: 'ls', resource_group: resource_group }.compact }
+
+ it do
+ entry.compose!(deps)
+
+ expect(entry.resource_group).to eq(result)
+ end
+ end
+ end
+
context 'with inheritance' do
context 'of variables' do
let(:config) do
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 4fdaaca8316..99f546ceb37 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -323,20 +323,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect { subject }.to raise_error(described_class::AmbigiousSpecificationError)
end
end
-
- context 'when feature flag is turned off' do
- let(:values) do
- { include: full_local_file_path }
- end
-
- before do
- stub_feature_flags(variables_in_include_section_ci: false)
- end
-
- it 'does not expand the variables' do
- expect(subject[0].location).to eq('$CI_PROJECT_PATH' + local_file)
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
index dd27b4045c9..15293429354 100644
--- a/spec/lib/gitlab/ci/cron_parser_spec.rb
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -63,6 +63,17 @@ RSpec.describe Gitlab::Ci::CronParser do
end
end
+ context 'when range and slash used' do
+ let(:cron) { '3-59/10 * * * *' }
+ let(:cron_timezone) { 'UTC' }
+
+ it_behaves_like returns_time_for_epoch
+
+ it 'returns specific time' do
+ expect(subject.min).to be_in([3, 13, 23, 33, 43, 53])
+ end
+ end
+
context 'when cron_timezone is TZInfo format' do
before do
allow(Time).to receive(:zone)
diff --git a/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb b/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb
new file mode 100644
index 00000000000..30bcce21be2
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Instrumentation do
+ describe '#parse!' do
+ let(:parser_class) do
+ Class.new do
+ prepend Gitlab::Ci::Parsers::Instrumentation
+
+ def parse!(arg1, arg2)
+ "parse #{arg1} #{arg2}"
+ end
+ end
+ end
+
+ it 'sets metrics for duration of parsing' do
+ result = parser_class.new.parse!('hello', 'world')
+
+ expect(result).to eq('parse hello world')
+
+ metrics = Gitlab::Metrics.registry.get(:ci_report_parser_duration_seconds).get({ parser: parser_class.name })
+
+ expect(metrics.keys).to match_array(described_class::BUCKETS)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers_spec.rb b/spec/lib/gitlab/ci/parsers_spec.rb
index b932cd81272..c9891c06507 100644
--- a/spec/lib/gitlab/ci/parsers_spec.rb
+++ b/spec/lib/gitlab/ci/parsers_spec.rb
@@ -54,4 +54,12 @@ RSpec.describe Gitlab::Ci::Parsers do
end
end
end
+
+ describe '.instrument!' do
+ it 'prepends the Instrumentation module into each parser' do
+ expect(described_class.parsers.values).to all( receive(:prepend).with(Gitlab::Ci::Parsers::Instrumentation) )
+
+ described_class.instrument!
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 3eaecb11ae0..1d17244e519 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -58,20 +58,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
end
-
- context 'when FF ci_auto_cancel_all_pipelines is disabled' do
- before do
- stub_feature_flags(ci_auto_cancel_all_pipelines: false)
- end
-
- it 'does not cancel interruptible builds of child pipeline' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
-
- perform
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
- end
- end
end
context 'when the child pipeline has not an interruptible job' do
diff --git a/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb b/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
new file mode 100644
index 00000000000..8b177fa7fc1
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
+ let(:codequality_report) { Gitlab::Ci::Reports::CodequalityReports.new }
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
+ let(:degradation_3) { build(:codequality_degradation_3) }
+
+ describe '#initialize!' do
+ subject(:report) { described_class.new(codequality_report) }
+
+ context 'when quality has degradations' do
+ context 'with several degradations on the same line' do
+ before do
+ codequality_report.add_degradation(degradation_1)
+ codequality_report.add_degradation(degradation_2)
+ end
+
+ it 'generates quality report for mr diff' do
+ expect(report.files).to match(
+ "file_a.rb" => [
+ { line: 10, description: "Avoid parameter lists longer than 5 parameters. [12/5]", severity: "major" },
+ { line: 10, description: "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.", severity: "major" }
+ ]
+ )
+ end
+ end
+
+ context 'with several degradations on several files' do
+ before do
+ codequality_report.add_degradation(degradation_1)
+ codequality_report.add_degradation(degradation_2)
+ codequality_report.add_degradation(degradation_3)
+ end
+
+ it 'returns quality report for mr diff' do
+ expect(report.files).to match(
+ "file_a.rb" => [
+ { line: 10, description: "Avoid parameter lists longer than 5 parameters. [12/5]", severity: "major" },
+ { line: 10, description: "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.", severity: "major" }
+ ],
+ "file_b.rb" => [
+ { line: 10, description: "Avoid parameter lists longer than 5 parameters. [12/5]", severity: "minor" }
+ ]
+ )
+ end
+ end
+ end
+
+ context 'when quality has no degradation' do
+ it 'returns an empty hash' do
+ expect(report.files).to match({})
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
index 7053d54381b..90188b56f5a 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
@@ -6,62 +6,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
let(:comparer) { described_class.new(base_report, head_report) }
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:head_report) { Gitlab::Ci::Reports::CodequalityReports.new }
- let(:degradation_1) do
- {
- "categories": [
- "Complexity"
- ],
- "check_name": "argument_count",
- "content": {
- "body": ""
- },
- "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
- "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
- "location": {
- "path": "foo.rb",
- "lines": {
- "begin": 10,
- "end": 10
- }
- },
- "other_locations": [],
- "remediation_points": 900000,
- "severity": "major",
- "type": "issue",
- "engine_name": "structure"
- }.with_indifferent_access
- end
-
- let(:degradation_2) do
- {
- "type": "Issue",
- "check_name": "Rubocop/Metrics/ParameterLists",
- "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
- "categories": [
- "Complexity"
- ],
- "remediation_points": 550000,
- "location": {
- "path": "foo.rb",
- "positions": {
- "begin": {
- "column": 14,
- "line": 10
- },
- "end": {
- "column": 39,
- "line": 10
- }
- }
- },
- "content": {
- "body": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count."
- },
- "engine_name": "rubocop",
- "fingerprint": "ab5f8b935886b942d621399f5a2ca16e",
- "severity": "minor"
- }.with_indifferent_access
- end
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
describe '#status' do
subject(:report_status) { comparer.status }
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
index 44e67259369..ae9b2f2c62b 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
@@ -4,62 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::CodequalityReports do
let(:codequality_report) { described_class.new }
- let(:degradation_1) do
- {
- "categories": [
- "Complexity"
- ],
- "check_name": "argument_count",
- "content": {
- "body": ""
- },
- "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
- "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
- "location": {
- "path": "foo.rb",
- "lines": {
- "begin": 10,
- "end": 10
- }
- },
- "other_locations": [],
- "remediation_points": 900000,
- "severity": "major",
- "type": "issue",
- "engine_name": "structure"
- }.with_indifferent_access
- end
-
- let(:degradation_2) do
- {
- "type": "Issue",
- "check_name": "Rubocop/Metrics/ParameterLists",
- "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
- "categories": [
- "Complexity"
- ],
- "remediation_points": 550000,
- "location": {
- "path": "foo.rb",
- "positions": {
- "begin": {
- "column": 14,
- "line": 10
- },
- "end": {
- "column": 39,
- "line": 10
- }
- }
- },
- "content": {
- "body": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count."
- },
- "engine_name": "rubocop",
- "fingerprint": "ab5f8b935886b942d621399f5a2ca16e",
- "severity": "minor"
- }.with_indifferent_access
- end
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
it { expect(codequality_report.degradations).to eq({}) }
diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
index a2903391c6f..f09e03b4d55 100644
--- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
+++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
let(:chunked_io) { described_class.new(build) }
before do
- stub_feature_flags(ci_enable_live_trace: true)
+ stub_feature_flags(ci_enable_live_trace: true, gitlab_ci_trace_read_consistency: true)
end
describe "#initialize" do
diff --git a/spec/lib/gitlab/ci/variables/helpers_spec.rb b/spec/lib/gitlab/ci/variables/helpers_spec.rb
new file mode 100644
index 00000000000..b45abf8c0e1
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/helpers_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Helpers do
+ describe '.merge_variables' do
+ let(:current_variables) do
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value2' }]
+ end
+
+ let(:new_variables) do
+ [{ key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
+ end
+
+ let(:result) do
+ [{ key: 'key1', value: 'value1', public: true },
+ { key: 'key2', value: 'value22', public: true },
+ { key: 'key3', value: 'value3', public: true }]
+ end
+
+ subject { described_class.merge_variables(current_variables, new_variables) }
+
+ it { is_expected.to eq(result) }
+
+ context 'when new variables is a hash' do
+ let(:new_variables) do
+ { 'key2' => 'value22', 'key3' => 'value3' }
+ end
+
+ it { is_expected.to eq(result) }
+ end
+
+ context 'when new variables is a hash with symbol keys' do
+ let(:new_variables) do
+ { key2: 'value22', key3: 'value3' }
+ end
+
+ it { is_expected.to eq(result) }
+ end
+
+ context 'when new variables is nil' do
+ let(:new_variables) {}
+ let(:result) do
+ [{ key: 'key1', value: 'value1', public: true },
+ { key: 'key2', value: 'value2', public: true }]
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '.transform_to_yaml_variables' do
+ let(:variables) do
+ { 'key1' => 'value1', 'key2' => 'value2' }
+ end
+
+ let(:result) do
+ [{ key: 'key1', value: 'value1', public: true },
+ { key: 'key2', value: 'value2', public: true }]
+ end
+
+ subject { described_class.transform_to_yaml_variables(variables) }
+
+ it { is_expected.to eq(result) }
+
+ context 'when variables is nil' do
+ let(:variables) {}
+
+ it { is_expected.to eq([]) }
+ end
+ end
+
+ describe '.transform_from_yaml_variables' do
+ let(:variables) do
+ [{ key: 'key1', value: 'value1', public: true },
+ { key: 'key2', value: 'value2', public: true }]
+ end
+
+ let(:result) do
+ { 'key1' => 'value1', 'key2' => 'value2' }
+ end
+
+ subject { described_class.transform_from_yaml_variables(variables) }
+
+ it { is_expected.to eq(result) }
+
+ context 'when variables is nil' do
+ let(:variables) {}
+
+ it { is_expected.to eq({}) }
+ end
+
+ context 'when variables is a hash' do
+ let(:variables) do
+ { key1: 'value1', 'key2' => 'value2' }
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
index 8a7425a4156..b5adb603dab 100644
--- a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
@@ -42,7 +42,8 @@ RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFiles do
end
it 'stops when limit is reached' do
- cleanup = described_class.new(limit: 1)
+ stub_env('LIMIT', 1)
+ cleanup = described_class.new
mock_artifacts_found(cleanup, 'tmp/foo/bar/1', 'tmp/foo/bar/2')
diff --git a/spec/lib/gitlab/cluster/lifecycle_events_spec.rb b/spec/lib/gitlab/cluster/lifecycle_events_spec.rb
new file mode 100644
index 00000000000..4ed68d54680
--- /dev/null
+++ b/spec/lib/gitlab/cluster/lifecycle_events_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::Cluster::LifecycleEvents do
+ # we create a new instance to ensure that we do not touch existing hooks
+ let(:replica) { Class.new(described_class) }
+
+ context 'hooks execution' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:method, :hook_names) do
+ :do_worker_start | %i[worker_start_hooks]
+ :do_before_fork | %i[before_fork_hooks]
+ :do_before_graceful_shutdown | %i[master_blackout_period master_graceful_shutdown]
+ :do_before_master_restart | %i[master_restart_hooks]
+ end
+
+ before do
+ # disable blackout period to speed-up tests
+ stub_config(shutdown: { blackout_seconds: 0 })
+ end
+
+ with_them do
+ subject { replica.public_send(method) }
+
+ it 'executes all hooks' do
+ hook_names.each do |hook_name|
+ hook = double
+ replica.instance_variable_set(:"@#{hook_name}", [hook])
+
+ # ensure that proper hooks are called
+ expect(hook).to receive(:call)
+ expect(replica).to receive(:call).with(hook_name, anything).and_call_original
+ end
+
+ subject
+ end
+ end
+ end
+
+ describe '#call' do
+ let(:name) { :my_hooks }
+
+ subject { replica.send(:call, name, hooks) }
+
+ context 'when many hooks raise exception' do
+ let(:hooks) do
+ [
+ -> { raise 'Exception A' },
+ -> { raise 'Exception B' }
+ ]
+ end
+
+ context 'USE_FATAL_LIFECYCLE_EVENTS is set to default' do
+ it 'only first hook is executed and is fatal' do
+ expect(hooks[0]).to receive(:call).and_call_original
+ expect(hooks[1]).not_to receive(:call)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
+ expect(replica).to receive(:warn).with('ERROR: The hook my_hooks failed with exception (RuntimeError) "Exception A".')
+
+ expect { subject }.to raise_error(described_class::FatalError, 'Exception A')
+ end
+ end
+
+ context 'when USE_FATAL_LIFECYCLE_EVENTS is disabled' do
+ before do
+ stub_const('Gitlab::Cluster::LifecycleEvents::USE_FATAL_LIFECYCLE_EVENTS', false)
+ end
+
+ it 'many hooks are executed and all exceptions are logged' do
+ expect(hooks[0]).to receive(:call).and_call_original
+ expect(hooks[1]).to receive(:call).and_call_original
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).twice.and_call_original
+ expect(replica).to receive(:warn).twice.and_call_original
+
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/composer/cache_spec.rb b/spec/lib/gitlab/composer/cache_spec.rb
new file mode 100644
index 00000000000..00318ac14f9
--- /dev/null
+++ b/spec/lib/gitlab/composer/cache_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Composer::Cache do
+ let_it_be(:package_name) { 'sample-project' }
+ let_it_be(:json) { { 'name' => package_name } }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+ let(:branch) { project.repository.find_branch('master') }
+ let(:sha_regex) { /^[A-Fa-f0-9]{64}$/ }
+
+ shared_examples 'Composer create cache page' do
+ let(:expected_json) { ::Gitlab::Composer::VersionIndex.new(packages).to_json }
+
+ before do
+ stub_composer_cache_object_storage
+ end
+
+ it 'creates the cached page' do
+ expect { subject }.to change { Packages::Composer::CacheFile.count }.by(1)
+ cache_file = Packages::Composer::CacheFile.last
+ expect(cache_file.file_sha256).to eq package.reload.composer_metadatum.version_cache_sha
+ expect(cache_file.file.read).to eq expected_json
+ end
+ end
+
+ shared_examples 'Composer marks cache page for deletion' do
+ it 'marks the page for deletion' do
+ cache_file = Packages::Composer::CacheFile.last
+
+ freeze_time do
+ expect { subject }.to change { cache_file.reload.delete_at}.from(nil).to(1.day.from_now)
+ end
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project: project, name: package_name).execute }
+
+ context 'creating packages' do
+ context 'with a pre-existing package' do
+ let(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
+ let(:packages) { [package, package2] }
+
+ before do
+ package
+ described_class.new(project: project, name: package_name).execute
+ package.reload
+ package2
+ end
+
+ it 'updates the sha and creates the cache page' do
+ expect { subject }.to change { package2.reload.composer_metadatum.version_cache_sha }.from(nil).to(sha_regex)
+ .and change { package.reload.composer_metadatum.version_cache_sha }.to(sha_regex)
+ end
+
+ it_behaves_like 'Composer create cache page'
+ it_behaves_like 'Composer marks cache page for deletion'
+ end
+
+ context 'first package' do
+ let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let(:packages) { [package] }
+
+ it 'updates the sha and creates the cache page' do
+ expect { subject }.to change { package.reload.composer_metadatum.version_cache_sha }.from(nil).to(sha_regex)
+ end
+
+ it_behaves_like 'Composer create cache page'
+ end
+ end
+
+ context 'updating packages' do
+ let(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
+ let(:packages) { [package, package2] }
+
+ before do
+ packages
+
+ described_class.new(project: project, name: package_name).execute
+
+ package.update!(version: '1.2.0')
+ package.reload
+ end
+
+ it_behaves_like 'Composer create cache page'
+ it_behaves_like 'Composer marks cache page for deletion'
+ end
+
+ context 'deleting packages' do
+ context 'when it is not the last package' do
+ let(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
+ let(:packages) { [package] }
+
+ before do
+ package
+ package2
+
+ described_class.new(project: project, name: package_name).execute
+
+ package2.destroy!
+ end
+
+ it_behaves_like 'Composer create cache page'
+ it_behaves_like 'Composer marks cache page for deletion'
+ end
+
+ context 'when it is the last package' do
+ let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let!(:last_sha) do
+ described_class.new(project: project, name: package_name).execute
+ package.reload.composer_metadatum.version_cache_sha
+ end
+
+ before do
+ package.destroy!
+ end
+
+ subject { described_class.new(project: project, name: package_name, last_page_sha: last_sha).execute }
+
+ it_behaves_like 'Composer marks cache page for deletion'
+
+ it 'does not create a new page' do
+ expect { subject }.not_to change { Packages::Composer::CacheFile.count }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/composer/version_index_spec.rb b/spec/lib/gitlab/composer/version_index_spec.rb
index 4c4742d9f59..7b0ed703f42 100644
--- a/spec/lib/gitlab/composer/version_index_spec.rb
+++ b/spec/lib/gitlab/composer/version_index_spec.rb
@@ -15,7 +15,9 @@ RSpec.describe Gitlab::Composer::VersionIndex do
let(:packages) { [package1, package2] }
describe '#as_json' do
- subject(:index) { described_class.new(packages).as_json }
+ subject(:package_index) { index['packages'][package_name] }
+
+ let(:index) { described_class.new(packages).as_json }
def expected_json(package)
{
@@ -32,10 +34,16 @@ RSpec.describe Gitlab::Composer::VersionIndex do
end
it 'returns the packages json' do
- packages = index['packages'][package_name]
+ expect(package_index['1.0.0']).to eq(expected_json(package1))
+ expect(package_index['2.0.0']).to eq(expected_json(package2))
+ end
+
+ context 'with an unordered list of packages' do
+ let(:packages) { [package2, package1] }
- expect(packages['1.0.0']).to eq(expected_json(package1))
- expect(packages['2.0.0']).to eq(expected_json(package2))
+ it 'returns the packages sorted by version' do
+ expect(package_index.keys).to eq ['1.0.0', '2.0.0']
+ end
end
end
diff --git a/spec/lib/gitlab/conan_token_spec.rb b/spec/lib/gitlab/conan_token_spec.rb
index be1d3e757f5..00683cf6e47 100644
--- a/spec/lib/gitlab/conan_token_spec.rb
+++ b/spec/lib/gitlab/conan_token_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::ConanToken do
let(:jwt_secret) do
OpenSSL::HMAC.hexdigest(
- OpenSSL::Digest::SHA256.new,
+ OpenSSL::Digest.new('SHA256'),
base_secret,
described_class::HMAC_KEY
)
diff --git a/spec/lib/gitlab/crypto_helper_spec.rb b/spec/lib/gitlab/crypto_helper_spec.rb
index c07089d8ef0..024564ea213 100644
--- a/spec/lib/gitlab/crypto_helper_spec.rb
+++ b/spec/lib/gitlab/crypto_helper_spec.rb
@@ -19,21 +19,85 @@ RSpec.describe Gitlab::CryptoHelper do
expect(encrypted).to match %r{\A[A-Za-z0-9+/=]+\z}
expect(encrypted).not_to include "\n"
end
+
+ it 'does not save hashed token with iv value in database' do
+ expect { described_class.aes256_gcm_encrypt('some-value') }.not_to change { TokenWithIv.count }
+ end
+
+ it 'encrypts using static iv' do
+ expect(Encryptor).to receive(:encrypt).with(described_class::AES256_GCM_OPTIONS.merge(value: 'some-value', iv: described_class::AES256_GCM_IV_STATIC)).and_return('hashed_value')
+
+ described_class.aes256_gcm_encrypt('some-value')
+ end
end
describe '.aes256_gcm_decrypt' do
- let(:encrypted) { described_class.aes256_gcm_encrypt('some-value') }
+ before do
+ stub_feature_flags(dynamic_nonce_creation: false)
+ end
+
+ context 'when token was encrypted using static nonce' do
+ let(:encrypted) { described_class.aes256_gcm_encrypt('some-value', nonce: described_class::AES256_GCM_IV_STATIC) }
+
+ it 'correctly decrypts encrypted string' do
+ decrypted = described_class.aes256_gcm_decrypt(encrypted)
+
+ expect(decrypted).to eq 'some-value'
+ end
+
+ it 'decrypts a value when it ends with a new line character' do
+ decrypted = described_class.aes256_gcm_decrypt(encrypted + "\n")
- it 'correctly decrypts encrypted string' do
- decrypted = described_class.aes256_gcm_decrypt(encrypted)
+ expect(decrypted).to eq 'some-value'
+ end
- expect(decrypted).to eq 'some-value'
+ it 'does not save hashed token with iv value in database' do
+ expect { described_class.aes256_gcm_decrypt(encrypted) }.not_to change { TokenWithIv.count }
+ end
+
+ context 'with feature flag switched on' do
+ before do
+ stub_feature_flags(dynamic_nonce_creation: true)
+ end
+
+ it 'correctly decrypts encrypted string' do
+ decrypted = described_class.aes256_gcm_decrypt(encrypted)
+
+ expect(decrypted).to eq 'some-value'
+ end
+ end
end
- it 'decrypts a value when it ends with a new line character' do
- decrypted = described_class.aes256_gcm_decrypt(encrypted + "\n")
+ context 'when token was encrypted using random nonce' do
+ let(:value) { 'random-value' }
+
+ # for compatibility with tokens encrypted using dynamic nonce
+ let!(:encrypted) do
+ iv = create_nonce
+ encrypted_token = described_class.create_encrypted_token(value, iv)
+ TokenWithIv.create!(hashed_token: Digest::SHA256.digest(encrypted_token), hashed_plaintext_token: Digest::SHA256.digest(encrypted_token), iv: iv)
+ encrypted_token
+ end
+
+ before do
+ stub_feature_flags(dynamic_nonce_creation: true)
+ end
- expect(decrypted).to eq 'some-value'
+ it 'correctly decrypts encrypted string' do
+ decrypted = described_class.aes256_gcm_decrypt(encrypted)
+
+ expect(decrypted).to eq value
+ end
+
+ it 'does not save hashed token with iv value in database' do
+ expect { described_class.aes256_gcm_decrypt(encrypted) }.not_to change { TokenWithIv.count }
+ end
end
end
+
+ def create_nonce
+ cipher = OpenSSL::Cipher.new('aes-256-gcm')
+ cipher.encrypt # Required before '#random_iv' can be called
+ cipher.random_iv # Ensures that the IV is the correct length respective to the algorithm used.
+ end
end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index 786db23ffc4..01aceec12c5 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -194,4 +194,32 @@ RSpec.describe Gitlab::CurrentSettings do
end
end
end
+
+ describe '#current_application_settings?', :use_clean_rails_memory_store_caching do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_call_original
+ end
+
+ it 'returns true when settings exist' do
+ create(:application_setting,
+ home_page_url: 'http://mydomain.com',
+ signup_enabled: false)
+
+ expect(described_class.current_application_settings?).to eq(true)
+ end
+
+ it 'returns false when settings do not exist' do
+ expect(described_class.current_application_settings?).to eq(false)
+ end
+
+ context 'with cache', :request_store do
+ include_context 'with settings in cache'
+
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).not_to receive(:current)
+
+ expect(described_class.current_application_settings?).to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 2f74e766a11..4242469b3db 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Build do
- let(:runner) { create(:ci_runner, :instance) }
+ let!(:tag_names) { %w(tag-1 tag-2) }
+ let(:runner) { create(:ci_runner, :instance, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
let(:user) { create(:user) }
let(:build) { create(:ci_build, :running, runner: runner, user: user) }
@@ -35,6 +36,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
}
it { expect(data[:commit][:id]).to eq(build.pipeline.id) }
it { expect(data[:runner][:id]).to eq(build.runner.id) }
+ it { expect(data[:runner][:tags]).to match_array(tag_names) }
it { expect(data[:runner][:description]).to eq(build.runner.description) }
context 'commit author_url' do
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 297d87708d8..32619fc4c37 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -51,13 +51,15 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
context 'build with runner' do
let!(:build) { create(:ci_build, pipeline: pipeline, runner: ci_runner) }
- let(:ci_runner) { create(:ci_runner) }
+ let!(:tag_names) { %w(tag-1 tag-2) }
+ let(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
it 'has runner attributes', :aggregate_failures do
expect(runner_data[:id]).to eq(ci_runner.id)
expect(runner_data[:description]).to eq(ci_runner.description)
expect(runner_data[:active]).to eq(ci_runner.active)
expect(runner_data[:is_shared]).to eq(ci_runner.instance_type?)
+ expect(runner_data[:tags]).to match_array(tag_names)
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
new file mode 100644
index 00000000000..f132ecbf13b
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -0,0 +1,221 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
+ include Database::TriggerHelpers
+
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ before do
+ allow(migration).to receive(:puts)
+ end
+
+ shared_examples_for 'Setting up to rename a column' do
+ let(:model) { Class.new(ActiveRecord::Base) }
+
+ before do
+ model.table_name = :test_table
+ end
+
+ context 'when called inside a transaction block' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ migration.public_send(operation, :test_table, :original, :renamed)
+ end.to raise_error("#{operation} can not be run inside a transaction")
+ end
+ end
+
+ context 'when the existing column has a default value' do
+ before do
+ migration.change_column_default :test_table, existing_column, 'default value'
+ end
+
+ it 'raises an error' do
+ expect do
+ migration.public_send(operation, :test_table, :original, :renamed)
+ end.to raise_error("#{operation} does not currently support columns with default values")
+ end
+ end
+
+ context 'when passing a batch column' do
+ context 'when the batch column does not exist' do
+ it 'raises an error' do
+ expect do
+ migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :missing)
+ end.to raise_error('Column missing does not exist on test_table')
+ end
+ end
+
+ context 'when the batch column does exist' do
+ it 'passes it when creating the column' do
+ expect(migration).to receive(:create_column_from)
+ .with(:test_table, existing_column, added_column, type: nil, batch_column_name: :status)
+ .and_call_original
+
+ migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :status)
+ end
+ end
+ end
+
+ it 'creates the renamed column, syncing existing data' do
+ existing_record_1 = model.create!(status: 0, existing_column => 'existing')
+ existing_record_2 = model.create!(status: 0, existing_column => nil)
+
+ migration.send(operation, :test_table, :original, :renamed)
+ model.reset_column_information
+
+ expect(migration.column_exists?(:test_table, added_column)).to eq(true)
+
+ expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing')
+ expect(existing_record_2.reload).to have_attributes(status: 0, original: nil, renamed: nil)
+ end
+
+ it 'installs triggers to sync new data' do
+ migration.public_send(operation, :test_table, :original, :renamed)
+ model.reset_column_information
+
+ new_record_1 = model.create!(status: 1, original: 'first')
+ new_record_2 = model.create!(status: 1, renamed: 'second')
+
+ expect(new_record_1.reload).to have_attributes(status: 1, original: 'first', renamed: 'first')
+ expect(new_record_2.reload).to have_attributes(status: 1, original: 'second', renamed: 'second')
+
+ new_record_1.update!(original: 'updated')
+ new_record_2.update!(renamed: nil)
+
+ expect(new_record_1.reload).to have_attributes(status: 1, original: 'updated', renamed: 'updated')
+ expect(new_record_2.reload).to have_attributes(status: 1, original: nil, renamed: nil)
+ end
+ end
+
+ describe '#rename_column_concurrently' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+
+ migration.create_table :test_table do |t|
+ t.integer :status, null: false
+ t.text :original
+ t.text :other_column
+ end
+ end
+
+ it_behaves_like 'Setting up to rename a column' do
+ let(:operation) { :rename_column_concurrently }
+ let(:existing_column) { :original }
+ let(:added_column) { :renamed }
+ end
+
+ context 'when the column to rename does not exist' do
+ it 'raises an error' do
+ expect do
+ migration.rename_column_concurrently :test_table, :missing_column, :renamed
+ end.to raise_error('Column missing_column does not exist on test_table')
+ end
+ end
+ end
+
+ describe '#undo_cleanup_concurrent_column_rename' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+
+ migration.create_table :test_table do |t|
+ t.integer :status, null: false
+ t.text :other_column
+ t.text :renamed
+ end
+ end
+
+ it_behaves_like 'Setting up to rename a column' do
+ let(:operation) { :undo_cleanup_concurrent_column_rename }
+ let(:existing_column) { :renamed }
+ let(:added_column) { :original }
+ end
+
+ context 'when the renamed column does not exist' do
+ it 'raises an error' do
+ expect do
+ migration.undo_cleanup_concurrent_column_rename :test_table, :original, :missing_column
+ end.to raise_error('Column missing_column does not exist on test_table')
+ end
+ end
+ end
+
+ shared_examples_for 'Cleaning up from renaming a column' do
+ let(:connection) { migration.connection }
+
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+
+ migration.create_table :test_table do |t|
+ t.integer :status, null: false
+ t.text :original
+ t.text :other_column
+ end
+
+ migration.rename_column_concurrently :test_table, :original, :renamed
+ end
+
+ context 'when the helper is called repeatedly' do
+ before do
+ migration.public_send(operation, :test_table, :original, :renamed)
+ end
+
+ it 'does not make repeated attempts to cleanup' do
+ expect(migration).not_to receive(:remove_column)
+
+ expect do
+ migration.public_send(operation, :test_table, :original, :renamed)
+ end.not_to raise_error
+ end
+ end
+
+ context 'when the renamed column exists' do
+ let(:triggers) do
+ [
+ ['trigger_7cc71f92fd63', 'function_for_trigger_7cc71f92fd63', before: 'insert'],
+ ['trigger_f1a1f619636a', 'function_for_trigger_f1a1f619636a', before: 'update'],
+ ['trigger_769a49938884', 'function_for_trigger_769a49938884', before: 'update']
+ ]
+ end
+
+ it 'removes the sync triggers and renamed columns' do
+ triggers.each do |(trigger_name, function_name, event)|
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(:test_table, trigger_name, function_name, event)
+ end
+
+ expect(migration.column_exists?(:test_table, added_column)).to eq(true)
+
+ migration.public_send(operation, :test_table, :original, :renamed)
+
+ expect(migration.column_exists?(:test_table, added_column)).to eq(false)
+
+ triggers.each do |(trigger_name, function_name, _)|
+ expect_trigger_not_to_exist(:test_table, trigger_name)
+ expect_function_not_to_exist(function_name)
+ end
+ end
+ end
+ end
+
+ describe '#undo_rename_column_concurrently' do
+ it_behaves_like 'Cleaning up from renaming a column' do
+ let(:operation) { :undo_rename_column_concurrently }
+ let(:added_column) { :renamed }
+ end
+ end
+
+ describe '#cleanup_concurrent_column_rename' do
+ it_behaves_like 'Cleaning up from renaming a column' do
+ let(:operation) { :cleanup_concurrent_column_rename }
+ let(:added_column) { :original }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 6b709cba5b3..6de7fc3a50e 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1874,7 +1874,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
has_internal_id :iid,
scope: :project,
init: ->(s, _scope) { s&.project&.issues&.maximum(:iid) },
- backfill: true,
presence: false
end
end
@@ -1928,258 +1927,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(issue_b.iid).to eq(3)
end
- context 'when the new code creates a row post deploy but before the migration runs' do
- it 'does not change the row iid' do
- project = setup
- issue = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- expect(issue.reload.iid).to eq(1)
- end
-
- it 'backfills iids for rows already in the database' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_c.reload.iid).to eq(3)
- end
-
- it 'backfills iids across multiple projects' do
- project_a = setup
- project_b = setup
- issue_a = issues.create!(project_id: project_a.id)
- issue_b = issues.create!(project_id: project_b.id)
- issue_c = Issue.create!(project_id: project_a.id)
- issue_d = Issue.create!(project_id: project_b.id)
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(1)
- expect(issue_c.reload.iid).to eq(2)
- expect(issue_d.reload.iid).to eq(2)
- end
-
- it 'generates iids properly for models created after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- issue_d = Issue.create!(project_id: project.id)
- issue_e = Issue.create!(project_id: project.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_c.reload.iid).to eq(3)
- expect(issue_d.iid).to eq(4)
- expect(issue_e.iid).to eq(5)
- end
-
- it 'backfills iids and properly generates iids for new models across multiple projects' do
- project_a = setup
- project_b = setup
- issue_a = issues.create!(project_id: project_a.id)
- issue_b = issues.create!(project_id: project_b.id)
- issue_c = Issue.create!(project_id: project_a.id)
- issue_d = Issue.create!(project_id: project_b.id)
-
- model.backfill_iids('issues')
-
- issue_e = Issue.create!(project_id: project_a.id)
- issue_f = Issue.create!(project_id: project_b.id)
- issue_g = Issue.create!(project_id: project_a.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(1)
- expect(issue_c.reload.iid).to eq(2)
- expect(issue_d.reload.iid).to eq(2)
- expect(issue_e.iid).to eq(3)
- expect(issue_f.iid).to eq(3)
- expect(issue_g.iid).to eq(4)
- end
- end
-
- context 'when the new code creates a model and then old code creates a model post deploy but before the migration runs' do
- it 'backfills iids' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = Issue.create!(project_id: project.id)
- issue_c = issues.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_c.reload.iid).to eq(3)
- end
-
- it 'generates an iid for a new model after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_d = issues.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- issue_e = Issue.create!(project_id: project.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_c.reload.iid).to eq(3)
- expect(issue_d.reload.iid).to eq(4)
- expect(issue_e.iid).to eq(5)
- end
- end
-
- context 'when the new code and old code alternate creating models post deploy but before the migration runs' do
- it 'backfills iids' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = Issue.create!(project_id: project.id)
- issue_c = issues.create!(project_id: project.id)
- issue_d = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_c.reload.iid).to eq(3)
- expect(issue_d.reload.iid).to eq(4)
- end
-
- it 'generates an iid for a new model after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_d = issues.create!(project_id: project.id)
- issue_e = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- issue_f = Issue.create!(project_id: project.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_c.reload.iid).to eq(3)
- expect(issue_d.reload.iid).to eq(4)
- expect(issue_e.reload.iid).to eq(5)
- expect(issue_f.iid).to eq(6)
- end
- end
-
- context 'when the new code creates and deletes a model post deploy but before the migration runs' do
- it 'backfills iids for rows already in the database' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_c.delete
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- end
-
- it 'successfully creates a new model after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_c.delete
-
- model.backfill_iids('issues')
-
- issue_d = Issue.create!(project_id: project.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_d.iid).to eq(3)
- end
- end
-
- context 'when the new code creates and deletes a model and old code creates a model post deploy but before the migration runs' do
- it 'backfills iids' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_c.delete
- issue_d = issues.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_d.reload.iid).to eq(3)
- end
-
- it 'successfully creates a new model after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_c.delete
- issue_d = issues.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- issue_e = Issue.create!(project_id: project.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_d.reload.iid).to eq(3)
- expect(issue_e.iid).to eq(4)
- end
- end
-
- context 'when the new code creates and deletes a model and then creates another model post deploy but before the migration runs' do
- it 'successfully generates an iid for a new model after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_c.delete
- issue_d = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_d.reload.iid).to eq(3)
- end
-
- it 'successfully generates an iid for a new model after the migration' do
- project = setup
- issue_a = issues.create!(project_id: project.id)
- issue_b = issues.create!(project_id: project.id)
- issue_c = Issue.create!(project_id: project.id)
- issue_c.delete
- issue_d = Issue.create!(project_id: project.id)
-
- model.backfill_iids('issues')
-
- issue_e = Issue.create!(project_id: project.id)
-
- expect(issue_a.reload.iid).to eq(1)
- expect(issue_b.reload.iid).to eq(2)
- expect(issue_d.reload.iid).to eq(3)
- expect(issue_e.iid).to eq(4)
- end
- end
-
context 'when the first model is created for a project after the migration' do
it 'generates an iid' do
project_a = setup
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index b50e02c7043..b5d741fc5e9 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -513,6 +513,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'finishing pending background migration jobs' do
let(:source_table_double) { double('table name') }
let(:raw_arguments) { [1, 50_000, source_table_double, partitioned_table, source_column] }
+ let(:background_job) { double('background job', args: ['background jobs', raw_arguments]) }
before do
allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true)
@@ -528,7 +529,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(Gitlab::BackgroundMigration).to receive(:steal)
.with(described_class::MIGRATION_CLASS_NAME)
- .and_yield(raw_arguments)
+ .and_yield(background_job)
expect(source_table_double).to receive(:==).with(source_table.to_s)
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 220ae705e71..563399ff0d9 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -54,6 +54,10 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_fiber.resume # start the transaction and lock the table
end
+ after do
+ lock_fiber.resume if lock_fiber.alive?
+ end
+
context 'lock_fiber' do
it 'acquires lock successfully' do
check_exclusive_lock_query = """
diff --git a/spec/lib/gitlab/diff/char_diff_spec.rb b/spec/lib/gitlab/diff/char_diff_spec.rb
new file mode 100644
index 00000000000..e4e2a3ba050
--- /dev/null
+++ b/spec/lib/gitlab/diff/char_diff_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'diff_match_patch'
+
+RSpec.describe Gitlab::Diff::CharDiff do
+ let(:old_string) { "Helo \n Worlld" }
+ let(:new_string) { "Hello \n World" }
+
+ subject(:diff) { described_class.new(old_string, new_string) }
+
+ describe '#generate_diff' do
+ context 'when old string is nil' do
+ let(:old_string) { nil }
+
+ it 'does not raise an error' do
+ expect { subject.generate_diff }.not_to raise_error
+ end
+
+ it 'treats nil values as blank strings' do
+ changes = subject.generate_diff
+
+ expect(changes).to eq([
+ [:insert, "Hello \n World"]
+ ])
+ end
+ end
+
+ it 'generates an array of changes' do
+ changes = subject.generate_diff
+
+ expect(changes).to eq([
+ [:equal, "Hel"],
+ [:insert, "l"],
+ [:equal, "o \n Worl"],
+ [:delete, "l"],
+ [:equal, "d"]
+ ])
+ end
+ end
+
+ describe '#changed_ranges' do
+ subject { diff.changed_ranges }
+
+ context 'when old string is nil' do
+ let(:old_string) { nil }
+
+ it 'returns lists of changes' do
+ old_diffs, new_diffs = subject
+
+ expect(old_diffs).to eq([])
+ expect(new_diffs).to eq([0..12])
+ end
+ end
+
+ it 'returns ranges of changes' do
+ old_diffs, new_diffs = subject
+
+ expect(old_diffs).to eq([11..11])
+ expect(new_diffs).to eq([3..3])
+ end
+ end
+
+ describe '#to_html' do
+ it 'returns an HTML representation of the diff' do
+ subject.generate_diff
+
+ expect(subject.to_html).to eq(
+ '<span class="idiff">Hel</span>' \
+ '<span class="idiff addition">l</span>' \
+ "<span class=\"idiff\">o \n Worl</span>" \
+ '<span class="idiff deletion">l</span>' \
+ '<span class="idiff">d</span>'
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/file_collection_sorter_spec.rb b/spec/lib/gitlab/diff/file_collection_sorter_spec.rb
index 8822fc55c6e..9ba9271cefc 100644
--- a/spec/lib/gitlab/diff/file_collection_sorter_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection_sorter_spec.rb
@@ -5,11 +5,14 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::FileCollectionSorter do
let(:diffs) do
[
+ double(new_path: 'README', old_path: 'README'),
double(new_path: '.dir/test', old_path: '.dir/test'),
double(new_path: '', old_path: '.file'),
double(new_path: '1-folder/A-file.ext', old_path: '1-folder/A-file.ext'),
+ double(new_path: '1-folder/README', old_path: '1-folder/README'),
double(new_path: nil, old_path: '1-folder/M-file.ext'),
double(new_path: '1-folder/Z-file.ext', old_path: '1-folder/Z-file.ext'),
+ double(new_path: '1-folder/README', old_path: '1-folder/README'),
double(new_path: '', old_path: '1-folder/nested/A-file.ext'),
double(new_path: '1-folder/nested/M-file.ext', old_path: '1-folder/nested/M-file.ext'),
double(new_path: nil, old_path: '1-folder/nested/Z-file.ext'),
@@ -19,7 +22,8 @@ RSpec.describe Gitlab::Diff::FileCollectionSorter do
double(new_path: nil, old_path: '2-folder/nested/A-file.ext'),
double(new_path: 'A-file.ext', old_path: 'A-file.ext'),
double(new_path: '', old_path: 'M-file.ext'),
- double(new_path: 'Z-file.ext', old_path: 'Z-file.ext')
+ double(new_path: 'Z-file.ext', old_path: 'Z-file.ext'),
+ double(new_path: 'README', old_path: 'README')
]
end
@@ -36,6 +40,8 @@ RSpec.describe Gitlab::Diff::FileCollectionSorter do
'1-folder/nested/Z-file.ext',
'1-folder/A-file.ext',
'1-folder/M-file.ext',
+ '1-folder/README',
+ '1-folder/README',
'1-folder/Z-file.ext',
'2-folder/nested/A-file.ext',
'2-folder/A-file.ext',
@@ -44,6 +50,8 @@ RSpec.describe Gitlab::Diff::FileCollectionSorter do
'.file',
'A-file.ext',
'M-file.ext',
+ 'README',
+ 'README',
'Z-file.ext'
])
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index f6810d7a966..94717152488 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -233,4 +233,22 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
cache.write_if_empty
end
end
+
+ describe '#key' do
+ subject { cache.key }
+
+ it 'returns the next version of the cache' do
+ is_expected.to start_with("highlighted-diff-files:#{cache.diffable.cache_key}:2")
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(improved_merge_diff_highlighting: false)
+ end
+
+ it 'returns the original version of the cache' do
+ is_expected.to start_with("highlighted-diff-files:#{cache.diffable.cache_key}:1")
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/inline_diff_spec.rb b/spec/lib/gitlab/diff/inline_diff_spec.rb
index 35284e952f7..dce655d5690 100644
--- a/spec/lib/gitlab/diff/inline_diff_spec.rb
+++ b/spec/lib/gitlab/diff/inline_diff_spec.rb
@@ -37,6 +37,33 @@ RSpec.describe Gitlab::Diff::InlineDiff do
it 'can handle unchanged empty lines' do
expect { described_class.for_lines(['- bar', '+ baz', '']) }.not_to raise_error
end
+
+ context 'when lines have multiple changes' do
+ let(:diff) do
+ <<~EOF
+ - Hello, how are you?
+ + Hi, how are you doing?
+ EOF
+ end
+
+ let(:subject) { described_class.for_lines(diff.lines) }
+
+ it 'finds all inline diffs' do
+ expect(subject[0]).to eq([3..6])
+ expect(subject[1]).to eq([3..3, 17..22])
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(improved_merge_diff_highlighting: false)
+ end
+
+ it 'finds all inline diffs' do
+ expect(subject[0]).to eq([3..19])
+ expect(subject[1]).to eq([3..22])
+ end
+ end
+ end
end
describe "#inline_diffs" do
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index c47f71c207d..1cebe37bea5 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
use_backwards_compatible_subject_index: true
},
test_experiment: {
+ tracking_category: 'Team',
+ rollout_strategy: rollout_strategy
+ },
+ my_experiment: {
tracking_category: 'Team'
}
}
@@ -20,6 +24,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
let(:enabled_percentage) { 10 }
+ let(:rollout_strategy) { nil }
controller(ApplicationController) do
include Gitlab::Experimentation::ControllerConcern
@@ -117,6 +122,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
context 'when subject is given' do
+ let(:rollout_strategy) { :user }
let(:user) { build(:user) }
it 'uses the subject' do
@@ -244,6 +250,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
it "provides the subject's hashed global_id as label" do
experiment_subject = double(:subject, to_global_id: 'abc')
+ allow(Gitlab::Experimentation).to receive(:valid_subject_for_rollout_strategy?).and_return(true)
controller.track_experiment_event(:test_experiment, 'start', 1, subject: experiment_subject)
@@ -420,6 +427,26 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
controller.record_experiment_user(:test_experiment, context)
end
+
+ context 'with a cookie based rollout strategy' do
+ it 'calls tracking_group with a nil subject' do
+ expect(controller).to receive(:tracking_group).with(:test_experiment, nil, subject: nil).and_return(:experimental)
+ allow(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context)
+
+ controller.record_experiment_user(:test_experiment, context)
+ end
+ end
+
+ context 'with a user based rollout strategy' do
+ let(:rollout_strategy) { :user }
+
+ it 'calls tracking_group with a user subject' do
+ expect(controller).to receive(:tracking_group).with(:test_experiment, nil, subject: user).and_return(:experimental)
+ allow(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context)
+
+ controller.record_experiment_user(:test_experiment, context)
+ end
+ end
end
context 'the user is part of the control group' do
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
index 008e6699597..94dbf1d7e4b 100644
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ b/spec/lib/gitlab/experimentation/experiment_spec.rb
@@ -9,7 +9,8 @@ RSpec.describe Gitlab::Experimentation::Experiment do
let(:params) do
{
tracking_category: 'Category1',
- use_backwards_compatible_subject_index: true
+ use_backwards_compatible_subject_index: true,
+ rollout_strategy: nil
}
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index b503960b8c7..4ef8a75de4f 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
:invite_members_empty_group_version_a,
:contact_sales_btn_in_app,
:customize_homepage,
- :group_only_trials,
- :default_to_issues_board
+ :group_only_trials
]
backwards_compatible_experiment_keys = described_class.filter { |_, v| v[:use_backwards_compatible_subject_index] }.keys
@@ -27,6 +26,8 @@ RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
end
RSpec.describe Gitlab::Experimentation do
+ using RSpec::Parameterized::TableSyntax
+
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
backwards_compatible_test_experiment: {
@@ -35,6 +36,10 @@ RSpec.describe Gitlab::Experimentation do
},
test_experiment: {
tracking_category: 'Team'
+ },
+ tabular_experiment: {
+ tracking_category: 'Team',
+ rollout_strategy: rollout_strategy
}
})
@@ -46,6 +51,7 @@ RSpec.describe Gitlab::Experimentation do
end
let(:enabled_percentage) { 10 }
+ let(:rollout_strategy) { nil }
describe '.get_experiment' do
subject { described_class.get_experiment(:test_experiment) }
@@ -175,4 +181,59 @@ RSpec.describe Gitlab::Experimentation do
end
end
end
+
+ describe '.log_invalid_rollout' do
+ subject { described_class.log_invalid_rollout(:test_experiment, 1) }
+
+ before do
+ allow(described_class).to receive(:valid_subject_for_rollout_strategy?).and_return(valid)
+ end
+
+ context 'subject is not valid for experiment' do
+ let(:valid) { false }
+
+ it 'logs a warning message' do
+ expect_next_instance_of(Gitlab::ExperimentationLogger) do |logger|
+ expect(logger)
+ .to receive(:warn)
+ .with(
+ message: 'Subject must conform to the rollout strategy',
+ experiment_key: :test_experiment,
+ subject: 'Integer',
+ rollout_strategy: :cookie
+ )
+ end
+
+ subject
+ end
+ end
+
+ context 'subject is valid for experiment' do
+ let(:valid) { true }
+
+ it 'does not log a warning message' do
+ expect(Gitlab::ExperimentationLogger).not_to receive(:build)
+
+ subject
+ end
+ end
+ end
+
+ describe '.valid_subject_for_rollout_strategy?' do
+ subject { described_class.valid_subject_for_rollout_strategy?(:tabular_experiment, experiment_subject) }
+
+ where(:rollout_strategy, :experiment_subject, :result) do
+ :cookie | nil | true
+ nil | nil | true
+ :cookie | 'string' | true
+ nil | User.new | false
+ :user | User.new | true
+ :group | User.new | false
+ :group | Group.new | true
+ end
+
+ with_them do
+ it { is_expected.to be(result) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb
index 8d6df62b3f6..0b5303f22b4 100644
--- a/spec/lib/gitlab/file_finder_spec.rb
+++ b/spec/lib/gitlab/file_finder_spec.rb
@@ -53,6 +53,14 @@ RSpec.describe Gitlab::FileFinder do
end
end
+ context 'with white space in the path' do
+ it 'filters by path correctly' do
+ results = subject.find('directory path:"with space/README.md"')
+
+ expect(results.count).to eq(1)
+ end
+ end
+
it 'does not cause N+1 query' do
expect(Gitlab::GitalyClient).to receive(:call).at_most(10).times.and_call_original
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 8961cdcae7d..49f1e6e994f 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -720,7 +720,8 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
committer_name: "Dmitriy Zaporozhets",
id: SeedRepo::Commit::ID,
message: "tree css fixes",
- parent_ids: ["874797c3a73b60d2187ed6e2fcabd289ff75171e"]
+ parent_ids: ["874797c3a73b60d2187ed6e2fcabd289ff75171e"],
+ trailers: {}
}
end
end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 783f0a9ccf7..17bb83d0f2f 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -100,6 +100,13 @@ EOT
expect(diff.diff).to be_empty
expect(diff).to be_too_large
end
+
+ it 'logs the event' do
+ expect(Gitlab::Metrics).to receive(:add_event)
+ .with(:patch_hard_limit_bytes_hit)
+
+ diff
+ end
end
context 'using a collapsable diff that is too large' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index ef9b5a30c86..cc1b1ceadcf 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -1894,8 +1894,11 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'removes the remote' do
repository_rugged.remotes.create(remote_name, url)
- repository.remove_remote(remote_name)
+ expect(repository.remove_remote(remote_name)).to be true
+ # Since we deleted the remote via Gitaly, Rugged doesn't know
+ # this changed underneath it. Let's refresh the Rugged repo.
+ repository_rugged = Rugged::Repository.new(repository_path)
expect(repository_rugged.remotes[remote_name]).to be_nil
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/connections_spec.rb b/spec/lib/gitlab/graphql/pagination/connections_spec.rb
new file mode 100644
index 00000000000..e89e5c17644
--- /dev/null
+++ b/spec/lib/gitlab/graphql/pagination/connections_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Tests that our connections are correctly mapped.
+RSpec.describe ::Gitlab::Graphql::Pagination::Connections do
+ include GraphqlHelpers
+
+ before(:all) do
+ ActiveRecord::Schema.define do
+ create_table :testing_pagination_nodes, force: true do |t|
+ t.integer :value, null: false
+ end
+ end
+ end
+
+ after(:all) do
+ ActiveRecord::Schema.define do
+ drop_table :testing_pagination_nodes, force: true
+ end
+ end
+
+ let_it_be(:node_model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'testing_pagination_nodes'
+ end
+ end
+
+ let(:query_string) { 'query { items(first: 2) { nodes { value } } }' }
+ let(:user) { nil }
+
+ let(:node) { Struct.new(:value) }
+ let(:node_type) do
+ Class.new(::GraphQL::Schema::Object) do
+ graphql_name 'Node'
+ field :value, GraphQL::INT_TYPE, null: false
+ end
+ end
+
+ let(:query_type) do
+ item_values = nodes
+
+ query_factory do |t|
+ t.field :items, node_type.connection_type, null: true
+
+ t.define_method :items do
+ item_values
+ end
+ end
+ end
+
+ shared_examples 'it maps to a specific connection class' do |connection_type|
+ let(:raw_values) { [1, 7, 42] }
+
+ it "maps to #{connection_type.name}" do
+ expect(connection_type).to receive(:new).and_call_original
+
+ results = execute_query(query_type).to_h
+
+ expect(graphql_dig_at(results, :data, :items, :nodes, :value)).to eq [1, 7]
+ end
+ end
+
+ describe 'OffsetPaginatedRelation' do
+ before do
+ # Expect to be ordered by an explicit ordering.
+ raw_values.each_with_index { |value, id| node_model.create!(id: id, value: value) }
+ end
+
+ let(:nodes) { ::Gitlab::Graphql::Pagination::OffsetPaginatedRelation.new(node_model.order(value: :asc)) }
+
+ include_examples 'it maps to a specific connection class', Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection
+ end
+
+ describe 'ActiveRecord::Relation' do
+ before do
+ # Expect to be ordered by ID descending
+ [3, 2, 1].zip(raw_values) { |id, value| node_model.create!(id: id, value: value) }
+ end
+
+ let(:nodes) { node_model.all }
+
+ include_examples 'it maps to a specific connection class', Gitlab::Graphql::Pagination::Keyset::Connection
+ end
+
+ describe 'ExternallyPaginatedArray' do
+ let(:nodes) { ::Gitlab::Graphql::ExternallyPaginatedArray.new(nil, nil, node.new(1), node.new(7)) }
+
+ include_examples 'it maps to a specific connection class', Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection
+ end
+
+ describe 'Array' do
+ let(:nodes) { raw_values.map { |x| node.new(x) } }
+
+ include_examples 'it maps to a specific connection class', Gitlab::Graphql::Pagination::ArrayConnection
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/group_builder_spec.rb b/spec/lib/gitlab/hook_data/group_builder_spec.rb
new file mode 100644
index 00000000000..d7347ff99d4
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/group_builder_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::GroupBuilder do
+ let_it_be(:group) { create(:group) }
+
+ describe '#build' do
+ let(:data) { described_class.new(group).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:attributes) do
+ [
+ :event_name, :created_at, :updated_at, :name, :path, :full_path, :group_id
+ ]
+ end
+
+ context 'data' do
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data).to include(*attributes)
+
+ expect(data[:name]).to eq(group.name)
+ expect(data[:path]).to eq(group.path)
+ expect(data[:full_path]).to eq(group.full_path)
+ expect(data[:group_id]).to eq(group.id)
+ expect(data[:created_at]).to eq(group.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(group.updated_at.xmlschema)
+ end
+ end
+
+ shared_examples_for 'does not include old path attributes' do
+ it 'does not include old path attributes' do
+ expect(data).not_to include(:old_path, :old_full_path)
+ end
+ end
+
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('group_create') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include old path attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('group_destroy') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include old path attributes'
+ end
+
+ context 'on rename' do
+ let(:event) { :rename }
+
+ it { expect(event_name).to eq('group_rename') }
+ it_behaves_like 'includes the required attributes'
+
+ it 'includes old path details' do
+ allow(group).to receive(:path_before_last_save).and_return('old-path')
+
+ expect(data[:old_path]).to eq(group.path_before_last_save)
+ expect(data[:old_full_path]).to eq(group.path_before_last_save)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb b/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb
new file mode 100644
index 00000000000..89e5dffd7b4
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::SubgroupBuilder do
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: parent_group) }
+
+ describe '#build' do
+ let(:data) { described_class.new(subgroup).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:attributes) do
+ [
+ :event_name, :created_at, :updated_at, :name, :path, :full_path, :group_id,
+ :parent_group_id, :parent_name, :parent_path, :parent_full_path
+ ]
+ end
+
+ context 'data' do
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data).to include(*attributes)
+
+ expect(data[:name]).to eq(subgroup.name)
+ expect(data[:path]).to eq(subgroup.path)
+ expect(data[:full_path]).to eq(subgroup.full_path)
+ expect(data[:group_id]).to eq(subgroup.id)
+ expect(data[:created_at]).to eq(subgroup.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(subgroup.updated_at.xmlschema)
+ expect(data[:parent_name]).to eq(parent_group.name)
+ expect(data[:parent_path]).to eq(parent_group.path)
+ expect(data[:parent_full_path]).to eq(parent_group.full_path)
+ expect(data[:parent_group_id]).to eq(parent_group.id)
+ end
+ end
+
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('subgroup_create') }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('subgroup_destroy') }
+ it_behaves_like 'includes the required attributes'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 825513bdfc5..2d616ec8862 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -146,6 +146,7 @@ merge_requests:
- merge_user
- merge_request_diffs
- merge_request_diff
+- merge_head_diff
- merge_request_context_commits
- merge_request_context_commit_diff_files
- events
@@ -544,7 +545,7 @@ project:
- daily_build_group_report_results
- jira_imports
- compliance_framework_setting
-- compliance_management_frameworks
+- compliance_management_framework
- metrics_users_starred_dashboards
- alert_management_alerts
- repository_storage_moves
@@ -561,6 +562,7 @@ project:
- exported_protected_branches
- incident_management_oncall_schedules
- debian_distributions
+- merge_request_metrics
award_emoji:
- awardable
- user
@@ -589,6 +591,7 @@ lfs_file_locks:
project_badges:
- project
metrics:
+- target_project
- merge_request
- latest_closed_by
- merged_by
diff --git a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
index b311a02833c..6680f4e7a03 100644
--- a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
@@ -11,12 +11,12 @@ RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
let!(:project) { create(:project) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
- let(:bundler) { Gitlab::ImportExport::DesignRepoSaver.new(project: project_with_design_repo, shared: shared) }
+ let(:bundler) { Gitlab::ImportExport::DesignRepoSaver.new(exportable: project_with_design_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.design_repo_bundle_filename) }
let(:restorer) do
described_class.new(path_to_bundle: bundle_path,
shared: shared,
- project: project)
+ importable: project)
end
before do
diff --git a/spec/lib/gitlab/import_export/design_repo_saver_spec.rb b/spec/lib/gitlab/import_export/design_repo_saver_spec.rb
index 2575d209db5..5501e3dee5a 100644
--- a/spec/lib/gitlab/import_export/design_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_saver_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::ImportExport::DesignRepoSaver do
let!(:project) { create(:project, :design_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
- let(:design_bundler) { described_class.new(project: project, shared: shared) }
+ let(:design_bundler) { described_class.new(exportable: project, shared: shared) }
before do
project.add_maintainer(user)
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index ef7394053b9..65c28a8b8a2 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -12,11 +12,11 @@ RSpec.describe 'forked project import' do
let(:shared) { project.import_export_shared }
let(:forked_from_project) { create(:project, :repository) }
let(:forked_project) { fork_project(project_with_repo, nil, repository: true) }
- let(:repo_saver) { Gitlab::ImportExport::RepoSaver.new(project: project_with_repo, shared: shared) }
+ let(:repo_saver) { Gitlab::ImportExport::RepoSaver.new(exportable: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
let(:repo_restorer) do
- Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: bundle_path, shared: shared, project: project)
+ Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: bundle_path, shared: shared, importable: project)
end
let!(:merge_request) do
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index 2794acb8980..d2153221e8f 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group)
expect(group_tree_restorer.restore).to be_truthy
+ expect(group_tree_restorer.groups_mapping).not_to be_empty
end
end
diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb
index 75db3167ebc..20f0f6af6f3 100644
--- a/spec/lib/gitlab/import_export/importer_spec.rb
+++ b/spec/lib/gitlab/import_export/importer_spec.rb
@@ -69,8 +69,8 @@ RSpec.describe Gitlab::ImportExport::Importer do
repo_path = File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename)
restorer = double(Gitlab::ImportExport::RepoRestorer)
- expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: repo_path, shared: shared, project: project).and_return(restorer)
- expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: wiki_repo_path, shared: shared, project: ProjectWiki.new(project)).and_return(restorer)
+ expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: repo_path, shared: shared, importable: project).and_return(restorer)
+ expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: wiki_repo_path, shared: shared, importable: ProjectWiki.new(project)).and_return(restorer)
expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).and_call_original
expect(restorer).to receive(:restore).and_return(true).twice
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index a6b917457c2..fe43a23e242 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -27,10 +27,10 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
end
describe 'bundle a project Git repo' do
- let(:bundler) { Gitlab::ImportExport::RepoSaver.new(project: project_with_repo, shared: shared) }
+ let(:bundler) { Gitlab::ImportExport::RepoSaver.new(exportable: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
- subject { described_class.new(path_to_bundle: bundle_path, shared: shared, project: project) }
+ subject { described_class.new(path_to_bundle: bundle_path, shared: shared, importable: project) }
after do
Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
@@ -62,10 +62,10 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
end
describe 'restore a wiki Git repo' do
- let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(project: project_with_repo, shared: shared) }
+ let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(exportable: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.wiki_repo_bundle_filename) }
- subject { described_class.new(path_to_bundle: bundle_path, shared: shared, project: ProjectWiki.new(project)) }
+ subject { described_class.new(path_to_bundle: bundle_path, shared: shared, importable: ProjectWiki.new(project)) }
after do
Gitlab::Shell.new.remove_repository(project.wiki.repository_storage, project.wiki.disk_path)
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
describe 'no wiki in the bundle' do
let!(:project_without_wiki) { create(:project) }
- let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(project: project_without_wiki, shared: shared) }
+ let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(exportable: project_without_wiki, shared: shared) }
it 'does not creates an empty wiki' do
expect(subject.restore).to be true
diff --git a/spec/lib/gitlab/import_export/repo_saver_spec.rb b/spec/lib/gitlab/import_export/repo_saver_spec.rb
index 73d51000c67..52001e778d6 100644
--- a/spec/lib/gitlab/import_export/repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_saver_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::ImportExport::RepoSaver do
let!(:project) { create(:project, :repository) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
- let(:bundler) { described_class.new(project: project, shared: shared) }
+ let(:bundler) { described_class.new(exportable: project, shared: shared) }
before do
project.add_maintainer(user)
@@ -25,6 +25,14 @@ RSpec.describe Gitlab::ImportExport::RepoSaver do
expect(bundler.save).to be true
end
+ it 'creates the directory for the repository' do
+ allow(bundler).to receive(:bundle_full_path).and_return('/foo/bar/file.tar.gz')
+
+ expect(FileUtils).to receive(:mkdir_p).with('/foo/bar', anything)
+
+ bundler.save # rubocop:disable Rails/SaveBang
+ end
+
context 'when the repo is empty' do
let!(:project) { create(:project) }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index a93ee051ccf..e301be47d68 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -220,6 +220,7 @@ MergeRequestDiff:
- commits_count
- files_count
- sorted
+- diff_type
MergeRequestDiffCommit:
- merge_request_diff_id
- relative_order
@@ -231,6 +232,7 @@ MergeRequestDiffCommit:
- committer_name
- committer_email
- message
+- trailers
MergeRequestDiffFile:
- merge_request_diff_id
- relative_order
@@ -255,6 +257,7 @@ MergeRequestContextCommit:
- committer_email
- message
- merge_request_id
+- trailers
MergeRequestContextCommitDiffFile:
- sha
- relative_order
@@ -580,6 +583,7 @@ ProjectFeature:
- requirements_access_level
- analytics_access_level
- operations_access_level
+- security_and_compliance_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
diff --git a/spec/lib/gitlab/import_export/saver_spec.rb b/spec/lib/gitlab/import_export/saver_spec.rb
index 865c7e57b5a..877474dd862 100644
--- a/spec/lib/gitlab/import_export/saver_spec.rb
+++ b/spec/lib/gitlab/import_export/saver_spec.rb
@@ -6,7 +6,8 @@ require 'fileutils'
RSpec.describe Gitlab::ImportExport::Saver do
let!(:project) { create(:project, :public, name: 'project') }
let(:base_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
- let(:export_path) { "#{base_path}/project_tree_saver_spec/export" }
+ let(:archive_path) { "#{base_path}/archive" }
+ let(:export_path) { "#{archive_path}/export" }
let(:shared) { project.import_export_shared }
subject { described_class.new(exportable: project, shared: shared) }
@@ -35,10 +36,13 @@ RSpec.describe Gitlab::ImportExport::Saver do
.to match(%r[\/uploads\/-\/system\/import_export_upload\/export_file.*])
end
- it 'removes tmp files' do
+ it 'removes archive path and keeps base path untouched' do
+ allow(shared).to receive(:archive_path).and_return(archive_path)
+
subject.save
- expect(FileUtils).to have_received(:rm_rf).with(base_path)
- expect(Dir.exist?(base_path)).to eq(false)
+ expect(FileUtils).not_to have_received(:rm_rf).with(base_path)
+ expect(FileUtils).to have_received(:rm_rf).with(archive_path)
+ expect(Dir.exist?(archive_path)).to eq(false)
end
end
diff --git a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
index 778d0859bf1..540f90e7804 100644
--- a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::ImportExport::WikiRepoSaver do
let_it_be(:project) { create(:project, :wiki_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
- let(:wiki_bundler) { described_class.new(project: project, shared: shared) }
+ let(:wiki_bundler) { described_class.new(exportable: project, shared: shared) }
let!(:project_wiki) { ProjectWiki.new(project, user) }
before do
diff --git a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
index 2ca7465e775..e4af3f77d5d 100644
--- a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
:del | [%w(foo bar)] | true # Arguments can be a nested array
:del | %w(foo foo) | false
:hset | %w(foo bar) | false # Not a multi-key command
+ :mget | [] | false # This is invalid, but not because it's a cross-slot command
end
with_them do
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index c00b0fdf043..48b76c4cdbf 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -37,7 +37,11 @@ RSpec.describe Gitlab::InstrumentationHelper do
:redis_shared_state_write_bytes,
:db_count,
:db_write_count,
- :db_cached_count
+ :db_cached_count,
+ :external_http_count,
+ :external_http_duration_s,
+ :rack_attack_redis_count,
+ :rack_attack_redis_duration_s
]
expect(described_class.keys).to eq(expected_keys)
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index ce22f36e9fd..01ced407883 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -58,4 +58,48 @@ RSpec.describe Gitlab::Kas do
end
end
end
+
+ describe '.included_in_gitlab_com_rollout?' do
+ let_it_be(:project) { create(:project) }
+
+ context 'not GitLab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ it 'returns true' do
+ expect(described_class.included_in_gitlab_com_rollout?(project)).to be_truthy
+ end
+ end
+
+ context 'GitLab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
+ end
+
+ it 'returns false' do
+ expect(described_class.included_in_gitlab_com_rollout?(project)).to be_falsey
+ end
+ end
+
+ context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_on_gitlab_com: project)
+ end
+
+ it 'returns true' do
+ expect(described_class.included_in_gitlab_com_rollout?(project)).to be_truthy
+ end
+
+ it 'returns false for another project' do
+ expect(described_class.included_in_gitlab_com_rollout?(create(:project))).to be_falsey
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb
new file mode 100644
index 00000000000..5bcaf8fbc47
--- /dev/null
+++ b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do
+ let(:transaction) { Gitlab::Metrics::Transaction.new }
+ let(:subscriber) { described_class.new }
+
+ let(:event_1) do
+ double(:event, payload: {
+ method: 'POST', code: "200", duration: 0.321,
+ scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects',
+ query: 'current=true'
+ })
+ end
+
+ let(:event_2) do
+ double(:event, payload: {
+ method: 'GET', code: "301", duration: 0.12,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2',
+ query: 'current=true'
+ })
+ end
+
+ let(:event_3) do
+ double(:event, payload: {
+ method: 'POST', duration: 5.3,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2/issues',
+ query: 'current=true',
+ exception_object: Net::ReadTimeout.new
+ })
+ end
+
+ describe '.detail_store' do
+ context 'when external HTTP detail store is empty' do
+ before do
+ Gitlab::SafeRequestStore[:peek_enabled] = true
+ end
+
+ it 'returns an empty array' do
+ expect(described_class.detail_store).to eql([])
+ end
+ end
+
+ context 'when the performance bar is not enabled' do
+ it 'returns an empty array' do
+ expect(described_class.detail_store).to eql([])
+ end
+ end
+
+ context 'when external HTTP detail store has some values' do
+ before do
+ Gitlab::SafeRequestStore[:peek_enabled] = true
+ Gitlab::SafeRequestStore[:external_http_detail_store] = [{
+ method: 'POST', code: "200", duration: 0.321
+ }]
+ end
+
+ it 'returns the external http detailed store' do
+ expect(described_class.detail_store).to eql([{ method: 'POST', code: "200", duration: 0.321 }])
+ end
+ end
+ end
+
+ describe '.payload' do
+ context 'when SafeRequestStore does not have any item from external HTTP' do
+ it 'returns an empty array' do
+ expect(described_class.payload).to eql(external_http_count: 0, external_http_duration_s: 0.0)
+ end
+ end
+
+ context 'when external HTTP recorded some values' do
+ before do
+ Gitlab::SafeRequestStore[:external_http_count] = 7
+ Gitlab::SafeRequestStore[:external_http_duration_s] = 1.2
+ end
+
+ it 'returns the external http detailed store' do
+ expect(described_class.payload).to eql(external_http_count: 7, external_http_duration_s: 1.2)
+ end
+ end
+ end
+
+ describe '#request' do
+ before do
+ Gitlab::SafeRequestStore[:peek_enabled] = true
+ allow(subscriber).to receive(:current_transaction).and_return(transaction)
+ end
+
+ it 'tracks external HTTP request count' do
+ expect(transaction).to receive(:increment)
+ .with(:gitlab_external_http_total, 1, { code: "200", method: "POST" })
+ expect(transaction).to receive(:increment)
+ .with(:gitlab_external_http_total, 1, { code: "301", method: "GET" })
+
+ subscriber.request(event_1)
+ subscriber.request(event_2)
+ end
+
+ it 'tracks external HTTP duration' do
+ expect(transaction).to receive(:observe)
+ .with(:gitlab_external_http_duration_seconds, 0.321)
+ expect(transaction).to receive(:observe)
+ .with(:gitlab_external_http_duration_seconds, 0.12)
+ expect(transaction).to receive(:observe)
+ .with(:gitlab_external_http_duration_seconds, 5.3)
+
+ subscriber.request(event_1)
+ subscriber.request(event_2)
+ subscriber.request(event_3)
+ end
+
+ it 'tracks external HTTP exceptions' do
+ expect(transaction).to receive(:increment)
+ .with(:gitlab_external_http_total, 1, { code: 'undefined', method: "POST" })
+ expect(transaction).to receive(:increment)
+ .with(:gitlab_external_http_exception_total, 1)
+
+ subscriber.request(event_3)
+ end
+
+ it 'stores per-request counters' do
+ subscriber.request(event_1)
+ subscriber.request(event_2)
+ subscriber.request(event_3)
+
+ expect(Gitlab::SafeRequestStore[:external_http_count]).to eq(3)
+ expect(Gitlab::SafeRequestStore[:external_http_duration_s]).to eq(5.741) # 0.321 + 0.12 + 5.3
+ end
+
+ it 'stores a portion of events into the detail store' do
+ subscriber.request(event_1)
+ subscriber.request(event_2)
+ subscriber.request(event_3)
+
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store].length).to eq(3)
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store][0]).to include(
+ method: 'POST', code: "200", duration: 0.321,
+ scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects',
+ query: 'current=true', exception_object: nil,
+ backtrace: be_a(Array)
+ )
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store][1]).to include(
+ method: 'GET', code: "301", duration: 0.12,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2',
+ query: 'current=true', exception_object: nil,
+ backtrace: be_a(Array)
+ )
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store][2]).to include(
+ method: 'POST', duration: 5.3,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2/issues',
+ query: 'current=true',
+ exception_object: be_a(Net::ReadTimeout),
+ backtrace: be_a(Array)
+ )
+ end
+
+ context 'when the performance bar is not enabled' do
+ before do
+ Gitlab::SafeRequestStore.delete(:peek_enabled)
+ end
+
+ it 'does not capture detail store' do
+ subscriber.request(event_1)
+ subscriber.request(event_2)
+ subscriber.request(event_3)
+
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store]).to be(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/subscribers/rack_attack_spec.rb b/spec/lib/gitlab/metrics/subscribers/rack_attack_spec.rb
new file mode 100644
index 00000000000..2d595632772
--- /dev/null
+++ b/spec/lib/gitlab/metrics/subscribers/rack_attack_spec.rb
@@ -0,0 +1,203 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Subscribers::RackAttack, :request_store do
+ let(:subscriber) { described_class.new }
+
+ describe '.payload' do
+ context 'when the request store is empty' do
+ it 'returns empty data' do
+ expect(described_class.payload).to eql(
+ rack_attack_redis_count: 0,
+ rack_attack_redis_duration_s: 0.0
+ )
+ end
+ end
+
+ context 'when the request store already has data' do
+ before do
+ Gitlab::SafeRequestStore[:rack_attack_instrumentation] = {
+ rack_attack_redis_count: 10,
+ rack_attack_redis_duration_s: 9.0
+ }
+ end
+
+ it 'returns the accumulated data' do
+ expect(described_class.payload).to eql(
+ rack_attack_redis_count: 10,
+ rack_attack_redis_duration_s: 9.0
+ )
+ end
+ end
+ end
+
+ describe '#redis' do
+ it 'accumulates per-request RackAttack cache usage' do
+ freeze_time do
+ subscriber.redis(
+ ActiveSupport::Notifications::Event.new(
+ 'redis.rack_attack', Time.current, Time.current + 1.second, '1', { operation: 'fetch' }
+ )
+ )
+ subscriber.redis(
+ ActiveSupport::Notifications::Event.new(
+ 'redis.rack_attack', Time.current, Time.current + 2.seconds, '1', { operation: 'write' }
+ )
+ )
+ subscriber.redis(
+ ActiveSupport::Notifications::Event.new(
+ 'redis.rack_attack', Time.current, Time.current + 3.seconds, '1', { operation: 'read' }
+ )
+ )
+ end
+
+ expect(Gitlab::SafeRequestStore[:rack_attack_instrumentation]).to eql(
+ rack_attack_redis_count: 3,
+ rack_attack_redis_duration_s: 6.0
+ )
+ end
+ end
+
+ shared_examples 'log into auth logger' do
+ context 'when matched throttle does not require user information' do
+ let(:event) do
+ ActiveSupport::Notifications::Event.new(
+ event_name, Time.current, Time.current + 2.seconds, '1', request: double(
+ :request,
+ ip: '1.2.3.4',
+ request_method: 'GET',
+ fullpath: '/api/v4/internal/authorized_keys',
+ env: {
+ 'rack.attack.match_type' => match_type,
+ 'rack.attack.matched' => 'throttle_unauthenticated'
+ }
+ )
+ )
+ end
+
+ it 'logs request information' do
+ expect(Gitlab::AuthLogger).to receive(:error).with(
+ include(
+ message: 'Rack_Attack',
+ env: match_type,
+ remote_ip: '1.2.3.4',
+ request_method: 'GET',
+ path: '/api/v4/internal/authorized_keys',
+ matched: 'throttle_unauthenticated'
+ )
+ )
+ subscriber.send(match_type, event)
+ end
+ end
+
+ context 'when matched throttle requires user information' do
+ context 'when user not found' do
+ let(:event) do
+ ActiveSupport::Notifications::Event.new(
+ event_name, Time.current, Time.current + 2.seconds, '1', request: double(
+ :request,
+ ip: '1.2.3.4',
+ request_method: 'GET',
+ fullpath: '/api/v4/internal/authorized_keys',
+ env: {
+ 'rack.attack.match_type' => match_type,
+ 'rack.attack.matched' => 'throttle_authenticated_api',
+ 'rack.attack.match_discriminator' => 'not_exist_user_id'
+ }
+ )
+ )
+ end
+
+ it 'logs request information and user id' do
+ expect(Gitlab::AuthLogger).to receive(:error).with(
+ include(
+ message: 'Rack_Attack',
+ env: match_type,
+ remote_ip: '1.2.3.4',
+ request_method: 'GET',
+ path: '/api/v4/internal/authorized_keys',
+ matched: 'throttle_authenticated_api',
+ user_id: 'not_exist_user_id'
+ )
+ )
+ subscriber.send(match_type, event)
+ end
+ end
+
+ context 'when user found' do
+ let(:user) { create(:user) }
+ let(:event) do
+ ActiveSupport::Notifications::Event.new(
+ event_name, Time.current, Time.current + 2.seconds, '1', request: double(
+ :request,
+ ip: '1.2.3.4',
+ request_method: 'GET',
+ fullpath: '/api/v4/internal/authorized_keys',
+ env: {
+ 'rack.attack.match_type' => match_type,
+ 'rack.attack.matched' => 'throttle_authenticated_api',
+ 'rack.attack.match_discriminator' => user.id
+ }
+ )
+ )
+ end
+
+ it 'logs request information and user meta' do
+ expect(Gitlab::AuthLogger).to receive(:error).with(
+ include(
+ message: 'Rack_Attack',
+ env: match_type,
+ remote_ip: '1.2.3.4',
+ request_method: 'GET',
+ path: '/api/v4/internal/authorized_keys',
+ matched: 'throttle_authenticated_api',
+ user_id: user.id,
+ 'meta.user' => user.username
+ )
+ )
+ subscriber.send(match_type, event)
+ end
+ end
+ end
+ end
+
+ describe '#throttle' do
+ let(:match_type) { :throttle }
+ let(:event_name) { 'throttle.rack_attack' }
+
+ it_behaves_like 'log into auth logger'
+ end
+
+ describe '#blocklist' do
+ let(:match_type) { :blocklist }
+ let(:event_name) { 'blocklist.rack_attack' }
+
+ it_behaves_like 'log into auth logger'
+ end
+
+ describe '#track' do
+ let(:match_type) { :track }
+ let(:event_name) { 'track.rack_attack' }
+
+ it_behaves_like 'log into auth logger'
+ end
+
+ describe '#safelist' do
+ let(:event) do
+ ActiveSupport::Notifications::Event.new(
+ 'safelist.rack_attack', Time.current, Time.current + 2.seconds, '1', request: double(
+ :request,
+ env: {
+ 'rack.attack.matched' => 'throttle_unauthenticated'
+ }
+ )
+ )
+ end
+
+ it 'adds the matched name to safe request store' do
+ subscriber.safelist(event)
+ expect(Gitlab::SafeRequestStore[:instrumentation_throttle_safelist]).to eql('throttle_unauthenticated')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/patch/prependable_spec.rb b/spec/lib/gitlab/patch/prependable_spec.rb
index 8feab57a8f3..5b01bb99fc8 100644
--- a/spec/lib/gitlab/patch/prependable_spec.rb
+++ b/spec/lib/gitlab/patch/prependable_spec.rb
@@ -231,4 +231,22 @@ RSpec.describe Gitlab::Patch::Prependable do
.to raise_error(described_class::MultiplePrependedBlocks)
end
end
+
+ describe 'the extra hack for override verification' do
+ context 'when ENV["STATIC_VERIFICATION"] is not defined' do
+ it 'does not extend ClassMethods onto the defining module' do
+ expect(ee).not_to respond_to(:class_name)
+ end
+ end
+
+ context 'when ENV["STATIC_VERIFICATION"] is defined' do
+ before do
+ stub_env('STATIC_VERIFICATION', 'true')
+ end
+
+ it 'does extend ClassMethods onto the defining module' do
+ expect(ee).to respond_to(:class_name)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/performance_bar/stats_spec.rb b/spec/lib/gitlab/performance_bar/stats_spec.rb
index c34c6f7b31f..ad11eca56d1 100644
--- a/spec/lib/gitlab/performance_bar/stats_spec.rb
+++ b/spec/lib/gitlab/performance_bar/stats_spec.rb
@@ -22,10 +22,12 @@ RSpec.describe Gitlab::PerformanceBar::Stats do
expect(logger).to receive(:info)
.with({ duration_ms: 1.096, filename: 'lib/gitlab/pagination/offset_pagination.rb',
- filenum: 53, method: 'add_pagination_headers', request_id: 'foo', type: :sql })
+ method_path: 'lib/gitlab/pagination/offset_pagination.rb:add_pagination_headers',
+ count: 1, request_id: 'foo', type: :sql })
expect(logger).to receive(:info)
- .with({ duration_ms: 0.817, filename: 'lib/api/helpers.rb',
- filenum: 112, method: 'find_project', request_id: 'foo', type: :sql }).twice
+ .with({ duration_ms: 1.634, filename: 'lib/api/helpers.rb',
+ method_path: 'lib/api/helpers.rb:find_project',
+ count: 2, request_id: 'foo', type: :sql })
subject
end
diff --git a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
new file mode 100644
index 00000000000..2cb31b00f39
--- /dev/null
+++ b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RackAttack::InstrumentedCacheStore do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:store) { ::ActiveSupport::Cache::NullStore.new }
+
+ subject { described_class.new(upstream_store: store)}
+
+ where(:operation, :params, :test_proc) do
+ :fetch | [:key] | ->(s) { s.fetch(:key) }
+ :read | [:key] | ->(s) { s.read(:key) }
+ :read_multi | [:key_1, :key_2, :key_3] | ->(s) { s.read_multi(:key_1, :key_2, :key_3) }
+ :write_multi | [{ key_1: 1, key_2: 2, key_3: 3 }] | ->(s) { s.write_multi(key_1: 1, key_2: 2, key_3: 3) }
+ :fetch_multi | [:key_1, :key_2, :key_3] | ->(s) { s.fetch_multi(:key_1, :key_2, :key_3) {} }
+ :write | [:key, :value, { option_1: 1 }] | ->(s) { s.write(:key, :value, option_1: 1) }
+ :delete | [:key] | ->(s) { s.delete(:key) }
+ :exist? | [:key, { option_1: 1 }] | ->(s) { s.exist?(:key, option_1: 1) }
+ :delete_matched | [/^key$/, { option_1: 1 }] | ->(s) { s.delete_matched(/^key$/, option_1: 1 ) }
+ :increment | [:key, 1] | ->(s) { s.increment(:key, 1) }
+ :decrement | [:key, 1] | ->(s) { s.decrement(:key, 1) }
+ :cleanup | [] | ->(s) { s.cleanup }
+ :clear | [] | ->(s) { s.clear }
+ end
+
+ with_them do
+ it 'publishes a notification' do
+ event = nil
+
+ begin
+ subscriber = ActiveSupport::Notifications.subscribe("redis.rack_attack") do |*args|
+ event = ActiveSupport::Notifications::Event.new(*args)
+ end
+
+ test_proc.call(subject)
+ ensure
+ ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
+ end
+
+ expect(event).not_to be_nil
+ expect(event.name).to eq("redis.rack_attack")
+ expect(event.duration).to be_a(Float).and(be > 0.0)
+ expect(event.payload[:operation]).to eql(operation)
+ end
+
+ it 'publishes a notification even if the cache store returns an error' do
+ allow(store).to receive(operation).and_raise('Something went wrong')
+
+ event = nil
+ exception = nil
+
+ begin
+ subscriber = ActiveSupport::Notifications.subscribe("redis.rack_attack") do |*args|
+ event = ActiveSupport::Notifications::Event.new(*args)
+ end
+
+ begin
+ test_proc.call(subject)
+ rescue => e
+ exception = e
+ end
+ ensure
+ ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
+ end
+
+ expect(event).not_to be_nil
+ expect(event.name).to eq("redis.rack_attack")
+ expect(event.duration).to be_a(Float).and(be > 0.0)
+ expect(event.payload[:operation]).to eql(operation)
+
+ expect(exception).not_to be_nil
+ expect(exception.message).to eql('Something went wrong')
+ end
+
+ it 'delegates to the upstream store' do
+ allow(store).to receive(operation).and_call_original
+
+ if params.empty?
+ expect(store).to receive(operation).with(no_args)
+ else
+ expect(store).to receive(operation).with(*params)
+ end
+
+ test_proc.call(subject)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index 5748e1e49e5..788d2eac61f 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
describe '.configure' do
let(:fake_rack_attack) { class_double("Rack::Attack") }
let(:fake_rack_attack_request) { class_double("Rack::Attack::Request") }
+ let(:fake_cache) { instance_double("Rack::Attack::Cache") }
let(:throttles) do
{
@@ -27,6 +28,8 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
allow(fake_rack_attack).to receive(:track)
allow(fake_rack_attack).to receive(:safelist)
allow(fake_rack_attack).to receive(:blocklist)
+ allow(fake_rack_attack).to receive(:cache).and_return(fake_cache)
+ allow(fake_cache).to receive(:store=)
end
it 'extends the request class' do
diff --git a/spec/lib/gitlab/repository_cache_adapter_spec.rb b/spec/lib/gitlab/repository_cache_adapter_spec.rb
index 4c57665b41f..625dcf11546 100644
--- a/spec/lib/gitlab/repository_cache_adapter_spec.rb
+++ b/spec/lib/gitlab/repository_cache_adapter_spec.rb
@@ -292,12 +292,11 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
describe '#expire_method_caches' do
it 'expires the caches of the given methods' do
- expect(cache).to receive(:expire).with(:rendered_readme)
expect(cache).to receive(:expire).with(:branch_names)
- expect(redis_set_cache).to receive(:expire).with(:rendered_readme, :branch_names)
- expect(redis_hash_cache).to receive(:delete).with(:rendered_readme, :branch_names)
+ expect(redis_set_cache).to receive(:expire).with(:branch_names)
+ expect(redis_hash_cache).to receive(:delete).with(:branch_names)
- repository.expire_method_caches(%i(rendered_readme branch_names))
+ repository.expire_method_caches(%i(branch_names))
end
it 'does not expire caches for non-existent methods' do
diff --git a/spec/lib/gitlab/search/query_spec.rb b/spec/lib/gitlab/search/query_spec.rb
index dd2f23a7e47..234b683ba1f 100644
--- a/spec/lib/gitlab/search/query_spec.rb
+++ b/spec/lib/gitlab/search/query_spec.rb
@@ -46,4 +46,22 @@ RSpec.describe Gitlab::Search::Query do
expect(subject.filters).to all(include(negated: true))
end
end
+
+ context 'with filter value in quotes' do
+ let(:query) { '"foo bar" name:"my test script.txt"' }
+
+ it 'does not break the filter value in quotes' do
+ expect(subject.term).to eq('"foo bar"')
+ expect(subject.filters[0]).to include(name: :name, negated: false, value: "MY TEST SCRIPT.TXT")
+ end
+ end
+
+ context 'with extra white spaces between the query words' do
+ let(:query) { ' foo = bar name:"my test.txt"' }
+
+ it 'removes the extra whitespace between tokens' do
+ expect(subject.term).to eq('foo = bar')
+ expect(subject.filters[0]).to include(name: :name, negated: false, value: "MY TEST.TXT")
+ end
+ end
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index c437b6bcceb..158d472f7ea 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::SearchResults do
include ProjectForksHelper
include SearchHelpers
+ using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, name: 'foo') }
@@ -41,8 +42,6 @@ RSpec.describe Gitlab::SearchResults do
end
describe '#formatted_count' do
- using RSpec::Parameterized::TableSyntax
-
where(:scope, :count_method, :expected) do
'projects' | :limited_projects_count | max_limited_count
'issues' | :limited_issues_count | max_limited_count
@@ -61,8 +60,6 @@ RSpec.describe Gitlab::SearchResults do
end
describe '#highlight_map' do
- using RSpec::Parameterized::TableSyntax
-
where(:scope, :expected) do
'projects' | {}
'issues' | {}
@@ -80,8 +77,6 @@ RSpec.describe Gitlab::SearchResults do
end
describe '#formatted_limited_count' do
- using RSpec::Parameterized::TableSyntax
-
where(:count, :expected) do
23 | '23'
99 | '99'
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index b99a5352717..856ae87c5bf 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -38,7 +38,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'pid' => Process.pid,
'created_at' => created_at.to_f,
'enqueued_at' => created_at.to_f,
- 'scheduling_latency_s' => scheduling_latency_s
+ 'scheduling_latency_s' => scheduling_latency_s,
+ 'job_size_bytes' => be > 0
)
end
diff --git a/spec/lib/gitlab/suggestions/commit_message_spec.rb b/spec/lib/gitlab/suggestions/commit_message_spec.rb
index 1411f64f8b7..965960f0c3e 100644
--- a/spec/lib/gitlab/suggestions/commit_message_spec.rb
+++ b/spec/lib/gitlab/suggestions/commit_message_spec.rb
@@ -72,6 +72,17 @@ RSpec.describe Gitlab::Suggestions::CommitMessage do
end
end
+ context 'when a custom commit message is specified' do
+ let(:message) { "i'm a project message. a user's custom message takes precedence over me :(" }
+ let(:custom_message) { "hello there! i'm a cool custom commit message." }
+
+ it 'shows the custom commit message' do
+ expect(Gitlab::Suggestions::CommitMessage
+ .new(user, suggestion_set, custom_message)
+ .message).to eq(custom_message)
+ end
+ end
+
context 'is specified and includes all placeholders' do
let(:message) do
'*** %{branch_name} %{files_count} %{file_paths} %{project_name} %{project_path} %{user_full_name} %{username} %{suggestions_count} ***'
diff --git a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
index e2751d194d3..38ec28c2b9a 100644
--- a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
+++ b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
@@ -15,9 +15,19 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
FileUtils.rm_rf(base_dir)
end
- subject(:finder) { described_class.new(base_dir, '', { 'General' => '', 'Bar' => 'Bar' }, excluded_patterns: excluded_patterns) }
+ subject(:finder) do
+ described_class.new(base_dir, '',
+ { 'General' => '', 'Bar' => 'Bar' },
+ include_categories_for_file,
+ excluded_patterns: excluded_patterns)
+ end
let(:excluded_patterns) { [] }
+ let(:include_categories_for_file) do
+ {
+ "SAST" => { "Security" => "Security" }
+ }
+ end
describe '.find' do
context 'with a non-prefixed General template' do
@@ -60,6 +70,7 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
context 'with a prefixed template' do
before do
create_template!('Bar/test-template')
+ create_template!('Security/SAST')
end
it 'finds the template with a prefix' do
@@ -76,6 +87,16 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
expect { finder.find('../foo') }.to raise_error(/Invalid path/)
end
+ context 'with include_categories_for_file being present' do
+ it 'finds the template with a prefix' do
+ expect(finder.find('SAST')).to be_present
+ end
+
+ it 'does not find any template which is missing in include_categories_for_file' do
+ expect(finder.find('DAST')).to be_nil
+ end
+ end
+
context 'while listed as an exclusion' do
let(:excluded_patterns) { [%r{^Bar/test-template$}] }
diff --git a/spec/lib/gitlab/terraform/state_migration_helper_spec.rb b/spec/lib/gitlab/terraform/state_migration_helper_spec.rb
new file mode 100644
index 00000000000..36c9c060e98
--- /dev/null
+++ b/spec/lib/gitlab/terraform/state_migration_helper_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Terraform::StateMigrationHelper do
+ before do
+ stub_terraform_state_object_storage
+ end
+
+ describe '.migrate_to_remote_storage' do
+ let!(:local_version) { create(:terraform_state_version, file_store: Terraform::StateUploader::Store::LOCAL) }
+
+ subject { described_class.migrate_to_remote_storage }
+
+ it 'migrates remote files to remote storage' do
+ subject
+
+ expect(local_version.reload.file_store).to eq(Terraform::StateUploader::Store::REMOTE)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index acf7aeb303a..b12a0310dac 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -9,11 +9,37 @@ RSpec.describe Gitlab::Tracking::StandardContext do
let(:snowplow_context) { subject.to_context }
describe '#to_context' do
- context 'with no arguments' do
- it 'creates a Snowplow context with no data' do
- snowplow_context.to_json[:data].each do |_, v|
- expect(v).to be_nil
+ context 'default fields' do
+ context 'environment' do
+ shared_examples 'contains environment' do |expected_environment|
+ it 'contains environment' do
+ expect(snowplow_context.to_json.dig(:data, :environment)).to eq(expected_environment)
+ end
end
+
+ context 'development or test' do
+ include_examples 'contains environment', 'development'
+ end
+
+ context 'staging' do
+ before do
+ allow(Gitlab).to receive(:staging?).and_return(true)
+ end
+
+ include_examples 'contains environment', 'staging'
+ end
+
+ context 'production' do
+ before do
+ allow(Gitlab).to receive(:com_and_canary?).and_return(true)
+ end
+
+ include_examples 'contains environment', 'production'
+ end
+ end
+
+ it 'contains source' do
+ expect(snowplow_context.to_json.dig(:data, :source)).to eq(described_class::GITLAB_RAILS_SOURCE)
end
end
@@ -28,8 +54,8 @@ RSpec.describe Gitlab::Tracking::StandardContext do
context 'with namespace' do
subject { described_class.new(namespace: namespace) }
- it 'creates a Snowplow context using the given data' do
- expect(snowplow_context.to_json.dig(:data, :namespace_id)).to eq(namespace.id)
+ it 'creates a Snowplow context without namespace and project' do
+ expect(snowplow_context.to_json.dig(:data, :namespace_id)).to be_nil
expect(snowplow_context.to_json.dig(:data, :project_id)).to be_nil
end
end
@@ -37,18 +63,18 @@ RSpec.describe Gitlab::Tracking::StandardContext do
context 'with project' do
subject { described_class.new(project: project) }
- it 'creates a Snowplow context using the given data' do
- expect(snowplow_context.to_json.dig(:data, :namespace_id)).to eq(project.namespace.id)
- expect(snowplow_context.to_json.dig(:data, :project_id)).to eq(project.id)
+ it 'creates a Snowplow context without namespace and project' do
+ expect(snowplow_context.to_json.dig(:data, :namespace_id)).to be_nil
+ expect(snowplow_context.to_json.dig(:data, :project_id)).to be_nil
end
end
context 'with project and namespace' do
subject { described_class.new(namespace: namespace, project: project) }
- it 'creates a Snowplow context using the given data' do
- expect(snowplow_context.to_json.dig(:data, :namespace_id)).to eq(namespace.id)
- expect(snowplow_context.to_json.dig(:data, :project_id)).to eq(project.id)
+ it 'creates a Snowplow context without namespace and project' do
+ expect(snowplow_context.to_json.dig(:data, :namespace_id)).to be_nil
+ expect(snowplow_context.to_json.dig(:data, :project_id)).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 686382dc262..fa01d4e48df 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -302,36 +302,36 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
it 'does not block urls from private networks' do
local_ips.each do |ip|
stub_domain_resolv(fake_domain, ip) do
- expect(described_class).not_to be_blocked_url("http://#{fake_domain}", url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url("http://#{fake_domain}", **url_blocker_attributes)
end
- expect(described_class).not_to be_blocked_url("http://#{ip}", url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url("http://#{ip}", **url_blocker_attributes)
end
end
it 'allows localhost endpoints' do
- expect(described_class).not_to be_blocked_url('http://0.0.0.0', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://localhost', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://127.0.0.1', url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://0.0.0.0', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://localhost', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://127.0.0.1', **url_blocker_attributes)
end
it 'allows loopback endpoints' do
- expect(described_class).not_to be_blocked_url('http://127.0.0.2', url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://127.0.0.2', **url_blocker_attributes)
end
it 'allows IPv4 link-local endpoints' do
- expect(described_class).not_to be_blocked_url('http://169.254.169.254', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://169.254.168.100', url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://169.254.169.254', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://169.254.168.100', **url_blocker_attributes)
end
it 'allows IPv6 link-local endpoints' do
- expect(described_class).not_to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.169.254]', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:169.254.169.254]', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a9fe]', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.168.100]', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:169.254.168.100]', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a864]', url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[fe80::c800:eff:fe74:8]', url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.169.254]', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[::ffff:169.254.169.254]', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a9fe]', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.168.100]', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[::ffff:169.254.168.100]', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a864]', **url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url('http://[fe80::c800:eff:fe74:8]', **url_blocker_attributes)
end
end
@@ -416,11 +416,11 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
attrs = url_blocker_attributes.merge(dns_rebind_protection: false)
stub_domain_resolv('example.com', '192.168.1.2') do
- expect(described_class).not_to be_blocked_url(url, attrs)
+ expect(described_class).not_to be_blocked_url(url, **attrs)
end
stub_domain_resolv('example.com', '192.168.1.3') do
- expect(described_class).to be_blocked_url(url, attrs)
+ expect(described_class).to be_blocked_url(url, **attrs)
end
end
end
@@ -442,18 +442,18 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
stub_domain_resolv(domain, '192.168.1.1') do
expect(described_class).not_to be_blocked_url("http://#{domain}",
- url_blocker_attributes)
+ **url_blocker_attributes)
end
stub_domain_resolv(subdomain1, '192.168.1.1') do
expect(described_class).not_to be_blocked_url("http://#{subdomain1}",
- url_blocker_attributes)
+ **url_blocker_attributes)
end
# subdomain2 is not part of the allowlist so it should be blocked
stub_domain_resolv(subdomain2, '192.168.1.1') do
expect(described_class).to be_blocked_url("http://#{subdomain2}",
- url_blocker_attributes)
+ **url_blocker_attributes)
end
end
@@ -463,12 +463,12 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
stub_domain_resolv(unicode_domain, '192.168.1.1') do
expect(described_class).not_to be_blocked_url("http://#{unicode_domain}",
- url_blocker_attributes)
+ **url_blocker_attributes)
end
stub_domain_resolv(idna_encoded_domain, '192.168.1.1') do
expect(described_class).not_to be_blocked_url("http://#{idna_encoded_domain}",
- url_blocker_attributes)
+ **url_blocker_attributes)
end
end
@@ -525,7 +525,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
it 'allows domain with port when resolved ip has port allowed' do
stub_domain_resolv("www.resolve-domain.com", '127.0.0.1') do
- expect(described_class).not_to be_blocked_url("http://www.resolve-domain.com:2000", url_blocker_attributes)
+ expect(described_class).not_to be_blocked_url("http://www.resolve-domain.com:2000", **url_blocker_attributes)
end
end
end
diff --git a/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb b/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb
index d9e44e9b85c..4c4248b143e 100644
--- a/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb
+++ b/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb
@@ -37,19 +37,19 @@ RSpec.describe Gitlab::UrlBlockers::UrlAllowlist do
let(:allowlist) { ['example.io:3000'] }
it 'returns true if domain and ports present in allowlist' do
- parsed_allowlist = [['example.io', { port: 3000 }]]
+ parsed_allowlist = [['example.io', 3000]]
not_allowed = [
'example.io',
- ['example.io', { port: 3001 }]
+ ['example.io', 3001]
]
aggregate_failures do
- parsed_allowlist.each do |domain_and_port|
- expect(described_class).to be_domain_allowed(*domain_and_port)
+ parsed_allowlist.each do |domain, port|
+ expect(described_class).to be_domain_allowed(domain, port: port)
end
- not_allowed.each do |domain_and_port|
- expect(described_class).not_to be_domain_allowed(*domain_and_port)
+ not_allowed.each do |domain, port|
+ expect(described_class).not_to be_domain_allowed(domain, port: port)
end
end
end
@@ -139,23 +139,23 @@ RSpec.describe Gitlab::UrlBlockers::UrlAllowlist do
it 'returns true if ip and ports present in allowlist' do
parsed_allowlist = [
- ['127.0.0.9', { port: 3000 }],
- ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 443 }]
+ ['127.0.0.9', 3000],
+ ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', 443]
]
not_allowed = [
'127.0.0.9',
- ['127.0.0.9', { port: 3001 }],
+ ['127.0.0.9', 3001],
'[2001:db8:85a3:8d3:1319:8a2e:370:7348]',
- ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 3001 }]
+ ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', 3001]
]
aggregate_failures do
- parsed_allowlist.each do |ip_and_port|
- expect(described_class).to be_ip_allowed(*ip_and_port)
+ parsed_allowlist.each do |ip, port|
+ expect(described_class).to be_ip_allowed(ip, port: port)
end
- not_allowed.each do |ip_and_port|
- expect(described_class).not_to be_ip_allowed(*ip_and_port)
+ not_allowed.each do |ip, port|
+ expect(described_class).not_to be_ip_allowed(ip, port: port)
end
end
end
diff --git a/spec/lib/gitlab/usage/docs/renderer_spec.rb b/spec/lib/gitlab/usage/docs/renderer_spec.rb
new file mode 100644
index 00000000000..e62861cd677
--- /dev/null
+++ b/spec/lib/gitlab/usage/docs/renderer_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Docs::Renderer do
+ describe 'contents' do
+ let(:dictionary_path) { Gitlab::Usage::Docs::Renderer::DICTIONARY_PATH }
+ let(:items) { Gitlab::Usage::MetricDefinition.definitions }
+
+ it 'generates dictionary for given items' do
+ generated_dictionary = described_class.new(items).contents
+ generated_dictionary_keys = RDoc::Markdown
+ .parse(generated_dictionary)
+ .table_of_contents
+ .select { |metric_doc| metric_doc.level == 2 && !metric_doc.text.start_with?('info:') }
+ .map(&:text)
+
+ expect(generated_dictionary_keys).to match_array(items.keys)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/docs/value_formatter_spec.rb b/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
new file mode 100644
index 00000000000..ceb00867c95
--- /dev/null
+++ b/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Docs::ValueFormatter do
+ describe '.format' do
+ using RSpec::Parameterized::TableSyntax
+ where(:key, :value, :expected_value) do
+ :group | 'growth::product intelligence' | '`growth::product intelligence`'
+ :data_source | 'redis' | 'Redis'
+ :data_source | 'ruby' | 'Ruby'
+ :introduced_by_url | 'http://test.com' | '[Introduced by](http://test.com)'
+ :tier | %w(gold premium) | 'gold, premium'
+ :distribution | %w(ce ee) | 'ce, ee'
+ :key_path | 'key.path' | '**key.path**'
+ :milestone | '13.4' | '13.4'
+ :status | 'data_available' | 'data_available'
+ end
+
+ with_them do
+ subject { described_class.format(key, value) }
+
+ it { is_expected.to eq(expected_value) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index e101f837324..bc64bdfbf56 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -5,17 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::MetricDefinition do
let(:attributes) do
{
- name: 'uuid',
description: 'GitLab instance unique identifier',
value_type: 'string',
product_category: 'collection',
stage: 'growth',
status: 'data_available',
default_generation: 'generation_1',
- full_path: {
- generation_1: 'uuid',
- generation_2: 'license.uuid'
- },
+ key_path: 'uuid',
group: 'group::product analytics',
time_frame: 'none',
data_source: 'database',
@@ -44,12 +40,11 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
using RSpec::Parameterized::TableSyntax
where(:attribute, :value) do
- :name | nil
:description | nil
:value_type | nil
:value_type | 'test'
:status | nil
- :default_generation | nil
+ :key_path | nil
:group | nil
:time_frame | nil
:time_frame | '29d'
@@ -70,6 +65,20 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
described_class.new(path, attributes).validate!
end
+
+ context 'with skip_validation' do
+ it 'raise exception if skip_validation: false' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError))
+
+ described_class.new(path, attributes.merge( { skip_validation: false } )).validate!
+ end
+
+ it 'does not raise exception if has skip_validation: true' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+
+ described_class.new(path, attributes.merge( { skip_validation: true } )).validate!
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb
index 40671d980d6..d4a789419a4 100644
--- a/spec/lib/gitlab/usage/metric_spec.rb
+++ b/spec/lib/gitlab/usage/metric_spec.rb
@@ -4,15 +4,15 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::Metric do
describe '#definition' do
- it 'returns generation_1 metric definiton' do
- expect(described_class.new(default_generation_path: 'uuid').definition).to be_an(Gitlab::Usage::MetricDefinition)
+ it 'returns key_path metric definiton' do
+ expect(described_class.new(key_path: 'uuid').definition).to be_an(Gitlab::Usage::MetricDefinition)
end
end
describe '#unflatten_default_path' do
using RSpec::Parameterized::TableSyntax
- where(:default_generation_path, :value, :expected_hash) do
+ where(:key_path, :value, :expected_hash) do
'uuid' | nil | { uuid: nil }
'uuid' | '1111' | { uuid: '1111' }
'counts.issues' | nil | { counts: { issues: nil } }
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Usage::Metric do
end
with_them do
- subject { described_class.new(default_generation_path: default_generation_path, value: value).unflatten_default_path }
+ subject { described_class.new(key_path: key_path, value: value).unflatten_key_path }
it { is_expected.to eq(expected_hash) }
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
new file mode 100644
index 00000000000..a391872c030
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -0,0 +1,256 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redis_shared_state do
+ let(:entity1) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
+ let(:entity2) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
+ let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
+ let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
+
+ around do |example|
+ # We need to freeze to a reference time
+ # because visits are grouped by the week number in the year
+ # Without freezing the time, the test may behave inconsistently
+ # depending on which day of the week test is run.
+ # Monday 6th of June
+ reference_time = Time.utc(2020, 6, 1)
+ travel_to(reference_time) { example.run }
+ end
+
+ context 'aggregated_metrics_data' do
+ let(:known_events) do
+ [
+ { name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
+ { name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
+ { name: 'event3_slot', redis_slot: "slot", category: 'category3', aggregation: "weekly" },
+ { name: 'event5_slot', redis_slot: "slot", category: 'category4', aggregation: "weekly" },
+ { name: 'event4', category: 'category2', aggregation: "weekly" }
+ ].map(&:with_indifferent_access)
+ end
+
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_events).and_return(known_events)
+ end
+
+ shared_examples 'aggregated_metrics_data' do
+ context 'no aggregated metrics is defined' do
+ it 'returns empty hash' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics).and_return([])
+ end
+
+ expect(aggregated_metrics_data).to eq({})
+ end
+ end
+
+ context 'there are aggregated metrics defined' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ end
+ end
+
+ context 'with AND operator' do
+ let(:aggregated_metrics) do
+ [
+ { name: 'gmau_1', events: %w[event1_slot event2_slot], operator: "AND" },
+ { name: 'gmau_2', events: %w[event1_slot event2_slot event3_slot], operator: "AND" },
+ { name: 'gmau_3', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" },
+ { name: 'gmau_4', events: %w[event4], operator: "AND" }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'returns the number of unique events for all known events' do
+ results = {
+ 'gmau_1' => 3,
+ 'gmau_2' => 2,
+ 'gmau_3' => 1,
+ 'gmau_4' => 3
+ }
+
+ expect(aggregated_metrics_data).to eq(results)
+ end
+ end
+
+ context 'with OR operator' do
+ let(:aggregated_metrics) do
+ [
+ { name: 'gmau_1', events: %w[event3_slot event5_slot], operator: "OR" },
+ { name: 'gmau_2', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "OR" },
+ { name: 'gmau_3', events: %w[event4], operator: "OR" }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'returns the number of unique events for all known events' do
+ results = {
+ 'gmau_1' => 2,
+ 'gmau_2' => 3,
+ 'gmau_3' => 3
+ }
+
+ expect(aggregated_metrics_data).to eq(results)
+ end
+ end
+
+ context 'hidden behind feature flag' do
+ let(:enabled_feature_flag) { 'test_ff_enabled' }
+ let(:disabled_feature_flag) { 'test_ff_disabled' }
+ let(:aggregated_metrics) do
+ [
+ # represents stable aggregated metrics that has been fully released
+ { name: 'gmau_without_ff', events: %w[event3_slot event5_slot], operator: "OR" },
+ # represents new aggregated metric that is under performance testing on gitlab.com
+ { name: 'gmau_enabled', events: %w[event4], operator: "AND", feature_flag: enabled_feature_flag },
+ # represents aggregated metric that is under development and shouldn't be yet collected even on gitlab.com
+ { name: 'gmau_disabled', events: %w[event4], operator: "AND", feature_flag: disabled_feature_flag }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'returns the number of unique events for all known events' do
+ skip_feature_flags_yaml_validation
+ stub_feature_flags(enabled_feature_flag => true, disabled_feature_flag => false)
+
+ expect(aggregated_metrics_data).to eq('gmau_without_ff' => 2, 'gmau_enabled' => 3)
+ end
+ end
+ end
+
+ context 'error handling' do
+ context 'development and test environment' do
+ it 'raises error when unknown aggregation operator is used' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics)
+ .and_return([{ name: 'gmau_1', events: %w[event1_slot], operator: "SUM" }])
+ end
+
+ expect { aggregated_metrics_data }.to raise_error Gitlab::Usage::Metrics::Aggregates::UnknownAggregationOperator
+ end
+
+ it 're raises Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
+ error = Gitlab::UsageDataCounters::HLLRedisCounter::EventError
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics)
+ .and_return([{ name: 'gmau_1', events: %w[event1_slot], operator: "OR" }])
+ end
+
+ expect { aggregated_metrics_data }.to raise_error error
+ end
+ end
+
+ context 'production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'rescues unknown aggregation operator error' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics)
+ .and_return([{ name: 'gmau_1', events: %w[event1_slot], operator: "SUM" }])
+ end
+
+ expect(aggregated_metrics_data).to eq('gmau_1' => -1)
+ end
+
+ it 'rescues Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
+ error = Gitlab::UsageDataCounters::HLLRedisCounter::EventError
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics)
+ .and_return([{ name: 'gmau_1', events: %w[event1_slot], operator: "OR" }])
+ end
+
+ expect(aggregated_metrics_data).to eq('gmau_1' => -1)
+ end
+ end
+ end
+ end
+
+ describe '.aggregated_metrics_weekly_data' do
+ subject(:aggregated_metrics_data) { described_class.new.weekly_data }
+
+ before do
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event1_slot', values: entity1, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event1_slot', values: entity2, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event1_slot', values: entity3, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity1, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity2, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity3, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event3_slot', values: entity1, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event3_slot', values: entity2, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event5_slot', values: entity2, time: 3.days.ago)
+
+ # events out of time scope
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity3, time: 8.days.ago)
+
+ # events in different slots
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event4', values: entity1, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event4', values: entity2, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event4', values: entity4, time: 2.days.ago)
+ end
+
+ it_behaves_like 'aggregated_metrics_data'
+ end
+
+ describe '.aggregated_metrics_monthly_data' do
+ subject(:aggregated_metrics_data) { described_class.new.monthly_data }
+
+ it_behaves_like 'aggregated_metrics_data' do
+ before do
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event1_slot', values: entity1, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event1_slot', values: entity2, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event1_slot', values: entity3, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity1, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity2, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event2_slot', values: entity3, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event3_slot', values: entity1, time: 3.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event3_slot', values: entity2, time: 10.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event5_slot', values: entity2, time: 4.weeks.ago.advance(days: 1))
+
+ # events out of time scope
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event5_slot', values: entity1, time: 4.weeks.ago.advance(days: -1))
+
+ # events in different slots
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event4', values: entity1, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event4', values: entity2, time: 2.days.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('event4', values: entity4, time: 2.days.ago)
+ end
+ end
+
+ context 'Redis calls' do
+ let(:aggregated_metrics) do
+ [
+ { name: 'gmau_3', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'caches intermediate operations' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ end
+
+ aggregated_metrics[0][:events].each do |event|
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union)
+ .with(event_names: event, start_date: 4.weeks.ago.to_date, end_date: Date.current)
+ .once
+ .and_return(0)
+ end
+
+ 2.upto(4) do |subset_size|
+ aggregated_metrics[0][:events].combination(subset_size).each do |events|
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union)
+ .with(event_names: events, start_date: 4.weeks.ago.to_date, end_date: Date.current)
+ .once
+ .and_return(0)
+ end
+ end
+
+ aggregated_metrics_data
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb b/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
index c0deb2aa00c..6ca7039b3de 100644
--- a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'aggregated metrics' do
Gitlab::UsageDataCounters::HLLRedisCounter.known_events
end
- Gitlab::UsageDataCounters::HLLRedisCounter.aggregated_metrics.tap do |aggregated_metrics|
+ Gitlab::Usage::Metrics::Aggregates::Aggregate.new.send(:aggregated_metrics).tap do |aggregated_metrics|
it 'all events has unique name' do
event_names = aggregated_metrics&.map { |event| event[:name] }
@@ -37,7 +37,7 @@ RSpec.describe 'aggregated metrics' do
end
it "uses allowed aggregation operators" do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter::ALLOWED_METRICS_AGGREGATIONS).to include aggregate[:operator]
+ expect(Gitlab::Usage::Metrics::Aggregates::ALLOWED_METRICS_AGGREGATIONS).to include aggregate[:operator]
end
it "uses events from the same Redis slot" do
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index b8eddc0ca7f..f0b8ce6c2fb 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -39,7 +39,9 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'snippets',
'code_review',
'terraform',
- 'ci_templates'
+ 'ci_templates',
+ 'quickactions',
+ 'pipeline_authoring'
)
end
end
@@ -425,182 +427,59 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- context 'aggregated_metrics_data' do
+ describe '.calculate_events_union' do
+ let(:time_range) { { start_date: 7.days.ago, end_date: DateTime.current } }
let(:known_events) do
[
{ name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
{ name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
{ name: 'event3_slot', redis_slot: "slot", category: 'category3', aggregation: "weekly" },
- { name: 'event5_slot', redis_slot: "slot", category: 'category4', aggregation: "weekly" },
+ { name: 'event5_slot', redis_slot: "slot", category: 'category4', aggregation: "daily" },
{ name: 'event4', category: 'category2', aggregation: "weekly" }
].map(&:with_indifferent_access)
end
before do
allow(described_class).to receive(:known_events).and_return(known_events)
- end
-
- shared_examples 'aggregated_metrics_data' do
- context 'no aggregated metrics is defined' do
- it 'returns empty hash' do
- allow(described_class).to receive(:aggregated_metrics).and_return([])
- expect(aggregated_metrics_data).to eq({})
- end
- end
-
- context 'there are aggregated metrics defined' do
- before do
- allow(described_class).to receive(:aggregated_metrics).and_return(aggregated_metrics)
- end
-
- context 'with AND operator' do
- let(:aggregated_metrics) do
- [
- { name: 'gmau_1', events: %w[event1_slot event2_slot], operator: "AND" },
- { name: 'gmau_2', events: %w[event1_slot event2_slot event3_slot], operator: "AND" },
- { name: 'gmau_3', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" },
- { name: 'gmau_4', events: %w[event4], operator: "AND" }
- ].map(&:with_indifferent_access)
- end
-
- it 'returns the number of unique events for all known events' do
- results = {
- 'gmau_1' => 3,
- 'gmau_2' => 2,
- 'gmau_3' => 1,
- 'gmau_4' => 3
- }
-
- expect(aggregated_metrics_data).to eq(results)
- end
- end
-
- context 'with OR operator' do
- let(:aggregated_metrics) do
- [
- { name: 'gmau_1', events: %w[event3_slot event5_slot], operator: "OR" },
- { name: 'gmau_2', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "OR" },
- { name: 'gmau_3', events: %w[event4], operator: "OR" }
- ].map(&:with_indifferent_access)
- end
-
- it 'returns the number of unique events for all known events' do
- results = {
- 'gmau_1' => 2,
- 'gmau_2' => 3,
- 'gmau_3' => 3
- }
-
- expect(aggregated_metrics_data).to eq(results)
- end
- end
-
- context 'hidden behind feature flag' do
- let(:enabled_feature_flag) { 'test_ff_enabled' }
- let(:disabled_feature_flag) { 'test_ff_disabled' }
- let(:aggregated_metrics) do
- [
- # represents stable aggregated metrics that has been fully released
- { name: 'gmau_without_ff', events: %w[event3_slot event5_slot], operator: "OR" },
- # represents new aggregated metric that is under performance testing on gitlab.com
- { name: 'gmau_enabled', events: %w[event4], operator: "AND", feature_flag: enabled_feature_flag },
- # represents aggregated metric that is under development and shouldn't be yet collected even on gitlab.com
- { name: 'gmau_disabled', events: %w[event4], operator: "AND", feature_flag: disabled_feature_flag }
- ].map(&:with_indifferent_access)
- end
-
- it 'returns the number of unique events for all known events' do
- skip_feature_flags_yaml_validation
- stub_feature_flags(enabled_feature_flag => true, disabled_feature_flag => false)
+ described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event1_slot', values: entity2, time: 2.days.ago)
+ described_class.track_event('event1_slot', values: entity3, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity1, time: 2.days.ago)
+ described_class.track_event('event2_slot', values: entity2, time: 3.days.ago)
+ described_class.track_event('event2_slot', values: entity3, time: 3.days.ago)
+ described_class.track_event('event3_slot', values: entity1, time: 3.days.ago)
+ described_class.track_event('event3_slot', values: entity2, time: 3.days.ago)
+ described_class.track_event('event5_slot', values: entity2, time: 3.days.ago)
+
+ # events out of time scope
+ described_class.track_event('event2_slot', values: entity4, time: 8.days.ago)
- expect(aggregated_metrics_data).to eq('gmau_without_ff' => 2, 'gmau_enabled' => 3)
- end
- end
- end
+ # events in different slots
+ described_class.track_event('event4', values: entity1, time: 2.days.ago)
+ described_class.track_event('event4', values: entity2, time: 2.days.ago)
end
- describe '.aggregated_metrics_weekly_data' do
- subject(:aggregated_metrics_data) { described_class.aggregated_metrics_weekly_data }
-
- before do
- described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
- described_class.track_event('event1_slot', values: entity2, time: 2.days.ago)
- described_class.track_event('event1_slot', values: entity3, time: 2.days.ago)
- described_class.track_event('event2_slot', values: entity1, time: 2.days.ago)
- described_class.track_event('event2_slot', values: entity2, time: 3.days.ago)
- described_class.track_event('event2_slot', values: entity3, time: 3.days.ago)
- described_class.track_event('event3_slot', values: entity1, time: 3.days.ago)
- described_class.track_event('event3_slot', values: entity2, time: 3.days.ago)
- described_class.track_event('event5_slot', values: entity2, time: 3.days.ago)
-
- # events out of time scope
- described_class.track_event('event2_slot', values: entity3, time: 8.days.ago)
-
- # events in different slots
- described_class.track_event('event4', values: entity1, time: 2.days.ago)
- described_class.track_event('event4', values: entity2, time: 2.days.ago)
- described_class.track_event('event4', values: entity4, time: 2.days.ago)
- end
-
- it_behaves_like 'aggregated_metrics_data'
+ it 'calculates union of given events', :aggregate_failure do
+ expect(described_class.calculate_events_union(**time_range.merge(event_names: %w[event4]))).to eq 2
+ expect(described_class.calculate_events_union(**time_range.merge(event_names: %w[event1_slot event2_slot event3_slot]))).to eq 3
end
- describe '.aggregated_metrics_monthly_data' do
- subject(:aggregated_metrics_data) { described_class.aggregated_metrics_monthly_data }
-
- it_behaves_like 'aggregated_metrics_data' do
- before do
- described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
- described_class.track_event('event1_slot', values: entity2, time: 2.days.ago)
- described_class.track_event('event1_slot', values: entity3, time: 2.days.ago)
- described_class.track_event('event2_slot', values: entity1, time: 2.days.ago)
- described_class.track_event('event2_slot', values: entity2, time: 3.days.ago)
- described_class.track_event('event2_slot', values: entity3, time: 3.days.ago)
- described_class.track_event('event3_slot', values: entity1, time: 3.days.ago)
- described_class.track_event('event3_slot', values: entity2, time: 10.days.ago)
- described_class.track_event('event5_slot', values: entity2, time: 4.weeks.ago.advance(days: 1))
-
- # events out of time scope
- described_class.track_event('event5_slot', values: entity1, time: 4.weeks.ago.advance(days: -1))
-
- # events in different slots
- described_class.track_event('event4', values: entity1, time: 2.days.ago)
- described_class.track_event('event4', values: entity2, time: 2.days.ago)
- described_class.track_event('event4', values: entity4, time: 2.days.ago)
- end
- end
-
- context 'Redis calls' do
- let(:aggregated_metrics) do
- [
- { name: 'gmau_3', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" }
- ].map(&:with_indifferent_access)
- end
-
- let(:known_events) do
- [
- { name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
- { name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
- { name: 'event3_slot', redis_slot: "slot", category: 'category3', aggregation: "weekly" },
- { name: 'event5_slot', redis_slot: "slot", category: 'category4', aggregation: "weekly" }
- ].map(&:with_indifferent_access)
- end
-
- it 'caches intermediate operations' do
- allow(described_class).to receive(:known_events).and_return(known_events)
- allow(described_class).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ it 'validates and raise exception if events has mismatched slot or aggregation', :aggregate_failure do
+ expect { described_class.calculate_events_union(**time_range.merge(event_names: %w[event1_slot event4])) }.to raise_error described_class::SlotMismatch
+ expect { described_class.calculate_events_union(**time_range.merge(event_names: %w[event5_slot event3_slot])) }.to raise_error described_class::AggregationMismatch
+ end
+ end
- 4.downto(1) do |subset_size|
- known_events.combination(subset_size).each do |events|
- keys = described_class.send(:weekly_redis_keys, events: events, start_date: 4.weeks.ago.to_date, end_date: Date.current)
- expect(Gitlab::Redis::HLL).to receive(:count).with(keys: keys).once.and_return(0)
- end
- end
+ describe '.weekly_time_range' do
+ it 'return hash with weekly time range boundaries' do
+ expect(described_class.weekly_time_range).to eq(start_date: 7.days.ago.to_date, end_date: Date.current)
+ end
+ end
- subject
- end
- end
+ describe '.monthly_time_range' do
+ it 'return hash with monthly time range boundaries' do
+ expect(described_class.monthly_time_range).to eq(start_date: 4.weeks.ago.to_date, end_date: Date.current)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index c7b208cfb31..509ba43ef32 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -73,6 +73,22 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
end
end
+ describe '.track_resolve_thread_action' do
+ subject { described_class.track_resolve_thread_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_RESOLVE_THREAD_ACTION }
+ end
+ end
+
+ describe '.track_unresolve_thread_action' do
+ subject { described_class.track_unresolve_thread_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_UNRESOLVE_THREAD_ACTION }
+ end
+ end
+
describe '.track_create_comment_action' do
subject { described_class.track_create_comment_action(note: note) }
@@ -148,4 +164,36 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:action) { described_class::MR_PUBLISH_REVIEW_ACTION }
end
end
+
+ describe '.track_add_suggestion_action' do
+ subject { described_class.track_add_suggestion_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_ADD_SUGGESTION_ACTION }
+ end
+ end
+
+ describe '.track_apply_suggestion_action' do
+ subject { described_class.track_apply_suggestion_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_APPLY_SUGGESTION_ACTION }
+ end
+ end
+
+ describe '.track_users_assigned_to_mr' do
+ subject { described_class.track_users_assigned_to_mr(users: [user]) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_ASSIGNED_USERS_ACTION }
+ end
+ end
+
+ describe '.track_users_review_requested' do
+ subject { described_class.track_users_review_requested(users: [user]) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_REVIEW_REQUESTED_USERS_ACTION }
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..d4c423f57fe
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:user) { build(:user, id: 1) }
+ let(:note) { build(:note, author: user) }
+ let(:args) { nil }
+
+ shared_examples_for 'a tracked quick action unique event' do
+ specify do
+ expect { 3.times { subject } }
+ .to change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: action,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }
+ .by(1)
+ end
+ end
+
+ subject { described_class.track_unique_action(quickaction_name, args: args, user: user) }
+
+ describe '.track_unique_action' do
+ let(:quickaction_name) { 'approve' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_approve' }
+ end
+ end
+
+ context 'tracking assigns' do
+ let(:quickaction_name) { 'assign' }
+
+ context 'single assignee' do
+ let(:args) { '@one' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_assign_single' }
+ end
+ end
+
+ context 'multiple assignees' do
+ let(:args) { '@one @two' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_assign_multiple' }
+ end
+ end
+
+ context 'assigning "me"' do
+ let(:args) { 'me' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_assign_self' }
+ end
+ end
+
+ context 'assigning a reviewer' do
+ let(:quickaction_name) { 'assign_reviewer' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_assign_reviewer' }
+ end
+ end
+
+ context 'assigning a reviewer with request review alias' do
+ let(:quickaction_name) { 'request_review' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_assign_reviewer' }
+ end
+ end
+ end
+
+ context 'tracking copy_metadata' do
+ let(:quickaction_name) { 'copy_metadata' }
+
+ context 'for issues' do
+ let(:args) { '#123' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_copy_metadata_issue' }
+ end
+ end
+
+ context 'for merge requests' do
+ let(:args) { '!123' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_copy_metadata_merge_request' }
+ end
+ end
+ end
+
+ context 'tracking spend' do
+ let(:quickaction_name) { 'spend' }
+
+ context 'adding time' do
+ let(:args) { '1d' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_spend_add' }
+ end
+ end
+
+ context 'removing time' do
+ let(:args) { '-1d' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_spend_subtract' }
+ end
+ end
+ end
+
+ context 'tracking unassign' do
+ let(:quickaction_name) { 'unassign' }
+
+ context 'unassigning everyone' do
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_unassign_all' }
+ end
+ end
+
+ context 'unassigning specific users' do
+ let(:args) { '@hello' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_unassign_specific' }
+ end
+ end
+ end
+
+ context 'tracking unlabel' do
+ context 'called as unlabel' do
+ let(:quickaction_name) { 'unlabel' }
+
+ context 'removing all labels' do
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_unlabel_all' }
+ end
+ end
+
+ context 'removing specific labels' do
+ let(:args) { '~wow' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_unlabel_specific' }
+ end
+ end
+ end
+
+ context 'called as remove_label' do
+ let(:quickaction_name) { 'remove_label' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_unlabel_all' }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index fd02521622c..602f6640d72 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -228,11 +228,32 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
- it 'includes imports usage data' do
+ it 'includes import gmau usage data' do
for_defined_days_back do
user = create(:user)
+ group = create(:group)
+ group.add_owner(user)
+
+ create(:project, import_type: :github, creator_id: user.id)
+ create(:jira_import_state, :finished, project: create(:project, creator_id: user.id))
+ create(:issue_csv_import, user: user)
+ create(:group_import_state, group: group, user: user)
create(:bulk_import, user: user)
+ end
+
+ expect(described_class.usage_activity_by_stage_manage({})).to include(
+ unique_users_all_imports: 10
+ )
+
+ expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
+ unique_users_all_imports: 5
+ )
+ end
+
+ it 'includes imports usage data' do
+ for_defined_days_back do
+ user = create(:user)
%w(gitlab_project gitlab github bitbucket bitbucket_server gitea git manifest fogbugz phabricator).each do |type|
create(:project, import_type: type, creator_id: user.id)
@@ -242,72 +263,113 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:jira_import_state, :finished, project: jira_project)
create(:issue_csv_import, user: user)
+
+ group = create(:group)
+ group.add_owner(user)
+ create(:group_import_state, group: group, user: user)
+
+ bulk_import = create(:bulk_import, user: user)
+ create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+ create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
end
expect(described_class.usage_activity_by_stage_manage({})).to include(
{
bulk_imports: {
- gitlab: 2
+ gitlab_v1: 2,
+ gitlab: Gitlab::UsageData::DEPRECATED_VALUE
},
- projects_imported: {
- total: 2,
- gitlab_project: 2,
- gitlab: 2,
- github: 2,
+ project_imports: {
bitbucket: 2,
bitbucket_server: 2,
- gitea: 2,
git: 2,
+ gitea: 2,
+ github: 2,
+ gitlab: 2,
+ gitlab_migration: 2,
+ gitlab_project: 2,
manifest: 2
},
- issues_imported: {
+ issue_imports: {
jira: 2,
fogbugz: 2,
phabricator: 2,
csv: 2
- }
+ },
+ group_imports: {
+ group_import: 2,
+ gitlab_migration: 2
+ },
+ projects_imported: {
+ total: Gitlab::UsageData::DEPRECATED_VALUE,
+ gitlab_project: Gitlab::UsageData::DEPRECATED_VALUE,
+ gitlab: Gitlab::UsageData::DEPRECATED_VALUE,
+ github: Gitlab::UsageData::DEPRECATED_VALUE,
+ bitbucket: Gitlab::UsageData::DEPRECATED_VALUE,
+ bitbucket_server: Gitlab::UsageData::DEPRECATED_VALUE,
+ gitea: Gitlab::UsageData::DEPRECATED_VALUE,
+ git: Gitlab::UsageData::DEPRECATED_VALUE,
+ manifest: Gitlab::UsageData::DEPRECATED_VALUE
+ },
+ issues_imported: {
+ jira: Gitlab::UsageData::DEPRECATED_VALUE,
+ fogbugz: Gitlab::UsageData::DEPRECATED_VALUE,
+ phabricator: Gitlab::UsageData::DEPRECATED_VALUE,
+ csv: Gitlab::UsageData::DEPRECATED_VALUE
+ },
+ groups_imported: Gitlab::UsageData::DEPRECATED_VALUE
}
)
expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
{
bulk_imports: {
- gitlab: 1
+ gitlab_v1: 1,
+ gitlab: Gitlab::UsageData::DEPRECATED_VALUE
},
- projects_imported: {
- total: 1,
- gitlab_project: 1,
- gitlab: 1,
- github: 1,
+ project_imports: {
bitbucket: 1,
bitbucket_server: 1,
- gitea: 1,
git: 1,
+ gitea: 1,
+ github: 1,
+ gitlab: 1,
+ gitlab_migration: 1,
+ gitlab_project: 1,
manifest: 1
},
- issues_imported: {
+ issue_imports: {
jira: 1,
fogbugz: 1,
phabricator: 1,
csv: 1
- }
+ },
+ group_imports: {
+ group_import: 1,
+ gitlab_migration: 1
+ },
+ projects_imported: {
+ total: Gitlab::UsageData::DEPRECATED_VALUE,
+ gitlab_project: Gitlab::UsageData::DEPRECATED_VALUE,
+ gitlab: Gitlab::UsageData::DEPRECATED_VALUE,
+ github: Gitlab::UsageData::DEPRECATED_VALUE,
+ bitbucket: Gitlab::UsageData::DEPRECATED_VALUE,
+ bitbucket_server: Gitlab::UsageData::DEPRECATED_VALUE,
+ gitea: Gitlab::UsageData::DEPRECATED_VALUE,
+ git: Gitlab::UsageData::DEPRECATED_VALUE,
+ manifest: Gitlab::UsageData::DEPRECATED_VALUE
+ },
+ issues_imported: {
+ jira: Gitlab::UsageData::DEPRECATED_VALUE,
+ fogbugz: Gitlab::UsageData::DEPRECATED_VALUE,
+ phabricator: Gitlab::UsageData::DEPRECATED_VALUE,
+ csv: Gitlab::UsageData::DEPRECATED_VALUE
+ },
+ groups_imported: Gitlab::UsageData::DEPRECATED_VALUE
+
}
)
end
- it 'includes group imports usage data' do
- for_defined_days_back do
- user = create(:user)
- group = create(:group)
- group.add_owner(user)
- create(:group_import_state, group: group, user: user)
- end
-
- expect(described_class.usage_activity_by_stage_manage({}))
- .to include(groups_imported: 2)
- expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period))
- .to include(groups_imported: 1)
- end
-
def omniauth_providers
[
OpenStruct.new(name: 'google_oauth2'),
@@ -1262,7 +1324,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.redis_hll_counters }
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
- let(:ineligible_total_categories) { %w[source_code ci_secrets_management incident_management_alerts snippets terraform] }
+ let(:ineligible_total_categories) do
+ %w[source_code ci_secrets_management incident_management_alerts snippets terraform pipeline_authoring]
+ end
it 'has all known_events' do
expect(subject).to have_key(:redis_hll_counters)
@@ -1286,8 +1350,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '.aggregated_metrics_weekly' do
subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_weekly }
- it 'uses ::Gitlab::UsageDataCounters::HLLRedisCounter#aggregated_metrics_data', :aggregate_failures do
- expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:aggregated_metrics_weekly_data).and_return(global_search_gmau: 123)
+ it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate#weekly_data', :aggregate_failures do
+ expect_next_instance_of(::Gitlab::Usage::Metrics::Aggregates::Aggregate) do |instance|
+ expect(instance).to receive(:weekly_data).and_return(global_search_gmau: 123)
+ end
expect(aggregated_metrics_payload).to eq(aggregated_metrics: { global_search_gmau: 123 })
end
end
@@ -1295,8 +1361,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '.aggregated_metrics_monthly' do
subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_monthly }
- it 'uses ::Gitlab::UsageDataCounters::HLLRedisCounter#aggregated_metrics_data', :aggregate_failures do
- expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:aggregated_metrics_monthly_data).and_return(global_search_gmau: 123)
+ it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate#monthly_data', :aggregate_failures do
+ expect_next_instance_of(::Gitlab::Usage::Metrics::Aggregates::Aggregate) do |instance|
+ expect(instance).to receive(:monthly_data).and_return(global_search_gmau: 123)
+ end
expect(aggregated_metrics_payload).to eq(aggregated_metrics: { global_search_gmau: 123 })
end
end
diff --git a/spec/lib/gitlab/utils/markdown_spec.rb b/spec/lib/gitlab/utils/markdown_spec.rb
index 93d91f7ed90..acc5bd47c8c 100644
--- a/spec/lib/gitlab/utils/markdown_spec.rb
+++ b/spec/lib/gitlab/utils/markdown_spec.rb
@@ -53,33 +53,23 @@ RSpec.describe Gitlab::Utils::Markdown do
end
context 'when string has a product suffix' do
- let(:string) { 'My Header (ULTIMATE)' }
-
- it 'ignores a product suffix' do
- is_expected.to eq 'my-header'
- end
-
- context 'with only modifier' do
- let(:string) { 'My Header (STARTER ONLY)' }
-
- it 'ignores a product suffix' do
- is_expected.to eq 'my-header'
- end
- end
-
- context 'with "*" around a product suffix' do
- let(:string) { 'My Header **(STARTER)**' }
-
- it 'ignores a product suffix' do
- is_expected.to eq 'my-header'
- end
- end
-
- context 'with "*" around a product suffix and only modifier' do
- let(:string) { 'My Header **(STARTER ONLY)**' }
-
- it 'ignores a product suffix' do
- is_expected.to eq 'my-header'
+ %w[CORE STARTER PREMIUM ULTIMATE FREE BRONZE SILVER GOLD].each do |tier|
+ ['', ' ONLY', ' SELF', ' SASS'].each do |modifier|
+ context "#{tier}#{modifier}" do
+ let(:string) { "My Header (#{tier}#{modifier})" }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+
+ context 'with "*" around a product suffix' do
+ let(:string) { "My Header **(#{tier}#{modifier})**" }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/utils/override_spec.rb b/spec/lib/gitlab/utils/override_spec.rb
index 7ba7392df0f..a5e53c1dfc1 100644
--- a/spec/lib/gitlab/utils/override_spec.rb
+++ b/spec/lib/gitlab/utils/override_spec.rb
@@ -2,6 +2,9 @@
require 'fast_spec_helper'
+# Patching ActiveSupport::Concern
+require_relative '../../../../config/initializers/0_as_concern'
+
RSpec.describe Gitlab::Utils::Override do
let(:base) do
Struct.new(:good) do
@@ -164,6 +167,70 @@ RSpec.describe Gitlab::Utils::Override do
it_behaves_like 'checking as intended, nothing was overridden'
end
+
+ context 'when ActiveSupport::Concern and class_methods are used' do
+ # We need to give module names before using Override
+ let(:base) { stub_const('Base', Module.new) }
+ let(:extension) { stub_const('Extension', Module.new) }
+
+ def define_base(method_name:)
+ base.module_eval do
+ extend ActiveSupport::Concern
+
+ class_methods do
+ define_method(method_name) do
+ :f
+ end
+ end
+ end
+ end
+
+ def define_extension(method_name:)
+ extension.module_eval do
+ extend ActiveSupport::Concern
+
+ class_methods do
+ extend Gitlab::Utils::Override
+
+ override method_name
+ define_method(method_name) do
+ :g
+ end
+ end
+ end
+ end
+
+ context 'when it is defining a overriding method' do
+ before do
+ define_base(method_name: :f)
+ define_extension(method_name: :f)
+
+ base.prepend(extension)
+ end
+
+ it 'verifies' do
+ expect(base.f).to eq(:g)
+
+ described_class.verify!
+ end
+ end
+
+ context 'when it is not defining a overriding method' do
+ before do
+ define_base(method_name: :f)
+ define_extension(method_name: :g)
+
+ base.prepend(extension)
+ end
+
+ it 'raises NotImplementedError' do
+ expect(base.f).to eq(:f)
+
+ expect { described_class.verify! }
+ .to raise_error(NotImplementedError)
+ end
+ end
+ end
end
context 'when STATIC_VERIFICATION is not set' do
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index dfc381d0ef2..27248d1d95a 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -58,6 +58,16 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.estimate_batch_distinct_count(relation, 'column')).to eq(5)
end
+ it 'yield provided block with PostgresHll::Buckets' do
+ buckets = Gitlab::Database::PostgresHll::Buckets.new
+
+ allow_next_instance_of(Gitlab::Database::PostgresHll::BatchDistinctCounter) do |instance|
+ allow(instance).to receive(:execute).and_return(buckets)
+ end
+
+ expect { |block| described_class.estimate_batch_distinct_count(relation, 'column', &block) }.to yield_with_args(buckets)
+ end
+
context 'quasi integration test for different counting parameters' do
# HyperLogLog http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf algorithm
# used in estimate_batch_distinct_count produce probabilistic
@@ -362,4 +372,97 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
end
+
+ describe '#save_aggregated_metrics', :clean_gitlab_redis_shared_state do
+ let(:timestamp) { Time.current.to_i }
+ let(:time_period) { { created_at: 7.days.ago..Date.current } }
+ let(:metric_name) { 'test_metric' }
+ let(:method_params) do
+ {
+ metric_name: metric_name,
+ time_period: time_period,
+ recorded_at_timestamp: timestamp,
+ data: data
+ }
+ end
+
+ context 'with compatible data argument' do
+ let(:data) { ::Gitlab::Database::PostgresHll::Buckets.new(141 => 1, 56 => 1) }
+
+ it 'persists serialized data in Redis' do
+ time_period_name = 'weekly'
+
+ expect(described_class).to receive(:time_period_to_human_name).with(time_period).and_return(time_period_name)
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with("#{metric_name}_#{time_period_name}-#{timestamp}", '{"141":1,"56":1}', ex: 80.hours)
+ end
+
+ described_class.save_aggregated_metrics(**method_params)
+ end
+
+ context 'error handling' do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(::Redis::CommandError)
+ end
+
+ it 'rescues and reraise ::Redis::CommandError for development and test environments' do
+ expect { described_class.save_aggregated_metrics(**method_params) }.to raise_error ::Redis::CommandError
+ end
+
+ context 'for environment different than development' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'rescues ::Redis::CommandError' do
+ expect { described_class.save_aggregated_metrics(**method_params) }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context 'with incompatible data argument' do
+ let(:data) { 1 }
+
+ context 'for environment different than development' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'does not persist data in Redis' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).not_to receive(:set)
+ end
+
+ described_class.save_aggregated_metrics(**method_params)
+ end
+ end
+
+ it 'raises error for development environment' do
+ expect { described_class.save_aggregated_metrics(**method_params) }.to raise_error /Unsupported data type/
+ end
+ end
+ end
+
+ describe '#time_period_to_human_name' do
+ it 'translates empty time period as all_time' do
+ expect(described_class.time_period_to_human_name({})).to eql 'all_time'
+ end
+
+ it 'translates time period not longer than 7 days as weekly', :aggregate_failures do
+ days_6_time_period = 6.days.ago..Date.current
+ days_7_time_period = 7.days.ago..Date.current
+
+ expect(described_class.time_period_to_human_name(column_name: days_6_time_period)).to eql 'weekly'
+ expect(described_class.time_period_to_human_name(column_name: days_7_time_period)).to eql 'weekly'
+ end
+
+ it 'translates time period longer than 7 days as monthly', :aggregate_failures do
+ days_8_time_period = 8.days.ago..Date.current
+ days_31_time_period = 31.days.ago..Date.current
+
+ expect(described_class.time_period_to_human_name(column_name: days_8_time_period)).to eql 'monthly'
+ expect(described_class.time_period_to_human_name(column_name: days_31_time_period)).to eql 'monthly'
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 1052d4cbacc..665eebdfd9e 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -116,8 +116,6 @@ RSpec.describe Gitlab::Utils do
end
describe '.ms_to_round_sec' do
- using RSpec::Parameterized::TableSyntax
-
where(:original, :expected) do
1999.8999 | 1.9999
12384 | 12.384
@@ -169,8 +167,6 @@ RSpec.describe Gitlab::Utils do
end
describe '.remove_line_breaks' do
- using RSpec::Parameterized::TableSyntax
-
where(:original, :expected) do
"foo\nbar\nbaz" | "foobarbaz"
"foo\r\nbar\r\nbaz" | "foobarbaz"
@@ -281,8 +277,6 @@ RSpec.describe Gitlab::Utils do
end
describe '.append_path' do
- using RSpec::Parameterized::TableSyntax
-
where(:host, :path, :result) do
'http://test/' | '/foo/bar' | 'http://test/foo/bar'
'http://test/' | '//foo/bar' | 'http://test/foo/bar'
@@ -393,8 +387,6 @@ RSpec.describe Gitlab::Utils do
end
describe ".safe_downcase!" do
- using RSpec::Parameterized::TableSyntax
-
where(:str, :result) do
"test".freeze | "test"
"Test".freeze | "test"
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index e1b8323eb8e..5f945d5b9fc 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -95,6 +95,26 @@ RSpec.describe Gitlab do
end
end
+ describe '.com' do
+ subject { described_class.com { true } }
+
+ before do
+ allow(described_class).to receive(:com?).and_return(gl_com)
+ end
+
+ context 'when on GitLab.com' do
+ let(:gl_com) { true }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when not on GitLab.com' do
+ let(:gl_com) { false }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '.staging?' do
subject { described_class.staging? }
@@ -332,13 +352,13 @@ RSpec.describe Gitlab do
describe '.maintenance_mode?' do
it 'returns true when maintenance mode is enabled' do
- stub_application_setting(maintenance_mode: true)
+ stub_maintenance_mode_setting(true)
expect(described_class.maintenance_mode?).to eq(true)
end
it 'returns false when maintenance mode is disabled' do
- stub_application_setting(maintenance_mode: false)
+ stub_maintenance_mode_setting(false)
expect(described_class.maintenance_mode?).to eq(false)
end
diff --git a/spec/lib/peek/views/external_http_spec.rb b/spec/lib/peek/views/external_http_spec.rb
new file mode 100644
index 00000000000..cc1813db622
--- /dev/null
+++ b/spec/lib/peek/views/external_http_spec.rb
@@ -0,0 +1,191 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Peek::Views::ExternalHttp, :request_store do
+ subject { described_class.new }
+
+ let(:subscriber) { Gitlab::Metrics::Subscribers::ExternalHttp.new }
+
+ before do
+ allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
+ end
+
+ let(:event_1) do
+ double(:event, payload: {
+ method: 'POST', code: "200", duration: 0.03,
+ scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects',
+ query: 'current=true'
+ })
+ end
+
+ let(:event_2) do
+ double(:event, payload: {
+ method: 'POST', duration: 1.3,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2/issues',
+ query: 'current=true',
+ exception_object: Net::ReadTimeout.new
+ })
+ end
+
+ let(:event_3) do
+ double(:event, payload: {
+ method: 'GET', code: "301", duration: 0.005,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2',
+ query: 'current=true',
+ proxy_host: 'proxy.gitlab.com', proxy_port: 8080
+ })
+ end
+
+ it 'returns no results' do
+ expect(subject.results).to eq(
+ calls: 0, details: [], duration: "0ms", warnings: []
+ )
+ end
+
+ it 'returns aggregated results' do
+ subscriber.request(event_1)
+ subscriber.request(event_2)
+ subscriber.request(event_3)
+
+ results = subject.results
+ expect(results[:calls]).to eq(3)
+ expect(results[:duration]).to eq("1335.00ms")
+ expect(results[:details].count).to eq(3)
+
+ expected = [
+ {
+ duration: 30.0,
+ label: "POST https://gitlab.com:80/api/v4/projects?current=true",
+ code: "Response status: 200",
+ proxy: nil,
+ error: nil,
+ warnings: []
+ },
+ {
+ duration: 1300,
+ label: "POST http://gitlab.com:80/api/v4/projects/2/issues?current=true",
+ code: nil,
+ proxy: nil,
+ error: "Exception: Net::ReadTimeout",
+ warnings: ["1300.0 over 100"]
+ },
+ {
+ duration: 5.0,
+ label: "GET http://gitlab.com:80/api/v4/projects/2?current=true",
+ code: "Response status: 301",
+ proxy: nil,
+ error: nil,
+ warnings: []
+ }
+ ]
+
+ expect(
+ results[:details].map { |data| data.slice(:duration, :label, :code, :proxy, :error, :warnings) }
+ ).to match_array(expected)
+ end
+
+ context 'when the host is in IPv4 format' do
+ it 'displays IPv4 in the label' do
+ subscriber.request(
+ double(:event, payload: {
+ method: 'POST', code: "200", duration: 0.03,
+ scheme: 'https', host: '1.2.3.4', port: 80, path: '/api/v4/projects',
+ query: 'current=true'
+ })
+ )
+ expect(
+ subject.results[:details].map { |data| data.slice(:duration, :label, :code, :proxy, :error, :warnings) }
+ ).to match_array(
+ [
+ {
+ duration: 30.0,
+ label: "POST https://1.2.3.4:80/api/v4/projects?current=true",
+ code: "Response status: 200",
+ proxy: nil,
+ error: nil,
+ warnings: []
+ }
+ ]
+ )
+ end
+ end
+
+ context 'when the host is in IPv6 foramat' do
+ it 'displays IPv6 in the label' do
+ subscriber.request(
+ double(:event, payload: {
+ method: 'POST', code: "200", duration: 0.03,
+ scheme: 'https', host: '2606:4700:90:0:f22e:fbec:5bed:a9b9', port: 80, path: '/api/v4/projects',
+ query: 'current=true'
+ })
+ )
+ expect(
+ subject.results[:details].map { |data| data.slice(:duration, :label, :code, :proxy, :error, :warnings) }
+ ).to match_array(
+ [
+ {
+ duration: 30.0,
+ label: "POST https://[2606:4700:90:0:f22e:fbec:5bed:a9b9]:80/api/v4/projects?current=true",
+ code: "Response status: 200",
+ proxy: nil,
+ error: nil,
+ warnings: []
+ }
+ ]
+ )
+ end
+ end
+
+ context 'when the host is invalid' do
+ it 'displays unknown in the label' do
+ subscriber.request(
+ double(:event, payload: {
+ method: 'POST', code: "200", duration: 0.03,
+ scheme: 'https', host: '!@#%!@#%!@#%', port: 80, path: '/api/v4/projects',
+ query: 'current=true'
+ })
+ )
+ expect(
+ subject.results[:details].map { |data| data.slice(:duration, :label, :code, :proxy, :error, :warnings) }
+ ).to match_array(
+ [
+ {
+ duration: 30.0,
+ label: "POST unknown",
+ code: "Response status: 200",
+ proxy: nil,
+ error: nil,
+ warnings: []
+ }
+ ]
+ )
+ end
+ end
+
+ context 'when another URI component is invalid' do
+ it 'displays unknown in the label' do
+ subscriber.request(
+ double(:event, payload: {
+ method: 'POST', code: "200", duration: 0.03,
+ scheme: 'https', host: 'invalid', port: 'invalid', path: '/api/v4/projects',
+ query: 'current=true'
+ })
+ )
+ expect(
+ subject.results[:details].map { |data| data.slice(:duration, :label, :code, :proxy, :error, :warnings) }
+ ).to match_array(
+ [
+ {
+ duration: 30.0,
+ label: "POST unknown",
+ code: "Response status: 200",
+ proxy: nil,
+ error: nil,
+ warnings: []
+ }
+ ]
+ )
+ end
+ end
+end
diff --git a/spec/lib/release_highlights/validator/entry_spec.rb b/spec/lib/release_highlights/validator/entry_spec.rb
index 648356e63ba..da44938f165 100644
--- a/spec/lib/release_highlights/validator/entry_spec.rb
+++ b/spec/lib/release_highlights/validator/entry_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
subject.valid?
- expect(subject.errors[:packages].first).to include("must be one of", "Core", "Starter", "Premium", "Ultimate")
+ expect(subject.errors[:packages].first).to include("must be one of", "Free", "Premium", "Ultimate")
end
end
end
diff --git a/spec/lib/release_highlights/validator_spec.rb b/spec/lib/release_highlights/validator_spec.rb
index e68d9145dcd..a423e8cc5f6 100644
--- a/spec/lib/release_highlights/validator_spec.rb
+++ b/spec/lib/release_highlights/validator_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe ReleaseHighlights::Validator do
---------------------------------------------------------
Validation failed for spec/fixtures/whats_new/invalid.yml
---------------------------------------------------------
- * Packages must be one of ["Core", "Starter", "Premium", "Ultimate"] (line 6)
+ * Packages must be one of ["Free", "Premium", "Ultimate"] (line 6)
MESSAGE
end
diff --git a/spec/lib/security/ci_configuration/sast_build_actions_spec.rb b/spec/lib/security/ci_configuration/sast_build_actions_spec.rb
new file mode 100644
index 00000000000..c8f9430eff9
--- /dev/null
+++ b/spec/lib/security/ci_configuration/sast_build_actions_spec.rb
@@ -0,0 +1,539 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::SastBuildActions do
+ let(:default_sast_values) do
+ { 'global' =>
+ [
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'registry.gitlab.com/gitlab-org/security-products/analyzers' }
+ ],
+ 'pipeline' =>
+ [
+ { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'test' },
+ { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 4 },
+ { 'field' => 'SAST_ANALYZER_IMAGE_TAG', 'defaultValue' => 2, 'value' => 2 },
+ { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec, test, tests, tmp' }
+ ] }
+ end
+
+ let(:params) do
+ { 'global' =>
+ [
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'new_registry' }
+ ],
+ 'pipeline' =>
+ [
+ { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'security' },
+ { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 1 },
+ { 'field' => 'SAST_ANALYZER_IMAGE_TAG', 'defaultValue' => 2, 'value' => 2 },
+ { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' }
+ ] }
+ end
+
+ let(:params_with_analyzer_info) do
+ params.merge( { 'analyzers' =>
+ [
+ {
+ 'name' => "bandit",
+ 'enabled' => false
+ },
+ {
+ 'name' => "brakeman",
+ 'enabled' => true,
+ 'variables' => [
+ { 'field' => "SAST_BRAKEMAN_LEVEL",
+ 'defaultValue' => "1",
+ 'value' => "2" }
+ ]
+ },
+ {
+ 'name' => "flawfinder",
+ 'enabled' => true,
+ 'variables' => [
+ { 'field' => "SAST_FLAWFINDER_LEVEL",
+ 'defaultValue' => "1",
+ 'value' => "1" }
+ ]
+ }
+ ] }
+ )
+ end
+
+ let(:params_with_all_analyzers_enabled) do
+ params.merge( { 'analyzers' =>
+ [
+ {
+ 'name' => "flawfinder",
+ 'enabled' => true
+ },
+ {
+ 'name' => "brakeman",
+ 'enabled' => true
+ }
+ ] }
+ )
+ end
+
+ context 'with existing .gitlab-ci.yml' do
+ let(:auto_devops_enabled) { false }
+
+ context 'sast has not been included' do
+ context 'template includes are array' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_template_array_without_sast }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_two_includes)
+ end
+ end
+
+ context 'template include is not an array' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_without_sast }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_two_includes)
+ end
+
+ it 'reports defaults have been overwritten' do
+ expect(result.first[:default_values_overwritten]).to eq(true)
+ end
+ end
+ end
+
+ context 'sast template include is not an array' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_all_params)
+ end
+ end
+
+ context 'with default values' do
+ let(:params) { default_sast_values }
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set)
+ end
+
+ it 'reports defaults have not been overwritten' do
+ expect(result.first[:default_values_overwritten]).to eq(false)
+ end
+
+ context 'analyzer section' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
+ end
+
+ context 'analyzers are disabled' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate }
+
+ it 'writes SAST_EXCLUDED_ANALYZERS' do
+ stub_const('Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS', 'bandit, brakeman, flawfinder')
+
+ expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
+ end
+ end
+
+ context 'all analyzers are enabled' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params_with_all_analyzers_enabled, gitlab_ci_content).generate }
+
+ it 'does not write SAST_DEFAULT_ANALYZERS or SAST_EXCLUDED_ANALYZERS' do
+ stub_const('Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS', 'brakeman, flawfinder')
+
+ expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set)
+ end
+ end
+ end
+ end
+
+ context 'with update stage and SEARCH_MAX_DEPTH and set SECURE_ANALYZERS_PREFIX to default' do
+ let(:params) do
+ { 'global' =>
+ [
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'registry.gitlab.com/gitlab-org/security-products/analyzers' }
+ ],
+ 'pipeline' =>
+ [
+ { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'brand_new_stage' },
+ { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 5 },
+ { 'field' => 'SAST_ANALYZER_IMAGE_TAG', 'defaultValue' => 2, 'value' => 2 },
+ { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' }
+ ] }
+ end
+
+ let(:gitlab_ci_content) { existing_gitlab_ci }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_updated_stage)
+ end
+ end
+
+ context 'with no existing variables' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_with_no_variables }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_variable_section_added)
+ end
+ end
+
+ context 'with no existing sast config' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_with_no_sast_section }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_sast_section_added)
+ end
+ end
+
+ context 'with no existing sast variables' do
+ let(:gitlab_ci_content) { existing_gitlab_ci_with_no_sast_variables }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:action]).to eq('update')
+ expect(result.first[:content]).to eq(sast_yaml_sast_variables_section_added)
+ end
+ end
+
+ def existing_gitlab_ci_and_template_array_without_sast
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
+ "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
+ "include" => [{ "template" => "existing.yml" }] }
+ end
+
+ def existing_gitlab_ci_and_single_template_with_sast_and_default_stage
+ { "stages" => %w(test),
+ "variables" => { "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
+ "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "test" },
+ "include" => { "template" => "Security/SAST.gitlab-ci.yml" } }
+ end
+
+ def existing_gitlab_ci_and_single_template_without_sast
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
+ "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
+ "include" => { "template" => "existing.yml" } }
+ end
+
+ def existing_gitlab_ci_with_no_variables
+ { "stages" => %w(test security),
+ "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
+ "include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
+ end
+
+ def existing_gitlab_ci_with_no_sast_section
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
+ "include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
+ end
+
+ def existing_gitlab_ci_with_no_sast_variables
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
+ "sast" => { "stage" => "security" },
+ "include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
+ end
+
+ def existing_gitlab_ci
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "bad_prefix" },
+ "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
+ "include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
+ end
+ end
+
+ context 'with no .gitlab-ci.yml' do
+ let(:gitlab_ci_content) { nil }
+
+ context 'autodevops disabled' do
+ let(:auto_devops_enabled) { false }
+
+ context 'with one empty parameter' do
+ let(:params) do
+ { 'global' =>
+ [
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => '' }
+ ] }
+ end
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set)
+ end
+ end
+
+ context 'with all parameters' do
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ it 'generates the correct YML' do
+ expect(result.first[:content]).to eq(sast_yaml_all_params)
+ end
+ end
+ end
+
+ context 'with autodevops enabled' do
+ let(:auto_devops_enabled) { true }
+
+ subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
+
+ before do
+ allow_next_instance_of(described_class) do |sast_build_actions|
+ allow(sast_build_actions).to receive(:auto_devops_stages).and_return(fast_auto_devops_stages)
+ end
+ end
+
+ it 'generates the correct YML' do
+ expect(result.first[:content]).to eq(auto_devops_with_custom_stage)
+ end
+ end
+ end
+
+ describe 'Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS' do
+ subject(:variable) {Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS}
+
+ it 'is sorted alphabetically' do
+ sorted_variable = Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS
+ .split(',')
+ .map(&:strip)
+ .sort
+ .join(', ')
+
+ expect(variable).to eq(sorted_variable)
+ end
+ end
+
+ # stubbing this method allows this spec file to use fast_spec_helper
+ def fast_auto_devops_stages
+ auto_devops_template = YAML.safe_load( File.read('lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml') )
+ auto_devops_template['stages']
+ end
+
+ def sast_yaml_with_no_variables_set_but_analyzers
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ sast:
+ variables:
+ SAST_EXCLUDED_ANALYZERS: bandit
+ SAST_BRAKEMAN_LEVEL: '2'
+ stage: test
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ CI_YML
+ end
+
+ def sast_yaml_with_no_variables_set
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ sast:
+ stage: test
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ CI_YML
+ end
+
+ def sast_yaml_all_params
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ variables:
+ SECURE_ANALYZERS_PREFIX: new_registry
+ sast:
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 1
+ stage: security
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ CI_YML
+ end
+
+ def auto_devops_with_custom_stage
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - build
+ - test
+ - deploy
+ - review
+ - dast
+ - staging
+ - canary
+ - production
+ - incremental rollout 10%
+ - incremental rollout 25%
+ - incremental rollout 50%
+ - incremental rollout 100%
+ - performance
+ - cleanup
+ - security
+ variables:
+ SECURE_ANALYZERS_PREFIX: new_registry
+ sast:
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 1
+ stage: security
+ include:
+ - template: Auto-DevOps.gitlab-ci.yml
+ CI_YML
+ end
+
+ def sast_yaml_two_includes
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ variables:
+ RANDOM: make sure this persists
+ SECURE_ANALYZERS_PREFIX: new_registry
+ sast:
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 1
+ stage: security
+ include:
+ - template: existing.yml
+ - template: Security/SAST.gitlab-ci.yml
+ CI_YML
+ end
+
+ def sast_yaml_variable_section_added
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ sast:
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 1
+ stage: security
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ variables:
+ SECURE_ANALYZERS_PREFIX: new_registry
+ CI_YML
+ end
+
+ def sast_yaml_sast_section_added
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ variables:
+ RANDOM: make sure this persists
+ SECURE_ANALYZERS_PREFIX: new_registry
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ sast:
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 1
+ stage: security
+ CI_YML
+ end
+
+ def sast_yaml_sast_variables_section_added
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ variables:
+ RANDOM: make sure this persists
+ SECURE_ANALYZERS_PREFIX: new_registry
+ sast:
+ stage: security
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 1
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ CI_YML
+ end
+
+ def sast_yaml_updated_stage
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ - brand_new_stage
+ variables:
+ RANDOM: make sure this persists
+ sast:
+ variables:
+ SAST_EXCLUDED_PATHS: spec,docs
+ SEARCH_MAX_DEPTH: 5
+ stage: brand_new_stage
+ include:
+ - template: Security/SAST.gitlab-ci.yml
+ CI_YML
+ end
+end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 53ce200eed5..110a13c2f54 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -905,6 +905,19 @@ RSpec.describe Notify do
is_expected.to have_body_text project.full_name
is_expected.to have_body_text project_member.human_access.downcase
is_expected.to have_body_text project_member.invite_token
+ is_expected.to have_link('Join now', href: invite_url(project_member.invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE))
+ end
+
+ it 'contains invite link for the avatar', :experiment do
+ stub_experiments('members/invite_email': :avatar)
+
+ is_expected.not_to have_content('You are invited!')
+ end
+
+ it 'has invite link for the control group' do
+ stub_experiments('members/invite_email': :control)
+
+ is_expected.to have_content('You are invited!')
end
end
diff --git a/spec/migrations/20201112130710_schedule_remove_duplicate_vulnerabilities_findings_spec.rb b/spec/migrations/20201112130710_schedule_remove_duplicate_vulnerabilities_findings_spec.rb
new file mode 100644
index 00000000000..ff27bdcf12d
--- /dev/null
+++ b/spec/migrations/20201112130710_schedule_remove_duplicate_vulnerabilities_findings_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20201112130710_schedule_remove_duplicate_vulnerabilities_findings.rb')
+
+RSpec.describe ScheduleRemoveDuplicateVulnerabilitiesFindings, :migration do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let!(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') }
+ let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'vulnerability-identifier',
+ external_id: 'vulnerability-identifier',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'vulnerability identifier')
+ end
+
+ let!(:first_finding) do
+ create_finding!(
+ uuid: "test1",
+ vulnerability_id: nil,
+ report_type: 0,
+ location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:first_duplicate) do
+ create_finding!(
+ uuid: "test2",
+ vulnerability_id: nil,
+ report_type: 0,
+ location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:second_duplicate) do
+ create_finding!(
+ uuid: "test3",
+ vulnerability_id: nil,
+ report_type: 0,
+ location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner3.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:unrelated_finding) do
+ create_finding!(
+ uuid: "unreleated_finding",
+ vulnerability_id: nil,
+ report_type: 1,
+ location_fingerprint: 'random_location_fingerprint',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: unrelated_scanner.id,
+ project_id: project.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migration' do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(4)
+ expect(described_class::MIGRATION).to be_scheduled_migration(first_finding.id, first_finding.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(first_duplicate.id, first_duplicate.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(second_duplicate.id, second_duplicate.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(unrelated_finding.id, unrelated_finding.id)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerability_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.now, confirmed_at: Time.now)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: confirmed_at
+ )
+ end
+end
diff --git a/spec/migrations/20210119122354_alter_vsa_issue_first_mentioned_in_commit_value_spec.rb b/spec/migrations/20210119122354_alter_vsa_issue_first_mentioned_in_commit_value_spec.rb
new file mode 100644
index 00000000000..33b2e009c8d
--- /dev/null
+++ b/spec/migrations/20210119122354_alter_vsa_issue_first_mentioned_in_commit_value_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210119122354_alter_vsa_issue_first_mentioned_in_commit_value.rb')
+
+RSpec.describe AlterVsaIssueFirstMentionedInCommitValue, schema: 20210114033715 do
+ let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
+ let(:value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
+ let(:namespaces) { table(:namespaces) }
+
+ let(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
+ let(:value_stream) { value_streams.create!(name: 'test', group_id: namespace.id) }
+
+ let!(:stage_1) { group_stages.create!(group_value_stream_id: value_stream.id, group_id: namespace.id, name: 'stage 1', start_event_identifier: described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_EE, end_event_identifier: 1) }
+ let!(:stage_2) { group_stages.create!(group_value_stream_id: value_stream.id, group_id: namespace.id, name: 'stage 2', start_event_identifier: 2, end_event_identifier: described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_EE) }
+ let!(:stage_3) { group_stages.create!(group_value_stream_id: value_stream.id, group_id: namespace.id, name: 'stage 3', start_event_identifier: described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS, end_event_identifier: 3) }
+
+ describe '#up' do
+ it 'changes the EE specific identifier values to the FOSS version' do
+ migrate!
+
+ expect(stage_1.reload.start_event_identifier).to eq(described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS)
+ expect(stage_2.reload.end_event_identifier).to eq(described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS)
+ end
+
+ it 'does not change irrelevant records' do
+ expect { migrate! }.not_to change { stage_3.reload }
+ end
+ end
+end
diff --git a/spec/migrations/add_has_external_issue_tracker_trigger_spec.rb b/spec/migrations/add_has_external_issue_tracker_trigger_spec.rb
new file mode 100644
index 00000000000..72983c7cfbf
--- /dev/null
+++ b/spec/migrations/add_has_external_issue_tracker_trigger_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddHasExternalIssueTrackerTrigger do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:services) { table(:services) }
+
+ before do
+ @namespace = namespaces.create!(name: 'foo', path: 'foo')
+ @project = projects.create!(namespace_id: @namespace.id)
+ end
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger' do
+ it 'sets `has_external_issue_tracker` to true when active `issue_tracker` is inserted' do
+ expect do
+ services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ end.to change { @project.reload.has_external_issue_tracker }.to(true)
+ end
+
+ it 'does not set `has_external_issue_tracker` to true when service is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+
+ expect do
+ services.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not set `has_external_issue_tracker` to true when inactive `issue_tracker` is inserted' do
+ expect do
+ services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not set `has_external_issue_tracker` to true when a non-`issue tracker` active service is inserted' do
+ expect do
+ services.create!(category: 'my_type', active: true, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+
+ describe 'UPDATE trigger' do
+ it 'sets `has_external_issue_tracker` to true when `issue_tracker` is made active' do
+ service = services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+
+ expect do
+ service.update!(active: true)
+ end.to change { @project.reload.has_external_issue_tracker }.to(true)
+ end
+
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive' do
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ service.update!(active: false)
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive, and an inactive `issue_tracker` exists' do
+ services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ service.update!(active: false)
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'does not change `has_external_issue_tracker` when `issue_tracker` is made inactive, if an active `issue_tracker` exists' do
+ services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ service.update!(active: false)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not change `has_external_issue_tracker` when service is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ service = services.create!(category: 'issue_tracker', active: false, project_id: different_project.id)
+
+ expect do
+ service.update!(active: true)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+
+ describe 'DELETE trigger' do
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted' do
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ service.delete
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted, if an inactive `issue_tracker` still exists' do
+ services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ service.delete
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'does not change `has_external_issue_tracker` when `issue_tracker` is deleted, if an active `issue_tracker` still exists' do
+ services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ service.delete
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not change `has_external_issue_tracker` when service is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ service = services.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
+
+ expect do
+ service.delete
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ it 'drops the INSERT trigger' do
+ expect do
+ services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'drops the UPDATE trigger' do
+ service = services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ @project.update!(has_external_issue_tracker: false)
+
+ expect do
+ service.update!(active: true)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'drops the DELETE trigger' do
+ service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ @project.update!(has_external_issue_tracker: true)
+
+ expect do
+ service.delete
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+end
diff --git a/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb b/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
index ad83119f324..c705515ce98 100644
--- a/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
+++ b/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe EncryptFeatureFlagsClientsTokens do
let(:feature_flags_clients) { table(:operations_feature_flags_clients) }
let(:projects) { table(:projects) }
let(:plaintext) { "secret-token" }
- let(:ciphertext) { Gitlab::CryptoHelper.aes256_gcm_encrypt(plaintext) }
+ let(:ciphertext) { Gitlab::CryptoHelper.aes256_gcm_encrypt(plaintext, nonce: Gitlab::CryptoHelper::AES256_GCM_IV_STATIC) }
describe '#up' do
it 'keeps plaintext token the same and populates token_encrypted if not present' do
diff --git a/spec/migrations/schedule_migrate_security_scans_spec.rb b/spec/migrations/schedule_migrate_security_scans_spec.rb
index 61b14f239ae..eb86a910611 100644
--- a/spec/migrations/schedule_migrate_security_scans_spec.rb
+++ b/spec/migrations/schedule_migrate_security_scans_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200217225719_schedule_migrate_security_scans.rb')
-# rubocop: disable RSpec/FactoriesInMigrationSpecs
RSpec.describe ScheduleMigrateSecurityScans, :sidekiq do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
@@ -13,7 +12,7 @@ RSpec.describe ScheduleMigrateSecurityScans, :sidekiq do
let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
let(:project) { projects.create!(namespace_id: namespace.id) }
- let(:build) { builds.create! }
+ let(:job) { builds.create! }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
@@ -35,8 +34,8 @@ RSpec.describe ScheduleMigrateSecurityScans, :sidekiq do
end
context 'has security job artifacts' do
- let!(:job_artifact_1) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 5) }
- let!(:job_artifact_2) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 8) }
+ let!(:job_artifact_1) { job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: 5) }
+ let!(:job_artifact_2) { job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: 8) }
it 'schedules migration of security scans' do
Sidekiq::Testing.fake! do
@@ -52,8 +51,8 @@ RSpec.describe ScheduleMigrateSecurityScans, :sidekiq do
end
context 'has non-security job artifacts' do
- let!(:job_artifact_1) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 4) }
- let!(:job_artifact_2) { job_artifacts.create!(project_id: project.id, job_id: build.id, file_type: 9) }
+ let!(:job_artifact_1) { job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: 4) }
+ let!(:job_artifact_2) { job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: 9) }
it 'schedules migration of security scans' do
Sidekiq::Testing.fake! do
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index f0bae3f29c0..51435cc4342 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -358,7 +358,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'calls .destroy_sessions' do
expect(ActiveSession).to(
receive(:destroy_sessions)
- .with(anything, user, [active_session.public_id, rack_session.public_id, rack_session.private_id]))
+ .with(anything, user, [encrypted_active_session_id, rack_session.public_id, rack_session.private_id]))
subject
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 4755d700d72..bb2e7398195 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -635,28 +635,28 @@ RSpec.describe ApplicationSetting do
end
end
- describe '#asset_proxy_whitelist' do
+ describe '#asset_proxy_allowlist' do
context 'when given an Array' do
it 'sets the domains and adds current running host' do
- setting.asset_proxy_whitelist = ['example.com', 'assets.example.com']
- expect(setting.asset_proxy_whitelist).to eq(['example.com', 'assets.example.com', 'localhost'])
+ setting.asset_proxy_allowlist = ['example.com', 'assets.example.com']
+ expect(setting.asset_proxy_allowlist).to eq(['example.com', 'assets.example.com', 'localhost'])
end
end
context 'when given a String' do
it 'sets multiple domains with spaces' do
- setting.asset_proxy_whitelist = 'example.com *.example.com'
- expect(setting.asset_proxy_whitelist).to eq(['example.com', '*.example.com', 'localhost'])
+ setting.asset_proxy_allowlist = 'example.com *.example.com'
+ expect(setting.asset_proxy_allowlist).to eq(['example.com', '*.example.com', 'localhost'])
end
it 'sets multiple domains with newlines and a space' do
- setting.asset_proxy_whitelist = "example.com\n *.example.com"
- expect(setting.asset_proxy_whitelist).to eq(['example.com', '*.example.com', 'localhost'])
+ setting.asset_proxy_allowlist = "example.com\n *.example.com"
+ expect(setting.asset_proxy_allowlist).to eq(['example.com', '*.example.com', 'localhost'])
end
it 'sets multiple domains with commas' do
- setting.asset_proxy_whitelist = "example.com, *.example.com"
- expect(setting.asset_proxy_whitelist).to eq(['example.com', '*.example.com', 'localhost'])
+ setting.asset_proxy_allowlist = "example.com, *.example.com"
+ expect(setting.asset_proxy_allowlist).to eq(['example.com', '*.example.com', 'localhost'])
end
end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 0732b671729..e5ab96ca514 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -81,6 +81,37 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.errors).to include(:parent)
end
end
+
+ context 'validate destination namespace of a group_entity' do
+ it 'is invalid if destination namespace is the source namespace' do
+ group_a = create(:group, path: 'group_a')
+
+ entity = build(
+ :bulk_import_entity,
+ :group_entity,
+ source_full_path: group_a.full_path,
+ destination_namespace: group_a.full_path
+ )
+
+ expect(entity).not_to be_valid
+ expect(entity.errors).to include(:destination_namespace)
+ end
+
+ it 'is invalid if destination namespace is a descendant of the source' do
+ group_a = create(:group, path: 'group_a')
+ group_b = create(:group, parent: group_a, path: 'group_b')
+
+ entity = build(
+ :bulk_import_entity,
+ :group_entity,
+ source_full_path: group_a.full_path,
+ destination_namespace: group_b.full_path
+ )
+
+ expect(entity).not_to be_valid
+ expect(entity.errors).to include(:destination_namespace)
+ end
+ end
end
describe "#update_tracker_for" do
diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb
index c5f56dbe5bc..e343ec0e698 100644
--- a/spec/models/ci/build_dependencies_spec.rb
+++ b/spec/models/ci/build_dependencies_spec.rb
@@ -18,6 +18,10 @@ RSpec.describe Ci::BuildDependencies do
let!(:rubocop_test) { create(:ci_build, pipeline: pipeline, name: 'rubocop', stage_idx: 1, stage: 'test') }
let!(:staging) { create(:ci_build, pipeline: pipeline, name: 'staging', stage_idx: 2, stage: 'deploy') }
+ before do
+ stub_feature_flags(ci_validate_build_dependencies_override: false)
+ end
+
describe '#local' do
subject { described_class.new(job).local }
@@ -360,4 +364,28 @@ RSpec.describe Ci::BuildDependencies do
expect(subject).to contain_exactly(1, 2, 3, 4)
end
end
+
+ describe '#valid?' do
+ subject { described_class.new(job).valid? }
+
+ let(:job) { rspec_test }
+
+ it { is_expected.to eq(true) }
+
+ context 'when a local dependency is invalid' do
+ before do
+ build.update_column(:erased_at, Time.current)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'when ci_validate_build_dependencies_override feature flag is enabled' do
+ before do
+ stub_feature_flags(ci_validate_build_dependencies_override: job.project)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index c2029b9240b..3d06fd6db4f 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1185,60 +1185,6 @@ RSpec.describe Ci::Build do
end
end
- describe 'state transition with resource group' do
- let(:resource_group) { create(:ci_resource_group, project: project) }
-
- context 'when build status is created' do
- let(:build) { create(:ci_build, :created, project: project, resource_group: resource_group) }
-
- it 'is waiting for resource when build is enqueued' do
- expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(resource_group.id)
-
- expect { build.enqueue! }.to change { build.status }.from('created').to('waiting_for_resource')
-
- expect(build.waiting_for_resource_at).not_to be_nil
- end
-
- context 'when build is waiting for resource' do
- before do
- build.update_column(:status, 'waiting_for_resource')
- end
-
- it 'is enqueued when build requests resource' do
- expect { build.enqueue_waiting_for_resource! }.to change { build.status }.from('waiting_for_resource').to('pending')
- end
-
- it 'releases a resource when build finished' do
- expect(build.resource_group).to receive(:release_resource_from).with(build).and_call_original
- expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(build.resource_group_id)
-
- build.enqueue_waiting_for_resource!
- build.success!
- end
-
- context 'when build has prerequisites' do
- before do
- allow(build).to receive(:any_unmet_prerequisites?) { true }
- end
-
- it 'is preparing when build is enqueued' do
- expect { build.enqueue_waiting_for_resource! }.to change { build.status }.from('waiting_for_resource').to('preparing')
- end
- end
-
- context 'when there are no available resources' do
- before do
- resource_group.assign_resource_to(create(:ci_build))
- end
-
- it 'stays as waiting for resource when build requests resource' do
- expect { build.enqueue_waiting_for_resource }.not_to change { build.status }
- end
- end
- end
- end
- end
-
describe '#on_stop' do
subject { build.on_stop }
@@ -3605,7 +3551,7 @@ RSpec.describe Ci::Build do
context 'when validates for dependencies is enabled' do
before do
- stub_feature_flags(ci_disable_validates_dependencies: false)
+ stub_feature_flags(ci_validate_build_dependencies_override: false)
end
let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
@@ -3633,7 +3579,7 @@ RSpec.describe Ci::Build do
let(:options) { { dependencies: ['test'] } }
before do
- stub_feature_flags(ci_disable_validates_dependencies: true)
+ stub_feature_flags(ci_validate_build_dependencies_override: true)
end
it_behaves_like 'validation is not active'
@@ -4922,14 +4868,6 @@ RSpec.describe Ci::Build do
it_behaves_like 'drops the build without changing allow_failure'
end
-
- context 'when ci_allow_failure_with_exit_codes is disabled' do
- before do
- stub_feature_flags(ci_allow_failure_with_exit_codes: false)
- end
-
- it_behaves_like 'drops the build without changing allow_failure'
- end
end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 75ed5939724..3d728b9335e 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it_behaves_like 'having unique enum values'
before do
- stub_feature_flags(ci_enable_live_trace: true)
+ stub_feature_flags(ci_enable_live_trace: true, gitlab_ci_trace_read_consistency: true)
stub_artifacts_object_storage
end
diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb
index bc96e2584cf..d9e9533fb26 100644
--- a/spec/models/ci/build_trace_chunks/fog_spec.rb
+++ b/spec/models/ci/build_trace_chunks/fog_spec.rb
@@ -98,27 +98,6 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
expect(data_store.data(model)).to eq new_data
end
-
- context 'when ci_live_trace_use_fog_attributes flag is disabled' do
- before do
- stub_feature_flags(ci_live_trace_use_fog_attributes: false)
- end
-
- it 'does not pass along Fog attributes' do
- expect_next_instance_of(Fog::AWS::Storage::Files) do |files|
- expect(files).to receive(:create).with(
- key: anything,
- body: new_data
- ).and_call_original
- end
-
- expect(data_store.data(model)).to be_nil
-
- data_store.set_data(model, new_data)
-
- expect(data_store.data(model)).to eq new_data
- end
- end
end
end
end
diff --git a/spec/models/ci/pipeline_artifact_spec.rb b/spec/models/ci/pipeline_artifact_spec.rb
index 8cbace845a9..1ae85b0a5ba 100644
--- a/spec/models/ci/pipeline_artifact_spec.rb
+++ b/spec/models/ci/pipeline_artifact_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::PipelineArtifact, type: :model do
- let(:coverage_report) { create(:ci_pipeline_artifact) }
+ let(:coverage_report) { create(:ci_pipeline_artifact, :with_coverage_report) }
describe 'associations' do
it { is_expected.to belong_to(:pipeline) }
@@ -15,7 +15,7 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
it_behaves_like 'UpdateProjectStatistics' do
let_it_be(:pipeline, reload: true) { create(:ci_pipeline) }
- subject { build(:ci_pipeline_artifact, pipeline: pipeline) }
+ subject { build(:ci_pipeline_artifact, :with_code_coverage_with_multiple_files, pipeline: pipeline) }
end
describe 'validations' do
@@ -51,7 +51,7 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
describe 'file is being stored' do
- subject { create(:ci_pipeline_artifact) }
+ subject { create(:ci_pipeline_artifact, :with_coverage_report) }
context 'when existing object has local store' do
it_behaves_like 'mounted file in local store'
@@ -68,7 +68,7 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
context 'when file contains multi-byte characters' do
- let(:coverage_report_multibyte) { create(:ci_pipeline_artifact, :with_multibyte_characters) }
+ let(:coverage_report_multibyte) { create(:ci_pipeline_artifact, :with_coverage_multibyte_characters) }
it 'sets the size in bytesize' do
expect(coverage_report_multibyte.size).to eq(14)
@@ -76,38 +76,98 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
end
- describe '.has_code_coverage?' do
- subject { Ci::PipelineArtifact.has_code_coverage? }
+ describe '.has_report?' do
+ subject(:pipeline_artifact) { Ci::PipelineArtifact.has_report?(file_type) }
- context 'when pipeline artifact has a code coverage' do
- let!(:pipeline_artifact) { create(:ci_pipeline_artifact) }
+ context 'when file_type is code_coverage' do
+ let(:file_type) { :code_coverage }
+
+ context 'when pipeline artifact has a coverage report' do
+ let!(:pipeline_artifact) { create(:ci_pipeline_artifact, :with_coverage_report) }
+
+ it 'returns true' do
+ expect(pipeline_artifact).to be_truthy
+ end
+ end
+
+ context 'when pipeline artifact does not have a coverage report' do
+ it 'returns false' do
+ expect(pipeline_artifact).to be_falsey
+ end
+ end
+ end
+
+ context 'when file_type is code_quality_mr_diff' do
+ let(:file_type) { :code_quality_mr_diff }
+
+ context 'when pipeline artifact has a codequality mr diff report' do
+ let!(:pipeline_artifact) { create(:ci_pipeline_artifact, :with_codequality_mr_diff_report) }
- it 'returns true' do
- expect(subject).to be_truthy
+ it 'returns true' do
+ expect(pipeline_artifact).to be_truthy
+ end
+ end
+
+ context 'when pipeline artifact does not have a codequality mr diff report' do
+ it 'returns false' do
+ expect(pipeline_artifact).to be_falsey
+ end
end
end
- context 'when pipeline artifact does not have a code coverage' do
+ context 'when file_type is nil' do
+ let(:file_type) { nil }
+
it 'returns false' do
- expect(subject).to be_falsey
+ expect(pipeline_artifact).to be_falsey
end
end
end
- describe '.find_with_code_coverage' do
- subject { Ci::PipelineArtifact.find_with_code_coverage }
+ describe '.find_by_file_type' do
+ subject(:pipeline_artifact) { Ci::PipelineArtifact.find_by_file_type(file_type) }
- context 'when pipeline artifact has a coverage report' do
- let!(:coverage_report) { create(:ci_pipeline_artifact) }
+ context 'when file_type is code_coverage' do
+ let(:file_type) { :code_coverage }
+
+ context 'when pipeline artifact has a coverage report' do
+ let!(:coverage_report) { create(:ci_pipeline_artifact, :with_coverage_report) }
+
+ it 'returns a pipeline artifact with a coverage report' do
+ expect(pipeline_artifact.file_type).to eq('code_coverage')
+ end
+ end
+
+ context 'when pipeline artifact does not have a coverage report' do
+ it 'returns nil' do
+ expect(pipeline_artifact).to be_nil
+ end
+ end
+ end
+
+ context 'when file_type is code_quality_mr_diff' do
+ let(:file_type) { :code_quality_mr_diff }
+
+ context 'when pipeline artifact has a quality report' do
+ let!(:coverage_report) { create(:ci_pipeline_artifact, :with_codequality_mr_diff_report) }
- it 'returns a pipeline artifact with a code coverage' do
- expect(subject.file_type).to eq('code_coverage')
+ it 'returns a pipeline artifact with a quality report' do
+ expect(pipeline_artifact.file_type).to eq('code_quality_mr_diff')
+ end
+ end
+
+ context 'when pipeline artifact does not have a quality report' do
+ it 'returns nil' do
+ expect(pipeline_artifact).to be_nil
+ end
end
end
- context 'when pipeline artifact does not have a coverage report' do
+ context 'when file_type is nil' do
+ let(:file_type) { nil }
+
it 'returns nil' do
- expect(subject).to be_nil
+ expect(pipeline_artifact).to be_nil
end
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index f5e824bb066..6f5f7d2e2fb 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -1261,26 +1261,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
pipeline.send(event)
end
-
- context 'the feature is disabled' do
- it 'does not trigger a worker' do
- stub_feature_flags(jira_sync_builds: false)
-
- expect(worker).not_to receive(:perform_async)
-
- pipeline.send(event)
- end
- end
-
- context 'the feature is enabled for this project' do
- it 'does trigger a worker' do
- stub_feature_flags(jira_sync_builds: pipeline.project)
-
- expect(worker).to receive(:perform_async)
-
- pipeline.send(event)
- end
- end
end
end
end
@@ -2016,13 +1996,34 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
is_expected.to be_falsey
end
end
+
+ context 'bridge which is allowed to fail fails' do
+ before do
+ create :ci_bridge, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop'
+ end
+
+ it 'returns true' do
+ is_expected.to be_truthy
+ end
+ end
+
+ context 'bridge which is allowed to fail is successful' do
+ before do
+ create :ci_bridge, :allowed_to_fail, :success, pipeline: pipeline, name: 'rubocop'
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
end
describe '#number_of_warnings' do
it 'returns the number of warnings' do
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
+ create(:ci_bridge, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
- expect(pipeline.number_of_warnings).to eq(1)
+ expect(pipeline.number_of_warnings).to eq(2)
end
it 'supports eager loading of the number of warnings' do
@@ -2320,7 +2321,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'on waiting for resource' do
before do
- allow(build).to receive(:requires_resource?) { true }
+ allow(build).to receive(:with_resource_group?) { true }
allow(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async)
build.enqueue
@@ -3387,7 +3388,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#batch_lookup_report_artifact_for_file_type' do
context 'with code quality report artifact' do
- let(:pipeline) { create(:ci_pipeline, :with_codequality_report, project: project) }
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
it "returns the code quality artifact" do
expect(pipeline.batch_lookup_report_artifact_for_file_type(:codequality)).to eq(pipeline.job_artifacts.sample)
@@ -3509,6 +3510,66 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#has_codequality_mr_diff_report?' do
+ subject { pipeline.has_codequality_mr_diff_report? }
+
+ context 'when pipeline has a codequality mr diff report' do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_mr_diff_report, :running, project: project) }
+
+ it { expect(subject).to be_truthy }
+ end
+
+ context 'when pipeline does not have a codequality mr diff report' do
+ let(:pipeline) { create(:ci_pipeline, :success, project: project) }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
+ describe '#can_generate_codequality_reports?' do
+ subject { pipeline.can_generate_codequality_reports? }
+
+ context 'when pipeline has builds with codequality reports' do
+ before do
+ create(:ci_build, :codequality_reports, pipeline: pipeline, project: project)
+ end
+
+ context 'when pipeline status is running' do
+ let(:pipeline) { create(:ci_pipeline, :running, project: project) }
+
+ it { expect(subject).to be_falsey }
+ end
+
+ context 'when pipeline status is success' do
+ let(:pipeline) { create(:ci_pipeline, :success, project: project) }
+
+ it 'can generate a codequality report' do
+ expect(subject).to be_truthy
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(codequality_mr_diff: false)
+ end
+
+ it 'can not generate a codequality report' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'when pipeline does not have builds with codequality reports' do
+ before do
+ create(:ci_build, :artifacts, pipeline: pipeline, project: project)
+ end
+
+ let(:pipeline) { create(:ci_pipeline, :success, project: project) }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
describe '#test_report_summary' do
subject { pipeline.test_report_summary }
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 35764e2bbbe..6290f4aef16 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -122,4 +122,58 @@ RSpec.describe Ci::Processable do
it { is_expected.to be_empty }
end
end
+
+ describe 'state transition with resource group' do
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+
+ context 'when build status is created' do
+ let(:build) { create(:ci_build, :created, project: project, resource_group: resource_group) }
+
+ it 'is waiting for resource when build is enqueued' do
+ expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(resource_group.id)
+
+ expect { build.enqueue! }.to change { build.status }.from('created').to('waiting_for_resource')
+
+ expect(build.waiting_for_resource_at).not_to be_nil
+ end
+
+ context 'when build is waiting for resource' do
+ before do
+ build.update_column(:status, 'waiting_for_resource')
+ end
+
+ it 'is enqueued when build requests resource' do
+ expect { build.enqueue_waiting_for_resource! }.to change { build.status }.from('waiting_for_resource').to('pending')
+ end
+
+ it 'releases a resource when build finished' do
+ expect(build.resource_group).to receive(:release_resource_from).with(build).and_call_original
+ expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(build.resource_group_id)
+
+ build.enqueue_waiting_for_resource!
+ build.success!
+ end
+
+ context 'when build has prerequisites' do
+ before do
+ allow(build).to receive(:any_unmet_prerequisites?) { true }
+ end
+
+ it 'is preparing when build is enqueued' do
+ expect { build.enqueue_waiting_for_resource! }.to change { build.status }.from('waiting_for_resource').to('preparing')
+ end
+ end
+
+ context 'when there are no available resources' do
+ before do
+ resource_group.assign_resource_to(create(:ci_build))
+ end
+
+ it 'stays as waiting for resource when build requests resource' do
+ expect { build.enqueue_waiting_for_resource }.not_to change { build.status }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/resource_group_spec.rb b/spec/models/ci/resource_group_spec.rb
index 9f72d1a82e5..50a786419f2 100644
--- a/spec/models/ci/resource_group_spec.rb
+++ b/spec/models/ci/resource_group_spec.rb
@@ -32,12 +32,12 @@ RSpec.describe Ci::ResourceGroup do
let(:build) { create(:ci_build) }
let(:resource_group) { create(:ci_resource_group) }
- it 'retains resource for the build' do
- expect(resource_group.resources.first.build).to be_nil
+ it 'retains resource for the processable' do
+ expect(resource_group.resources.first.processable).to be_nil
is_expected.to eq(true)
- expect(resource_group.resources.first.build).to eq(build)
+ expect(resource_group.resources.first.processable).to eq(build)
end
context 'when there are no free resources' do
@@ -51,7 +51,7 @@ RSpec.describe Ci::ResourceGroup do
end
context 'when the build has already retained a resource' do
- let!(:another_resource) { create(:ci_resource, resource_group: resource_group, build: build) }
+ let!(:another_resource) { create(:ci_resource, resource_group: resource_group, processable: build) }
it 'fails to retain resource' do
expect { subject }.to raise_error(ActiveRecord::RecordNotUnique)
@@ -71,11 +71,11 @@ RSpec.describe Ci::ResourceGroup do
end
it 'releases resource from the build' do
- expect(resource_group.resources.first.build).to eq(build)
+ expect(resource_group.resources.first.processable).to eq(build)
is_expected.to eq(true)
- expect(resource_group.resources.first.build).to be_nil
+ expect(resource_group.resources.first.processable).to be_nil
end
end
diff --git a/spec/models/ci/resource_spec.rb b/spec/models/ci/resource_spec.rb
index 90f26ef2b31..5574f6f82b2 100644
--- a/spec/models/ci/resource_spec.rb
+++ b/spec/models/ci/resource_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Ci::Resource do
subject { described_class.retained_by(build) }
let(:build) { create(:ci_build) }
- let!(:resource) { create(:ci_resource, build: build) }
+ let!(:resource) { create(:ci_resource, processable: build) }
it 'returns retained resources' do
is_expected.to eq([resource])
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index 3d873a1b9c1..0afc491dc73 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -288,6 +288,7 @@ RSpec.describe Ci::Stage, :models do
context 'when stage has warnings' do
before do
create(:ci_build, :failed, :allowed_to_fail, stage_id: stage.id)
+ create(:ci_bridge, :failed, :allowed_to_fail, stage_id: stage.id)
end
describe '#has_warnings?' do
@@ -310,7 +311,7 @@ RSpec.describe Ci::Stage, :models do
expect(synced_queries.count).to eq 1
expect(stage.number_of_warnings.inspect).to include 'BatchLoader'
- expect(stage.number_of_warnings).to eq 1
+ expect(stage.number_of_warnings).to eq 2
end
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index acbabee9383..a5f02b61132 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -400,6 +400,19 @@ eos
allow(commit).to receive(:safe_message).and_return(message + "\n" + message)
expect(commit.full_title).to eq(message)
end
+
+ it 'truncates html representation if more than 1KiB' do
+ # Commit title is over 2KiB on a single line
+ huge_commit_title = ('panic ' * 350) + 'trailing text'
+
+ allow(commit).to receive(:safe_message).and_return(huge_commit_title)
+
+ commit.refresh_markdown_cache
+ full_title_html = commit.full_title_html
+
+ expect(full_title_html.bytesize).to be < 2.kilobytes
+ expect(full_title_html).not_to include('trailing text')
+ end
end
describe 'description' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 532f68c2f18..52f6b4f2586 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -725,22 +725,6 @@ RSpec.describe CommitStatus do
let(:commit_status) { create(:commit_status) }
it { is_expected.to eq(true) }
-
- context 'when build requires a resource' do
- before do
- allow(commit_status).to receive(:requires_resource?) { true }
- end
-
- it { is_expected.to eq(false) }
- end
-
- context 'when build has a prerequisite' do
- before do
- allow(commit_status).to receive(:any_unmet_prerequisites?) { true }
- end
-
- it { is_expected.to eq(false) }
- end
end
describe '#enqueue' do
@@ -748,7 +732,6 @@ RSpec.describe CommitStatus do
before do
allow(Time).to receive(:now).and_return(current_time)
- expect(commit_status.any_unmet_prerequisites?).to eq false
end
shared_examples 'commit status enqueued' do
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index 5ee3c012dc9..35b0f107676 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -87,6 +87,158 @@ RSpec.describe AtomicInternalId do
end
end
+ describe '#clear_scope_iid!' do
+ context 'when no ensure_if condition is given' do
+ it 'clears automatically set IIDs' do
+ expect(milestone).to receive(:clear_project_iid!).and_call_original
+
+ expect_iid_to_be_set_and_rollback(milestone)
+
+ expect(milestone.iid).to be_nil
+ end
+
+ it 'does not clear manually set IIDS' do
+ milestone.iid = external_iid
+
+ expect(milestone).to receive(:clear_project_iid!).and_call_original
+
+ expect_iid_to_be_set_and_rollback(milestone)
+
+ expect(milestone.iid).to eq(external_iid)
+ end
+ end
+
+ context 'when an ensure_if condition is given' do
+ let(:test_class) do
+ Class.new(ApplicationRecord) do
+ include AtomicInternalId
+ include Importable
+
+ self.table_name = :milestones
+
+ belongs_to :project
+
+ has_internal_id :iid, scope: :project, track_if: -> { !importing }, ensure_if: -> { !importing }
+
+ def self.name
+ 'TestClass'
+ end
+ end
+ end
+
+ let(:instance) { test_class.new(milestone.attributes) }
+
+ context 'when the ensure_if condition evaluates to true' do
+ it 'clears automatically set IIDs' do
+ expect(instance).to receive(:clear_project_iid!).and_call_original
+
+ expect_iid_to_be_set_and_rollback(instance)
+
+ expect(instance.iid).to be_nil
+ end
+
+ it 'does not clear manually set IIDs' do
+ instance.iid = external_iid
+
+ expect(instance).to receive(:clear_project_iid!).and_call_original
+
+ expect_iid_to_be_set_and_rollback(instance)
+
+ expect(instance.iid).to eq(external_iid)
+ end
+ end
+
+ context 'when the ensure_if condition evaluates to false' do
+ before do
+ instance.importing = true
+ end
+
+ it 'does not clear IIDs' do
+ instance.iid = external_iid
+
+ expect(instance).not_to receive(:clear_project_iid!)
+
+ expect_iid_to_be_set_and_rollback(instance)
+
+ expect(instance.iid).to eq(external_iid)
+ end
+ end
+ end
+
+ def expect_iid_to_be_set_and_rollback(instance)
+ ActiveRecord::Base.transaction(requires_new: true) do
+ instance.save!
+
+ expect(instance.iid).not_to be_nil
+
+ raise ActiveRecord::Rollback
+ end
+ end
+ end
+
+ describe '#validate_scope_iid_exists!' do
+ let(:test_class) do
+ Class.new(ApplicationRecord) do
+ include AtomicInternalId
+ include Importable
+
+ self.table_name = :milestones
+
+ belongs_to :project
+
+ def self.name
+ 'TestClass'
+ end
+ end
+ end
+
+ let(:instance) { test_class.new(milestone.attributes) }
+
+ before do
+ test_class.has_internal_id :iid, scope: :project, presence: presence, ensure_if: -> { !importing }
+
+ instance.importing = true
+ end
+
+ context 'when the presence flag is set' do
+ let(:presence) { true }
+
+ it 'raises an error for blank iids on create' do
+ expect do
+ instance.save!
+ end.to raise_error(described_class::MissingValueError, 'iid was unexpectedly blank!')
+ end
+
+ it 'raises an error for blank iids on update' do
+ instance.iid = 100
+ instance.save!
+
+ instance.iid = nil
+
+ expect do
+ instance.save!
+ end.to raise_error(described_class::MissingValueError, 'iid was unexpectedly blank!')
+ end
+ end
+
+ context 'when the presence flag is not set' do
+ let(:presence) { false }
+
+ it 'does not raise an error for blank iids on create' do
+ expect { instance.save! }.not_to raise_error
+ end
+
+ it 'does not raise an error for blank iids on update' do
+ instance.iid = 100
+ instance.save!
+
+ instance.iid = nil
+
+ expect { instance.save! }.not_to raise_error
+ end
+ end
+ end
+
describe '.with_project_iid_supply' do
let(:iid) { 100 }
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index 82b0c00b396..e40b0cf11ff 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -44,7 +44,6 @@ RSpec.describe BulkInsertSafe do
insecure_mode: false
default_value_for :enum_value, 'case_1'
- default_value_for :secret_value, 'my-secret'
default_value_for :sha_value, '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12'
default_value_for :jsonb_value, { "key" => "value" }
@@ -53,11 +52,11 @@ RSpec.describe BulkInsertSafe do
end
def self.valid_list(count)
- Array.new(count) { |n| new(name: "item-#{n}") }
+ Array.new(count) { |n| new(name: "item-#{n}", secret_value: 'my-secret') }
end
def self.invalid_list(count)
- Array.new(count) { new }
+ Array.new(count) { new(secret_value: 'my-secret') }
end
end
end
@@ -102,7 +101,7 @@ RSpec.describe BulkInsertSafe do
context 'primary keys' do
it 'raises error if primary keys are set prior to insertion' do
- item = bulk_insert_item_class.new(name: 'valid', id: 10)
+ item = bulk_insert_item_class.new(name: 'valid', id: 10, secret_value: 'my-secret')
expect { bulk_insert_item_class.bulk_insert!([item]) }
.to raise_error(bulk_insert_item_class::PrimaryKeySetError)
diff --git a/spec/models/concerns/featurable_spec.rb b/spec/models/concerns/featurable_spec.rb
index 99acc563950..b550d22f686 100644
--- a/spec/models/concerns/featurable_spec.rb
+++ b/spec/models/concerns/featurable_spec.rb
@@ -134,22 +134,6 @@ RSpec.describe Featurable do
expect(project.feature_available?(:issues, user)).to eq(true)
end
end
-
- context 'when feature is disabled by a feature flag' do
- it 'returns false' do
- stub_feature_flags(issues: false)
-
- expect(project.feature_available?(:issues, user)).to eq(false)
- end
- end
-
- context 'when feature is enabled by a feature flag' do
- it 'returns true' do
- stub_feature_flags(issues: true)
-
- expect(project.feature_available?(:issues, user)).to eq(true)
- end
- end
end
describe '#*_enabled?' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index ff5b270cf33..3545c8e9686 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Issuable do
include ProjectForksHelper
+ using RSpec::Parameterized::TableSyntax
let(:issuable_class) { Issue }
let(:issue) { create(:issue, title: 'An issue', description: 'A description') }
@@ -45,13 +46,17 @@ RSpec.describe Issuable do
end
it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:iid) }
it { is_expected.to validate_presence_of(:author) }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_length_of(:title).is_at_most(described_class::TITLE_LENGTH_MAX) }
it { is_expected.to validate_length_of(:description).is_at_most(described_class::DESCRIPTION_LENGTH_MAX).on(:create) }
- it_behaves_like 'validates description length with custom validation'
+ it_behaves_like 'validates description length with custom validation' do
+ before do
+ allow(InternalId).to receive(:generate_next).and_call_original
+ end
+ end
+
it_behaves_like 'truncates the description to its allowed maximum length on import'
end
end
@@ -820,8 +825,6 @@ RSpec.describe Issuable do
end
describe '#supports_time_tracking?' do
- using RSpec::Parameterized::TableSyntax
-
where(:issuable_type, :supports_time_tracking) do
:issue | true
:incident | true
@@ -838,8 +841,6 @@ RSpec.describe Issuable do
end
describe '#supports_severity?' do
- using RSpec::Parameterized::TableSyntax
-
where(:issuable_type, :supports_severity) do
:issue | false
:incident | true
@@ -856,8 +857,6 @@ RSpec.describe Issuable do
end
describe '#incident?' do
- using RSpec::Parameterized::TableSyntax
-
where(:issuable_type, :incident) do
:issue | false
:incident | true
@@ -874,8 +873,6 @@ RSpec.describe Issuable do
end
describe '#supports_issue_type?' do
- using RSpec::Parameterized::TableSyntax
-
where(:issuable_type, :supports_issue_type) do
:issue | true
:merge_request | false
@@ -894,8 +891,6 @@ RSpec.describe Issuable do
subject { issuable.severity }
context 'when issuable is not an incident' do
- using RSpec::Parameterized::TableSyntax
-
where(:issuable_type, :severity) do
:issue | 'unknown'
:merge_request | 'unknown'
diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb
index d4fcb2e99eb..3c5f3b2d2ad 100644
--- a/spec/models/concerns/spammable_spec.rb
+++ b/spec/models/concerns/spammable_spec.rb
@@ -120,6 +120,61 @@ RSpec.describe Spammable do
end
end
+ describe '#render_recaptcha?' do
+ before do
+ allow(Gitlab::Recaptcha).to receive(:enabled?) { recaptcha_enabled }
+ end
+
+ context 'when recaptcha is not enabled' do
+ let(:recaptcha_enabled) { false }
+
+ it 'returns false' do
+ expect(issue.render_recaptcha?).to eq(false)
+ end
+ end
+
+ context 'when recaptcha is enabled' do
+ let(:recaptcha_enabled) { true }
+
+ context 'when there are two or more errors' do
+ before do
+ issue.errors.add(:base, 'a spam error')
+ issue.errors.add(:base, 'some other error')
+ end
+
+ it 'returns false' do
+ expect(issue.render_recaptcha?).to eq(false)
+ end
+ end
+
+ context 'when there are less than two errors' do
+ before do
+ issue.errors.add(:base, 'a spam error')
+ end
+
+ context 'when spammable does not need recaptcha' do
+ before do
+ issue.needs_recaptcha = false
+ end
+
+ it 'returns false' do
+ expect(issue.render_recaptcha?).to eq(false)
+ end
+ end
+
+ context 'when spammable needs recaptcha' do
+ before do
+ issue.needs_recaptcha!
+ end
+
+ it 'returns false' do
+ expect(issue.render_recaptcha?).to eq(true)
+ end
+ end
+ end
+ end
+ end
+
describe '#clear_spam_flags!' do
it 'clears spam and recaptcha flags' do
issue.spam = true
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index d8b77e1cd0d..2df76684d71 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe ApplicationSetting, 'TokenAuthenticatable' do
it 'persists new token as an encrypted string' do
expect(subject).to eq settings.reload.runners_registration_token
expect(settings.read_attribute('runners_registration_token_encrypted'))
- .to eq Gitlab::CryptoHelper.aes256_gcm_encrypt(subject)
+ .to eq Gitlab::CryptoHelper.aes256_gcm_encrypt(subject, nonce: Gitlab::CryptoHelper::AES256_GCM_IV_STATIC)
expect(settings).to be_persisted
end
@@ -243,7 +243,7 @@ RSpec.describe Ci::Build, 'TokenAuthenticatable' do
it 'persists new token as an encrypted string' do
build.ensure_token!
- encrypted = Gitlab::CryptoHelper.aes256_gcm_encrypt(build.token)
+ encrypted = Gitlab::CryptoHelper.aes256_gcm_encrypt(build.token, nonce: Gitlab::CryptoHelper::AES256_GCM_IV_STATIC)
expect(build.read_attribute('token_encrypted')).to eq encrypted
end
diff --git a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
index f6b8cf7def4..1e1cd97e410 100644
--- a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
@@ -68,6 +68,10 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
context 'when using optional strategy' do
let(:options) { { encrypted: :optional } }
+ before do
+ stub_feature_flags(dynamic_nonce_creation: false)
+ end
+
it 'returns decrypted token when an encrypted token is present' do
allow(instance).to receive(:read_attribute)
.with('some_field_encrypted')
@@ -124,7 +128,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
it 'writes encrypted token and removes plaintext token and returns it' do
expect(instance).to receive(:[]=)
- .with('some_field_encrypted', encrypted)
+ .with('some_field_encrypted', any_args)
expect(instance).to receive(:[]=)
.with('some_field', nil)
@@ -137,7 +141,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
it 'writes encrypted token and writes plaintext token' do
expect(instance).to receive(:[]=)
- .with('some_field_encrypted', encrypted)
+ .with('some_field_encrypted', any_args)
expect(instance).to receive(:[]=)
.with('some_field', 'my-value')
diff --git a/spec/models/dependency_proxy/manifest_spec.rb b/spec/models/dependency_proxy/manifest_spec.rb
index aa2e73356dd..4203644c003 100644
--- a/spec/models/dependency_proxy/manifest_spec.rb
+++ b/spec/models/dependency_proxy/manifest_spec.rb
@@ -29,24 +29,32 @@ RSpec.describe DependencyProxy::Manifest, type: :model do
end
end
- describe '.find_or_initialize_by_file_name' do
- subject { DependencyProxy::Manifest.find_or_initialize_by_file_name(file_name) }
+ describe '.find_or_initialize_by_file_name_or_digest' do
+ let_it_be(:file_name) { 'foo' }
+ let_it_be(:digest) { 'bar' }
- context 'no manifest exists' do
- let_it_be(:file_name) { 'foo' }
+ subject { DependencyProxy::Manifest.find_or_initialize_by_file_name_or_digest(file_name: file_name, digest: digest) }
+ context 'no manifest exists' do
it 'initializes a manifest' do
- expect(DependencyProxy::Manifest).to receive(:new).with(file_name: file_name)
+ expect(DependencyProxy::Manifest).to receive(:new).with(file_name: file_name, digest: digest)
subject
end
end
- context 'manifest exists' do
+ context 'manifest exists and matches file_name' do
let_it_be(:dependency_proxy_manifest) { create(:dependency_proxy_manifest) }
let_it_be(:file_name) { dependency_proxy_manifest.file_name }
it { is_expected.to eq(dependency_proxy_manifest) }
end
+
+ context 'manifest exists and matches digest' do
+ let_it_be(:dependency_proxy_manifest) { create(:dependency_proxy_manifest) }
+ let_it_be(:digest) { dependency_proxy_manifest.digest }
+
+ it { is_expected.to eq(dependency_proxy_manifest) }
+ end
end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 5bc61db6d21..a2624a54668 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -254,26 +254,6 @@ RSpec.describe Deployment do
deployment.send(event)
end
-
- context 'the feature is disabled' do
- it 'does not trigger a worker' do
- stub_feature_flags(jira_sync_deployments: false)
-
- expect(worker).not_to receive(:perform_async)
-
- deployment.send(event)
- end
- end
-
- context 'the feature is enabled for this project' do
- it 'does trigger a worker' do
- stub_feature_flags(jira_sync_deployments: deployment.project)
-
- expect(worker).to receive(:perform_async)
-
- deployment.send(event)
- end
- end
end
end
end
diff --git a/spec/models/experiment_spec.rb b/spec/models/experiment_spec.rb
index 171bfd116d3..22bbf2df8fd 100644
--- a/spec/models/experiment_spec.rb
+++ b/spec/models/experiment_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Experiment do
+ include AfterNextHelpers
+
subject { build(:experiment) }
describe 'associations' do
@@ -67,6 +69,33 @@ RSpec.describe Experiment do
end
end
+ describe '.add_group' do
+ let_it_be(:experiment_name) { :experiment_key }
+ let_it_be(:variant) { :control }
+ let_it_be(:group) { build(:group) }
+
+ subject(:add_group) { described_class.add_group(experiment_name, variant: variant, group: group) }
+
+ context 'when an experiment with the provided name does not exist' do
+ it 'creates a new experiment record' do
+ allow_next(described_class, name: :experiment_key)
+ .to receive(:record_group_and_variant!).with(group, variant)
+
+ expect { add_group }.to change(described_class, :count).by(1)
+ end
+ end
+
+ context 'when an experiment with the provided name already exists' do
+ before do
+ create(:experiment, name: experiment_name)
+ end
+
+ it 'does not create a new experiment record' do
+ expect { add_group }.not_to change(described_class, :count)
+ end
+ end
+ end
+
describe '.record_conversion_event' do
let_it_be(:user) { build(:user) }
@@ -136,6 +165,34 @@ RSpec.describe Experiment do
end
end
+ describe '#record_group_and_variant!' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:variant) { :control }
+ let_it_be(:experiment) { create(:experiment) }
+
+ subject(:record_group_and_variant!) { experiment.record_group_and_variant!(group, variant) }
+
+ context 'when no existing experiment_subject record exists for the given group' do
+ it 'creates an experiment_subject record' do
+ expect_next(ExperimentSubject).to receive(:update!).with(variant: variant).and_call_original
+
+ expect { record_group_and_variant! }.to change(ExperimentSubject, :count).by(1)
+ end
+ end
+
+ context 'when an existing experiment_subject exists for the given group' do
+ context 'but it belonged to a different variant' do
+ let!(:experiment_subject) do
+ create(:experiment_subject, experiment: experiment, group: group, user: nil, variant: :experimental)
+ end
+
+ it 'updates the variant value' do
+ expect { record_group_and_variant! }.to change { experiment_subject.reload.variant }.to('control')
+ end
+ end
+ end
+ end
+
describe '#record_user_and_group' do
let_it_be(:experiment) { create(:experiment) }
let_it_be(:user) { create(:user) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 0acf2b96b74..2ac00b6231e 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -410,7 +410,7 @@ RSpec.describe Group do
it "is false if avatar is html page" do
group.update_attribute(:avatar, 'uploads/avatar.html')
- expect(group.avatar_type).to eq(["file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico"])
+ expect(group.avatar_type).to eq(["file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp"])
end
end
@@ -1492,6 +1492,28 @@ RSpec.describe Group do
end
end
+ describe '.preset_root_ancestor_for' do
+ let_it_be(:rootgroup, reload: true) { create(:group) }
+ let_it_be(:subgroup, reload: true) { create(:group, parent: rootgroup) }
+ let_it_be(:subgroup2, reload: true) { create(:group, parent: subgroup) }
+
+ it 'does noting for single group' do
+ expect(subgroup).not_to receive(:self_and_ancestors)
+
+ described_class.preset_root_ancestor_for([subgroup])
+ end
+
+ it 'sets the same root_ancestor for multiple groups' do
+ expect(subgroup).not_to receive(:self_and_ancestors)
+ expect(subgroup2).not_to receive(:self_and_ancestors)
+
+ described_class.preset_root_ancestor_for([rootgroup, subgroup, subgroup2])
+
+ expect(subgroup.root_ancestor).to eq(rootgroup)
+ expect(subgroup2.root_ancestor).to eq(rootgroup)
+ end
+ end
+
def subject_and_reload(*models)
subject
models.map(&:reload)
diff --git a/spec/models/issue_link_spec.rb b/spec/models/issue_link_spec.rb
index ef41108ebea..433b51b8a70 100644
--- a/spec/models/issue_link_spec.rb
+++ b/spec/models/issue_link_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe IssueLink do
end
describe 'link_type' do
- it { is_expected.to define_enum_for(:link_type).with_values(relates_to: 0, blocks: 1, is_blocked_by: 2) }
+ it { is_expected.to define_enum_for(:link_type).with_values(relates_to: 0, blocks: 1) }
it 'provides the "related" as default link_type' do
expect(create(:issue_link).link_type).to eq 'relates_to'
diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb
index 760eaf1ac7f..13ff239a306 100644
--- a/spec/models/merge_request/metrics_spec.rb
+++ b/spec/models/merge_request/metrics_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe MergeRequest::Metrics do
describe 'associations' do
it { is_expected.to belong_to(:merge_request) }
+ it { is_expected.to belong_to(:target_project).class_name('Project') }
it { is_expected.to belong_to(:latest_closed_by).class_name('User') }
it { is_expected.to belong_to(:merged_by).class_name('User') }
end
@@ -36,5 +37,15 @@ RSpec.describe MergeRequest::Metrics do
is_expected.not_to include([metrics_2])
end
end
+
+ describe '.by_target_project' do
+ let(:target_project) { metrics_1.target_project }
+
+ subject { described_class.by_target_project(target_project) }
+
+ it 'finds metrics record with the associated target project' do
+ is_expected.to eq([metrics_1])
+ end
+ end
end
end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index 2edf44ecdc4..a24628b0f9d 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -48,7 +48,8 @@ RSpec.describe MergeRequestDiffCommit do
"committer_email": "dmitriy.zaporozhets@gmail.com",
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
- "sha": Gitlab::Database::ShaAttribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e")
+ "sha": Gitlab::Database::ShaAttribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e"),
+ "trailers": {}.to_json
},
{
"message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
@@ -60,7 +61,8 @@ RSpec.describe MergeRequestDiffCommit do
"committer_email": "dmitriy.zaporozhets@gmail.com",
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 1,
- "sha": Gitlab::Database::ShaAttribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d")
+ "sha": Gitlab::Database::ShaAttribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d"),
+ "trailers": {}.to_json
}
]
end
@@ -92,7 +94,8 @@ RSpec.describe MergeRequestDiffCommit do
"committer_email": "alejorro70@gmail.com",
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
- "sha": Gitlab::Database::ShaAttribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69")
+ "sha": Gitlab::Database::ShaAttribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69"),
+ "trailers": {}.to_json
}]
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index a5493d1650b..83024c587fa 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -16,6 +16,8 @@ RSpec.describe MergeRequestDiff do
describe 'validations' do
subject { diff_with_commits }
+ it { is_expected.not_to validate_uniqueness_of(:diff_type).scoped_to(:merge_request_id) }
+
it 'checks sha format of base_commit_sha, head_commit_sha and start_commit_sha' do
subject.base_commit_sha = subject.head_commit_sha = subject.start_commit_sha = 'foobar'
@@ -23,6 +25,24 @@ RSpec.describe MergeRequestDiff do
expect(subject.errors.count).to eq 3
expect(subject.errors).to all(include('is not a valid SHA'))
end
+
+ it 'does not validate uniqueness by default' do
+ expect(build(:merge_request_diff, merge_request: subject.merge_request)).to be_valid
+ end
+
+ context 'when merge request diff is a merge_head type' do
+ it 'is valid' do
+ expect(build(:merge_request_diff, :merge_head, merge_request: subject.merge_request)).to be_valid
+ end
+
+ context 'when merge_head diff exists' do
+ let(:existing_merge_head_diff) { create(:merge_request_diff, :merge_head) }
+
+ it 'validates uniqueness' do
+ expect(build(:merge_request_diff, :merge_head, merge_request: existing_merge_head_diff.merge_request)).not_to be_valid
+ end
+ end
+ end
end
describe 'create new record' do
@@ -35,6 +55,26 @@ RSpec.describe MergeRequestDiff do
it { expect(subject.head_commit_sha).to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0') }
it { expect(subject.base_commit_sha).to eq('ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }
it { expect(subject.start_commit_sha).to eq('0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
+
+ context 'when diff_type is merge_head' do
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ let_it_be(:merge_head) do
+ MergeRequests::MergeToRefService
+ .new(merge_request.project, merge_request.author)
+ .execute(merge_request)
+
+ merge_request.create_merge_head_diff
+ end
+
+ it { expect(merge_head).to be_valid }
+ it { expect(merge_head).to be_persisted }
+ it { expect(merge_head.commits.count).to eq(30) }
+ it { expect(merge_head.diffs.count).to eq(20) }
+ it { expect(merge_head.head_commit_sha).to eq(merge_request.merge_ref_head.diff_refs.head_sha) }
+ it { expect(merge_head.base_commit_sha).to eq(merge_request.merge_ref_head.diff_refs.base_sha) }
+ it { expect(merge_head.start_commit_sha).to eq(merge_request.target_branch_sha) }
+ end
end
describe '.by_commit_sha' do
@@ -63,6 +103,7 @@ RSpec.describe MergeRequestDiff do
let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:outdated) { merge_request.merge_request_diff }
let_it_be(:latest) { merge_request.create_merge_request_diff }
+ let_it_be(:merge_head) { merge_request.create_merge_head_diff }
let_it_be(:closed_mr) { create(:merge_request, :closed_last_month) }
let(:closed) { closed_mr.merge_request_diff }
@@ -103,14 +144,14 @@ RSpec.describe MergeRequestDiff do
stub_external_diffs_setting(enabled: true)
end
- it { is_expected.to contain_exactly(outdated.id, latest.id, closed.id, merged.id, closed_recently.id, merged_recently.id) }
+ it { is_expected.to contain_exactly(outdated.id, latest.id, closed.id, merged.id, closed_recently.id, merged_recently.id, merge_head.id) }
it 'ignores diffs with 0 files' do
MergeRequestDiffFile.where(merge_request_diff_id: [closed_recently.id, merged_recently.id]).delete_all
closed_recently.update!(files_count: 0)
merged_recently.update!(files_count: 0)
- is_expected.to contain_exactly(outdated.id, latest.id, closed.id, merged.id)
+ is_expected.to contain_exactly(outdated.id, latest.id, closed.id, merged.id, merge_head.id)
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 1cf197322f5..aef3aa34425 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -271,8 +271,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
stub_feature_flags(stricter_mr_branch_name: false)
end
- using RSpec::Parameterized::TableSyntax
-
where(:branch_name, :valid) do
'foo' | true
'foo:bar' | false
@@ -451,6 +449,17 @@ RSpec.describe MergeRequest, factory_default: :keep do
it { is_expected.to be_empty }
end
+
+ context 'when commit is part of the merge request and a squash commit at the same time' do
+ let!(:merge_request) { create(:merge_request, :with_diffs) }
+ let(:sha) { merge_request.commits.first.id }
+
+ before do
+ merge_request.update!(squash_commit_sha: sha)
+ end
+
+ it { is_expected.to eq([merge_request]) }
+ end
end
describe '.by_cherry_pick_sha' do
@@ -480,6 +489,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
create(:merge_request, params).tap do |mr|
diffs.times { mr.merge_request_diffs.create }
+ mr.create_merge_head_diff
end
end
@@ -705,6 +715,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'when external issue tracker is enabled' do
+ let(:project) { create(:project, :repository) }
+
+ subject { create(:merge_request, source_project: project) }
+
before do
subject.project.has_external_issue_tracker = true
subject.project.save!
@@ -754,9 +768,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when both internal and external issue trackers are enabled' do
before do
- subject.project.has_external_issue_tracker = true
- subject.project.save!
create(:jira_service, project: subject.project)
+ subject.project.reload
end
it 'does not cache issues from external trackers' do
@@ -779,6 +792,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'when only external issue tracker enabled' do
+ let(:project) { create(:project, :repository) }
+
+ subject { create(:merge_request, source_project: project) }
+
before do
subject.project.has_external_issue_tracker = true
subject.project.issues_enabled = false
@@ -1263,7 +1280,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
describe '#issues_mentioned_but_not_closing' do
let(:closing_issue) { create :issue, project: subject.project }
let(:mentioned_issue) { create :issue, project: subject.project }
-
let(:commit) { double('commit', safe_message: "Fixes #{closing_issue.to_reference}") }
it 'detects issues mentioned in description but not closed' do
@@ -1279,13 +1295,12 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'when the project has an external issue tracker' do
- subject { create(:merge_request, source_project: create(:project, :repository)) }
-
before do
subject.project.add_developer(subject.author)
commit = double(:commit, safe_message: 'Fixes TEST-3')
create(:jira_service, project: subject.project)
+ subject.project.reload
allow(subject).to receive(:commits).and_return([commit])
allow(subject).to receive(:description).and_return('Is related to TEST-2 and TEST-3')
@@ -1645,7 +1660,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it_behaves_like 'an editable mentionable' do
- subject { create(:merge_request, :simple) }
+ subject { create(:merge_request, :simple, source_project: create(:project, :repository)) }
let(:backref_text) { "merge request #{subject.to_reference}" }
let(:set_mentionable_text) { ->(txt) { subject.description = txt } }
@@ -1971,6 +1986,30 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#has_codequality_mr_diff_report?' do
+ subject { merge_request.has_codequality_mr_diff_report? }
+
+ context 'when head pipeline has codequality mr diff report' do
+ let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(codequality_mr_diff: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when head pipeline does not have codeqquality mr diff report' do
+ let(:merge_request) { create(:merge_request) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#has_codequality_reports?' do
subject { merge_request.has_codequality_reports? }
@@ -1983,7 +2022,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when feature flag is disabled' do
before do
- stub_feature_flags(codequality_mr_diff: false)
+ stub_feature_flags(codequality_backend_comparison: false)
end
it { is_expected.to be_falsey }
@@ -2144,6 +2183,54 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#find_codequality_mr_diff_reports' do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project) }
+ let(:pipeline) { merge_request.head_pipeline }
+
+ subject(:mr_diff_report) { merge_request.find_codequality_mr_diff_reports }
+
+ context 'when head pipeline has coverage reports' do
+ context 'when reactive cache worker is parsing results asynchronously' do
+ it 'returns status' do
+ expect(mr_diff_report[:status]).to eq(:parsing)
+ end
+ end
+
+ context 'when reactive cache worker is inline' do
+ before do
+ synchronous_reactive_cache(merge_request)
+ end
+
+ it 'returns status and data' do
+ expect(mr_diff_report[:status]).to eq(:parsed)
+ end
+
+ context 'when an error occurrs' do
+ before do
+ merge_request.update!(head_pipeline: nil)
+ end
+
+ it 'returns an error message' do
+ expect(mr_diff_report[:status]).to eq(:error)
+ end
+ end
+
+ context 'when cached results is not latest' do
+ before do
+ allow_next_instance_of(Ci::GenerateCodequalityMrDiffReportService) do |service|
+ allow(service).to receive(:latest?).and_return(false)
+ end
+ end
+
+ it 'raises and InvalidateReactiveCache error' do
+ expect { mr_diff_report }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
+ end
+ end
+ end
+ end
+ end
+
describe '#compare_test_reports' do
subject { merge_request.compare_test_reports }
@@ -2765,8 +2852,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'with skip_ci_check option' do
- using RSpec::Parameterized::TableSyntax
-
before do
allow(subject).to receive_messages(check_mergeability: nil,
can_be_merged?: true,
@@ -2790,8 +2875,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'with skip_discussions_check option' do
- using RSpec::Parameterized::TableSyntax
-
before do
allow(subject).to receive_messages(mergeable_ci_state?: true,
check_mergeability: nil,
@@ -3345,6 +3428,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'when resolve_outdated_diff_discussions is set' do
+ let(:project) { create(:project, :repository) }
+
+ subject { create(:merge_request, source_project: project) }
+
before do
discussion
@@ -3365,7 +3452,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
describe '#branch_merge_base_commit' do
let(:project) { create(:project, :repository) }
- subject { create(:merge_request, :with_diffs, source_project: project) }
+ subject { create(:merge_request, source_project: project) }
context 'source and target branch exist' do
it { expect(subject.branch_merge_base_commit.sha).to eq('ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }
@@ -3388,7 +3475,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context "with diffs" do
let(:project) { create(:project, :repository) }
- subject { create(:merge_request, :with_diffs, source_project: project) }
+ subject { create(:merge_request, source_project: project) }
let(:expected_diff_refs) do
Gitlab::Diff::DiffRefs.new(
@@ -3792,7 +3879,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
describe '#fetch_ref!' do
let(:project) { create(:project, :repository) }
- subject { create(:merge_request, :with_diffs, source_project: project) }
+ subject { create(:merge_request, source_project: project) }
it 'fetches the ref correctly' do
expect { subject.target_project.repository.delete_refs(subject.ref_path) }.not_to raise_error
@@ -4367,37 +4454,41 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
describe '#diffable_merge_ref?' do
+ let(:merge_request) { create(:merge_request) }
+
context 'merge request can be merged' do
- context 'merge_to_ref is not calculated' do
+ context 'merge_head diff is not created' do
it 'returns true' do
- expect(subject.diffable_merge_ref?).to eq(false)
+ expect(merge_request.diffable_merge_ref?).to eq(false)
end
end
- context 'merge_to_ref is calculated' do
+ context 'merge_head diff is created' do
before do
- MergeRequests::MergeToRefService.new(subject.project, subject.author).execute(subject)
+ create(:merge_request_diff, :merge_head, merge_request: merge_request)
end
it 'returns true' do
- expect(subject.diffable_merge_ref?).to eq(true)
+ expect(merge_request.diffable_merge_ref?).to eq(true)
end
context 'merge request is merged' do
- subject { build_stubbed(:merge_request, :merged, project: project) }
+ before do
+ merge_request.mark_as_merged!
+ end
it 'returns false' do
- expect(subject.diffable_merge_ref?).to eq(false)
+ expect(merge_request.diffable_merge_ref?).to eq(false)
end
end
context 'merge request cannot be merged' do
before do
- subject.mark_as_unchecked!
+ merge_request.mark_as_unchecked!
end
it 'returns false' do
- expect(subject.diffable_merge_ref?).to eq(true)
+ expect(merge_request.diffable_merge_ref?).to eq(true)
end
context 'display_merge_conflicts_in_diff is disabled' do
@@ -4406,7 +4497,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it 'returns false' do
- expect(subject.diffable_merge_ref?).to eq(false)
+ expect(merge_request.diffable_merge_ref?).to eq(false)
end
end
end
diff --git a/spec/models/onboarding_progress_spec.rb b/spec/models/onboarding_progress_spec.rb
index bd951846bb8..02fe0015a14 100644
--- a/spec/models/onboarding_progress_spec.rb
+++ b/spec/models/onboarding_progress_spec.rb
@@ -29,6 +29,67 @@ RSpec.describe OnboardingProgress do
end
end
+ describe 'scopes' do
+ describe '.incomplete_actions' do
+ subject { described_class.incomplete_actions(actions) }
+
+ let!(:no_actions_completed) { create(:onboarding_progress) }
+ let!(:one_action_completed_one_action_incompleted) { create(:onboarding_progress, "#{action}_at" => Time.current) }
+
+ context 'when given one action' do
+ let(:actions) { action }
+
+ it { is_expected.to eq [no_actions_completed] }
+ end
+
+ context 'when given an array of actions' do
+ let(:actions) { [action, :git_write] }
+
+ it { is_expected.to eq [no_actions_completed] }
+ end
+ end
+
+ describe '.completed_actions' do
+ subject { described_class.completed_actions(actions) }
+
+ let!(:one_action_completed_one_action_incompleted) { create(:onboarding_progress, "#{action}_at" => Time.current) }
+ let!(:both_actions_completed) { create(:onboarding_progress, "#{action}_at" => Time.current, git_write_at: Time.current) }
+
+ context 'when given one action' do
+ let(:actions) { action }
+
+ it { is_expected.to eq [one_action_completed_one_action_incompleted, both_actions_completed] }
+ end
+
+ context 'when given an array of actions' do
+ let(:actions) { [action, :git_write] }
+
+ it { is_expected.to eq [both_actions_completed] }
+ end
+ end
+
+ describe '.completed_actions_with_latest_in_range' do
+ subject { described_class.completed_actions_with_latest_in_range(actions, 1.day.ago.beginning_of_day..1.day.ago.end_of_day) }
+
+ let!(:one_action_completed_in_range_one_action_incompleted) { create(:onboarding_progress, "#{action}_at" => 1.day.ago.middle_of_day) }
+ let!(:git_write_action_completed_in_range) { create(:onboarding_progress, git_write_at: 1.day.ago.middle_of_day) }
+ let!(:both_actions_completed_latest_action_out_of_range) { create(:onboarding_progress, "#{action}_at" => 1.day.ago.middle_of_day, git_write_at: Time.current) }
+ let!(:both_actions_completed_latest_action_in_range) { create(:onboarding_progress, "#{action}_at" => 1.day.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day) }
+
+ context 'when given one action' do
+ let(:actions) { :git_write }
+
+ it { is_expected.to eq [git_write_action_completed_in_range] }
+ end
+
+ context 'when given an array of actions' do
+ let(:actions) { [action, :git_write] }
+
+ it { is_expected.to eq [both_actions_completed_latest_action_in_range] }
+ end
+ end
+ end
+
describe '.onboard' do
subject(:onboard) { described_class.onboard(namespace) }
@@ -104,4 +165,10 @@ RSpec.describe OnboardingProgress do
end
end
end
+
+ describe '.column_name' do
+ subject { described_class.column_name(action) }
+
+ it { is_expected.to eq(:subscription_created_at) }
+ end
end
diff --git a/spec/models/packages/composer/cache_file_spec.rb b/spec/models/packages/composer/cache_file_spec.rb
new file mode 100644
index 00000000000..ef9818f0930
--- /dev/null
+++ b/spec/models/packages/composer/cache_file_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Composer::CacheFile, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:group) }
+ it { is_expected.to belong_to(:namespace) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:namespace) }
+ end
+
+ describe 'scopes' do
+ let_it_be(:group1) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:cache_file1) { create(:composer_cache_file, file_sha256: '123456', group: group1) }
+ let_it_be(:cache_file2) { create(:composer_cache_file, file_sha256: '456778', group: group2) }
+
+ describe '.with_namespace' do
+ subject { described_class.with_namespace(group1) }
+
+ it { is_expected.to eq [cache_file1] }
+ end
+
+ describe '.with_sha' do
+ subject { described_class.with_sha('123456') }
+
+ it { is_expected.to eq [cache_file1] }
+ end
+ end
+end
diff --git a/spec/models/packages/composer/metadatum_spec.rb b/spec/models/packages/composer/metadatum_spec.rb
index ae53532696b..1c888f1563c 100644
--- a/spec/models/packages/composer/metadatum_spec.rb
+++ b/spec/models/packages/composer/metadatum_spec.rb
@@ -11,4 +11,20 @@ RSpec.describe Packages::Composer::Metadatum, type: :model do
it { is_expected.to validate_presence_of(:target_sha) }
it { is_expected.to validate_presence_of(:composer_json) }
end
+
+ describe 'scopes' do
+ let_it_be(:package_name) { 'sample-project' }
+ let_it_be(:json) { { 'name' => package_name } }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+ let_it_be(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let_it_be(:package2) { create(:composer_package, :with_metadatum, project: project, name: 'other-name', version: '1.0.0', json: json) }
+ let_it_be(:package3) { create(:pypi_package, name: package_name, project: project) }
+
+ describe '.for_package' do
+ subject { described_class.for_package(package_name, project.id) }
+
+ it { is_expected.to eq [package1.composer_metadatum] }
+ end
+ end
end
diff --git a/spec/models/packages/debian/group_component_spec.rb b/spec/models/packages/debian/group_component_spec.rb
new file mode 100644
index 00000000000..f288ebbe5df
--- /dev/null
+++ b/spec/models/packages/debian/group_component_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::GroupComponent do
+ it_behaves_like 'Debian Distribution Component', :debian_group_component, :group, false
+end
diff --git a/spec/models/packages/debian/project_component_spec.rb b/spec/models/packages/debian/project_component_spec.rb
new file mode 100644
index 00000000000..4b041068b8d
--- /dev/null
+++ b/spec/models/packages/debian/project_component_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProjectComponent do
+ it_behaves_like 'Debian Distribution Component', :debian_project_component, :project, true
+end
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index 30712af6b32..d95a33aabcc 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -56,6 +56,15 @@ RSpec.describe Pages::LookupPath do
include_examples 'uses disk storage'
+ it 'return nil when legacy storage is disabled and there is no deployment' do
+ stub_feature_flags(pages_serve_from_legacy_storage: false)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(described_class::LegacyStorageDisabledError)
+ .and_call_original
+
+ expect(source).to eq(nil)
+ end
+
context 'when there is pages deployment' do
let(:deployment) { create(:pages_deployment, project: project) }
@@ -116,6 +125,44 @@ RSpec.describe Pages::LookupPath do
include_examples 'uses disk storage'
end
end
+
+ context 'when deployment were created during migration' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+
+ expect(::Pages::MigrateLegacyStorageToDeploymentService.new(project).execute[:status]).to eq(:success)
+
+ project.reload
+ end
+
+ let(:deployment) { project.pages_metadatum.pages_deployment }
+
+ it 'uses deployment from object storage' do
+ freeze_time do
+ expect(source).to(
+ eq({
+ type: 'zip',
+ path: deployment.file.url(expire_at: 1.day.from_now),
+ global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
+ })
+ )
+ end
+ end
+
+ context 'when pages_serve_from_migrated_zip feature flag is disabled' do
+ before do
+ stub_feature_flags(pages_serve_from_migrated_zip: false)
+ end
+
+ include_examples 'uses disk storage'
+ end
+ end
end
describe '#prefix' do
diff --git a/spec/models/pages/virtual_domain_spec.rb b/spec/models/pages/virtual_domain_spec.rb
index 38f5f4d2538..29c14cbeb3e 100644
--- a/spec/models/pages/virtual_domain_spec.rb
+++ b/spec/models/pages/virtual_domain_spec.rb
@@ -26,31 +26,34 @@ RSpec.describe Pages::VirtualDomain do
describe '#lookup_paths' do
let(:project_a) { instance_double(Project) }
- let(:project_z) { instance_double(Project) }
- let(:pages_lookup_path_a) { instance_double(Pages::LookupPath, prefix: 'aaa') }
- let(:pages_lookup_path_z) { instance_double(Pages::LookupPath, prefix: 'zzz') }
+ let(:project_b) { instance_double(Project) }
+ let(:project_c) { instance_double(Project) }
+ let(:pages_lookup_path_a) { instance_double(Pages::LookupPath, prefix: 'aaa', source: { type: 'zip', path: 'https://example.com' }) }
+ let(:pages_lookup_path_b) { instance_double(Pages::LookupPath, prefix: 'bbb', source: { type: 'zip', path: 'https://example.com' }) }
+ let(:pages_lookup_path_without_source) { instance_double(Pages::LookupPath, prefix: 'ccc', source: nil) }
context 'when there is pages domain provided' do
let(:domain) { instance_double(PagesDomain) }
- subject(:virtual_domain) { described_class.new([project_a, project_z], domain: domain) }
+ subject(:virtual_domain) { described_class.new([project_a, project_b, project_c], domain: domain) }
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
expect(project_a).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_a)
- expect(project_z).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_z)
+ expect(project_b).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_b)
+ expect(project_c).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_without_source)
- expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_z, pages_lookup_path_a])
+ expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
end
end
context 'when there is trim_prefix provided' do
- subject(:virtual_domain) { described_class.new([project_a, project_z], trim_prefix: 'group/') }
+ subject(:virtual_domain) { described_class.new([project_a, project_b], trim_prefix: 'group/') }
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
expect(project_a).to receive(:pages_lookup_path).with(trim_prefix: 'group/', domain: nil).and_return(pages_lookup_path_a)
- expect(project_z).to receive(:pages_lookup_path).with(trim_prefix: 'group/', domain: nil).and_return(pages_lookup_path_z)
+ expect(project_b).to receive(:pages_lookup_path).with(trim_prefix: 'group/', domain: nil).and_return(pages_lookup_path_b)
- expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_z, pages_lookup_path_a])
+ expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
end
end
end
diff --git a/spec/models/project_services/chat_notification_service_spec.rb b/spec/models/project_services/chat_notification_service_spec.rb
index 77a1377c138..476d99364b6 100644
--- a/spec/models/project_services/chat_notification_service_spec.rb
+++ b/spec/models/project_services/chat_notification_service_spec.rb
@@ -75,6 +75,39 @@ RSpec.describe ChatNotificationService do
end
end
+ context 'when the data object has a label' do
+ let(:label) { create(:label, project: project, name: 'Bug')}
+ let(:issue) { create(:labeled_issue, project: project, labels: [label]) }
+ let(:note) { create(:note, noteable: issue, project: project)}
+ let(:data) { Gitlab::DataBuilder::Note.build(note, user) }
+
+ it 'notifies the chat service' do
+ expect(chat_service).to receive(:notify).with(any_args)
+
+ chat_service.execute(data)
+ end
+
+ context 'and the chat_service has a label filter that does not matches the label' do
+ subject(:chat_service) { described_class.new(labels_to_be_notified: '~some random label') }
+
+ it 'does not notify the chat service' do
+ expect(chat_service).not_to receive(:notify)
+
+ chat_service.execute(data)
+ end
+ end
+
+ context 'and the chat_service has a label filter that matches the label' do
+ subject(:chat_service) { described_class.new(labels_to_be_notified: '~Backend, ~Bug') }
+
+ it 'notifies the chat service' do
+ expect(chat_service).to receive(:notify).with(any_args)
+
+ chat_service.execute(data)
+ end
+ end
+ end
+
context 'with "channel" property' do
before do
allow(chat_service).to receive(:channel).and_return(channel)
diff --git a/spec/models/project_services/confluence_service_spec.rb b/spec/models/project_services/confluence_service_spec.rb
index 5d153b17070..6c7ba2c9f32 100644
--- a/spec/models/project_services/confluence_service_spec.rb
+++ b/spec/models/project_services/confluence_service_spec.rb
@@ -43,13 +43,13 @@ RSpec.describe ConfluenceService do
end
end
- describe '#detailed_description' do
+ describe '#help' do
it 'can correctly return a link to the project wiki when active' do
project = create(:project)
subject.project = project
subject.active = true
- expect(subject.detailed_description).to include(Gitlab::Routing.url_helpers.project_wikis_url(project))
+ expect(subject.help).to include(Gitlab::Routing.url_helpers.project_wikis_url(project))
end
context 'when the project wiki is not enabled' do
@@ -60,7 +60,7 @@ RSpec.describe ConfluenceService do
[true, false].each do |active|
subject.active = active
- expect(subject.detailed_description).to be_nil
+ expect(subject.help).to be_nil
end
end
end
diff --git a/spec/models/project_services/datadog_service_spec.rb b/spec/models/project_services/datadog_service_spec.rb
index 1d9f49e4824..d15ea1f351b 100644
--- a/spec/models/project_services/datadog_service_spec.rb
+++ b/spec/models/project_services/datadog_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe DatadogService, :model do
let(:active) { true }
let(:dd_site) { 'datadoghq.com' }
let(:default_url) { 'https://webhooks-http-intake.logs.datadoghq.com/v1/input/' }
- let(:api_url) { nil }
+ let(:api_url) { '' }
let(:api_key) { SecureRandom.hex(32) }
let(:dd_env) { 'ci' }
let(:dd_service) { 'awesome-gitlab' }
@@ -22,13 +22,11 @@ RSpec.describe DatadogService, :model do
described_class.new(
active: active,
project: project,
- properties: {
- datadog_site: dd_site,
- api_url: api_url,
- api_key: api_key,
- datadog_env: dd_env,
- datadog_service: dd_service
- }
+ datadog_site: dd_site,
+ api_url: api_url,
+ api_key: api_key,
+ datadog_env: dd_env,
+ datadog_service: dd_service
)
end
@@ -58,7 +56,7 @@ RSpec.describe DatadogService, :model do
context 'when selecting site' do
let(:dd_site) { 'datadoghq.com' }
- let(:api_url) { nil }
+ let(:api_url) { '' }
it { is_expected.to validate_presence_of(:datadog_site) }
it { is_expected.not_to validate_presence_of(:api_url) }
@@ -66,7 +64,7 @@ RSpec.describe DatadogService, :model do
end
context 'with custom api_url' do
- let(:dd_site) { nil }
+ let(:dd_site) { '' }
let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/v1/input/' }
it { is_expected.not_to validate_presence_of(:datadog_site) }
@@ -76,13 +74,21 @@ RSpec.describe DatadogService, :model do
end
context 'when missing site and api_url' do
- let(:dd_site) { nil }
- let(:api_url) { nil }
+ let(:dd_site) { '' }
+ let(:api_url) { '' }
it { is_expected.not_to be_valid }
it { is_expected.to validate_presence_of(:datadog_site) }
it { is_expected.to validate_presence_of(:api_url) }
end
+
+ context 'when providing both site and api_url' do
+ let(:dd_site) { 'datadoghq.com' }
+ let(:api_url) { default_url }
+
+ it { is_expected.not_to allow_value('datadog hq.com').for(:datadog_site) }
+ it { is_expected.not_to allow_value('example.com').for(:api_url) }
+ end
end
context 'when service is not active' do
@@ -113,8 +119,8 @@ RSpec.describe DatadogService, :model do
end
context 'without optional params' do
- let(:dd_service) { nil }
- let(:dd_env) { nil }
+ let(:dd_service) { '' }
+ let(:dd_env) { '' }
it { is_expected.to eq(default_url + api_key) }
end
@@ -126,7 +132,7 @@ RSpec.describe DatadogService, :model do
it { is_expected.to eq("https://app.#{dd_site}/account/settings#api") }
context 'with unset datadog_site' do
- let(:dd_site) { nil }
+ let(:dd_site) { '' }
it { is_expected.to eq("https://docs.datadoghq.com/account_management/api-app-keys/") }
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index a2b51684d4d..02ebf9447c0 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:services) }
it { is_expected.to have_many(:events) }
it { is_expected.to have_many(:merge_requests) }
+ it { is_expected.to have_many(:merge_request_metrics).class_name('MergeRequest::Metrics') }
it { is_expected.to have_many(:issues) }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:iterations) }
@@ -1002,103 +1003,125 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#external_issue_tracker' do
- let(:project) { create(:project) }
- let(:ext_project) { create(:redmine_project) }
+ describe '#has_wiki?' do
+ let(:no_wiki_project) { create(:project, :wiki_disabled, has_external_wiki: false) }
+ let(:wiki_enabled_project) { create(:project) }
+ let(:external_wiki_project) { create(:project, has_external_wiki: true) }
- context 'on existing projects with no value for has_external_issue_tracker' do
- before do
- project.update_column(:has_external_issue_tracker, nil)
- ext_project.update_column(:has_external_issue_tracker, nil)
+ it 'returns true if project is wiki enabled or has external wiki' do
+ expect(wiki_enabled_project).to have_wiki
+ expect(external_wiki_project).to have_wiki
+ expect(no_wiki_project).not_to have_wiki
+ end
+ end
+
+ describe '#default_owner' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:namespace) { create(:namespace, owner: owner) }
+
+ context 'the project does not have a group' do
+ let(:project) { build(:project, namespace: namespace) }
+
+ it 'is the namespace owner' do
+ expect(project.default_owner).to eq(owner)
end
+ end
- it 'updates the has_external_issue_tracker boolean' do
- expect do
- project.external_issue_tracker
- end.to change { project.reload.has_external_issue_tracker }.to(false)
+ context 'the project is in a group' do
+ let(:group) { build(:group) }
+ let(:project) { build(:project, group: group, namespace: namespace) }
- expect do
- ext_project.external_issue_tracker
- end.to change { ext_project.reload.has_external_issue_tracker }.to(true)
+ it 'is the group owner' do
+ allow(group).to receive(:default_owner).and_return(Object.new)
+
+ expect(project.default_owner).to eq(group.default_owner)
end
end
+ end
+
+ describe '#external_issue_tracker' do
+ it 'sets Project#has_external_issue_tracker when it is nil' do
+ project_with_no_tracker = create(:project, has_external_issue_tracker: nil)
+ project_with_tracker = create(:redmine_project, has_external_issue_tracker: nil)
+
+ expect do
+ project_with_no_tracker.external_issue_tracker
+ end.to change { project_with_no_tracker.reload.has_external_issue_tracker }.from(nil).to(false)
+
+ expect do
+ project_with_tracker.external_issue_tracker
+ end.to change { project_with_tracker.reload.has_external_issue_tracker }.from(nil).to(true)
+ end
it 'returns nil and does not query services when there is no external issue tracker' do
- expect(project).not_to receive(:services)
+ project = create(:project)
+ expect(project).not_to receive(:services)
expect(project.external_issue_tracker).to eq(nil)
end
it 'retrieves external_issue_tracker querying services and cache it when there is external issue tracker' do
- ext_project.reload # Factory returns a project with changed attributes
- expect(ext_project).to receive(:services).once.and_call_original
+ project = create(:redmine_project)
- 2.times { expect(ext_project.external_issue_tracker).to be_a_kind_of(RedmineService) }
+ expect(project).to receive(:services).once.and_call_original
+ 2.times { expect(project.external_issue_tracker).to be_a_kind_of(RedmineService) }
end
end
- describe '#cache_has_external_issue_tracker' do
- let_it_be(:project) { create(:project, has_external_issue_tracker: nil) }
-
- it 'stores true if there is any external_issue_tracker' do
- services = double(:service, external_issue_trackers: [RedmineService.new])
- expect(project).to receive(:services).and_return(services)
+ describe '#has_external_issue_tracker' do
+ let_it_be(:project) { create(:project) }
- expect do
- project.cache_has_external_issue_tracker
- end.to change { project.has_external_issue_tracker}.to(true)
+ def subject
+ project.reload.has_external_issue_tracker
end
- it 'stores false if there is no external_issue_tracker' do
- services = double(:service, external_issue_trackers: [])
- expect(project).to receive(:services).and_return(services)
+ it 'is false when external issue tracker service is not active' do
+ create(:service, project: project, category: 'issue_tracker', active: false)
- expect do
- project.cache_has_external_issue_tracker
- end.to change { project.has_external_issue_tracker}.to(false)
+ is_expected.to eq(false)
end
- it 'does not cache data when in a read-only GitLab instance' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
+ it 'is false when other service is active' do
+ create(:service, project: project, category: 'not_issue_tracker', active: true)
- expect do
- project.cache_has_external_issue_tracker
- end.not_to change { project.has_external_issue_tracker }
+ is_expected.to eq(false)
end
- end
-
- describe '#has_wiki?' do
- let(:no_wiki_project) { create(:project, :wiki_disabled, has_external_wiki: false) }
- let(:wiki_enabled_project) { create(:project) }
- let(:external_wiki_project) { create(:project, has_external_wiki: true) }
- it 'returns true if project is wiki enabled or has external wiki' do
- expect(wiki_enabled_project).to have_wiki
- expect(external_wiki_project).to have_wiki
- expect(no_wiki_project).not_to have_wiki
- end
- end
+ context 'when there is an active external issue tracker service' do
+ let!(:service) do
+ create(:service, project: project, type: 'JiraService', category: 'issue_tracker', active: true)
+ end
- describe '#default_owner' do
- let_it_be(:owner) { create(:user) }
- let_it_be(:namespace) { create(:namespace, owner: owner) }
+ specify { is_expected.to eq(true) }
- context 'the project does not have a group' do
- let(:project) { build(:project, namespace: namespace) }
+ it 'becomes false when external issue tracker service is destroyed' do
+ expect do
+ Service.find(service.id).delete
+ end.to change { subject }.to(false)
+ end
- it 'is the namespace owner' do
- expect(project.default_owner).to eq(owner)
+ it 'becomes false when external issue tracker service becomes inactive' do
+ expect do
+ service.update_column(:active, false)
+ end.to change { subject }.to(false)
end
- end
- context 'the project is in a group' do
- let(:group) { build(:group) }
- let(:project) { build(:project, group: group, namespace: namespace) }
+ context 'when there are two active external issue tracker services' do
+ let_it_be(:second_service) do
+ create(:service, project: project, type: 'CustomIssueTracker', category: 'issue_tracker', active: true)
+ end
- it 'is the group owner' do
- allow(group).to receive(:default_owner).and_return(Object.new)
+ it 'does not become false when external issue tracker service is destroyed' do
+ expect do
+ Service.find(service.id).delete
+ end.not_to change { subject }
+ end
- expect(project.default_owner).to eq(group.default_owner)
+ it 'does not become false when external issue tracker service becomes inactive' do
+ expect do
+ service.update_column(:active, false)
+ end.not_to change { subject }
+ end
end
end
end
@@ -1234,7 +1257,7 @@ RSpec.describe Project, factory_default: :keep do
it 'is false if avatar is html page' do
project.update_attribute(:avatar, 'uploads/avatar.html')
- expect(project.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico'])
+ expect(project.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp'])
end
end
@@ -1529,10 +1552,7 @@ RSpec.describe Project, factory_default: :keep do
let(:project) { build(:project) }
it 'picks storage from ApplicationSetting' do
- expect_next_instance_of(ApplicationSetting) do |instance|
- expect(instance).to receive(:pick_repository_storage).and_return('picked')
- end
- expect(described_class).to receive(:pick_repository_storage).and_call_original
+ expect(Repository).to receive(:pick_storage_shard).and_return('picked')
expect(project.repository_storage).to eq('picked')
end
@@ -2227,8 +2247,6 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#ci_config_path=' do
- using RSpec::Parameterized::TableSyntax
-
let(:project) { build_stubbed(:project) }
where(:default_ci_config_path, :project_ci_config_path, :expected_ci_config_path) do
@@ -2980,6 +2998,7 @@ RSpec.describe Project, factory_default: :keep do
it_behaves_like 'can housekeep repository' do
let(:resource) { build_stubbed(:project) }
let(:resource_key) { 'projects' }
+ let(:expected_worker_class) { Projects::GitGarbageCollectWorker }
end
describe '#deployment_variables' do
@@ -3926,7 +3945,6 @@ RSpec.describe Project, factory_default: :keep do
describe '.filter_by_feature_visibility' do
include_context 'ProjectPolicyTable context'
include ProjectHelpers
- using RSpec::Parameterized::TableSyntax
let_it_be(:group) { create(:group) }
let!(:project) { create(:project, project_level, namespace: group ) }
@@ -4176,8 +4194,6 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#git_transfer_in_progress?' do
- using RSpec::Parameterized::TableSyntax
-
let(:project) { build(:project) }
subject { project.git_transfer_in_progress? }
@@ -5068,10 +5084,8 @@ RSpec.describe Project, factory_default: :keep do
it 'executes services with the specified scope' do
data = 'any data'
- expect(SlackService).to receive(:allocate).and_wrap_original do |method|
- method.call.tap do |instance|
- expect(instance).to receive(:async_execute).with(data).once
- end
+ expect_next_found_instance_of(SlackService) do |instance|
+ expect(instance).to receive(:async_execute).with(data).once
end
service.project.execute_services(data, :push_hooks)
@@ -5801,8 +5815,6 @@ RSpec.describe Project, factory_default: :keep do
end
describe 'validation #changing_shared_runners_enabled_is_allowed' do
- using RSpec::Parameterized::TableSyntax
-
where(:shared_runners_setting, :project_shared_runners_enabled, :valid_record) do
'enabled' | true | true
'enabled' | false | true
@@ -6025,8 +6037,6 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#closest_setting' do
- using RSpec::Parameterized::TableSyntax
-
shared_examples_for 'fetching closest setting' do
let!(:namespace) { create(:namespace) }
let!(:project) { create(:project, namespace: namespace) }
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 8001d009901..c04fc70deca 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -47,5 +47,6 @@ RSpec.describe ProjectWiki do
let_it_be(:resource) { create(:project_wiki) }
let(:resource_key) { 'project_wikis' }
+ let(:expected_worker_class) { Wikis::GitGarbageCollectWorker }
end
end
diff --git a/spec/models/prometheus_metric_spec.rb b/spec/models/prometheus_metric_spec.rb
index 9588167bbcc..a20f4edcf4a 100644
--- a/spec/models/prometheus_metric_spec.rb
+++ b/spec/models/prometheus_metric_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe PrometheusMetric do
+ using RSpec::Parameterized::TableSyntax
+
subject { build(:prometheus_metric) }
it_behaves_like 'having unique enum values'
@@ -14,8 +16,6 @@ RSpec.describe PrometheusMetric do
it { is_expected.to validate_uniqueness_of(:identifier).scoped_to(:project_id).allow_nil }
describe 'common metrics' do
- using RSpec::Parameterized::TableSyntax
-
where(:common, :with_project, :result) do
false | true | true
false | false | false
@@ -34,8 +34,6 @@ RSpec.describe PrometheusMetric do
end
describe '#query_series' do
- using RSpec::Parameterized::TableSyntax
-
where(:legend, :type) do
'Some other legend' | NilClass
'Status Code' | Array
@@ -72,8 +70,6 @@ RSpec.describe PrometheusMetric do
end
describe '#priority' do
- using RSpec::Parameterized::TableSyntax
-
where(:group, :priority) do
:nginx_ingress_vts | 10
:nginx_ingress | 10
@@ -97,8 +93,6 @@ RSpec.describe PrometheusMetric do
end
describe '#required_metrics' do
- using RSpec::Parameterized::TableSyntax
-
where(:group, :required_metrics) do
:nginx_ingress_vts | %w(nginx_upstream_responses_total nginx_upstream_response_msecs_avg)
:nginx_ingress | %w(nginx_ingress_controller_requests nginx_ingress_controller_ingress_upstream_latency_seconds_sum)
diff --git a/spec/models/readme_blob_spec.rb b/spec/models/readme_blob_spec.rb
deleted file mode 100644
index 95622d55254..00000000000
--- a/spec/models/readme_blob_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ReadmeBlob do
- include FakeBlobHelpers
-
- describe 'policy' do
- let(:project) { build(:project, :repository) }
-
- subject { described_class.new(fake_blob(path: 'README.md'), project.repository) }
-
- it 'works with policy' do
- expect(Ability.allowed?(project.creator, :read_blob, subject)).to be_truthy
- end
- end
-end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index b436c2e1088..209ac471210 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Release do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:release) { create(:release, project: project, author: user) }
+ let(:release) { create(:release, project: project, author: user) }
it { expect(release).to be_valid }
@@ -89,6 +89,61 @@ RSpec.describe Release do
end
end
+ describe '#update' do
+ subject { release.update(params) }
+
+ context 'when links do not exist' do
+ context 'when params are specified for creation' do
+ let(:params) do
+ { links_attributes: [{ name: 'test', url: 'https://www.google.com/' }] }
+ end
+
+ it 'creates a link successfuly' do
+ is_expected.to eq(true)
+
+ expect(release.links.count).to eq(1)
+ expect(release.links.first.name).to eq('test')
+ expect(release.links.first.url).to eq('https://www.google.com/')
+ end
+ end
+ end
+
+ context 'when a link exists' do
+ let!(:link1) { create(:release_link, release: release, name: 'test1', url: 'https://www.google1.com/') }
+ let!(:link2) { create(:release_link, release: release, name: 'test2', url: 'https://www.google2.com/') }
+
+ before do
+ release.reload
+ end
+
+ context 'when params are specified for update' do
+ let(:params) do
+ { links_attributes: [{ id: link1.id, name: 'new' }] }
+ end
+
+ it 'updates the link successfully' do
+ is_expected.to eq(true)
+
+ expect(release.links.count).to eq(2)
+ expect(release.links.first.name).to eq('new')
+ end
+ end
+
+ context 'when params are specified for deletion' do
+ let(:params) do
+ { links_attributes: [{ id: link1.id, _destroy: true }] }
+ end
+
+ it 'removes the link successfuly' do
+ is_expected.to eq(true)
+
+ expect(release.links.count).to eq(1)
+ expect(release.links.first.name).to eq(link2.name)
+ end
+ end
+ end
+ end
+
describe '#sources' do
subject { release.sources }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index dd54a701282..9e1729e8de4 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -483,12 +483,6 @@ RSpec.describe Repository do
it { is_expected.to be_an_instance_of(::Blob) }
end
- context 'readme blob on HEAD' do
- subject { repository.blob_at(repository.head_commit.sha, 'README.md') }
-
- it { is_expected.to be_an_instance_of(::ReadmeBlob) }
- end
-
context 'readme blob not on HEAD' do
subject { repository.blob_at(repository.find_branch('feature').target, 'README.md') }
@@ -1142,11 +1136,11 @@ RSpec.describe Repository do
expect(repository.license_key).to be_nil
end
- it 'returns nil when the content is not recognizable' do
+ it 'returns other when the content is not recognizable' do
repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license_key).to be_nil
+ expect(repository.license_key).to eq('other')
end
it 'returns nil when the commit SHA does not exist' do
@@ -1186,11 +1180,12 @@ RSpec.describe Repository do
expect(repository.license).to be_nil
end
- it 'returns nil when the content is not recognizable' do
+ it 'returns other when the content is not recognizable' do
+ license = Licensee::License.new('other')
repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license).to be_nil
+ expect(repository.license).to eq(license)
end
it 'returns the license' do
@@ -1938,7 +1933,6 @@ RSpec.describe Repository do
expect(repository).to receive(:expire_method_caches).with([
:size,
:commit_count,
- :rendered_readme,
:readme_path,
:contribution_guide,
:changelog,
@@ -2314,14 +2308,6 @@ RSpec.describe Repository do
expect(repository.readme).to be_nil
end
end
-
- context 'when a README exists' do
- let(:project) { create(:project, :repository) }
-
- it 'returns the README' do
- expect(repository.readme).to be_an_instance_of(ReadmeBlob)
- end
- end
end
end
@@ -2527,9 +2513,8 @@ RSpec.describe Repository do
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(rendered_readme readme_path license_blob license_key license))
+ .with(%i(readme_path license_blob license_key license))
- expect(repository).to receive(:rendered_readme)
expect(repository).to receive(:readme_path)
expect(repository).to receive(:license_blob)
expect(repository).to receive(:license_key)
@@ -3049,4 +3034,51 @@ RSpec.describe Repository do
end
end
end
+
+ describe '.pick_storage_shard', :request_store do
+ before do
+ storages = {
+ 'default' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories'),
+ 'picked' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories')
+ }
+
+ allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ Gitlab::CurrentSettings.current_application_settings
+
+ update_storages({ 'picked' => 0, 'default' => 100 })
+ end
+
+ context 'when expire is false' do
+ it 'does not expire existing repository storage value' do
+ previous_storage = described_class.pick_storage_shard
+ expect(previous_storage).to eq('default')
+ expect(Gitlab::CurrentSettings).not_to receive(:expire_current_application_settings)
+
+ update_storages({ 'picked' => 100, 'default' => 0 })
+
+ new_storage = described_class.pick_storage_shard(expire: false)
+ expect(new_storage).to eq(previous_storage)
+ end
+ end
+
+ context 'when expire is true' do
+ it 'expires existing repository storage value' do
+ previous_storage = described_class.pick_storage_shard
+ expect(previous_storage).to eq('default')
+ expect(Gitlab::CurrentSettings).to receive(:expire_current_application_settings).and_call_original
+
+ update_storages({ 'picked' => 100, 'default' => 0 })
+
+ new_storage = described_class.pick_storage_shard(expire: true)
+ expect(new_storage).to eq('picked')
+ end
+ end
+
+ def update_storages(storage_hash)
+ settings = ApplicationSetting.last
+ settings.repository_storages_weighted = storage_hash
+ settings.save!
+ end
+ end
end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 04b3920cd6c..9ffefd4bbf7 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -39,35 +39,29 @@ RSpec.describe Service do
end
end
- context 'with an existing service template' do
- before do
+ context 'with existing services' do
+ before_all do
create(:service, :template)
+ create(:service, :instance)
+ create(:service, project: project)
+ create(:service, group: group, project: nil)
end
- it 'validates only one service template per type' do
+ it 'allows only one service template per type' do
expect(build(:service, :template)).to be_invalid
end
- end
- context 'with an existing instance service' do
- before do
- create(:service, :instance)
- end
-
- it 'validates only one service instance per type' do
+ it 'allows only one instance service per type' do
expect(build(:service, :instance)).to be_invalid
end
- end
- it 'validates uniqueness of type and project_id on create' do
- expect(create(:service, project: project, type: 'Service')).to be_valid
- expect(build(:service, project: project, type: 'Service').valid?(:create)).to eq(false)
- expect(build(:service, project: project, type: 'Service').valid?(:update)).to eq(true)
- end
+ it 'allows only one project service per type' do
+ expect(build(:service, project: project)).to be_invalid
+ end
- it 'validates uniqueness of type and group_id' do
- expect(create(:service, group_id: group.id, project_id: nil, type: 'Service')).to be_valid
- expect(build(:service, group_id: group.id, project_id: nil, type: 'Service')).to be_invalid
+ it 'allows only one group service per type' do
+ expect(build(:service, group: group, project: nil)).to be_invalid
+ end
end
end
@@ -753,38 +747,6 @@ RSpec.describe Service do
end
end
- describe "callbacks" do
- let!(:service) do
- RedmineService.new(
- project: project,
- active: true,
- properties: {
- project_url: 'http://redmine/projects/project_name_in_redmine',
- issues_url: "http://redmine/#{project.id}/project_name_in_redmine/:id",
- new_issue_url: 'http://redmine/projects/project_name_in_redmine/issues/new'
- }
- )
- end
-
- describe "on create" do
- it "updates the has_external_issue_tracker boolean" do
- expect do
- service.save!
- end.to change { service.project.has_external_issue_tracker }.from(false).to(true)
- end
- end
-
- describe "on update" do
- it "updates the has_external_issue_tracker boolean" do
- service.save!
-
- expect do
- service.update(active: false)
- end.to change { service.project.has_external_issue_tracker }.from(true).to(false)
- end
- end
- end
-
describe '#api_field_names' do
let(:fake_service) do
Class.new(Service) do
@@ -864,20 +826,6 @@ RSpec.describe Service do
end
end
- describe '#external_issue_tracker?' do
- where(:category, :active, :result) do
- :issue_tracker | true | true
- :issue_tracker | false | false
- :common | true | false
- end
-
- with_them do
- it 'returns the right result' do
- expect(build(:service, category: category, active: active).external_issue_tracker?).to eq(result)
- end
- end
- end
-
describe '#external_wiki?' do
where(:type, :active, :result) do
'ExternalWikiService' | true | true
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 68d183d5d55..623767d19e0 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -630,14 +630,10 @@ RSpec.describe Snippet do
subject { snippet.repository_storage }
before do
- expect_next_instance_of(ApplicationSetting) do |instance|
- expect(instance).to receive(:pick_repository_storage).and_return('picked')
- end
+ expect(Repository).to receive(:pick_storage_shard).and_return('picked')
end
it 'returns repository storage from ApplicationSetting' do
- expect(described_class).to receive(:pick_repository_storage).and_call_original
-
expect(subject).to eq 'picked'
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index ed311314086..1319e2adb03 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -8,8 +8,11 @@ RSpec.describe Terraform::State do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:locked_by_user).class_name('User') }
+ it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:project_id) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
+
describe 'scopes' do
describe '.ordered_by_name' do
let_it_be(:project) { create(:project) }
@@ -25,6 +28,15 @@ RSpec.describe Terraform::State do
it { expect(subject.map(&:name)).to eq(names.sort) }
end
+
+ describe '.with_name' do
+ let_it_be(:matching_name) { create(:terraform_state, name: 'matching-name') }
+ let_it_be(:other_name) { create(:terraform_state, name: 'other-name') }
+
+ subject { described_class.with_name(matching_name.name) }
+
+ it { is_expected.to contain_exactly(matching_name) }
+ end
end
describe '#destroy' do
diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb
index 97ac77d5e7b..ac2e8d167b3 100644
--- a/spec/models/terraform/state_version_spec.rb
+++ b/spec/models/terraform/state_version_spec.rb
@@ -24,6 +24,24 @@ RSpec.describe Terraform::StateVersion do
it { expect(subject.map(&:version)).to eq(versions.sort.reverse) }
end
+
+ describe '.with_files_stored_locally' do
+ subject { described_class.with_files_stored_locally }
+
+ it 'includes states with local storage' do
+ create_list(:terraform_state_version, 5)
+
+ expect(subject).to have_attributes(count: 5)
+ end
+
+ it 'excludes states without local storage' do
+ stub_terraform_state_object_storage
+
+ create_list(:terraform_state_version, 5)
+
+ expect(subject).to have_attributes(count: 0)
+ end
+ end
end
context 'file storage' do
diff --git a/spec/models/token_with_iv_spec.rb b/spec/models/token_with_iv_spec.rb
new file mode 100644
index 00000000000..8dbccc19217
--- /dev/null
+++ b/spec/models/token_with_iv_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TokenWithIv do
+ describe 'validations' do
+ it { is_expected.to validate_presence_of :hashed_token }
+ it { is_expected.to validate_presence_of :iv }
+ it { is_expected.to validate_presence_of :hashed_plaintext_token }
+ end
+
+ describe '.find_by_hashed_token' do
+ it 'only includes matching record' do
+ matching_record = create(:token_with_iv, hashed_token: ::Digest::SHA256.digest('hashed-token'))
+ create(:token_with_iv)
+
+ expect(described_class.find_by_hashed_token('hashed-token')).to eq(matching_record)
+ end
+ end
+
+ describe '.find_by_plaintext_token' do
+ it 'only includes matching record' do
+ matching_record = create(:token_with_iv, hashed_plaintext_token: ::Digest::SHA256.digest('hashed-token'))
+ create(:token_with_iv)
+
+ expect(described_class.find_by_plaintext_token('hashed-token')).to eq(matching_record)
+ end
+ end
+end
diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb
new file mode 100644
index 00000000000..1f2e4d1e447
--- /dev/null
+++ b/spec/models/u2f_registration_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe U2fRegistration do
+ let_it_be(:user) { create(:user) }
+ let(:u2f_registration) do
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
+ create(:u2f_registration, name: 'u2f_device',
+ user: user,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw))
+ end
+
+ describe 'callbacks' do
+ describe '#create_webauthn_registration' do
+ it 'creates webauthn registration' do
+ u2f_registration.save!
+
+ webauthn_registration = WebauthnRegistration.where(u2f_registration_id: u2f_registration.id)
+ expect(webauthn_registration).to exist
+ end
+
+ it 'logs error' do
+ allow(Gitlab::Auth::U2fWebauthnConverter).to receive(:new).and_raise('boom!')
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:error).with(a_hash_including(event: 'u2f_migration',
+ error: 'RuntimeError',
+ message: 'U2F to WebAuthn conversion failed'))
+ )
+
+ u2f_registration.save!
+ end
+ end
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 0935d3576a4..2ce6dad1c61 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -2477,7 +2477,7 @@ RSpec.describe User do
it 'is false if avatar is html page' do
user.update_attribute(:avatar, 'uploads/avatar.html')
- expect(user.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico'])
+ expect(user.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp'])
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index e6650549f7f..efa1afb758f 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -468,6 +468,49 @@ RSpec.describe ProjectPolicy do
end
end
+ context "project bots" do
+ let(:project_bot) { create(:user, :project_bot) }
+ let(:user) { create(:user) }
+
+ context "project_bot_access" do
+ context "when regular user and part of the project" do
+ let(:current_user) { user }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.not_to be_allowed(:project_bot_access)}
+ end
+
+ context "when project bot and not part of the project" do
+ let(:current_user) { project_bot }
+
+ it { is_expected.not_to be_allowed(:project_bot_access)}
+ end
+
+ context "when project bot and part of the project" do
+ let(:current_user) { project_bot }
+
+ before do
+ project.add_developer(project_bot)
+ end
+
+ it { is_expected.to be_allowed(:project_bot_access)}
+ end
+ end
+
+ context 'with resource access tokens' do
+ let(:current_user) { project_bot }
+
+ before do
+ project.add_maintainer(project_bot)
+ end
+
+ it { is_expected.not_to be_allowed(:admin_resource_access_tokens)}
+ end
+ end
+
describe 'read_prometheus_alerts' do
context 'with admin' do
let(:current_user) { admin }
@@ -822,6 +865,28 @@ RSpec.describe ProjectPolicy do
end
end
+ context 'security configuration feature' do
+ %w(guest reporter).each do |role|
+ context role do
+ let(:current_user) { send(role) }
+
+ it 'prevents reading security configuration' do
+ expect_disallowed(:read_security_configuration)
+ end
+ end
+ end
+
+ %w(developer maintainer owner).each do |role|
+ context role do
+ let(:current_user) { send(role) }
+
+ it 'allows reading security configuration' do
+ expect_allowed(:read_security_configuration)
+ end
+ end
+ end
+ end
+
describe 'design permissions' do
let(:current_user) { guest }
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index f78ad38f4e8..1e6c33484ec 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -196,16 +196,6 @@ RSpec.describe Ci::BuildRunnerPresenter do
expect(subject[0]).to match(/^\+[0-9a-f]{40}:refs\/pipelines\/[0-9]+$/)
end
- context 'when the scalability_ci_fetch_sha feature flag is disabled' do
- before do
- stub_feature_flags(scalability_ci_fetch_sha: false)
- end
-
- it 'fetches the ref by name' do
- expect(subject[0]).to eq("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
- end
- end
-
context 'when ref is tag' do
let(:build) { create(:ci_build, :tag) }
diff --git a/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb b/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb
new file mode 100644
index 00000000000..06d5422eed3
--- /dev/null
+++ b/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineArtifacts::CodeQualityMrDiffPresenter do
+ let(:pipeline_artifact) { create(:ci_pipeline_artifact, :with_codequality_mr_diff_report) }
+
+ subject(:presenter) { described_class.new(pipeline_artifact) }
+
+ describe '#for_files' do
+ subject(:quality_data) { presenter.for_files(filenames) }
+
+ context 'when code quality has data' do
+ context 'when filenames is empty' do
+ let(:filenames) { %w() }
+
+ it 'returns hash without quality' do
+ expect(quality_data).to match(files: {})
+ end
+ end
+
+ context 'when filenames do not match code quality data' do
+ let(:filenames) { %w(demo.rb) }
+
+ it 'returns hash without quality' do
+ expect(quality_data).to match(files: {})
+ end
+ end
+
+ context 'when filenames matches code quality data' do
+ context 'when asking for one filename' do
+ let(:filenames) { %w(file_a.rb) }
+
+ it 'returns quality for the given filename' do
+ expect(quality_data).to match(
+ files: {
+ "file_a.rb" => [
+ { line: 10, description: "Avoid parameter lists longer than 5 parameters. [12/5]", severity: "major" },
+ { line: 10, description: "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.", severity: "minor" }
+ ]
+ }
+ )
+ end
+ end
+
+ context 'when asking for multiple filenames' do
+ let(:filenames) { %w(file_a.rb file_b.rb) }
+
+ it 'returns quality for the given filenames' do
+ expect(quality_data).to match(
+ files: {
+ "file_a.rb" => [
+ { line: 10, description: "Avoid parameter lists longer than 5 parameters. [12/5]", severity: "major" },
+ { line: 10, description: "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.", severity: "minor" }
+ ],
+ "file_b.rb" => [
+ { line: 10, description: "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count.", severity: "minor" }
+ ]
+ }
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/presenters/gitlab/whats_new/item_presenter_spec.rb b/spec/presenters/gitlab/whats_new/item_presenter_spec.rb
deleted file mode 100644
index 9b04741aa60..00000000000
--- a/spec/presenters/gitlab/whats_new/item_presenter_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::WhatsNew::ItemPresenter do
- let(:present) { Gitlab::WhatsNew::ItemPresenter.present(item) }
- let(:item) { { "packages" => %w(Core Starter Premium Ultimate) } }
- let(:gitlab_com) { true }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(gitlab_com)
- end
-
- describe '.present' do
- context 'when on Gitlab.com' do
- it 'transforms package names to gitlab.com friendly package names' do
- expect(present).to eq({ "packages" => %w(Free Bronze Silver Gold) })
- end
- end
-
- context 'when not on Gitlab.com' do
- let(:gitlab_com) { false }
-
- it 'does not transform package names' do
- expect(present).to eq({ "packages" => %w(Core Starter Premium Ultimate) })
- end
- end
- end
-end
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index a9050c233af..98bcbd8384b 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -350,7 +350,7 @@ RSpec.describe ProjectPresenter do
is_link: false,
label: a_string_including("New file"),
link: presenter.project_new_blob_path(project, 'master'),
- class_modifier: 'missing'
+ class_modifier: 'dashed'
)
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 9fd30213133..8bd6049e6fa 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -36,6 +36,12 @@ RSpec.describe API::API do
expect(response).to have_gitlab_http_status(:ok)
end
+ it 'does authorize user for head request' do
+ head api('/groups', personal_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
it 'does not authorize user for revoked token' do
revoked = create(:personal_access_token, :revoked, user: user, scopes: [:read_api])
@@ -126,4 +132,34 @@ RSpec.describe API::API do
get(api('/users'))
end
end
+
+ describe 'supported content-types' do
+ context 'GET /user/:id.txt' do
+ let_it_be(:user) { create(:user) }
+
+ subject { get api("/users/#{user.id}.txt", user) }
+
+ it 'returns application/json' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/json')
+ expect(response.body).to include('{"id":')
+ end
+
+ context 'when api_always_use_application_json is disabled' do
+ before do
+ stub_feature_flags(api_always_use_application_json: false)
+ end
+
+ it 'returns text/plain' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('text/plain')
+ expect(response.body).to include('#<API::Entities::User:')
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/applications_spec.rb b/spec/requests/api/applications_spec.rb
index 63fbf6e32dd..ca09f5524ca 100644
--- a/spec/requests/api/applications_spec.rb
+++ b/spec/requests/api/applications_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe API::Applications, :api do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to be_a Hash
- expect(json_response['message']['redirect_uri'][0]).to eq('must be an absolute URI.')
+ expect(json_response['message']['redirect_uri'][0]).to eq('must be a valid URI.')
end
it 'does not allow creating an application with a forbidden URI format' do
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
index 8a05d20fb33..9d63d675a02 100644
--- a/spec/requests/api/debian_group_packages_spec.rb
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -6,32 +6,32 @@ RSpec.describe API::DebianGroupPackages do
include WorkhorseHelpers
include_context 'Debian repository shared context', :group do
- describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release.gpg' do
- let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
+ describe 'GET groups/:id/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/Release.gpg" }
it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
end
- describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do
- let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/Release" }
+ describe 'GET groups/:id/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/Release" }
it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Release'
end
- describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do
- let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/InRelease" }
+ describe 'GET groups/:id/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/InRelease" }
it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
end
- describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
- let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+ describe 'GET groups/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Packages'
end
- describe 'GET groups/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/groups/#{group.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+ describe 'GET groups/:id/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/groups/#{group.id}/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO File'
end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index 663b69b1b76..4941f2a77f4 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -6,46 +6,46 @@ RSpec.describe API::DebianProjectPackages do
include WorkhorseHelpers
include_context 'Debian repository shared context', :project do
- describe 'GET projects/:id/-/packages/debian/dists/*distribution/Release.gpg' do
- let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
+ describe 'GET projects/:id/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/Release.gpg" }
it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
end
- describe 'GET projects/:id/-/packages/debian/dists/*distribution/Release' do
- let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/Release" }
+ describe 'GET projects/:id/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/Release" }
it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Release'
end
- describe 'GET projects/:id/-/packages/debian/dists/*distribution/InRelease' do
- let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/InRelease" }
+ describe 'GET projects/:id/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/InRelease" }
it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
end
- describe 'GET projects/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
- let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+ describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Packages'
end
- describe 'GET projects/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/projects/#{project.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+ describe 'GET projects/:id/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/projects/#{project.id}/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO File'
end
- describe 'PUT projects/:id/-/packages/debian/incoming/:file_name' do
+ describe 'PUT projects/:id/packages/debian/:file_name' do
let(:method) { :put }
- let(:url) { "/projects/#{project.id}/-/packages/debian/incoming/#{file_name}" }
+ let(:url) { "/projects/#{project.id}/packages/debian/#{file_name}" }
it_behaves_like 'Debian project repository PUT endpoint', :created, nil
end
- describe 'PUT projects/:id/-/packages/debian/incoming/:file_name/authorize' do
+ describe 'PUT projects/:id/packages/debian/:file_name/authorize' do
let(:method) { :put }
- let(:url) { "/projects/#{project.id}/-/packages/debian/incoming/#{file_name}/authorize" }
+ let(:url) { "/projects/#{project.id}/packages/debian/#{file_name}/authorize" }
it_behaves_like 'Debian project repository PUT endpoint', :created, nil, is_authorize: true
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index 8ec4f888e2e..7a31ff725c8 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -10,24 +10,12 @@ RSpec.describe API::DeployTokens do
let!(:deploy_token) { create(:deploy_token, projects: [project]) }
let!(:group_deploy_token) { create(:deploy_token, :group, groups: [group]) }
- shared_examples 'with feature flag disabled' do
- context 'disabled feature flag' do
- before do
- stub_feature_flags(deploy_tokens_api: false)
- end
-
- it { is_expected.to have_gitlab_http_status(:service_unavailable) }
- end
- end
-
describe 'GET /deploy_tokens' do
subject do
get api('/deploy_tokens', user)
response
end
- it_behaves_like 'with feature flag disabled'
-
context 'when unauthenticated' do
let(:user) { nil }
@@ -81,8 +69,6 @@ RSpec.describe API::DeployTokens do
project.add_maintainer(user)
end
- it_behaves_like 'with feature flag disabled'
-
it { is_expected.to have_gitlab_http_status(:ok) }
it 'returns all deploy tokens for the project' do
@@ -128,8 +114,6 @@ RSpec.describe API::DeployTokens do
group.add_maintainer(user)
end
- it_behaves_like 'with feature flag disabled'
-
it { is_expected.to have_gitlab_http_status(:ok) }
it 'returns all deploy tokens for the group' do
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 6a8d5f91abd..110d6e2f99e 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -55,6 +55,12 @@ RSpec.describe API::Events do
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
end
+
+ it 'returns "200" response on head request' do
+ head api('/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31', personal_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token does not have "read_user" or "api" scope' do
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index d162d288129..648d899f1a8 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::GenericPackages do
include HttpBasicAuthHelpers
+ using RSpec::Parameterized::TableSyntax
let_it_be(:personal_access_token) { create(:personal_access_token) }
let_it_be(:project, reload: true) { create(:project) }
@@ -76,8 +77,6 @@ RSpec.describe API::GenericPackages do
describe 'PUT /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name/authorize' do
context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
'PUBLIC' | :developer | true | :personal_access_token | :success
'PUBLIC' | :guest | true | :personal_access_token | :forbidden
@@ -194,8 +193,6 @@ RSpec.describe API::GenericPackages do
let(:params) { { file: file_upload } }
context 'authentication' do
- using RSpec::Parameterized::TableSyntax
-
where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
'PUBLIC' | :guest | true | :personal_access_token | :forbidden
'PUBLIC' | :guest | true | :user_basic_auth | :forbidden
@@ -373,8 +370,6 @@ RSpec.describe API::GenericPackages do
end
context 'application security' do
- using RSpec::Parameterized::TableSyntax
-
where(:param_name, :param_value) do
:package_name | 'my-package/../'
:package_name | 'my-package%2f%2e%2e%2f'
@@ -404,8 +399,6 @@ RSpec.describe API::GenericPackages do
end
describe 'GET /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name' do
- using RSpec::Parameterized::TableSyntax
-
let_it_be(:package) { create(:generic_package, project: project) }
let_it_be(:package_file) { create(:package_file, :generic, package: package) }
@@ -527,8 +520,6 @@ RSpec.describe API::GenericPackages do
end
context 'application security' do
- using RSpec::Parameterized::TableSyntax
-
where(:param_name, :param_value) do
:package_name | 'my-package/../'
:package_name | 'my-package%2f%2e%2e%2f'
diff --git a/spec/requests/api/graphql/issue/issue_spec.rb b/spec/requests/api/graphql/issue/issue_spec.rb
index 42c8e0cc9c0..09e89f65882 100644
--- a/spec/requests/api/graphql/issue/issue_spec.rb
+++ b/spec/requests/api/graphql/issue/issue_spec.rb
@@ -24,6 +24,20 @@ RSpec.describe 'Query.issue(id)' do
end
end
+ it_behaves_like 'a noteable graphql type we can query' do
+ let(:noteable) { issue }
+ let(:project) { issue.project }
+ let(:path_to_noteable) { [:issue] }
+
+ before do
+ project.add_guest(current_user)
+ end
+
+ def query(fields)
+ graphql_query_for('issue', issue_params, fields)
+ end
+ end
+
context 'when the user does not have access to the issue' do
it 'returns nil' do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb
index bf7eb3d980c..18cbb7d8b00 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Updating an existing HTTP Integration' do
include GraphqlHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
@@ -32,18 +32,8 @@ RSpec.describe 'Updating an existing HTTP Integration' do
let(:mutation_response) { graphql_mutation_response(:http_integration_update) }
before do
- project.add_maintainer(user)
+ project.add_maintainer(current_user)
end
- it 'updates the integration' do
- post_graphql_mutation(mutation, current_user: user)
-
- integration_response = mutation_response['integration']
-
- expect(response).to have_gitlab_http_status(:success)
- expect(integration_response['id']).to eq(GitlabSchema.id_from_object(integration).to_s)
- expect(integration_response['name']).to eq('Modified Name')
- expect(integration_response['active']).to be_falsey
- expect(integration_response['url']).to include('modified-name')
- end
+ it_behaves_like 'updating an existing HTTP integration'
end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb
new file mode 100644
index 00000000000..2e4f35cbcde
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Setting assignees of a merge request' do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request, reviewers: [user]) }
+ let(:project) { merge_request.project }
+ let(:user) { create(:user) }
+ let(:input) { { user_id: global_id_of(user) } }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_reviewer_rereview, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_reviewer_rereview)
+ end
+
+ def mutation_errors
+ mutation_response['errors']
+ end
+
+ before do
+ project.add_developer(current_user)
+ project.add_developer(user)
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ describe 'reviewer does not exist' do
+ let(:input) { { user_id: global_id_of(create(:user)) } }
+
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_errors).not_to be_empty
+ end
+ end
+
+ describe 'reviewer exists' do
+ it 'does not return an error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_errors).to be_empty
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/releases/create_spec.rb b/spec/requests/api/graphql/mutations/releases/create_spec.rb
index 79bdcec7944..a4918cd560c 100644
--- a/spec/requests/api/graphql/mutations/releases/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/create_spec.rb
@@ -309,10 +309,7 @@ RSpec.describe 'Creation of a new release' do
let(:asset_link_2) { { name: 'My link', url: 'https://example.com/2' } }
let(:assets) { { links: [asset_link_1, asset_link_2] } }
- # Right now the raw Postgres error message is sent to the user as the validation message.
- # We should catch this validation error and return a nicer message:
- # https://gitlab.com/gitlab-org/gitlab/-/issues/277087
- it_behaves_like 'errors-as-data with message', 'PG::UniqueViolation'
+ it_behaves_like 'errors-as-data with message', %r{Validation failed: Links have duplicate values \(My link\)}
end
context 'when two release assets share the same URL' do
@@ -320,8 +317,7 @@ RSpec.describe 'Creation of a new release' do
let(:asset_link_2) { { name: 'My second link', url: 'https://example.com' } }
let(:assets) { { links: [asset_link_1, asset_link_2] } }
- # Same note as above about the ugly error message
- it_behaves_like 'errors-as-data with message', 'PG::UniqueViolation'
+ it_behaves_like 'errors-as-data with message', %r{Validation failed: Links have duplicate values \(https://example.com\)}
end
context 'when the provided tag name is HEAD' do
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index fd0dc98a8d3..1c2260070ec 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe 'Creating a Snippet' do
let(:actions) { [{ action: action }.merge(file_1), { action: action }.merge(file_2)] }
let(:project_path) { nil }
let(:uploaded_files) { nil }
+ let(:spam_mutation_vars) { {} }
let(:mutation_vars) do
{
description: description,
@@ -25,7 +26,7 @@ RSpec.describe 'Creating a Snippet' do
project_path: project_path,
uploaded_files: uploaded_files,
blob_actions: actions
- }
+ }.merge(spam_mutation_vars)
end
let(:mutation) do
@@ -77,7 +78,20 @@ RSpec.describe 'Creating a Snippet' do
expect(mutation_response['snippet']).to be_nil
end
- it_behaves_like 'spam flag is present'
+ context 'when snippet_spam flag is disabled' do
+ before do
+ stub_feature_flags(snippet_spam: false)
+ end
+
+ it 'passes disable_spam_action_service param to service' do
+ expect(::Snippets::CreateService)
+ .to receive(:new)
+ .with(anything, anything, hash_including(disable_spam_action_service: true))
+ .and_call_original
+
+ subject
+ end
+ end
end
shared_examples 'creates snippet' do
@@ -98,15 +112,24 @@ RSpec.describe 'Creating a Snippet' do
end
context 'when action is invalid' do
- let(:file_1) { { filePath: 'example_file1' }}
+ let(:file_1) { { filePath: 'example_file1' } }
it_behaves_like 'a mutation that returns errors in the response', errors: ['Snippet actions have invalid data']
it_behaves_like 'does not create snippet'
end
it_behaves_like 'snippet edit usage data counters'
- it_behaves_like 'spam flag is present'
- it_behaves_like 'can raise spam flag' do
+
+ it_behaves_like 'a mutation which can mutate a spammable' do
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { 1234 }
+ let(:spam_mutation_vars) do
+ {
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ }
+ end
+
let(:service) { Snippets::CreateService }
end
end
@@ -148,9 +171,6 @@ RSpec.describe 'Creating a Snippet' do
it_behaves_like 'a mutation that returns errors in the response', errors: ["Title can't be blank"]
it_behaves_like 'does not create snippet'
- it_behaves_like 'can raise spam flag' do
- let(:service) { Snippets::CreateService }
- end
end
context 'when there non ActiveRecord errors' do
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 21d403c6f73..43dc8d8bc44 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Updating a Snippet' do
let(:updated_file) { 'CHANGELOG' }
let(:deleted_file) { 'README' }
let(:snippet_gid) { GitlabSchema.id_from_object(snippet).to_s }
+ let(:spam_mutation_vars) { {} }
let(:mutation_vars) do
{
id: snippet_gid,
@@ -26,7 +27,7 @@ RSpec.describe 'Updating a Snippet' do
{ action: :update, filePath: updated_file, content: updated_content },
{ action: :delete, filePath: deleted_file }
]
- }
+ }.merge(spam_mutation_vars)
end
let(:mutation) do
@@ -81,11 +82,20 @@ RSpec.describe 'Updating a Snippet' do
end
end
- it_behaves_like 'can raise spam flag' do
- let(:service) { Snippets::UpdateService }
- end
+ context 'when snippet_spam flag is disabled' do
+ before do
+ stub_feature_flags(snippet_spam: false)
+ end
- it_behaves_like 'spam flag is present'
+ it 'passes disable_spam_action_service param to service' do
+ expect(::Snippets::UpdateService)
+ .to receive(:new)
+ .with(anything, anything, hash_including(disable_spam_action_service: true))
+ .and_call_original
+
+ subject
+ end
+ end
context 'when there are ActiveRecord validation errors' do
let(:updated_title) { '' }
@@ -112,11 +122,19 @@ RSpec.describe 'Updating a Snippet' do
expect(mutation_response['snippet']['visibilityLevel']).to eq('private')
end
end
+ end
- it_behaves_like 'spam flag is present'
- it_behaves_like 'can raise spam flag' do
- let(:service) { Snippets::UpdateService }
+ it_behaves_like 'a mutation which can mutate a spammable' do
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { 1234 }
+ let(:spam_mutation_vars) do
+ {
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ }
end
+
+ let(:service) { Snippets::UpdateService }
end
def blob_at(filename)
diff --git a/spec/requests/api/graphql/packages/package_composer_details_spec.rb b/spec/requests/api/graphql/packages/package_composer_details_spec.rb
deleted file mode 100644
index 1a2cf4a972a..00000000000
--- a/spec/requests/api/graphql/packages/package_composer_details_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe 'package composer details' do
- using RSpec::Parameterized::TableSyntax
- include GraphqlHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:package) { create(:composer_package, project: project) }
- let_it_be(:composer_metadatum) do
- # we are forced to manually create the metadatum, without using the factory to force the sha to be a string
- # and avoid an error where gitaly can't find the repository
- create(:composer_metadatum, package: package, target_sha: 'foo_sha', composer_json: { name: 'name', type: 'type', license: 'license', version: 1 })
- end
-
- let(:query) do
- graphql_query_for(
- 'packageComposerDetails',
- { id: package_global_id },
- all_graphql_fields_for('PackageComposerDetails', max_depth: 2)
- )
- end
-
- let(:user) { project.owner }
- let(:package_global_id) { package.to_global_id.to_s }
- let(:package_composer_details_response) { graphql_data.dig('packageComposerDetails') }
-
- subject { post_graphql(query, current_user: user) }
-
- it_behaves_like 'a working graphql query' do
- before do
- subject
- end
-
- it 'matches the JSON schema' do
- expect(package_composer_details_response).to match_schema('graphql/packages/package_composer_details')
- end
- end
-end
diff --git a/spec/requests/api/graphql/packages/package_spec.rb b/spec/requests/api/graphql/packages/package_spec.rb
new file mode 100644
index 00000000000..3def9b7d19d
--- /dev/null
+++ b/spec/requests/api/graphql/packages/package_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'package details' do
+ using RSpec::Parameterized::TableSyntax
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:composer_package, project: project) }
+ let_it_be(:composer_json) { { name: 'name', type: 'type', license: 'license', version: 1 } }
+ let_it_be(:composer_metadatum) do
+ # we are forced to manually create the metadatum, without using the factory to force the sha to be a string
+ # and avoid an error where gitaly can't find the repository
+ create(:composer_metadatum, package: package, target_sha: 'foo_sha', composer_json: composer_json)
+ end
+
+ let(:depth) { 3 }
+ let(:excluded) { ['metadata'] }
+
+ let(:query) do
+ graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
+ #{all_graphql_fields_for('Package', max_depth: depth, excluded: excluded)}
+ metadata {
+ #{query_graphql_fragment('ComposerMetadata')}
+ }
+ FIELDS
+ end
+
+ let(:user) { project.owner }
+ let(:package_global_id) { global_id_of(package) }
+ let(:package_details) { graphql_data_at(:package) }
+
+ subject { post_graphql(query, current_user: user) }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ subject
+ end
+
+ it 'matches the JSON schema' do
+ expect(package_details).to match_schema('graphql/packages/package_details')
+ end
+
+ it 'includes the fields of the correct package' do
+ expect(package_details).to include(
+ 'id' => package_global_id,
+ 'metadata' => {
+ 'targetSha' => 'foo_sha',
+ 'composerJson' => composer_json.transform_keys(&:to_s).transform_values(&:to_s)
+ }
+ )
+ end
+ end
+
+ context 'there are other versions of this package' do
+ let(:depth) { 3 }
+ let(:excluded) { %w[metadata project tags pipelines] } # to limit the query complexity
+
+ let_it_be(:siblings) { create_list(:composer_package, 2, project: project, name: package.name) }
+
+ it 'includes the sibling versions' do
+ subject
+
+ expect(graphql_data_at(:package, :versions, :nodes)).to match_array(
+ siblings.map { |p| a_hash_including('id' => global_id_of(p)) }
+ )
+ end
+
+ context 'going deeper' do
+ let(:depth) { 6 }
+
+ it 'does not create a cycle of versions' do
+ subject
+
+ expect(graphql_data_at(:package, :versions, :nodes, :version)).to be_present
+ expect(graphql_data_at(:package, :versions, :nodes, :versions)).not_to be_present
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/issue/designs/notes_spec.rb b/spec/requests/api/graphql/project/issue/designs/notes_spec.rb
index a671ddc7ab1..7148750b6cb 100644
--- a/spec/requests/api/graphql/project/issue/designs/notes_spec.rb
+++ b/spec/requests/api/graphql/project/issue/designs/notes_spec.rb
@@ -22,6 +22,23 @@ RSpec.describe 'Getting designs related to an issue' do
end
end
+ it_behaves_like 'a noteable graphql type we can query' do
+ let(:noteable) { design }
+ let(:note_factory) { :diff_note_on_design }
+ let(:discussion_factory) { :diff_note_on_design }
+ let(:path_to_noteable) { [:issue, :design_collection, :designs, :nodes, 0] }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ def query(fields)
+ graphql_query_for(:issue, { id: global_id_of(issue) }, <<~FIELDS)
+ designCollection { designs { nodes { #{fields} } } }
+ FIELDS
+ end
+ end
+
it 'is not too deep for anonymous users' do
note_fields = <<~FIELDS
id
@@ -37,7 +54,7 @@ RSpec.describe 'Getting designs related to an issue' do
expect(note_data['id']).to eq(note.to_global_id.to_s)
end
- def query(note_fields = all_graphql_fields_for(Note))
+ def query(note_fields = all_graphql_fields_for(Note, max_depth: 1))
design_node = <<~NODE
designs {
nodes {
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index c85cb8b2ffe..a11e758c136 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -196,17 +196,18 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
context 'when requesting `commit_count`' do
- let(:requested_fields) { [:commit_count] }
+ let(:merge_request_with_commits) { create(:merge_request, source_project: project) }
+ let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_with_commits.iid.to_s] } }
+ let(:requested_fields) { [:iid, :commit_count] }
it 'exposes `commit_count`' do
- merge_request_a.metrics.update!(commits_count: 5)
-
execute_query
- expect(results).to include(a_hash_including('commitCount' => 5))
+ expect(results).to match_array([
+ { "iid" => merge_request_a.iid.to_s, "commitCount" => 0 },
+ { "iid" => merge_request_with_commits.iid.to_s, "commitCount" => 29 }
+ ])
end
-
- include_examples 'N+1 query check'
end
context 'when requesting `merged_at`' do
@@ -396,4 +397,85 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
end
end
+
+ context 'when only the count is requested' do
+ context 'when merged at filter is present' do
+ let_it_be(:merge_request) do
+ create(:merge_request, :unique_branches, source_project: project).tap do |mr|
+ mr.metrics.update!(merged_at: Time.new(2020, 1, 3))
+ end
+ end
+
+ let(:query) do
+ graphql_query_for(:project, { full_path: project.full_path },
+ <<~QUERY
+ mergeRequests(mergedAfter: "2020-01-01", mergedBefore: "2020-01-05", first: 0) {
+ count
+ }
+ QUERY
+ )
+ end
+
+ shared_examples 'count examples' do
+ it 'returns the correct count' do
+ post_graphql(query, current_user: current_user)
+
+ count = graphql_data.dig('project', 'mergeRequests', 'count')
+ expect(count).to eq(1)
+ end
+ end
+
+ context 'when "optimized_merge_request_count_with_merged_at_filter" feature flag is enabled' do
+ before do
+ stub_feature_flags(optimized_merge_request_count_with_merged_at_filter: true)
+ end
+
+ it 'does not query the merge requests table for the count' do
+ query_recorder = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
+
+ queries = query_recorder.data.each_value.first[:occurrences]
+ expect(queries).not_to include(match(/SELECT COUNT\(\*\) FROM "merge_requests"/))
+ expect(queries).to include(match(/SELECT COUNT\(\*\) FROM "merge_request_metrics"/))
+ end
+
+ context 'when total_time_to_merge and count is queried' do
+ let(:query) do
+ graphql_query_for(:project, { full_path: project.full_path },
+ <<~QUERY
+ mergeRequests(mergedAfter: "2020-01-01", mergedBefore: "2020-01-05", first: 0) {
+ totalTimeToMerge
+ count
+ }
+ QUERY
+ )
+ end
+
+ it 'does not query the merge requests table for the total_time_to_merge' do
+ query_recorder = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
+
+ queries = query_recorder.data.each_value.first[:occurrences]
+ expect(queries).to include(match(/SELECT.+SUM.+FROM "merge_request_metrics" WHERE/))
+ end
+ end
+
+ it_behaves_like 'count examples'
+
+ context 'when "optimized_merge_request_count_with_merged_at_filter" feature flag is disabled' do
+ before do
+ stub_feature_flags(optimized_merge_request_count_with_merged_at_filter: false)
+ end
+
+ it 'queries the merge requests table for the count' do
+ query_recorder = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
+
+ queries = query_recorder.data.each_value.first[:occurrences]
+ expect(queries).to include(match(/SELECT COUNT\(\*\) FROM "merge_requests"/))
+ expect(queries).not_to include(match(/SELECT COUNT\(\*\) FROM "merge_request_metrics"/))
+ end
+
+ it_behaves_like 'count examples'
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/packages_spec.rb b/spec/requests/api/graphql/project/packages_spec.rb
index 5df98ed1e6b..b20c96d54c8 100644
--- a/spec/requests/api/graphql/project/packages_spec.rb
+++ b/spec/requests/api/graphql/project/packages_spec.rb
@@ -5,16 +5,27 @@ require 'spec_helper'
RSpec.describe 'getting a package list for a project' do
include GraphqlHelpers
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
+
let_it_be(:package) { create(:package, project: project) }
- let(:packages_data) { graphql_data['project']['packages']['edges'] }
+ let_it_be(:maven_package) { create(:maven_package, project: project) }
+ let_it_be(:debian_package) { create(:debian_package, project: project) }
+ let_it_be(:composer_package) { create(:composer_package, project: project) }
+ let_it_be(:composer_metadatum) do
+ create(:composer_metadatum, package: composer_package,
+ target_sha: 'afdeh',
+ composer_json: { name: 'x', type: 'y', license: 'z', version: 1 })
+ end
+
+ let(:package_names) { graphql_data_at(:project, :packages, :edges, :node, :name) }
let(:fields) do
<<~QUERY
edges {
node {
- #{all_graphql_fields_for('packages'.classify)}
+ #{all_graphql_fields_for('packages'.classify, excluded: ['project'])}
+ metadata { #{query_graphql_fragment('ComposerMetadata')} }
}
}
QUERY
@@ -37,7 +48,17 @@ RSpec.describe 'getting a package list for a project' do
it_behaves_like 'a working graphql query'
it 'returns packages successfully' do
- expect(packages_data[0]['node']['name']).to eq package.name
+ expect(package_names).to contain_exactly(
+ package.name,
+ maven_package.name,
+ debian_package.name,
+ composer_package.name
+ )
+ end
+
+ it 'deals with metadata' do
+ target_shas = graphql_data_at(:project, :packages, :edges, :node, :metadata, :target_sha)
+ expect(target_shas).to contain_exactly(composer_metadatum.target_sha)
end
end
@@ -53,7 +74,7 @@ RSpec.describe 'getting a package list for a project' do
end
end
- context 'when the user is not autenthicated' do
+ context 'when the user is not authenticated' do
before do
post_graphql(query)
end
diff --git a/spec/requests/api/graphql/project/release_spec.rb b/spec/requests/api/graphql/project/release_spec.rb
index ccc2825da25..963b594a202 100644
--- a/spec/requests/api/graphql/project/release_spec.rb
+++ b/spec/requests/api/graphql/project/release_spec.rb
@@ -283,7 +283,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
let_it_be(:project) { create(:project, :repository, :private) }
let_it_be(:milestone_1) { create(:milestone, project: project) }
let_it_be(:milestone_2) { create(:milestone, project: project) }
- let_it_be(:release) { create(:release, :with_evidence, project: project, milestones: [milestone_1, milestone_2], released_at: released_at) }
+ let_it_be(:release, reload: true) { create(:release, :with_evidence, project: project, milestones: [milestone_1, milestone_2], released_at: released_at) }
let_it_be(:release_link_1) { create(:release_link, release: release) }
let_it_be(:release_link_2) { create(:release_link, release: release, filepath: link_filepath) }
@@ -324,7 +324,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:milestone_1) { create(:milestone, project: project) }
let_it_be(:milestone_2) { create(:milestone, project: project) }
- let_it_be(:release) { create(:release, :with_evidence, project: project, milestones: [milestone_1, milestone_2], released_at: released_at) }
+ let_it_be(:release, reload: true) { create(:release, :with_evidence, project: project, milestones: [milestone_1, milestone_2], released_at: released_at) }
let_it_be(:release_link_1) { create(:release_link, release: release) }
let_it_be(:release_link_2) { create(:release_link, release: release, filepath: link_filepath) }
diff --git a/spec/requests/api/graphql/project/terraform/state_spec.rb b/spec/requests/api/graphql/project/terraform/state_spec.rb
new file mode 100644
index 00000000000..9f1d9ab204a
--- /dev/null
+++ b/spec/requests/api/graphql/project/terraform/state_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'query a single terraform state' do
+ include GraphqlHelpers
+ include ::API::Helpers::RelatedResourcesHelpers
+
+ let_it_be(:terraform_state) { create(:terraform_state, :with_version, :locked) }
+
+ let(:latest_version) { terraform_state.latest_version }
+ let(:project) { terraform_state.project }
+ let(:current_user) { project.creator }
+ let(:data) { graphql_data.dig('project', 'terraformState') }
+
+ let(:query) do
+ graphql_query_for(
+ :project,
+ { fullPath: project.full_path },
+ query_graphql_field(
+ :terraformState,
+ { name: terraform_state.name },
+ %{
+ id
+ name
+ lockedAt
+ createdAt
+ updatedAt
+
+ latestVersion {
+ id
+ serial
+ createdAt
+ updatedAt
+
+ createdByUser {
+ id
+ }
+
+ job {
+ name
+ }
+ }
+
+ lockedByUser {
+ id
+ }
+ }
+ )
+ )
+ end
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns terraform state data' do
+ expect(data).to match(a_hash_including({
+ 'id' => global_id_of(terraform_state),
+ 'name' => terraform_state.name,
+ 'lockedAt' => terraform_state.locked_at.iso8601,
+ 'createdAt' => terraform_state.created_at.iso8601,
+ 'updatedAt' => terraform_state.updated_at.iso8601,
+ 'lockedByUser' => { 'id' => global_id_of(terraform_state.locked_by_user) },
+ 'latestVersion' => {
+ 'id' => eq(global_id_of(latest_version)),
+ 'serial' => eq(latest_version.version),
+ 'createdAt' => eq(latest_version.created_at.iso8601),
+ 'updatedAt' => eq(latest_version.updated_at.iso8601),
+ 'createdByUser' => { 'id' => eq(global_id_of(latest_version.created_by_user)) },
+ 'job' => { 'name' => eq(latest_version.build.name) }
+ }
+ }))
+ end
+
+ context 'unauthorized users' do
+ let(:current_user) { nil }
+
+ it { expect(data).to be_nil }
+ end
+end
diff --git a/spec/requests/api/group_labels_spec.rb b/spec/requests/api/group_labels_spec.rb
index 72621e2ce5e..c677e68b285 100644
--- a/spec/requests/api/group_labels_spec.rb
+++ b/spec/requests/api/group_labels_spec.rb
@@ -3,13 +3,19 @@
require 'spec_helper'
RSpec.describe API::GroupLabels do
+ let_it_be(:valid_group_label_title_1) { 'Label foo & bar:subgroup::v.1' }
+ let_it_be(:valid_group_label_title_1_esc) { ERB::Util.url_encode(valid_group_label_title_1) }
+ let_it_be(:valid_group_label_title_2) { 'Bar & foo:subgroup::v.2' }
+ let_it_be(:valid_subgroup_label_title_1) { 'Support label foobar:sub::v.1' }
+ let_it_be(:valid_new_label_title) { 'New & foo:feature::v.3' }
+
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
let!(:group_member) { create(:group_member, group: group, user: user) }
- let!(:group_label1) { create(:group_label, title: 'feature-label', group: group) }
- let!(:group_label2) { create(:group_label, title: 'bug', group: group) }
- let!(:subgroup_label) { create(:group_label, title: 'support-label', group: subgroup) }
+ let!(:group_label1) { create(:group_label, title: valid_group_label_title_1, group: group) }
+ let!(:group_label2) { create(:group_label, title: valid_group_label_title_2, group: group) }
+ let!(:subgroup_label) { create(:group_label, title: valid_subgroup_label_title_1, group: subgroup) }
describe 'GET :id/labels' do
context 'get current group labels' do
@@ -104,7 +110,7 @@ RSpec.describe API::GroupLabels do
describe 'GET :id/labels/:label_id' do
it 'returns a single label for the group' do
- get api("/groups/#{group.id}/labels/#{group_label1.name}", user)
+ get api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(group_label1.name)
@@ -117,13 +123,13 @@ RSpec.describe API::GroupLabels do
it 'returns created label when all params are given' do
post api("/groups/#{group.id}/labels", user),
params: {
- name: 'Foo',
+ name: valid_new_label_title,
color: '#FFAABB',
description: 'test'
}
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq('Foo')
+ expect(json_response['name']).to eq(valid_new_label_title)
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to eq('test')
end
@@ -131,12 +137,12 @@ RSpec.describe API::GroupLabels do
it 'returns created label when only required params are given' do
post api("/groups/#{group.id}/labels", user),
params: {
- name: 'Foo & Bar',
+ name: valid_new_label_title,
color: '#FFAABB'
}
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq('Foo & Bar')
+ expect(json_response['name']).to eq(valid_new_label_title)
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to be_nil
end
@@ -204,7 +210,7 @@ RSpec.describe API::GroupLabels do
describe 'DELETE /groups/:id/labels/:label_id' do
it 'returns 204 for existing label' do
- delete api("/groups/#{group.id}/labels/#{group_label1.name}", user)
+ delete api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}", user)
expect(response).to have_gitlab_http_status(:no_content)
end
@@ -228,7 +234,7 @@ RSpec.describe API::GroupLabels do
end
it_behaves_like '412 response' do
- let(:request) { api("/groups/#{group.id}/labels/#{group_label1.name}", user) }
+ let(:request) { api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}", user) }
end
end
@@ -237,13 +243,13 @@ RSpec.describe API::GroupLabels do
put api("/groups/#{group.id}/labels", user),
params: {
name: group_label1.name,
- new_name: 'New Label',
+ new_name: valid_new_label_title,
color: '#FFFFFF',
description: 'test'
}
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('New Label')
+ expect(json_response['name']).to eq(valid_new_label_title)
expect(json_response['color']).to eq('#FFFFFF')
expect(json_response['description']).to eq('test')
end
@@ -255,11 +261,11 @@ RSpec.describe API::GroupLabels do
put api("/groups/#{subgroup.id}/labels", user),
params: {
name: subgroup_label.name,
- new_name: 'New Label'
+ new_name: valid_new_label_title
}
expect(response).to have_gitlab_http_status(:ok)
- expect(subgroup.labels[0].name).to eq('New Label')
+ expect(subgroup.labels[0].name).to eq(valid_new_label_title)
expect(group_label1.name).to eq(group_label1.title)
end
@@ -267,7 +273,7 @@ RSpec.describe API::GroupLabels do
put api("/groups/#{group.id}/labels", user),
params: {
name: 'not_exists',
- new_name: 'label3'
+ new_name: valid_new_label_title
}
expect(response).to have_gitlab_http_status(:not_found)
@@ -291,15 +297,15 @@ RSpec.describe API::GroupLabels do
describe 'PUT /groups/:id/labels/:label_id' do
it 'returns 200 if name and colors and description are changed' do
- put api("/groups/#{group.id}/labels/#{group_label1.name}", user),
+ put api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}", user),
params: {
- new_name: 'New Label',
+ new_name: valid_new_label_title,
color: '#FFFFFF',
description: 'test'
}
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('New Label')
+ expect(json_response['name']).to eq(valid_new_label_title)
expect(json_response['color']).to eq('#FFFFFF')
expect(json_response['description']).to eq('test')
end
@@ -310,25 +316,25 @@ RSpec.describe API::GroupLabels do
put api("/groups/#{subgroup.id}/labels/#{subgroup_label.name}", user),
params: {
- new_name: 'New Label'
+ new_name: valid_new_label_title
}
expect(response).to have_gitlab_http_status(:ok)
- expect(subgroup.labels[0].name).to eq('New Label')
+ expect(subgroup.labels[0].name).to eq(valid_new_label_title)
expect(group_label1.name).to eq(group_label1.title)
end
it 'returns 404 if label does not exist' do
put api("/groups/#{group.id}/labels/not_exists", user),
params: {
- new_name: 'label3'
+ new_name: valid_new_label_title
}
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 400 if no new parameters given' do
- put api("/groups/#{group.id}/labels/#{group_label1.name}", user)
+ put api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}", user)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('new_name, color, description are missing, '\
@@ -339,7 +345,7 @@ RSpec.describe API::GroupLabels do
describe 'POST /groups/:id/labels/:label_id/subscribe' do
context 'when label_id is a label title' do
it 'subscribes to the label' do
- post api("/groups/#{group.id}/labels/#{group_label1.title}/subscribe", user)
+ post api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}/subscribe", user)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(group_label1.title)
@@ -385,7 +391,7 @@ RSpec.describe API::GroupLabels do
context 'when label_id is a label title' do
it 'unsubscribes from the label' do
- post api("/groups/#{group.id}/labels/#{group_label1.title}/unsubscribe", user)
+ post api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}/unsubscribe", user)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(group_label1.title)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index c7756a4fae5..1c359b6e50f 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe API::Groups do
it_behaves_like 'invalid file upload request'
end
- context 'when file format is not supported' do
+ context 'when file is too large' do
let(:file_path) { 'spec/fixtures/big-image.png' }
let(:message) { 'is too big' }
@@ -661,6 +661,7 @@ RSpec.describe API::Groups do
describe 'PUT /groups/:id' do
let(:new_group_name) { 'New Group'}
+ let(:file_path) { 'spec/fixtures/dk.png' }
it_behaves_like 'group avatar upload' do
def make_upload_request
@@ -678,7 +679,8 @@ RSpec.describe API::Groups do
request_access_enabled: true,
project_creation_level: "noone",
subgroup_creation_level: "maintainer",
- default_branch_protection: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS
+ default_branch_protection: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS,
+ avatar: fixture_file_upload(file_path)
}
expect(response).to have_gitlab_http_status(:ok)
@@ -701,6 +703,7 @@ RSpec.describe API::Groups do
expect(json_response['shared_projects']).to be_an Array
expect(json_response['shared_projects'].length).to eq(0)
expect(json_response['default_branch_protection']).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
+ expect(json_response['avatar_url']).to end_with('dk.png')
end
context 'updating the `default_branch_protection` attribute' do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index e04f63befd0..be4ecd0a734 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -1094,6 +1094,104 @@ RSpec.describe API::Internal::Base do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
+
+ context 'admin mode' do
+ shared_examples 'pushes succeed for ssh and http' do
+ it 'accepts the SSH push' do
+ push(key, project)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'accepts the HTTP push' do
+ push(key, project, 'http')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ shared_examples 'pushes fail for ssh and http' do
+ it 'rejects the SSH push' do
+ push(key, project)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'rejects the HTTP push' do
+ push(key, project, 'http')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'feature flag :user_mode_in_session is enabled' do
+ context 'with an admin user' do
+ let(:user) { create(:admin) }
+
+ context 'is member of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'pushes succeed for ssh and http'
+ end
+
+ context 'is not member of the project' do
+ it_behaves_like 'pushes succeed for ssh and http'
+ end
+ end
+
+ context 'with a regular user' do
+ context 'is member of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'pushes succeed for ssh and http'
+ end
+
+ context 'is not member of the project' do
+ it_behaves_like 'pushes fail for ssh and http'
+ end
+ end
+ end
+
+ context 'feature flag :user_mode_in_session is disabled' do
+ before do
+ stub_feature_flags(user_mode_in_session: false)
+ end
+
+ context 'with an admin user' do
+ let(:user) { create(:admin) }
+
+ context 'is member of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'pushes succeed for ssh and http'
+ end
+
+ context 'is not member of the project' do
+ it_behaves_like 'pushes succeed for ssh and http'
+ end
+ end
+
+ context 'with a regular user' do
+ context 'is member of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'pushes succeed for ssh and http'
+ end
+
+ context 'is not member of the project' do
+ it_behaves_like 'pushes fail for ssh and http'
+ end
+ end
+ end
+ end
end
describe 'POST /internal/post_receive', :clean_gitlab_redis_shared_state do
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index afff3647b91..2e13016a0a6 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -62,25 +62,25 @@ RSpec.describe API::Internal::Kubernetes do
let!(:agent_token) { create(:cluster_agent_token) }
it 'returns no_content for valid gitops_sync_count' do
- send_request(params: { gitops_sync_count: 10 }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+ send_request(params: { gitops_sync_count: 10 })
expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns no_content 0 gitops_sync_count' do
- send_request(params: { gitops_sync_count: 0 }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+ send_request(params: { gitops_sync_count: 0 })
expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 400 for non number' do
- send_request(params: { gitops_sync_count: 'string' }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+ send_request(params: { gitops_sync_count: 'string' })
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 for negative number' do
- send_request(params: { gitops_sync_count: '-1' }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+ send_request(params: { gitops_sync_count: '-1' })
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -125,6 +125,36 @@ RSpec.describe API::Internal::Kubernetes do
)
)
end
+
+ context 'on GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(true)
+ end
+
+ context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
+ end
+
+ it 'returns 403' do
+ send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_on_gitlab_com: agent_token.agent.project)
+ end
+
+ it 'returns success' do
+ send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+ end
end
end
@@ -174,6 +204,36 @@ RSpec.describe API::Internal::Kubernetes do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'on GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(true)
+ end
+
+ context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
+ end
+
+ it 'returns 403' do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_on_gitlab_com: agent_token.agent.project)
+ end
+
+ it 'returns success' do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+ end
end
context 'project is private' do
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index 6db6de4b533..e3fffd3e3fd 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -10,14 +10,19 @@ RSpec.describe API::Labels do
else
label_id = spec_params[:name] || spec_params[:label_id]
- put api("/projects/#{project.id}/labels/#{label_id}", user),
+ put api("/projects/#{project.id}/labels/#{ERB::Util.url_encode(label_id)}", user),
params: request_params.merge(spec_params.except(:name, :id))
end
end
+ let_it_be(:valid_label_title_1) { 'Label foo & bar:subgroup::v.1' }
+ let_it_be(:valid_label_title_1_esc) { ERB::Util.url_encode(valid_label_title_1) }
+ let_it_be(:valid_label_title_2) { 'Label bar & foo:subgroup::v.2' }
+ let_it_be(:valid_group_label_title_1) { 'Group label foobar:sub::v.1' }
+
let(:user) { create(:user) }
let(:project) { create(:project, creator_id: user.id, namespace: user.namespace) }
- let!(:label1) { create(:label, description: 'the best label', title: 'label1', project: project) }
+ let!(:label1) { create(:label, description: 'the best label v.1', title: valid_label_title_1, project: project) }
let!(:priority_label) { create(:label, title: 'bug', project: project, priority: 3) }
route_types = [:deprecated, :rest]
@@ -25,10 +30,10 @@ RSpec.describe API::Labels do
shared_examples 'label update API' do
route_types.each do |route_type|
it "returns 200 if name is changed (#{route_type} route)" do
- put_labels_api(route_type, user, spec_params, new_name: 'New Label')
+ put_labels_api(route_type, user, spec_params, new_name: valid_label_title_2)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('New Label')
+ expect(json_response['name']).to eq(valid_label_title_2)
expect(json_response['color']).to eq(label1.color)
end
@@ -77,10 +82,10 @@ RSpec.describe API::Labels do
end
it "returns 200 if name and colors and description are changed (#{route_type} route)" do
- put_labels_api(route_type, user, spec_params, new_name: 'New Label', color: '#FFFFFF', description: 'test')
+ put_labels_api(route_type, user, spec_params, new_name: valid_label_title_2, color: '#FFFFFF', description: 'test')
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('New Label')
+ expect(json_response['name']).to eq(valid_label_title_2)
expect(json_response['color']).to eq('#FFFFFF')
expect(json_response['description']).to eq('test')
end
@@ -141,7 +146,7 @@ RSpec.describe API::Labels do
priority: nil
}.merge(spec_params.except(:name, :id))
- put api("/projects/#{project.id}/labels/#{label_id}", user),
+ put api("/projects/#{project.id}/labels/#{ERB::Util.url_encode(label_id)}", user),
params: request_params
expect(response).to have_gitlab_http_status(:ok)
@@ -167,7 +172,7 @@ RSpec.describe API::Labels do
it 'returns 204 for existing label (rest route)' do
label_id = spec_params[:name] || spec_params[:label_id]
- delete api("/projects/#{project.id}/labels/#{label_id}", user), params: spec_params.except(:name, :label_id)
+ delete api("/projects/#{project.id}/labels/#{ERB::Util.url_encode(label_id)}", user), params: spec_params.except(:name, :label_id)
expect(response).to have_gitlab_http_status(:no_content)
end
@@ -179,7 +184,7 @@ RSpec.describe API::Labels do
describe 'GET /projects/:id/labels' do
let_it_be(:group) { create(:group) }
- let_it_be(:group_label) { create(:group_label, title: 'feature label', group: group) }
+ let_it_be(:group_label) { create(:group_label, title: valid_group_label_title_1, group: group) }
before do
project.update!(group: group)
@@ -219,7 +224,7 @@ RSpec.describe API::Labels do
'closed_issues_count' => 1,
'open_merge_requests_count' => 0,
'name' => label1.name,
- 'description' => 'the best label',
+ 'description' => label1.description,
'color' => a_string_matching(/^#\h{6}$/),
'text_color' => a_string_matching(/^#\h{6}$/),
'priority' => nil,
@@ -293,14 +298,14 @@ RSpec.describe API::Labels do
it 'returns created label when all params' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'Foo',
+ name: valid_label_title_2,
color: '#FFAABB',
description: 'test',
priority: 2
}
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq('Foo')
+ expect(json_response['name']).to eq(valid_label_title_2)
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to eq('test')
expect(json_response['priority']).to eq(2)
@@ -309,12 +314,12 @@ RSpec.describe API::Labels do
it 'returns created label when only required params' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'Foo & Bar',
+ name: valid_label_title_2,
color: '#FFAABB'
}
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq('Foo & Bar')
+ expect(json_response['name']).to eq(valid_label_title_2)
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to be_nil
expect(json_response['priority']).to be_nil
@@ -323,13 +328,13 @@ RSpec.describe API::Labels do
it 'creates a prioritized label' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'Foo & Bar',
+ name: valid_label_title_2,
color: '#FFAABB',
priority: 3
}
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq('Foo & Bar')
+ expect(json_response['name']).to eq(valid_label_title_2)
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to be_nil
expect(json_response['priority']).to eq(3)
@@ -348,7 +353,7 @@ RSpec.describe API::Labels do
it 'returns 400 for invalid color' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'Foo',
+ name: valid_label_title_2,
color: '#FFAA'
}
expect(response).to have_gitlab_http_status(:bad_request)
@@ -358,7 +363,7 @@ RSpec.describe API::Labels do
it 'returns 400 for too long color code' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'Foo',
+ name: valid_label_title_2,
color: '#FFAAFFFF'
}
expect(response).to have_gitlab_http_status(:bad_request)
@@ -393,7 +398,7 @@ RSpec.describe API::Labels do
it 'returns 400 for invalid priority' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'Foo',
+ name: valid_label_title_2,
color: '#FFAAFFFF',
priority: 'foo'
}
@@ -404,7 +409,7 @@ RSpec.describe API::Labels do
it 'returns 409 if label already exists in project' do
post api("/projects/#{project.id}/labels", user),
params: {
- name: 'label1',
+ name: valid_label_title_1,
color: '#FFAABB'
}
expect(response).to have_gitlab_http_status(:conflict)
@@ -414,7 +419,7 @@ RSpec.describe API::Labels do
describe 'DELETE /projects/:id/labels' do
it_behaves_like 'label delete API' do
- let(:spec_params) { { name: 'label1' } }
+ let(:spec_params) { { name: valid_label_title_1 } }
end
it_behaves_like 'label delete API' do
@@ -422,7 +427,7 @@ RSpec.describe API::Labels do
end
it 'returns 404 for non existing label' do
- delete api("/projects/#{project.id}/labels", user), params: { name: 'label2' }
+ delete api("/projects/#{project.id}/labels", user), params: { name: 'unknown' }
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Label Not Found')
@@ -446,14 +451,14 @@ RSpec.describe API::Labels do
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/labels", user) }
- let(:params) { { name: 'label1' } }
+ let(:params) { { name: valid_label_title_1 } }
end
end
describe 'PUT /projects/:id/labels' do
context 'when using name' do
it_behaves_like 'label update API' do
- let(:spec_params) { { name: 'label1' } }
+ let(:spec_params) { { name: valid_label_title_1 } }
let(:expected_response_label_id) { label1.id }
end
end
@@ -468,7 +473,7 @@ RSpec.describe API::Labels do
it 'returns 404 if label does not exist' do
put api("/projects/#{project.id}/labels", user),
params: {
- name: 'label2',
+ name: valid_label_title_2,
new_name: 'label3'
}
@@ -571,7 +576,7 @@ RSpec.describe API::Labels do
describe "POST /projects/:id/labels/:label_id/subscribe" do
context "when label_id is a label title" do
it "subscribes to the label" do
- post api("/projects/#{project.id}/labels/#{label1.title}/subscribe", user)
+ post api("/projects/#{project.id}/labels/#{valid_label_title_1_esc}/subscribe", user)
expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(label1.title)
@@ -617,7 +622,7 @@ RSpec.describe API::Labels do
context "when label_id is a label title" do
it "unsubscribes from the label" do
- post api("/projects/#{project.id}/labels/#{label1.title}/unsubscribe", user)
+ post api("/projects/#{project.id}/labels/#{valid_label_title_1_esc}/unsubscribe", user)
expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq(label1.title)
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index e5d11fb1218..5c85909a851 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe API::MavenPackages do
end
let(:version) { '1.0-SNAPSHOT' }
+ let(:param_path) { "#{package_name}/#{version}"}
before do
project.add_developer(user)
@@ -695,6 +696,14 @@ RSpec.describe API::MavenPackages do
expect(json_response['message']).to include('Duplicate package is not allowed')
end
+ context 'when uploading to the versionless package which contains metadata about all versions' do
+ let(:version) { nil }
+ let(:param_path) { package_name }
+ let!(:package) { create(:maven_package, project: project, version: version, name: project.full_path) }
+
+ it_behaves_like 'storing the package file'
+ end
+
context 'when uploading different non-duplicate files to the same package' do
let!(:package) { create(:maven_package, project: project, name: project.full_path) }
@@ -744,7 +753,7 @@ RSpec.describe API::MavenPackages do
end
def upload_file(params: {}, request_headers: headers, file_extension: 'jar')
- url = "/projects/#{project.id}/packages/maven/#{package_name}/#{version}/my-app-1.0-20180724.124855-1.#{file_extension}"
+ url = "/projects/#{project.id}/packages/maven/#{param_path}/my-app-1.0-20180724.124855-1.#{file_extension}"
workhorse_finalize(
api(url),
method: :put,
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 3a3eae73932..1d7476d6f23 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1542,9 +1542,9 @@ RSpec.describe API::MergeRequests do
end
end
- context 'when access_raw_diffs is passed as an option' do
+ context 'when access_raw_diffs is true' do
it_behaves_like 'accesses diffs via raw_diffs' do
- let(:params) { { access_raw_diffs: true } }
+ let(:params) { { access_raw_diffs: "true" } }
end
end
end
@@ -1736,6 +1736,36 @@ RSpec.describe API::MergeRequests do
end
end
+ context 'accepts reviewer_ids' do
+ let(:params) do
+ {
+ title: 'Test merge request',
+ source_branch: 'feature_conflict',
+ target_branch: 'master',
+ author_id: user.id,
+ reviewer_ids: [user2.id]
+ }
+ end
+
+ it 'creates a new merge request with a reviewer' do
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['title']).to eq('Test merge request')
+ expect(json_response['reviewers'].first['name']).to eq(user2.name)
+ end
+
+ it 'creates a new merge request with no reviewer' do
+ params[:reviewer_ids] = []
+
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['title']).to eq('Test merge request')
+ expect(json_response['reviewers']).to be_empty
+ end
+ end
+
context 'between branches projects' do
context 'different labels' do
let(:params) do
@@ -2081,6 +2111,34 @@ RSpec.describe API::MergeRequests do
it_behaves_like 'issuable update endpoint' do
let(:entity) { merge_request }
end
+
+ context 'accepts reviewer_ids' do
+ let(:params) do
+ {
+ title: 'Updated merge request',
+ reviewer_ids: [user2.id]
+ }
+ end
+
+ it 'adds a reviewer to the existing merge request' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['title']).to eq('Updated merge request')
+ expect(json_response['reviewers'].first['name']).to eq(user2.name)
+ end
+
+ it 'removes a reviewer from the existing merge request' do
+ merge_request.reviewers = [user2]
+ params[:reviewer_ids] = []
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['title']).to eq('Updated merge request')
+ expect(json_response['reviewers']).to be_empty
+ end
+ end
end
describe "POST /projects/:id/merge_requests/:merge_request_iid/context_commits" do
diff --git a/spec/requests/api/oauth_tokens_spec.rb b/spec/requests/api/oauth_tokens_spec.rb
index 23d5df873d4..52c7408545f 100644
--- a/spec/requests/api/oauth_tokens_spec.rb
+++ b/spec/requests/api/oauth_tokens_spec.rb
@@ -26,17 +26,14 @@ RSpec.describe 'OAuth tokens' do
end
context 'when user does not have 2FA enabled' do
- # NOTE: using ROPS grant flow without client credentials will be deprecated
- # and removed in the next version of Doorkeeper.
- # See https://gitlab.com/gitlab-org/gitlab/-/issues/219137
context 'when no client credentials provided' do
- it 'creates an access token' do
+ it 'does not create an access token' do
user = create(:user)
request_oauth_token(user)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['access_token']).not_to be_nil
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(json_response['access_token']).to be_nil
end
end
@@ -54,15 +51,11 @@ RSpec.describe 'OAuth tokens' do
context 'with invalid credentials' do
it 'does not create an access token' do
- # NOTE: remove this after update to Doorkeeper 5.5 or newer, see
- # https://gitlab.com/gitlab-org/gitlab/-/issues/219137
- pending 'Enable this example after upgrading Doorkeeper to 5.5 or newer'
-
user = create(:user)
request_oauth_token(user, basic_auth_header(client.uid, 'invalid secret'))
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response['error']).to eq('invalid_client')
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 4acd0eea448..6d8cdde2c4f 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1139,7 +1139,7 @@ RSpec.describe API::Projects do
let!(:public_project) { create(:project, :public, name: 'public_project', creator_id: user4.id, namespace: user4.namespace) }
it 'returns error when user not found' do
- get api('/users/0/projects/')
+ get api("/users/#{non_existing_record_id}/projects/")
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
@@ -1687,7 +1687,7 @@ RSpec.describe API::Projects do
end
it 'returns a 404 error if not found' do
- get api('/projects/42', user)
+ get api("/projects/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
@@ -2048,7 +2048,7 @@ RSpec.describe API::Projects do
end
it 'returns a 404 error if not found' do
- get api('/projects/42/users', user)
+ get api("/projects/#{non_existing_record_id}/users", user)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
@@ -2154,7 +2154,7 @@ RSpec.describe API::Projects do
end
it 'fails if forked_from project which does not exist' do
- post api("/projects/#{project_fork_target.id}/fork/0", admin)
+ post api("/projects/#{project_fork_target.id}/fork/#{non_existing_record_id}", admin)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -2398,7 +2398,7 @@ RSpec.describe API::Projects do
end
it 'returns a 404 error when project does not exist' do
- delete api("/projects/123/share/#{non_existing_record_id}", user)
+ delete api("/projects/#{non_existing_record_id}/share/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -2955,7 +2955,7 @@ RSpec.describe API::Projects do
end
it 'returns the proper security headers' do
- get api('/projects/1/starrers', current_user)
+ get api("/projects/#{public_project.id}/starrers", current_user)
expect(response).to include_security_headers
end
@@ -3028,7 +3028,7 @@ RSpec.describe API::Projects do
end
it 'returns not_found(404) for not existing project' do
- get api("/projects/0/languages", user)
+ get api("/projects/#{non_existing_record_id}/languages", user)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -3079,7 +3079,7 @@ RSpec.describe API::Projects do
end
it 'does not remove a non existing project' do
- delete api('/projects/1328', user)
+ delete api("/projects/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -3098,7 +3098,7 @@ RSpec.describe API::Projects do
end
it 'does not remove a non existing project' do
- delete api('/projects/1328', admin)
+ delete api("/projects/#{non_existing_record_id}", admin)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -3177,7 +3177,7 @@ RSpec.describe API::Projects do
end
it 'fails if project to fork from does not exist' do
- post api('/projects/424242/fork', user)
+ post api("/projects/#{non_existing_record_id}/fork", user)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
@@ -3211,7 +3211,7 @@ RSpec.describe API::Projects do
end
it 'fails if trying to fork to non-existent namespace' do
- post api("/projects/#{project.id}/fork", user2), params: { namespace: 42424242 }
+ post api("/projects/#{project.id}/fork", user2), params: { namespace: non_existing_record_id }
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Namespace Not Found')
@@ -3328,8 +3328,8 @@ RSpec.describe API::Projects do
expect(json_response['message']['path']).to eq(['has already been taken'])
end
- it 'accepts a name for the target project' do
- post api("/projects/#{project.id}/fork", user2), params: { name: 'My Random Project' }
+ it 'accepts custom parameters for the target project' do
+ post api("/projects/#{project.id}/fork", user2), params: { name: 'My Random Project', description: 'A description', visibility: 'private' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('My Random Project')
@@ -3337,6 +3337,8 @@ RSpec.describe API::Projects do
expect(json_response['owner']['id']).to eq(user2.id)
expect(json_response['namespace']['id']).to eq(user2.namespace.id)
expect(json_response['forked_from_project']['id']).to eq(project.id)
+ expect(json_response['description']).to eq('A description')
+ expect(json_response['visibility']).to eq('private')
expect(json_response['import_status']).to eq('scheduled')
expect(json_response).to include("import_error")
end
@@ -3368,6 +3370,13 @@ RSpec.describe API::Projects do
expect(json_response['message']['path']).to eq(['has already been taken'])
expect(json_response['message']['name']).to eq(['has already been taken'])
end
+
+ it 'fails to fork with an unknown visibility level' do
+ post api("/projects/#{project.id}/fork", user2), params: { visibility: 'something' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('visibility does not have a valid value')
+ end
end
context 'when unauthenticated' do
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index 72a470dca4b..94ecd177890 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe API::PypiPackages do
include WorkhorseHelpers
include PackagesManagerApiSpecHelpers
include HttpBasicAuthHelpers
+ using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
@@ -20,8 +21,6 @@ RSpec.describe API::PypiPackages do
subject { get api(url) }
context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'PyPI package versions' | :success
'PUBLIC' | :guest | true | true | 'PyPI package versions' | :success
@@ -83,8 +82,6 @@ RSpec.describe API::PypiPackages do
subject { post api(url), headers: headers }
context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'process PyPI api request' | :success
'PUBLIC' | :guest | true | true | 'process PyPI api request' | :forbidden
@@ -149,8 +146,6 @@ RSpec.describe API::PypiPackages do
end
context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'PyPI package creation' | :created
'PUBLIC' | :guest | true | true | 'process PyPI api request' | :forbidden
@@ -239,8 +234,6 @@ RSpec.describe API::PypiPackages do
subject { get api(url) }
context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
'PUBLIC' | :developer | true | true | 'PyPI package download' | :success
'PUBLIC' | :guest | true | true | 'PyPI package download' | :success
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 45bce8c8a5c..a05f1730708 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -610,4 +610,102 @@ RSpec.describe API::Repositories do
end
end
end
+
+ describe 'POST /projects/:id/repository/changelog' do
+ context 'when the changelog_api feature flag is enabled' do
+ it 'generates the changelog for a version' do
+ spy = instance_spy(Repositories::ChangelogService)
+
+ allow(Repositories::ChangelogService)
+ .to receive(:new)
+ .with(
+ project,
+ user,
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: DateTime.new(2020, 1, 1),
+ branch: 'kittens',
+ trailer: 'Foo',
+ file: 'FOO.md',
+ message: 'Commit message'
+ )
+ .and_return(spy)
+
+ allow(spy).to receive(:execute)
+
+ post(
+ api("/projects/#{project.id}/repository/changelog", user),
+ params: {
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: '2020-01-01',
+ branch: 'kittens',
+ trailer: 'Foo',
+ file: 'FOO.md',
+ message: 'Commit message'
+ }
+ )
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'produces an error when generating the changelog fails' do
+ spy = instance_spy(Repositories::ChangelogService)
+
+ allow(Repositories::ChangelogService)
+ .to receive(:new)
+ .with(
+ project,
+ user,
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: DateTime.new(2020, 1, 1),
+ branch: 'kittens',
+ trailer: 'Foo',
+ file: 'FOO.md',
+ message: 'Commit message'
+ )
+ .and_return(spy)
+
+ allow(spy)
+ .to receive(:execute)
+ .and_raise(Gitlab::Changelog::Committer::CommitError.new('oops'))
+
+ post(
+ api("/projects/#{project.id}/repository/changelog", user),
+ params: {
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: '2020-01-01',
+ branch: 'kittens',
+ trailer: 'Foo',
+ file: 'FOO.md',
+ message: 'Commit message'
+ }
+ )
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(json_response['message']).to eq('Failed to generate the changelog: oops')
+ end
+ end
+
+ context 'when the changelog_api feature flag is disabled' do
+ before do
+ stub_feature_flags(changelog_api: false)
+ end
+
+ it 'responds with a 404 Not Found' do
+ post(
+ api("/projects/#{project.id}/repository/changelog", user),
+ params: { version: '1.0.0', from: 'foo', to: 'bar' }
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/resource_access_tokens_spec.rb b/spec/requests/api/resource_access_tokens_spec.rb
new file mode 100644
index 00000000000..9fd7eb2177d
--- /dev/null
+++ b/spec/requests/api/resource_access_tokens_spec.rb
@@ -0,0 +1,293 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe API::ResourceAccessTokens do
+ context "when the resource is a project" do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe "GET projects/:id/access_tokens" do
+ subject(:get_tokens) { get api("/projects/#{project_id}/access_tokens", user) }
+
+ context "when the user has maintainer permissions" do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:access_tokens) { create_list(:personal_access_token, 3, user: project_bot) }
+ let_it_be(:project_id) { project.id }
+
+ before do
+ project.add_maintainer(user)
+ project.add_maintainer(project_bot)
+ end
+
+ it "gets a list of access tokens for the specified project" do
+ get_tokens
+
+ token_ids = json_response.map { |token| token['id'] }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(token_ids).to match_array(access_tokens.pluck(:id))
+ end
+
+ context "when using a project access token to GET other project access tokens" do
+ let_it_be(:token) { access_tokens.first }
+
+ it "gets a list of access tokens for the specified project" do
+ get api("/projects/#{project_id}/access_tokens", personal_access_token: token)
+
+ token_ids = json_response.map { |token| token['id'] }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(token_ids).to match_array(access_tokens.pluck(:id))
+ end
+ end
+
+ context "when tokens belong to a different project" do
+ let_it_be(:bot) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: bot) }
+
+ before do
+ other_project.add_maintainer(bot)
+ other_project.add_maintainer(user)
+ end
+
+ it "does not return tokens from a different project" do
+ get_tokens
+
+ token_ids = json_response.map { |token| token['id'] }
+
+ expect(token_ids).not_to include(token.id)
+ end
+ end
+
+ context "when the project has no access tokens" do
+ let(:project_id) { other_project.id }
+
+ before do
+ other_project.add_maintainer(user)
+ end
+
+ it 'returns an empty array' do
+ get_tokens
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq([])
+ end
+ end
+
+ context "when trying to get the tokens of a different project" do
+ let_it_be(:project_id) { other_project.id }
+
+ it "returns 404" do
+ get_tokens
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context "when the project does not exist" do
+ let(:project_id) { non_existing_record_id }
+
+ it "returns 404" do
+ get_tokens
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context "when the user does not have valid permissions" do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:access_tokens) { create_list(:personal_access_token, 3, user: project_bot) }
+ let_it_be(:project_id) { project.id }
+
+ before do
+ project.add_developer(user)
+ project.add_maintainer(project_bot)
+ end
+
+ it "returns 401" do
+ get_tokens
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ describe "DELETE projects/:id/access_tokens/:token_id", :sidekiq_inline do
+ subject(:delete_token) { delete api("/projects/#{project_id}/access_tokens/#{token_id}", user) }
+
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: project_bot) }
+ let_it_be(:project_id) { project.id }
+ let_it_be(:token_id) { token.id }
+
+ before do
+ project.add_maintainer(project_bot)
+ end
+
+ context "when the user has maintainer permissions" do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it "deletes the project access token from the project" do
+ delete_token
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(User.exists?(project_bot.id)).to be_falsy
+ end
+
+ context "when attempting to delete a non-existent project access token" do
+ let_it_be(:token_id) { non_existing_record_id }
+
+ it "does not delete the token, and returns 404" do
+ delete_token
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to include("Could not find project access token with token_id: #{token_id}")
+ end
+ end
+
+ context "when attempting to delete a token that does not belong to the specified project" do
+ let_it_be(:project_id) { other_project.id }
+
+ before do
+ other_project.add_maintainer(user)
+ end
+
+ it "does not delete the token, and returns 404" do
+ delete_token
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to include("Could not find project access token with token_id: #{token_id}")
+ end
+ end
+ end
+
+ context "when the user does not have valid permissions" do
+ before do
+ project.add_developer(user)
+ end
+
+ it "does not delete the token, and returns 400", :aggregate_failures do
+ delete_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(User.exists?(project_bot.id)).to be_truthy
+ expect(response.body).to include("#{user.name} cannot delete #{token.user.name}")
+ end
+ end
+ end
+
+ describe "POST projects/:id/access_tokens" do
+ let_it_be(:params) { { name: "test", scopes: ["api"], expires_at: Date.today + 1.month } }
+
+ subject(:create_token) { post api("/projects/#{project_id}/access_tokens", user), params: params }
+
+ context "when the user has maintainer permissions" do
+ let_it_be(:project_id) { project.id }
+ let_it_be(:expires_at) { 1.month.from_now }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ context "with valid params" do
+ context "with full params" do
+ it "creates a project access token with the params", :aggregate_failures do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response["name"]).to eq("test")
+ expect(json_response["scopes"]).to eq(["api"])
+ expect(json_response["expires_at"]).to eq(expires_at.to_date.iso8601)
+ end
+ end
+
+ context "when 'expires_at' is not set" do
+ let_it_be(:params) { { name: "test", scopes: ["api"] } }
+
+ it "creates a project access token with the params", :aggregate_failures do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response["name"]).to eq("test")
+ expect(json_response["scopes"]).to eq(["api"])
+ expect(json_response["expires_at"]).to eq(nil)
+ end
+ end
+ end
+
+ context "with invalid params" do
+ context "when missing the 'name' param" do
+ let_it_be(:params) { { scopes: ["api"], expires_at: 5.days.from_now } }
+
+ it "does not create a project access token without 'name'" do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include("name is missing")
+ end
+ end
+
+ context "when missing the 'scopes' param" do
+ let_it_be(:params) { { name: "test", expires_at: 5.days.from_now } }
+
+ it "does not create a project access token without 'scopes'" do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include("scopes is missing")
+ end
+ end
+ end
+
+ context "when trying to create a token in a different project" do
+ let_it_be(:project_id) { other_project.id }
+
+ it "does not create the token, and returns the project not found error" do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to include("Project Not Found")
+ end
+ end
+ end
+
+ context "when the user does not have valid permissions" do
+ let_it_be(:project_id) { project.id }
+
+ context "when the user is a developer" do
+ before do
+ project.add_developer(user)
+ end
+
+ it "does not create the token, and returns the permission error" do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include("User does not have permission to create project access token")
+ end
+ end
+
+ context "when a project access token tries to create another project access token" do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:user) { project_bot }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it "does not allow a project access token to create another project access token" do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include("User does not have permission to create project access token")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 8fb0f8fc51a..3b84c812010 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -199,6 +199,14 @@ RSpec.describe API::Settings, 'Settings' do
expect(json_response['allow_local_requests_from_hooks_and_services']).to eq(true)
end
+ it 'supports legacy asset_proxy_whitelist' do
+ put api("/application/settings", admin),
+ params: { asset_proxy_whitelist: ['example.com', '*.example.com'] }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['asset_proxy_allowlist']).to eq(['example.com', '*.example.com', 'localhost'])
+ end
+
it 'disables ability to switch to legacy storage' do
put api("/application/settings", admin),
params: { hashed_storage_enabled: false }
@@ -362,24 +370,24 @@ RSpec.describe API::Settings, 'Settings' do
asset_proxy_enabled: true,
asset_proxy_url: 'http://assets.example.com',
asset_proxy_secret_key: 'shared secret',
- asset_proxy_whitelist: ['example.com', '*.example.com']
+ asset_proxy_allowlist: ['example.com', '*.example.com']
}
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['asset_proxy_enabled']).to be(true)
expect(json_response['asset_proxy_url']).to eq('http://assets.example.com')
expect(json_response['asset_proxy_secret_key']).to be_nil
- expect(json_response['asset_proxy_whitelist']).to eq(['example.com', '*.example.com', 'localhost'])
+ expect(json_response['asset_proxy_allowlist']).to eq(['example.com', '*.example.com', 'localhost'])
end
- it 'allows a string for asset_proxy_whitelist' do
+ it 'allows a string for asset_proxy_allowlist' do
put api('/application/settings', admin),
params: {
- asset_proxy_whitelist: 'example.com, *.example.com'
+ asset_proxy_allowlist: 'example.com, *.example.com'
}
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['asset_proxy_whitelist']).to eq(['example.com', '*.example.com', 'localhost'])
+ expect(json_response['asset_proxy_allowlist']).to eq(['example.com', '*.example.com', 'localhost'])
end
end
diff --git a/spec/requests/api/suggestions_spec.rb b/spec/requests/api/suggestions_spec.rb
index 78a2688ac5e..7f53d379af5 100644
--- a/spec/requests/api/suggestions_spec.rb
+++ b/spec/requests/api/suggestions_spec.rb
@@ -65,6 +65,19 @@ RSpec.describe API::Suggestions do
end
end
+ context 'when a custom commit message is included' do
+ it 'renders an ok response and returns json content' do
+ project.add_maintainer(user)
+
+ message = "cool custom commit message!"
+
+ put api(url, user), params: { commit_message: message }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project.repository.commit.message).to eq(message)
+ end
+ end
+
context 'when not able to apply patch' do
let(:url) { "/suggestions/#{unappliable_suggestion.id}/apply" }
@@ -113,9 +126,11 @@ RSpec.describe API::Suggestions do
let(:url) { "/suggestions/batch_apply" }
context 'when successfully applies multiple patches as a batch' do
- it 'renders an ok response and returns json content' do
+ before do
project.add_maintainer(user)
+ end
+ it 'renders an ok response and returns json content' do
put api(url, user), params: { ids: [suggestion.id, suggestion2.id] }
expect(response).to have_gitlab_http_status(:ok)
@@ -123,6 +138,16 @@ RSpec.describe API::Suggestions do
'appliable', 'applied',
'from_content', 'to_content'))
end
+
+ it 'provides a custom commit message' do
+ message = "cool custom commit message!"
+
+ put api(url, user), params: { ids: [suggestion.id, suggestion2.id],
+ commit_message: message }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project.repository.commit.message).to eq(message)
+ end
end
context 'when not able to apply one or more of the patches' do
diff --git a/spec/requests/api/templates_spec.rb b/spec/requests/api/templates_spec.rb
index e1c5bfd82c4..adb37c62dc3 100644
--- a/spec/requests/api/templates_spec.rb
+++ b/spec/requests/api/templates_spec.rb
@@ -65,7 +65,9 @@ RSpec.describe API::Templates do
expect(json_response['nickname']).to be_nil
expect(json_response['popular']).to be true
expect(json_response['html_url']).to eq('http://choosealicense.com/licenses/mit/')
- expect(json_response['source_url']).to eq('https://opensource.org/licenses/MIT')
+ # This was dropped:
+ # https://github.com/github/choosealicense.com/commit/325806b42aa3d5b78e84120327ec877bc936dbdd#diff-66df8f1997786f7052d29010f2cbb4c66391d60d24ca624c356acc0ab986f139
+ expect(json_response['source_url']).to be_nil
expect(json_response['description']).to include('A short and simple permissive license with conditions')
expect(json_response['conditions']).to eq(%w[include-copyright])
expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use])
@@ -81,7 +83,7 @@ RSpec.describe API::Templates do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.size).to eq(12)
+ expect(json_response.size).to eq(13)
expect(json_response.map { |l| l['key'] }).to include('agpl-3.0')
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 94fba451860..66d6c29cd4d 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -2565,7 +2565,7 @@ RSpec.describe API::Users do
it 'does not approve a deactivated user' do
expect { approve }.not_to change { deactivated_user.reload.state }
expect(response).to have_gitlab_http_status(:conflict)
- expect(json_response['message']).to eq('The user you are trying to approve is not pending an approval')
+ expect(json_response['message']).to eq('The user you are trying to approve is not pending approval')
end
end
@@ -2585,7 +2585,7 @@ RSpec.describe API::Users do
it 'returns 201' do
expect { approve }.not_to change { user.reload.state }
expect(response).to have_gitlab_http_status(:conflict)
- expect(json_response['message']).to eq('The user you are trying to approve is not pending an approval')
+ expect(json_response['message']).to eq('The user you are trying to approve is not pending approval')
end
end
@@ -2595,7 +2595,7 @@ RSpec.describe API::Users do
it 'returns 403' do
expect { approve }.not_to change { blocked_user.reload.state }
expect(response).to have_gitlab_http_status(:conflict)
- expect(json_response['message']).to eq('The user you are trying to approve is not pending an approval')
+ expect(json_response['message']).to eq('The user you are trying to approve is not pending approval')
end
end
@@ -2605,7 +2605,7 @@ RSpec.describe API::Users do
it 'returns 403' do
expect { approve }.not_to change { ldap_blocked_user.reload.state }
expect(response).to have_gitlab_http_status(:conflict)
- expect(json_response['message']).to eq('The user you are trying to approve is not pending an approval')
+ expect(json_response['message']).to eq('The user you are trying to approve is not pending approval')
end
end
diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb
index a0a0f66c8d1..7abbaf4f9ec 100644
--- a/spec/requests/api/version_spec.rb
+++ b/spec/requests/api/version_spec.rb
@@ -33,6 +33,12 @@ RSpec.describe API::Version do
expect_version
end
+
+ it 'returns "200" response on head requests' do
+ head api('/version', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'with read_user scope' do
@@ -43,6 +49,12 @@ RSpec.describe API::Version do
expect_version
end
+
+ it 'returns "200" response on head requests' do
+ head api('/version', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'with neither api nor read_user scope' do
diff --git a/spec/requests/groups/email_campaigns_controller_spec.rb b/spec/requests/groups/email_campaigns_controller_spec.rb
new file mode 100644
index 00000000000..930e645f6c0
--- /dev/null
+++ b/spec/requests/groups/email_campaigns_controller_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::EmailCampaignsController do
+ include InProductMarketingHelper
+ using RSpec::Parameterized::TableSyntax
+
+ describe 'GET #index', :snowplow do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+ let(:track) { 'create' }
+ let(:series) { '0' }
+ let(:schema) { described_class::EMAIL_CAMPAIGNS_SCHEMA_URL }
+ let(:data) do
+ {
+ namespace_id: group.id,
+ track: track.to_sym,
+ series: series.to_i,
+ subject_line: subject_line(track.to_sym, series.to_i)
+ }
+ end
+
+ before do
+ sign_in(user)
+ group.add_developer(user)
+ allow(Gitlab::Tracking).to receive(:self_describing_event)
+ end
+
+ subject do
+ get group_email_campaigns_url(group, track: track, series: series)
+ response
+ end
+
+ shared_examples 'track and redirect' do
+ it do
+ is_expected.to track_self_describing_event(schema, data)
+ is_expected.to have_gitlab_http_status(:redirect)
+ end
+ end
+
+ shared_examples 'no track and 404' do
+ it do
+ is_expected.not_to track_self_describing_event
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ describe 'track parameter' do
+ context 'when valid' do
+ where(track: Namespaces::InProductMarketingEmailsService::TRACKS.keys)
+
+ with_them do
+ it_behaves_like 'track and redirect'
+ end
+ end
+
+ context 'when invalid' do
+ where(track: [nil, 'xxxx'])
+
+ with_them do
+ it_behaves_like 'no track and 404'
+ end
+ end
+ end
+
+ describe 'series parameter' do
+ context 'when valid' do
+ where(series: (0..Namespaces::InProductMarketingEmailsService::INTERVAL_DAYS.length - 1).to_a)
+
+ with_them do
+ it_behaves_like 'track and redirect'
+ end
+ end
+
+ context 'when invalid' do
+ where(series: [-1, nil, Namespaces::InProductMarketingEmailsService::INTERVAL_DAYS.length])
+
+ with_them do
+ it_behaves_like 'no track and 404'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/oauth/tokens_controller_spec.rb b/spec/requests/oauth/tokens_controller_spec.rb
new file mode 100644
index 00000000000..c3cdae2cd21
--- /dev/null
+++ b/spec/requests/oauth/tokens_controller_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Oauth::TokensController do
+ it 'allows cross-origin POST requests' do
+ post '/oauth/token', headers: { 'Origin' => 'http://notgitlab.com' }
+
+ expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq 'POST'
+ expect(response.headers['Access-Control-Allow-Headers']).to be_nil
+ expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ end
+end
diff --git a/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb b/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
new file mode 100644
index 00000000000..5d2f3e98bb4
--- /dev/null
+++ b/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects::Ci::PrometheusMetrics::HistogramsController' do
+ let_it_be(:project) { create(:project, :public) }
+
+ describe 'POST /*namespace_id/:project_id/-/ci/prometheus_metrics/histograms' do
+ context 'with known histograms' do
+ it 'returns 201 Created' do
+ post histograms_route(histograms: [
+ { name: :pipeline_graph_link_calculation_duration_seconds, value: 1 },
+ { name: :pipeline_graph_links_total, value: 10 }
+ ])
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'with unknown histograms' do
+ it 'returns 404 Not Found' do
+ post histograms_route(histograms: [{ name: :chunky_bacon, value: 5 }])
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with the feature flag disabled' do
+ before do
+ stub_feature_flags(ci_accept_frontend_prometheus_metrics: false)
+ end
+
+ it 'returns 202 Accepted' do
+ post histograms_route(histograms: [
+ { name: :pipeline_graph_link_calculation_duration_seconds, value: 1 }
+ ])
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+ end
+
+ def histograms_route(params = {})
+ namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: project.namespace, project_id: project, **params)
+ end
+end
diff --git a/spec/requests/projects/noteable_notes_spec.rb b/spec/requests/projects/noteable_notes_spec.rb
index 2bf1ffb2edc..5ae2aadaa84 100644
--- a/spec/requests/projects/noteable_notes_spec.rb
+++ b/spec/requests/projects/noteable_notes_spec.rb
@@ -18,9 +18,7 @@ RSpec.describe 'Project noteable notes' do
login_as(user)
end
- it 'does not set a Gitlab::EtagCaching ETag if there is a note' do
- create(:note_on_merge_request, noteable: merge_request, project: merge_request.project)
-
+ it 'does not set a Gitlab::EtagCaching ETag' do
get notes_path
expect(response).to have_gitlab_http_status(:ok)
@@ -29,12 +27,5 @@ RSpec.describe 'Project noteable notes' do
# interfere with notes pagination
expect(response_etag).not_to eq(stored_etag)
end
-
- it 'sets a Gitlab::EtagCaching ETag if there is no note' do
- get notes_path
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response_etag).to eq(stored_etag)
- end
end
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 34f34c0b850..1bb260b5ea1 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Rack Attack global throttles' do
+RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_caching do
include RackAttackSpecHelpers
let(:settings) { Gitlab::CurrentSettings.current_application_settings }
@@ -149,14 +149,14 @@ RSpec.describe 'Rack Attack global throttles' do
expect(response).to have_gitlab_http_status(:ok)
end
- arguments = {
+ arguments = a_hash_including({
message: 'Rack_Attack',
env: :throttle,
remote_ip: '127.0.0.1',
request_method: 'GET',
path: '/users/sign_in',
matched: 'throttle_unauthenticated'
- }
+ })
expect(Gitlab::AuthLogger).to receive(:error).with(arguments)
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index b224ef87229..f3ddcbef1c2 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -268,6 +268,14 @@ RSpec.describe UsersController do
end
it_behaves_like 'renders all public keys'
+
+ context 'when public visibility is restricted' do
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+ end
+
+ it_behaves_like 'renders all public keys'
+ end
end
end
end
diff --git a/spec/requests/whats_new_controller_spec.rb b/spec/requests/whats_new_controller_spec.rb
index 8005d38dbb0..b17fa7e4a26 100644
--- a/spec/requests/whats_new_controller_spec.rb
+++ b/spec/requests/whats_new_controller_spec.rb
@@ -7,59 +7,39 @@ RSpec.describe WhatsNewController do
let(:item) { double(:item) }
let(:highlights) { double(:highlight, items: [item], map: [item].map, next_page: 2) }
- context 'with whats_new_drawer feature enabled' do
- before do
- stub_feature_flags(whats_new_drawer: true)
- end
-
- context 'with no page param' do
- it 'responds with paginated data and headers' do
- allow(ReleaseHighlight).to receive(:paginated).with(page: 1).and_return(highlights)
- allow(Gitlab::WhatsNew::ItemPresenter).to receive(:present).with(item).and_return(item)
+ context 'with no page param' do
+ it 'responds with paginated data and headers' do
+ allow(ReleaseHighlight).to receive(:paginated).with(page: 1).and_return(highlights)
- get whats_new_path, xhr: true
+ get whats_new_path, xhr: true
- expect(response.body).to eq(highlights.items.to_json)
- expect(response.headers['X-Next-Page']).to eq(2)
- end
+ expect(response.body).to eq(highlights.items.to_json)
+ expect(response.headers['X-Next-Page']).to eq(2)
end
+ end
- context 'with page param' do
- it 'passes the page parameter' do
- expect(ReleaseHighlight).to receive(:paginated).with(page: 2).and_call_original
-
- get whats_new_path(page: 2), xhr: true
- end
-
- it 'returns a 404 if page param is negative' do
- get whats_new_path(page: -1), xhr: true
+ context 'with page param' do
+ it 'passes the page parameter' do
+ expect(ReleaseHighlight).to receive(:paginated).with(page: 2).and_call_original
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ get whats_new_path(page: 2), xhr: true
end
- context 'with version param' do
- it 'returns items without pagination headers' do
- allow(ReleaseHighlight).to receive(:for_version).with(version: '42').and_return(highlights)
- allow(Gitlab::WhatsNew::ItemPresenter).to receive(:present).with(item).and_return(item)
+ it 'returns a 404 if page param is negative' do
+ get whats_new_path(page: -1), xhr: true
- get whats_new_path(version: 42), xhr: true
-
- expect(response.body).to eq(highlights.items.to_json)
- expect(response.headers['X-Next-Page']).to be_nil
- end
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'with whats_new_drawer feature disabled' do
- before do
- stub_feature_flags(whats_new_drawer: false)
- end
+ context 'with version param' do
+ it 'returns items without pagination headers' do
+ allow(ReleaseHighlight).to receive(:for_version).with(version: '42').and_return(highlights)
- it 'returns a 404' do
- get whats_new_path, xhr: true
+ get whats_new_path(version: 42), xhr: true
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to eq(highlights.items.to_json)
+ expect(response.headers['X-Next-Page']).to be_nil
end
end
end
diff --git a/spec/routing/admin_routing_spec.rb b/spec/routing/admin_routing_spec.rb
index 9374df0c4a2..8c36d7d4668 100644
--- a/spec/routing/admin_routing_spec.rb
+++ b/spec/routing/admin_routing_spec.rb
@@ -141,13 +141,6 @@ RSpec.describe Admin::DevOpsReportController, "routing" do
end
end
-# admin_cohorts GET /admin/cohorts(.:format) admin/cohorst#index
-RSpec.describe Admin::CohortsController, "routing" do
- it "to #index" do
- expect(get("/admin/cohorts")).to route_to('admin/cohorts#index')
- end
-end
-
RSpec.describe Admin::GroupsController, "routing" do
let(:name) { 'complex.group-namegit' }
diff --git a/spec/routing/projects/security/configuration_controller_routing_spec.rb b/spec/routing/projects/security/configuration_controller_routing_spec.rb
new file mode 100644
index 00000000000..c2b10a49dea
--- /dev/null
+++ b/spec/routing/projects/security/configuration_controller_routing_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Security::ConfigurationController, 'routing' do
+ let(:base_params) { { namespace_id: 'gitlab', project_id: 'gitlabhq' } }
+
+ before do
+ allow(Project).to receive(:find_by_full_path).with('gitlab/gitlabhq', any_args).and_return(true)
+ end
+
+ it 'routes to #show' do
+ expect(get('/gitlab/gitlabhq/-/security/configuration')).to route_to('projects/security/configuration#show', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+end
diff --git a/spec/rubocop/code_reuse_helpers_spec.rb b/spec/rubocop/code_reuse_helpers_spec.rb
index 574a4a85a34..4c3dd8f8167 100644
--- a/spec/rubocop/code_reuse_helpers_spec.rb
+++ b/spec/rubocop/code_reuse_helpers_spec.rb
@@ -6,7 +6,7 @@ require 'parser/current'
require_relative '../../rubocop/code_reuse_helpers'
RSpec.describe RuboCop::CodeReuseHelpers do
- def parse_source(source, path = 'foo.rb')
+ def build_and_parse_source(source, path = 'foo.rb')
buffer = Parser::Source::Buffer.new(path)
buffer.source = source
@@ -24,13 +24,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#send_to_constant?' do
it 'returns true when sending to a constant' do
- node = parse_source('Foo.bar')
+ node = build_and_parse_source('Foo.bar')
expect(cop.send_to_constant?(node)).to eq(true)
end
it 'returns false when sending to something other than a constant' do
- node = parse_source('10')
+ node = build_and_parse_source('10')
expect(cop.send_to_constant?(node)).to eq(false)
end
@@ -38,13 +38,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#send_receiver_name_ends_with?' do
it 'returns true when the receiver ends with a suffix' do
- node = parse_source('FooFinder.new')
+ node = build_and_parse_source('FooFinder.new')
expect(cop.send_receiver_name_ends_with?(node, 'Finder')).to eq(true)
end
it 'returns false when the receiver is the same as a suffix' do
- node = parse_source('Finder.new')
+ node = build_and_parse_source('Finder.new')
expect(cop.send_receiver_name_ends_with?(node, 'Finder')).to eq(false)
end
@@ -52,7 +52,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#file_path_for_node' do
it 'returns the file path of a node' do
- node = parse_source('10')
+ node = build_and_parse_source('10')
path = cop.file_path_for_node(node)
expect(path).to eq('foo.rb')
@@ -61,7 +61,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#name_of_constant' do
it 'returns the name of a constant' do
- node = parse_source('Foo')
+ node = build_and_parse_source('Foo')
expect(cop.name_of_constant(node)).to eq(:Foo)
end
@@ -69,13 +69,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_finder?' do
it 'returns true for a node in the finders directory' do
- node = parse_source('10', rails_root_join('app', 'finders', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'finders', 'foo.rb'))
expect(cop.in_finder?(node)).to eq(true)
end
it 'returns false for a node outside the finders directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_finder?(node)).to eq(false)
end
@@ -83,13 +83,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_model?' do
it 'returns true for a node in the models directory' do
- node = parse_source('10', rails_root_join('app', 'models', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'models', 'foo.rb'))
expect(cop.in_model?(node)).to eq(true)
end
it 'returns false for a node outside the models directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_model?(node)).to eq(false)
end
@@ -97,13 +97,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_service_class?' do
it 'returns true for a node in the services directory' do
- node = parse_source('10', rails_root_join('app', 'services', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'services', 'foo.rb'))
expect(cop.in_service_class?(node)).to eq(true)
end
it 'returns false for a node outside the services directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_service_class?(node)).to eq(false)
end
@@ -111,13 +111,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_presenter?' do
it 'returns true for a node in the presenters directory' do
- node = parse_source('10', rails_root_join('app', 'presenters', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'presenters', 'foo.rb'))
expect(cop.in_presenter?(node)).to eq(true)
end
it 'returns false for a node outside the presenters directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_presenter?(node)).to eq(false)
end
@@ -125,13 +125,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_serializer?' do
it 'returns true for a node in the serializers directory' do
- node = parse_source('10', rails_root_join('app', 'serializers', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'serializers', 'foo.rb'))
expect(cop.in_serializer?(node)).to eq(true)
end
it 'returns false for a node outside the serializers directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_serializer?(node)).to eq(false)
end
@@ -139,13 +139,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_worker?' do
it 'returns true for a node in the workers directory' do
- node = parse_source('10', rails_root_join('app', 'workers', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'workers', 'foo.rb'))
expect(cop.in_worker?(node)).to eq(true)
end
it 'returns false for a node outside the workers directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_worker?(node)).to eq(false)
end
@@ -153,13 +153,13 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_api?' do
it 'returns true for a node in the API directory' do
- node = parse_source('10', rails_root_join('lib', 'api', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('lib', 'api', 'foo.rb'))
expect(cop.in_api?(node)).to eq(true)
end
it 'returns false for a node outside the API directory' do
- node = parse_source('10', rails_root_join('lib', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('lib', 'foo', 'foo.rb'))
expect(cop.in_api?(node)).to eq(false)
end
@@ -167,21 +167,21 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#in_directory?' do
it 'returns true for a directory in the CE app/ directory' do
- node = parse_source('10', rails_root_join('app', 'models', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'models', 'foo.rb'))
expect(cop.in_directory?(node, 'models')).to eq(true)
end
it 'returns true for a directory in the EE app/ directory' do
node =
- parse_source('10', rails_root_join('ee', 'app', 'models', 'foo.rb'))
+ build_and_parse_source('10', rails_root_join('ee', 'app', 'models', 'foo.rb'))
expect(cop.in_directory?(node, 'models')).to eq(true)
end
it 'returns false for a directory in the lib/ directory' do
node =
- parse_source('10', rails_root_join('lib', 'models', 'foo.rb'))
+ build_and_parse_source('10', rails_root_join('lib', 'models', 'foo.rb'))
expect(cop.in_directory?(node, 'models')).to eq(false)
end
@@ -189,7 +189,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#name_of_receiver' do
it 'returns the name of a send receiver' do
- node = parse_source('Foo.bar')
+ node = build_and_parse_source('Foo.bar')
expect(cop.name_of_receiver(node)).to eq('Foo')
end
@@ -197,7 +197,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#each_class_method' do
it 'yields every class method to the supplied block' do
- node = parse_source(<<~RUBY)
+ node = build_and_parse_source(<<~RUBY)
class Foo
class << self
def first
@@ -220,7 +220,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#each_send_node' do
it 'yields every send node to the supplied block' do
- node = parse_source("foo\nbar")
+ node = build_and_parse_source("foo\nbar")
nodes = cop.each_send_node(node).to_a
expect(nodes.length).to eq(2)
@@ -231,7 +231,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
describe '#disallow_send_to' do
it 'disallows sending a message to a constant' do
- def_node = parse_source(<<~RUBY)
+ def_node = build_and_parse_source(<<~RUBY)
def foo
FooFinder.new
end
diff --git a/spec/rubocop/cop/active_record_association_reload_spec.rb b/spec/rubocop/cop/active_record_association_reload_spec.rb
index 8dbe6daeeca..f28c4e60f3c 100644
--- a/spec/rubocop/cop/active_record_association_reload_spec.rb
+++ b/spec/rubocop/cop/active_record_association_reload_spec.rb
@@ -5,8 +5,6 @@ require 'rubocop'
require_relative '../../../rubocop/cop/active_record_association_reload'
RSpec.describe RuboCop::Cop::ActiveRecordAssociationReload do
- include CopHelper
-
subject(:cop) { described_class.new }
context 'when using ActiveRecord::Base' do
diff --git a/spec/rubocop/cop/api/base_spec.rb b/spec/rubocop/cop/api/base_spec.rb
index de05ab93874..ec646b9991b 100644
--- a/spec/rubocop/cop/api/base_spec.rb
+++ b/spec/rubocop/cop/api/base_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/api/base'
RSpec.describe RuboCop::Cop::API::Base do
- include CopHelper
-
subject(:cop) { described_class.new }
let(:corrected) do
@@ -17,7 +14,7 @@ RSpec.describe RuboCop::Cop::API::Base do
CORRECTED
end
- ['Grape::API', '::Grape::API', 'Grape::API::Instance', '::Grape::API::Instance'].each do |offense|
+ %w[Grape::API ::Grape::API Grape::API::Instance ::Grape::API::Instance].each do |offense|
it "adds an offense when inheriting from #{offense}" do
expect_offense(<<~CODE)
class SomeAPI < #{offense}
diff --git a/spec/rubocop/cop/api/grape_array_missing_coerce_spec.rb b/spec/rubocop/cop/api/grape_array_missing_coerce_spec.rb
index c7bb8255398..b50866b54b3 100644
--- a/spec/rubocop/cop/api/grape_array_missing_coerce_spec.rb
+++ b/spec/rubocop/cop/api/grape_array_missing_coerce_spec.rb
@@ -5,36 +5,38 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/api/grape_array_missing_coerce'
RSpec.describe RuboCop::Cop::API::GrapeArrayMissingCoerce do
- include CopHelper
+ let(:msg) do
+ "This Grape parameter defines an Array but is missing a coerce_with definition. " \
+ "For more details, see " \
+ "https://github.com/ruby-grape/grape/blob/master/UPGRADING.md#ensure-that-array-types-have-explicit-coercions"
+ end
subject(:cop) { described_class.new }
it 'adds an offense with a required parameter' do
- inspect_source(<<~CODE)
+ expect_offense(<<~TYPE)
class SomeAPI < Grape::API::Instance
params do
requires :values, type: Array[String]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
end
end
- CODE
-
- expect(cop.offenses.size).to eq(1)
+ TYPE
end
it 'adds an offense with an optional parameter' do
- inspect_source(<<~CODE)
+ expect_offense(<<~TYPE)
class SomeAPI < Grape::API::Instance
params do
optional :values, type: Array[String]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
end
end
- CODE
-
- expect(cop.offenses.size).to eq(1)
+ TYPE
end
it 'does not add an offense' do
- inspect_source(<<~CODE)
+ expect_no_offenses(<<~CODE)
class SomeAPI < Grape::API::Instance
params do
requires :values, type: Array[String], coerce_with: ->(val) { val.split(',').map(&:strip) }
@@ -44,19 +46,15 @@ RSpec.describe RuboCop::Cop::API::GrapeArrayMissingCoerce do
end
end
CODE
-
- expect(cop.offenses.size).to be_zero
end
it 'does not add an offense for unrelated classes' do
- inspect_source(<<~CODE)
+ expect_no_offenses(<<~CODE)
class SomeClass
params do
requires :values, type: Array[String]
end
end
CODE
-
- expect(cop.offenses.size).to be_zero
end
end
diff --git a/spec/rubocop/cop/avoid_becomes_spec.rb b/spec/rubocop/cop/avoid_becomes_spec.rb
index 07cf374faf5..401c694f373 100644
--- a/spec/rubocop/cop/avoid_becomes_spec.rb
+++ b/spec/rubocop/cop/avoid_becomes_spec.rb
@@ -2,33 +2,31 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/avoid_becomes'
RSpec.describe RuboCop::Cop::AvoidBecomes do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of becomes with a constant parameter' do
- inspect_source('foo.becomes(Project)')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~CODE)
+ foo.becomes(Project)
+ ^^^^^^^^^^^^^^^^^^^^ Avoid the use of becomes(SomeConstant), [...]
+ CODE
end
it 'flags the use of becomes with a namespaced constant parameter' do
- inspect_source('foo.becomes(Namespace::Group)')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~CODE)
+ foo.becomes(Namespace::Group)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid the use of becomes(SomeConstant), [...]
+ CODE
end
it 'flags the use of becomes with a dynamic parameter' do
- inspect_source(<<~RUBY)
- model = Namespace
- project = Project.first
- project.becomes(model)
- RUBY
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~CODE)
+ model = Namespace
+ project = Project.first
+ project.becomes(model)
+ ^^^^^^^^^^^^^^^^^^^^^^ Avoid the use of becomes(SomeConstant), [...]
+ CODE
end
end
diff --git a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
index 3c3aa5b7b5c..ac59d36db3f 100644
--- a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
+++ b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
@@ -5,8 +5,6 @@ require 'rubocop'
require_relative '../../../rubocop/cop/avoid_break_from_strong_memoize'
RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags violation for break inside strong_memoize' do
@@ -56,7 +54,7 @@ RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize do
call do
strong_memoize(:result) do
break if something
-
+ ^^^^^ Do not use break inside strong_memoize, use next instead.
do_an_heavy_calculation
end
end
@@ -65,7 +63,7 @@ RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize do
expect(instance).to receive(:add_offense).once
end
- inspect_source(source)
+ expect_offense(source)
end
it "doesn't check when block is empty" do
diff --git a/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb b/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb
index 1e1fe851840..460a0b13458 100644
--- a/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb
+++ b/spec/rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers_spec.rb
@@ -2,18 +2,15 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/avoid_keyword_arguments_in_sidekiq_workers'
RSpec.describe RuboCop::Cop::AvoidKeywordArgumentsInSidekiqWorkers do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags violation for keyword arguments usage in perform method signature' do
expect_offense(<<~RUBY)
def perform(id:)
- ^^^^^^^^^^^^^^^^ Do not use keyword arguments in Sidekiq workers. For details, check https://github.com/mperham/sidekiq/issues/2372
+ ^^^^^^^^^^^^^^^^ Do not use keyword arguments in Sidekiq workers. For details, [...]
end
RUBY
end
@@ -21,7 +18,7 @@ RSpec.describe RuboCop::Cop::AvoidKeywordArgumentsInSidekiqWorkers do
it 'flags violation for optional keyword arguments usage in perform method signature' do
expect_offense(<<~RUBY)
def perform(id: nil)
- ^^^^^^^^^^^^^^^^^^^^ Do not use keyword arguments in Sidekiq workers. For details, check https://github.com/mperham/sidekiq/issues/2372
+ ^^^^^^^^^^^^^^^^^^^^ Do not use keyword arguments in Sidekiq workers. For details, [...]
end
RUBY
end
diff --git a/spec/rubocop/cop/ban_catch_throw_spec.rb b/spec/rubocop/cop/ban_catch_throw_spec.rb
index 4f669bad4af..b3c4ad8688c 100644
--- a/spec/rubocop/cop/ban_catch_throw_spec.rb
+++ b/spec/rubocop/cop/ban_catch_throw_spec.rb
@@ -1,30 +1,28 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/ban_catch_throw'
RSpec.describe RuboCop::Cop::BanCatchThrow do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'registers an offense when `catch` or `throw` are used' do
- inspect_source("catch(:foo) {\n throw(:foo)\n}")
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(2)
- expect(cop.offenses.map(&:line)).to eq([1, 2])
- expect(cop.highlights).to eq(['catch(:foo)', 'throw(:foo)'])
- end
+ expect_offense(<<~CODE)
+ catch(:foo) {
+ ^^^^^^^^^^^ Do not use catch or throw unless a gem's API demands it.
+ throw(:foo)
+ ^^^^^^^^^^^ Do not use catch or throw unless a gem's API demands it.
+ }
+ CODE
end
it 'does not register an offense for a method called catch or throw' do
- inspect_source("foo.catch(:foo) {\n foo.throw(:foo)\n}")
-
- expect(cop.offenses).to be_empty
+ expect_no_offenses(<<~CODE)
+ foo.catch(:foo) {
+ foo.throw(:foo)
+ }
+ CODE
end
end
diff --git a/spec/rubocop/cop/code_reuse/finder_spec.rb b/spec/rubocop/cop/code_reuse/finder_spec.rb
index 6f04d5e0d60..484a1549a89 100644
--- a/spec/rubocop/cop/code_reuse/finder_spec.rb
+++ b/spec/rubocop/cop/code_reuse/finder_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/finder'
RSpec.describe RuboCop::Cop::CodeReuse::Finder do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of a Finder inside another Finder' do
@@ -23,8 +20,6 @@ RSpec.describe RuboCop::Cop::CodeReuse::Finder do
end
end
SOURCE
-
- expect(cop.offenses.size).to eq(1)
end
it 'flags the use of a Finder inside a model class method' do
diff --git a/spec/rubocop/cop/code_reuse/presenter_spec.rb b/spec/rubocop/cop/code_reuse/presenter_spec.rb
index 8efd4da8aa1..4639854588e 100644
--- a/spec/rubocop/cop/code_reuse/presenter_spec.rb
+++ b/spec/rubocop/cop/code_reuse/presenter_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/presenter'
RSpec.describe RuboCop::Cop::CodeReuse::Presenter do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of a Presenter in a Service class' do
diff --git a/spec/rubocop/cop/code_reuse/serializer_spec.rb b/spec/rubocop/cop/code_reuse/serializer_spec.rb
index 74999df5859..84db2e62b41 100644
--- a/spec/rubocop/cop/code_reuse/serializer_spec.rb
+++ b/spec/rubocop/cop/code_reuse/serializer_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/serializer'
RSpec.describe RuboCop::Cop::CodeReuse::Serializer do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of a Serializer in a Service class' do
diff --git a/spec/rubocop/cop/code_reuse/service_class_spec.rb b/spec/rubocop/cop/code_reuse/service_class_spec.rb
index 4870daf72dc..b6d94dd749f 100644
--- a/spec/rubocop/cop/code_reuse/service_class_spec.rb
+++ b/spec/rubocop/cop/code_reuse/service_class_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/service_class'
RSpec.describe RuboCop::Cop::CodeReuse::ServiceClass do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of a Service class in a Finder' do
diff --git a/spec/rubocop/cop/code_reuse/worker_spec.rb b/spec/rubocop/cop/code_reuse/worker_spec.rb
index 9e015f286d8..42c9303a93b 100644
--- a/spec/rubocop/cop/code_reuse/worker_spec.rb
+++ b/spec/rubocop/cop/code_reuse/worker_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/code_reuse/worker'
RSpec.describe RuboCop::Cop::CodeReuse::Worker do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of a worker in a controller' do
diff --git a/spec/rubocop/cop/default_scope_spec.rb b/spec/rubocop/cop/default_scope_spec.rb
index fee1895603c..506843e030e 100644
--- a/spec/rubocop/cop/default_scope_spec.rb
+++ b/spec/rubocop/cop/default_scope_spec.rb
@@ -2,47 +2,44 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/default_scope'
RSpec.describe RuboCop::Cop::DefaultScope do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'does not flag the use of default_scope with a send receiver' do
- inspect_source('foo.default_scope')
-
- expect(cop.offenses.size).to eq(0)
+ expect_no_offenses('foo.default_scope')
end
it 'flags the use of default_scope with a constant receiver' do
- inspect_source('User.default_scope')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~SOURCE)
+ User.default_scope
+ ^^^^^^^^^^^^^^^^^^ Do not use `default_scope`, [...]
+ SOURCE
end
it 'flags the use of default_scope with a nil receiver' do
- inspect_source('class Foo ; default_scope ; end')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~SOURCE)
+ class Foo ; default_scope ; end
+ ^^^^^^^^^^^^^ Do not use `default_scope`, [...]
+ SOURCE
end
it 'flags the use of default_scope when passing arguments' do
- inspect_source('class Foo ; default_scope(:foo) ; end')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~SOURCE)
+ class Foo ; default_scope(:foo) ; end
+ ^^^^^^^^^^^^^^^^^^^ Do not use `default_scope`, [...]
+ SOURCE
end
it 'flags the use of default_scope when passing a block' do
- inspect_source('class Foo ; default_scope { :foo } ; end')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~SOURCE)
+ class Foo ; default_scope { :foo } ; end
+ ^^^^^^^^^^^^^ Do not use `default_scope`, [...]
+ SOURCE
end
it 'ignores the use of default_scope with a local variable receiver' do
- inspect_source('users = User.all ; users.default_scope')
-
- expect(cop.offenses.size).to eq(0)
+ expect_no_offenses('users = User.all ; users.default_scope')
end
end
diff --git a/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb b/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb
index 2db03898e01..f96e25c59e7 100644
--- a/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb
+++ b/spec/rubocop/cop/gitlab/avoid_uploaded_file_from_params_spec.rb
@@ -2,19 +2,16 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/avoid_uploaded_file_from_params'
RSpec.describe RuboCop::Cop::Gitlab::AvoidUploadedFileFromParams do
- include CopHelper
-
subject(:cop) { described_class.new }
- context 'UploadedFile.from_params' do
+ context 'when using UploadedFile.from_params' do
it 'flags its call' do
expect_offense(<<~SOURCE)
- UploadedFile.from_params(params)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `UploadedFile` set by `multipart.rb` instead of calling `UploadedFile.from_params` directly. See https://docs.gitlab.com/ee/development/uploads.html#how-to-add-a-new-upload-route
+ UploadedFile.from_params(params)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `UploadedFile` set by `multipart.rb` instead of calling [...]
SOURCE
end
end
diff --git a/spec/rubocop/cop/gitlab/bulk_insert_spec.rb b/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
index ad7e685e505..c280ab8fa8b 100644
--- a/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
+++ b/spec/rubocop/cop/gitlab/bulk_insert_spec.rb
@@ -2,25 +2,22 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/bulk_insert'
RSpec.describe RuboCop::Cop::Gitlab::BulkInsert do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of Gitlab::Database.bulk_insert' do
expect_offense(<<~SOURCE)
- Gitlab::Database.bulk_insert('merge_request_diff_files', rows)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{RuboCop::Cop::Gitlab::BulkInsert::MSG}
+ Gitlab::Database.bulk_insert('merge_request_diff_files', rows)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `BulkInsertSafe` concern, [...]
SOURCE
end
it 'flags the use of ::Gitlab::Database.bulk_insert' do
expect_offense(<<~SOURCE)
- ::Gitlab::Database.bulk_insert('merge_request_diff_files', rows)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{RuboCop::Cop::Gitlab::BulkInsert::MSG}
+ ::Gitlab::Database.bulk_insert('merge_request_diff_files', rows)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `BulkInsertSafe` concern, [...]
SOURCE
end
end
diff --git a/spec/rubocop/cop/gitlab/change_timezone_spec.rb b/spec/rubocop/cop/gitlab/change_timezone_spec.rb
index 6abbc06bb1a..9cb822ec4f2 100644
--- a/spec/rubocop/cop/gitlab/change_timezone_spec.rb
+++ b/spec/rubocop/cop/gitlab/change_timezone_spec.rb
@@ -2,19 +2,16 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/change_timzone'
RSpec.describe RuboCop::Cop::Gitlab::ChangeTimezone do
- include CopHelper
-
subject(:cop) { described_class.new }
context 'Time.zone=' do
it 'registers an offense with no 2nd argument' do
expect_offense(<<~PATTERN)
Time.zone = 'Awkland'
- ^^^^^^^^^^^^^^^^^^^^^ Do not change timezone in the runtime (application or rspec), it could result in silently modifying other behavior.
+ ^^^^^^^^^^^^^^^^^^^^^ Do not change timezone in the runtime (application or rspec), it could result [...]
PATTERN
end
end
diff --git a/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb b/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb
index bed06ab2b17..19e5fe946be 100644
--- a/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb
+++ b/spec/rubocop/cop/gitlab/const_get_inherit_false_spec.rb
@@ -2,78 +2,75 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/const_get_inherit_false'
RSpec.describe RuboCop::Cop::Gitlab::ConstGetInheritFalse do
- include CopHelper
-
subject(:cop) { described_class.new }
context 'Object.const_get' do
- it 'registers an offense with no 2nd argument' do
+ it 'registers an offense with no 2nd argument and corrects' do
expect_offense(<<~PATTERN)
Object.const_get(:CONSTANT)
^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
- end
- it 'autocorrects' do
- expect(autocorrect_source('Object.const_get(:CONSTANT)')).to eq('Object.const_get(:CONSTANT, false)')
+ expect_correction(<<~PATTERN)
+ Object.const_get(:CONSTANT, false)
+ PATTERN
end
context 'inherit=false' do
it 'does not register an offense' do
expect_no_offenses(<<~PATTERN)
- Object.const_get(:CONSTANT, false)
+ Object.const_get(:CONSTANT, false)
PATTERN
end
end
context 'inherit=true' do
- it 'registers an offense' do
+ it 'registers an offense and corrects' do
expect_offense(<<~PATTERN)
- Object.const_get(:CONSTANT, true)
- ^^^^^^^^^ Use inherit=false when using const_get.
+ Object.const_get(:CONSTANT, true)
+ ^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
- end
- it 'autocorrects' do
- expect(autocorrect_source('Object.const_get(:CONSTANT, true)')).to eq('Object.const_get(:CONSTANT, false)')
+ expect_correction(<<~PATTERN)
+ Object.const_get(:CONSTANT, false)
+ PATTERN
end
end
end
context 'const_get for a nested class' do
- it 'registers an offense on reload usage' do
+ it 'registers an offense on reload usage and corrects' do
expect_offense(<<~PATTERN)
Nested::Blog.const_get(:CONSTANT)
^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
- end
- it 'autocorrects' do
- expect(autocorrect_source('Nested::Blag.const_get(:CONSTANT)')).to eq('Nested::Blag.const_get(:CONSTANT, false)')
+ expect_correction(<<~PATTERN)
+ Nested::Blog.const_get(:CONSTANT, false)
+ PATTERN
end
context 'inherit=false' do
it 'does not register an offense' do
expect_no_offenses(<<~PATTERN)
- Nested::Blog.const_get(:CONSTANT, false)
+ Nested::Blog.const_get(:CONSTANT, false)
PATTERN
end
end
context 'inherit=true' do
- it 'registers an offense if inherit is true' do
+ it 'registers an offense if inherit is true and corrects' do
expect_offense(<<~PATTERN)
- Nested::Blog.const_get(:CONSTANT, true)
- ^^^^^^^^^ Use inherit=false when using const_get.
+ Nested::Blog.const_get(:CONSTANT, true)
+ ^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
- end
- it 'autocorrects' do
- expect(autocorrect_source('Nested::Blag.const_get(:CONSTANT, true)')).to eq('Nested::Blag.const_get(:CONSTANT, false)')
+ expect_correction(<<~PATTERN)
+ Nested::Blog.const_get(:CONSTANT, false)
+ PATTERN
end
end
end
diff --git a/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb b/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
index 5804b03b641..a207155f432 100644
--- a/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
+++ b/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
@@ -6,8 +6,6 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/gitlab/duplicate_spec_location'
RSpec.describe RuboCop::Cop::Gitlab::DuplicateSpecLocation do
- include CopHelper
-
subject(:cop) { described_class.new }
let(:rails_root) { '../../../../' }
diff --git a/spec/rubocop/cop/gitlab/except_spec.rb b/spec/rubocop/cop/gitlab/except_spec.rb
index 173e5943da5..7a122e3cf53 100644
--- a/spec/rubocop/cop/gitlab/except_spec.rb
+++ b/spec/rubocop/cop/gitlab/except_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/except'
RSpec.describe RuboCop::Cop::Gitlab::Except do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of Gitlab::SQL::Except.new' do
diff --git a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
index db3bcf1dfdb..03d7fc5e8b1 100644
--- a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
+++ b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
@@ -1,46 +1,31 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/finder_with_find_by'
RSpec.describe RuboCop::Cop::Gitlab::FinderWithFindBy do
- include CopHelper
-
subject(:cop) { described_class.new }
context 'when calling execute.find' do
- let(:source) do
- <<~SRC
- DummyFinder.new(some_args)
- .execute
- .find_by!(1)
- SRC
- end
-
- let(:corrected_source) do
- <<~SRC
- DummyFinder.new(some_args)
- .find_by!(1)
- SRC
- end
-
- it 'registers an offence' do
- inspect_source(source)
-
- expect(cop.offenses.size).to eq(1)
- end
-
- it 'can autocorrect the source' do
- expect(autocorrect_source(source)).to eq(corrected_source)
+ it 'registers an offense and corrects' do
+ expect_offense(<<~CODE)
+ DummyFinder.new(some_args)
+ .execute
+ .find_by!(1)
+ ^^^^^^^^ Don't chain finders `#execute` method with [...]
+ CODE
+
+ expect_correction(<<~CODE)
+ DummyFinder.new(some_args)
+ .find_by!(1)
+ CODE
end
context 'when called within the `FinderMethods` module' do
- let(:source) do
- <<~SRC
+ it 'does not register an offense' do
+ expect_no_offenses(<<~SRC)
module FinderMethods
def find_by!(*args)
execute.find_by!(args)
@@ -48,12 +33,6 @@ RSpec.describe RuboCop::Cop::Gitlab::FinderWithFindBy do
end
SRC
end
-
- it 'does not register an offence' do
- inspect_source(source)
-
- expect(cop.offenses).to be_empty
- end
end
end
end
diff --git a/spec/rubocop/cop/gitlab/httparty_spec.rb b/spec/rubocop/cop/gitlab/httparty_spec.rb
index b112ac84bff..fcd18b0eb9b 100644
--- a/spec/rubocop/cop/gitlab/httparty_spec.rb
+++ b/spec/rubocop/cop/gitlab/httparty_spec.rb
@@ -2,46 +2,30 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/httparty'
RSpec.describe RuboCop::Cop::Gitlab::HTTParty do # rubocop:disable RSpec/FilePath
- include CopHelper
-
subject(:cop) { described_class.new }
- shared_examples('registering include offense') do |options|
- let(:offending_lines) { options[:offending_lines] }
-
+ shared_examples('registering include offense') do
it 'registers an offense when the class includes HTTParty' do
- inspect_source(source)
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(offending_lines.size)
- expect(cop.offenses.map(&:line)).to eq(offending_lines)
- end
+ expect_offense(source)
end
end
- shared_examples('registering call offense') do |options|
- let(:offending_lines) { options[:offending_lines] }
-
+ shared_examples('registering call offense') do
it 'registers an offense when the class calls HTTParty' do
- inspect_source(source)
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(offending_lines.size)
- expect(cop.offenses.map(&:line)).to eq(offending_lines)
- end
+ expect_offense(source)
end
end
context 'when source is a regular module' do
- it_behaves_like 'registering include offense', offending_lines: [2] do
+ it_behaves_like 'registering include offense' do
let(:source) do
<<~RUBY
module M
include HTTParty
+ ^^^^^^^^^^^^^^^^ Avoid including `HTTParty` directly. [...]
end
RUBY
end
@@ -49,11 +33,12 @@ RSpec.describe RuboCop::Cop::Gitlab::HTTParty do # rubocop:disable RSpec/FilePat
end
context 'when source is a regular class' do
- it_behaves_like 'registering include offense', offending_lines: [2] do
+ it_behaves_like 'registering include offense' do
let(:source) do
<<~RUBY
class Foo
include HTTParty
+ ^^^^^^^^^^^^^^^^ Avoid including `HTTParty` directly. [...]
end
RUBY
end
@@ -61,12 +46,13 @@ RSpec.describe RuboCop::Cop::Gitlab::HTTParty do # rubocop:disable RSpec/FilePat
end
context 'when HTTParty is called' do
- it_behaves_like 'registering call offense', offending_lines: [3] do
+ it_behaves_like 'registering call offense' do
let(:source) do
<<~RUBY
class Foo
def bar
HTTParty.get('http://example.com')
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid calling `HTTParty` directly. [...]
end
end
RUBY
diff --git a/spec/rubocop/cop/gitlab/intersect_spec.rb b/spec/rubocop/cop/gitlab/intersect_spec.rb
index e724f47029c..6f0367591cd 100644
--- a/spec/rubocop/cop/gitlab/intersect_spec.rb
+++ b/spec/rubocop/cop/gitlab/intersect_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/intersect'
RSpec.describe RuboCop::Cop::Gitlab::Intersect do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of Gitlab::SQL::Intersect.new' do
diff --git a/spec/rubocop/cop/gitlab/json_spec.rb b/spec/rubocop/cop/gitlab/json_spec.rb
index fc25f69a244..29c3b96cc1a 100644
--- a/spec/rubocop/cop/gitlab/json_spec.rb
+++ b/spec/rubocop/cop/gitlab/json_spec.rb
@@ -2,38 +2,21 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/json'
RSpec.describe RuboCop::Cop::Gitlab::Json do
- include CopHelper
-
subject(:cop) { described_class.new }
- shared_examples('registering call offense') do |options|
- let(:offending_lines) { options[:offending_lines] }
-
- it 'registers an offense when the class calls JSON' do
- inspect_source(source)
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(offending_lines.size)
- expect(cop.offenses.map(&:line)).to eq(offending_lines)
- end
- end
- end
-
context 'when JSON is called' do
- it_behaves_like 'registering call offense', offending_lines: [3] do
- let(:source) do
- <<~RUBY
- class Foo
- def bar
- JSON.parse('{ "foo": "bar" }')
- end
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class Foo
+ def bar
+ JSON.parse('{ "foo": "bar" }')
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid calling `JSON` directly. [...]
end
- RUBY
- end
+ end
+ RUBY
end
end
end
diff --git a/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb b/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
index 1d09c720bf7..08634d5753a 100644
--- a/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
+++ b/spec/rubocop/cop/gitlab/module_with_instance_variables_spec.rb
@@ -2,42 +2,33 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/module_with_instance_variables'
RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
- include CopHelper
+ let(:msg) { "Do not use instance variables in a module. [...]" }
subject(:cop) { described_class.new }
- shared_examples('registering offense') do |options|
- let(:offending_lines) { options[:offending_lines] }
-
+ shared_examples('registering offense') do
it 'registers an offense when instance variable is used in a module' do
- inspect_source(source)
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(offending_lines.size)
- expect(cop.offenses.map(&:line)).to eq(offending_lines)
- end
+ expect_offense(source)
end
end
shared_examples('not registering offense') do
it 'does not register offenses' do
- inspect_source(source)
-
- expect(cop.offenses).to be_empty
+ expect_no_offenses(source)
end
end
context 'when source is a regular module' do
- it_behaves_like 'registering offense', offending_lines: [3] do
+ it_behaves_like 'registering offense' do
let(:source) do
<<~RUBY
module M
def f
@f = true
+ ^^^^^^^^^ #{msg}
end
end
RUBY
@@ -46,13 +37,14 @@ RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
end
context 'when source is a nested module' do
- it_behaves_like 'registering offense', offending_lines: [4] do
+ it_behaves_like 'registering offense' do
let(:source) do
<<~RUBY
module N
module M
def f
@f = true
+ ^^^^^^^^^ #{msg}
end
end
end
@@ -62,13 +54,14 @@ RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
end
context 'when source is a nested module with multiple offenses' do
- it_behaves_like 'registering offense', offending_lines: [4, 12] do
+ it_behaves_like 'registering offense' do
let(:source) do
<<~RUBY
module N
module M
def f
@f = true
+ ^^^^^^^^^ #{msg}
end
def g
@@ -77,6 +70,7 @@ RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
def h
@h = true
+ ^^^^^^^^^ #{msg}
end
end
end
@@ -129,12 +123,13 @@ RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
end
context 'when source is using simple or ivar assignment with other ivar' do
- it_behaves_like 'registering offense', offending_lines: [3] do
+ it_behaves_like 'registering offense' do
let(:source) do
<<~RUBY
module M
def f
@f ||= g(@g)
+ ^^ #{msg}
end
end
RUBY
@@ -143,13 +138,15 @@ RSpec.describe RuboCop::Cop::Gitlab::ModuleWithInstanceVariables do
end
context 'when source is using or ivar assignment with something else' do
- it_behaves_like 'registering offense', offending_lines: [3, 4] do
+ it_behaves_like 'registering offense' do
let(:source) do
<<~RUBY
module M
def f
@f ||= true
+ ^^ #{msg}
@f.to_s
+ ^^ #{msg}
end
end
RUBY
diff --git a/spec/rubocop/cop/gitlab/namespaced_class_spec.rb b/spec/rubocop/cop/gitlab/namespaced_class_spec.rb
new file mode 100644
index 00000000000..d1f61aa5afb
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/namespaced_class_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../../rubocop/cop/gitlab/namespaced_class'
+
+RSpec.describe RuboCop::Cop::Gitlab::NamespacedClass do
+ subject(:cop) { described_class.new }
+
+ it 'flags a class definition without namespace' do
+ expect_offense(<<~SOURCE)
+ class MyClass
+ ^^^^^^^^^^^^^ #{described_class::MSG}
+ end
+ SOURCE
+ end
+
+ it 'flags a class definition with inheritance without namespace' do
+ expect_offense(<<~SOURCE)
+ class MyClass < ApplicationRecord
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+ def some_method
+ true
+ end
+ end
+ SOURCE
+ end
+
+ it 'does not flag the class definition with namespace in separate lines' do
+ expect_no_offenses(<<~SOURCE)
+ module MyModule
+ class MyClass < ApplicationRecord
+ end
+
+ class MyOtherClass
+ def other_method
+ 1 + 1
+ end
+ end
+ end
+ SOURCE
+ end
+
+ it 'does not flag the class definition with nested namespace in separate lines' do
+ expect_no_offenses(<<~SOURCE)
+ module TopLevelModule
+ module NestedModule
+ class MyClass
+ end
+ end
+ end
+ SOURCE
+ end
+
+ it 'does not flag the class definition nested inside namespaced class' do
+ expect_no_offenses(<<~SOURCE)
+ module TopLevelModule
+ class TopLevelClass
+ class MyClass
+ end
+ end
+ end
+ SOURCE
+ end
+
+ it 'does not flag a compact namespaced class definition' do
+ expect_no_offenses(<<~SOURCE)
+ class MyModule::MyClass < ApplicationRecord
+ end
+ SOURCE
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb b/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb
index e6fb9ab9d57..6dbbcdd8324 100644
--- a/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb
+++ b/spec/rubocop/cop/gitlab/policy_rule_boolean_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/policy_rule_boolean'
RSpec.describe RuboCop::Cop::Gitlab::PolicyRuleBoolean do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'registers offense for &&' do
diff --git a/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb b/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
index 322c7c82968..071ddcf8b7d 100644
--- a/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
+++ b/spec/rubocop/cop/gitlab/predicate_memoization_spec.rb
@@ -2,55 +2,38 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/predicate_memoization'
RSpec.describe RuboCop::Cop::Gitlab::PredicateMemoization do
- include CopHelper
-
subject(:cop) { described_class.new }
- shared_examples('registering offense') do |options|
- let(:offending_lines) { options[:offending_lines] }
-
- it 'registers an offense when a predicate method is memoizing via ivar' do
- inspect_source(source)
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(offending_lines.size)
- expect(cop.offenses.map(&:line)).to eq(offending_lines)
- end
- end
- end
-
shared_examples('not registering offense') do
it 'does not register offenses' do
- inspect_source(source)
-
- expect(cop.offenses).to be_empty
+ expect_no_offenses(source)
end
end
- context 'when source is a predicate method memoizing via ivar' do
- it_behaves_like 'registering offense', offending_lines: [3] do
+ context 'when source is a predicate method using ivar with assignment' do
+ it_behaves_like 'not registering offense' do
let(:source) do
<<~RUBY
class C
def really?
- @really ||= true
+ @really = true
end
end
RUBY
end
end
+ end
- it_behaves_like 'registering offense', offending_lines: [4] do
+ context 'when source is a predicate method using local with ||=' do
+ it_behaves_like 'not registering offense' do
let(:source) do
<<~RUBY
class C
def really?
- value = true
- @really ||= value
+ really ||= true
end
end
RUBY
@@ -58,13 +41,13 @@ RSpec.describe RuboCop::Cop::Gitlab::PredicateMemoization do
end
end
- context 'when source is a predicate method using ivar with assignment' do
+ context 'when source is a regular method memoizing via ivar' do
it_behaves_like 'not registering offense' do
let(:source) do
<<~RUBY
class C
- def really?
- @really = true
+ def really
+ @really ||= true
end
end
RUBY
@@ -72,30 +55,37 @@ RSpec.describe RuboCop::Cop::Gitlab::PredicateMemoization do
end
end
- context 'when source is a predicate method using local with ||=' do
- it_behaves_like 'not registering offense' do
- let(:source) do
- <<~RUBY
+ context 'when source is a predicate method memoizing via ivar' do
+ let(:msg) { "Avoid using `@value ||= query` [...]" }
+
+ context 'when assigning to boolean' do
+ it 'registers an offense' do
+ node = "@really ||= true"
+
+ expect_offense(<<~CODE, node: node, msg: msg)
class C
def really?
- really ||= true
+ %{node}
+ ^{node} %{msg}
end
end
- RUBY
+ CODE
end
end
- end
- context 'when source is a regular method memoizing via ivar' do
- it_behaves_like 'not registering offense' do
- let(:source) do
- <<~RUBY
+ context 'when assigning to another variable that is a boolean' do
+ it 'registers an offense' do
+ node = "@really ||= value"
+
+ expect_offense(<<~CODE, node: node, msg: msg)
class C
- def really
- @really ||= true
+ def really?
+ value = true
+ %{node}
+ ^{node} %{msg}
end
end
- RUBY
+ CODE
end
end
end
diff --git a/spec/rubocop/cop/gitlab/rails_logger_spec.rb b/spec/rubocop/cop/gitlab/rails_logger_spec.rb
index 768da243b02..7258b047191 100644
--- a/spec/rubocop/cop/gitlab/rails_logger_spec.rb
+++ b/spec/rubocop/cop/gitlab/rails_logger_spec.rb
@@ -2,37 +2,31 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/rails_logger'
RSpec.describe RuboCop::Cop::Gitlab::RailsLogger do
- include CopHelper
-
subject(:cop) { described_class.new }
described_class::LOG_METHODS.each do |method|
it "flags the use of Rails.logger.#{method} with a constant receiver" do
- inspect_source("Rails.logger.#{method}('some error')")
+ node = "Rails.logger.#{method}('some error')"
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~CODE, node: node, msg: "Use a structured JSON logger instead of `Rails.logger`. [...]")
+ %{node}
+ ^{node} %{msg}
+ CODE
end
end
it 'does not flag the use of Rails.logger with a constant that is not Rails' do
- inspect_source("AppLogger.error('some error')")
-
- expect(cop.offenses.size).to eq(0)
+ expect_no_offenses("AppLogger.error('some error')")
end
it 'does not flag the use of logger with a send receiver' do
- inspect_source("file_logger.info('important info')")
-
- expect(cop.offenses.size).to eq(0)
+ expect_no_offenses("file_logger.info('important info')")
end
it 'does not flag the use of Rails.logger.level' do
- inspect_source("Rails.logger.level")
-
- expect(cop.offenses.size).to eq(0)
+ expect_no_offenses("Rails.logger.level")
end
end
diff --git a/spec/rubocop/cop/gitlab/union_spec.rb b/spec/rubocop/cop/gitlab/union_spec.rb
index 20364b1b901..04a3db8e7dd 100644
--- a/spec/rubocop/cop/gitlab/union_spec.rb
+++ b/spec/rubocop/cop/gitlab/union_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/union'
RSpec.describe RuboCop::Cop::Gitlab::Union do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of Gitlab::SQL::Union.new' do
diff --git a/spec/rubocop/cop/graphql/authorize_types_spec.rb b/spec/rubocop/cop/graphql/authorize_types_spec.rb
index a1b7a3f3a9b..9242b865b20 100644
--- a/spec/rubocop/cop/graphql/authorize_types_spec.rb
+++ b/spec/rubocop/cop/graphql/authorize_types_spec.rb
@@ -6,21 +6,18 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/authorize_types'
RSpec.describe RuboCop::Cop::Graphql::AuthorizeTypes do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'adds an offense when there is no authorize call' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
module Types
class AType < BaseObject
+ ^^^^^^^^^^^^^^^^^^^^^^^^ Add an `authorize :ability` call to the type: https://docs.gitlab.com/ee/development/api_graphql_styleguide.html#type-authorization
field :a_thing
field :another_thing
end
end
TYPE
-
- expect(cop.offenses.size).to eq 1
end
it 'does not add an offense for classes that have an authorize call' do
diff --git a/spec/rubocop/cop/graphql/descriptions_spec.rb b/spec/rubocop/cop/graphql/descriptions_spec.rb
index b44205b0920..9ad40fad83d 100644
--- a/spec/rubocop/cop/graphql/descriptions_spec.rb
+++ b/spec/rubocop/cop/graphql/descriptions_spec.rb
@@ -5,38 +5,34 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/descriptions'
RSpec.describe RuboCop::Cop::Graphql::Descriptions do
- include CopHelper
-
subject(:cop) { described_class.new }
context 'fields' do
it 'adds an offense when there is no description' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
module Types
class FakeType < BaseObject
field :a_thing,
+ ^^^^^^^^^^^^^^^ Please add a `description` property.
GraphQL::STRING_TYPE,
null: false
end
end
TYPE
-
- expect(cop.offenses.size).to eq 1
end
it 'adds an offense when description does not end in a period' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
module Types
class FakeType < BaseObject
field :a_thing,
+ ^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
GraphQL::STRING_TYPE,
null: false,
description: 'A descriptive description'
end
end
TYPE
-
- expect(cop.offenses.size).to eq 1
end
it 'does not add an offense when description is correct' do
@@ -55,32 +51,30 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
context 'arguments' do
it 'adds an offense when there is no description' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
module Types
class FakeType < BaseObject
argument :a_thing,
+ ^^^^^^^^^^^^^^^^^^ Please add a `description` property.
GraphQL::STRING_TYPE,
null: false
end
end
TYPE
-
- expect(cop.offenses.size).to eq 1
end
it 'adds an offense when description does not end in a period' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
module Types
class FakeType < BaseObject
argument :a_thing,
+ ^^^^^^^^^^^^^^^^^^ `description` strings must end with a `.`.
GraphQL::STRING_TYPE,
null: false,
description: 'Behold! A description'
end
end
TYPE
-
- expect(cop.offenses.size).to eq 1
end
it 'does not add an offense when description is correct' do
diff --git a/spec/rubocop/cop/graphql/gid_expected_type_spec.rb b/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
index 8fd7ae03748..d9a129244d6 100644
--- a/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
+++ b/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
@@ -6,16 +6,13 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/gid_expected_type'
RSpec.describe RuboCop::Cop::Graphql::GIDExpectedType do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'adds an offense when there is no expected_type parameter' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
GitlabSchema.object_from_id(received_id)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Add an expected_type parameter to #object_from_id calls if possible.
TYPE
-
- expect(cop.offenses.size).to eq 1
end
it 'does not add an offense for calls that have an expected_type parameter' do
diff --git a/spec/rubocop/cop/graphql/id_type_spec.rb b/spec/rubocop/cop/graphql/id_type_spec.rb
index 6135c9fef43..93c01cd7f06 100644
--- a/spec/rubocop/cop/graphql/id_type_spec.rb
+++ b/spec/rubocop/cop/graphql/id_type_spec.rb
@@ -6,16 +6,13 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/id_type'
RSpec.describe RuboCop::Cop::Graphql::IDType do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'adds an offense when GraphQL::ID_TYPE is used as a param to #argument' do
- inspect_source(<<~TYPE)
+ expect_offense(<<~TYPE)
argument :some_arg, GraphQL::ID_TYPE, some: other, params: do_not_matter
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use GraphQL::ID_TYPE, use a specific GlobalIDType instead
TYPE
-
- expect(cop.offenses.size).to eq 1
end
context 'whitelisted arguments' do
diff --git a/spec/rubocop/cop/graphql/json_type_spec.rb b/spec/rubocop/cop/graphql/json_type_spec.rb
index 6d9f86e44d2..91838c1708e 100644
--- a/spec/rubocop/cop/graphql/json_type_spec.rb
+++ b/spec/rubocop/cop/graphql/json_type_spec.rb
@@ -5,29 +5,29 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/json_type'
RSpec.describe RuboCop::Cop::Graphql::JSONType do
- include CopHelper
+ let(:msg) do
+ 'Avoid using GraphQL::Types::JSON. See: https://docs.gitlab.com/ee/development/api_graphql_styleguide.html#json'
+ end
subject(:cop) { described_class.new }
context 'fields' do
it 'adds an offense when GraphQL::Types::JSON is used' do
- inspect_source(<<~RUBY.strip)
+ expect_offense(<<~RUBY)
class MyType
field :some_field, GraphQL::Types::JSON
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
end
RUBY
-
- expect(cop.offenses.size).to eq(1)
end
it 'adds an offense when GraphQL::Types::JSON is used with other keywords' do
- inspect_source(<<~RUBY.strip)
+ expect_offense(<<~RUBY)
class MyType
field :some_field, GraphQL::Types::JSON, null: true, description: 'My description'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
end
RUBY
-
- expect(cop.offenses.size).to eq(1)
end
it 'does not add an offense for other types' do
@@ -41,23 +41,21 @@ RSpec.describe RuboCop::Cop::Graphql::JSONType do
context 'arguments' do
it 'adds an offense when GraphQL::Types::JSON is used' do
- inspect_source(<<~RUBY.strip)
+ expect_offense(<<~RUBY)
class MyType
argument :some_arg, GraphQL::Types::JSON
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
end
RUBY
-
- expect(cop.offenses.size).to eq(1)
end
it 'adds an offense when GraphQL::Types::JSON is used with other keywords' do
- inspect_source(<<~RUBY.strip)
+ expect_offense(<<~RUBY)
class MyType
argument :some_arg, GraphQL::Types::JSON, null: true, description: 'My description'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
end
RUBY
-
- expect(cop.offenses.size).to eq(1)
end
it 'does not add an offense for other types' do
diff --git a/spec/rubocop/cop/graphql/resolver_type_spec.rb b/spec/rubocop/cop/graphql/resolver_type_spec.rb
index 25213e30528..11c0ad284a9 100644
--- a/spec/rubocop/cop/graphql/resolver_type_spec.rb
+++ b/spec/rubocop/cop/graphql/resolver_type_spec.rb
@@ -6,24 +6,19 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/graphql/resolver_type'
RSpec.describe RuboCop::Cop::Graphql::ResolverType do
- include CopHelper
-
subject(:cop) { described_class.new }
- it 'adds an offense when there is no type annotaion' do
- lacks_type = <<-SRC
+ it 'adds an offense when there is no type annotation' do
+ expect_offense(<<~SRC)
module Resolvers
class FooResolver < BaseResolver
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Missing type annotation: Please add `type` DSL method call. e.g: type UserType.connection_type, null: true
def resolve(**args)
[:thing]
end
end
end
SRC
-
- inspect_source(lacks_type)
-
- expect(cop.offenses.size).to eq 1
end
it 'does not add an offense for resolvers that have a type call' do
@@ -41,9 +36,10 @@ RSpec.describe RuboCop::Cop::Graphql::ResolverType do
end
it 'ignores type calls on other objects' do
- lacks_type = <<-SRC
+ expect_offense(<<~SRC)
module Resolvers
class FooResolver < BaseResolver
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Missing type annotation: Please add `type` DSL method call. e.g: type UserType.connection_type, null: true
class FalsePositive < BaseObject
type RedHerringType, null: true
end
@@ -54,10 +50,6 @@ RSpec.describe RuboCop::Cop::Graphql::ResolverType do
end
end
SRC
-
- inspect_source(lacks_type)
-
- expect(cop.offenses.size).to eq 1
end
it 'does not add an offense unless the class is named using the Resolver convention' do
diff --git a/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb b/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
index ac6c481a7c3..b3ec426dc07 100644
--- a/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
+++ b/spec/rubocop/cop/group_public_or_visible_to_user_spec.rb
@@ -2,29 +2,28 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/group_public_or_visible_to_user'
RSpec.describe RuboCop::Cop::GroupPublicOrVisibleToUser do
- include CopHelper
+ let(:msg) do
+ "`Group.public_or_visible_to_user` should be used with extreme care. " \
+ "Please ensure that you are not using it on its own and that the amount of rows being filtered is reasonable."
+ end
subject(:cop) { described_class.new }
it 'flags the use of Group.public_or_visible_to_user with a constant receiver' do
- inspect_source('Group.public_or_visible_to_user')
-
- expect(cop.offenses.size).to eq(1)
+ expect_offense(<<~CODE)
+ Group.public_or_visible_to_user
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ CODE
end
- it 'does not flat the use of public_or_visible_to_user with a constant that is not Group' do
- inspect_source('Project.public_or_visible_to_user')
-
- expect(cop.offenses.size).to eq(0)
+ it 'does not flag the use of public_or_visible_to_user with a constant that is not Group' do
+ expect_no_offenses('Project.public_or_visible_to_user')
end
it 'does not flag the use of Group.public_or_visible_to_user with a send receiver' do
- inspect_source('foo.public_or_visible_to_user')
-
- expect(cop.offenses.size).to eq(0)
+ expect_no_offenses('foo.public_or_visible_to_user')
end
end
diff --git a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
index 47247006e42..2d293fd0a05 100644
--- a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
+++ b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
@@ -2,12 +2,9 @@
require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/inject_enterprise_edition_module'
RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
- include CopHelper
-
subject(:cop) { described_class.new }
it 'flags the use of `prepend_if_ee EE` in the middle of a file' do
@@ -185,18 +182,19 @@ RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
end
it 'autocorrects offenses by just disabling the Cop' do
- source = <<~SOURCE
- class Foo
- prepend_if_ee 'EE::Foo'
- include_if_ee 'Bar'
- end
+ expect_offense(<<~SOURCE)
+ class Foo
+ prepend_if_ee 'EE::Foo'
+ ^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
+ include_if_ee 'Bar'
+ end
SOURCE
- expect(autocorrect_source(source)).to eq(<<~SOURCE)
- class Foo
- prepend_if_ee 'EE::Foo' # rubocop: disable Cop/InjectEnterpriseEditionModule
- include_if_ee 'Bar'
- end
+ expect_correction(<<~SOURCE)
+ class Foo
+ prepend_if_ee 'EE::Foo' # rubocop: disable Cop/InjectEnterpriseEditionModule
+ include_if_ee 'Bar'
+ end
SOURCE
end
diff --git a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
index 826c681a880..aac59f0db4c 100644
--- a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
+++ b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
@@ -5,8 +5,6 @@ require 'rubocop'
require_relative '../../../../rubocop/cop/lint/last_keyword_argument'
RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument do
- include CopHelper
-
subject(:cop) { described_class.new }
before do
diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
index 97b9d0d1ee2..149fb0a48eb 100644
--- a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
+++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
@@ -28,6 +28,15 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
^^^^ #{described_class::MSG}
end
+ create_table_with_constraints :test_text_limits_create do |t|
+ t.integer :test_id, null: false
+ t.text :title
+ t.text :description
+ ^^^^ #{described_class::MSG}
+
+ t.text_limit :title, 100
+ end
+
add_column :test_text_limits, :email, :text
^^^^^^^^^^ #{described_class::MSG}
@@ -57,6 +66,15 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
t.text :name
end
+ create_table_with_constraints :test_text_limits_create do |t|
+ t.integer :test_id, null: false
+ t.text :title
+ t.text :description
+
+ t.text_limit :title, 100
+ t.text_limit :description, 255
+ end
+
add_column :test_text_limits, :email, :text
add_column_with_default :test_text_limits, :role, :text, default: 'default'
change_column_type_concurrently :test_text_limits, :test_id, :text
diff --git a/spec/rubocop/cop/put_project_routes_under_scope_spec.rb b/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
index b0627af0e8b..eb783d22129 100644
--- a/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
+++ b/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
@@ -5,8 +5,6 @@ require 'rubocop'
require_relative '../../../rubocop/cop/put_project_routes_under_scope'
RSpec.describe RuboCop::Cop::PutProjectRoutesUnderScope do
- include CopHelper
-
subject(:cop) { described_class.new }
%w[resource resources get post put patch delete].each do |route_method|
diff --git a/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb b/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb
index 4876fcd5050..9332ab4186e 100644
--- a/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb
+++ b/spec/rubocop/cop/qa/ambiguous_page_object_name_spec.rb
@@ -1,15 +1,11 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/qa/ambiguous_page_object_name'
RSpec.describe RuboCop::Cop::QA::AmbiguousPageObjectName do
- include CopHelper
-
let(:source_file) { 'qa/page.rb' }
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/qa/element_with_pattern_spec.rb b/spec/rubocop/cop/qa/element_with_pattern_spec.rb
index 6289b1a7c97..28c351ccf1e 100644
--- a/spec/rubocop/cop/qa/element_with_pattern_spec.rb
+++ b/spec/rubocop/cop/qa/element_with_pattern_spec.rb
@@ -1,15 +1,11 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/qa/element_with_pattern'
RSpec.describe RuboCop::Cop::QA::ElementWithPattern do
- include CopHelper
-
let(:source_file) { 'qa/page.rb' }
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
index fe9cea47a43..7cb8354a37c 100644
--- a/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
+++ b/spec/rubocop/cop/rspec/factories_in_migration_specs_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe RuboCop::Cop::RSpec::FactoriesInMigrationSpecs do
subject(:cop) { described_class.new }
shared_examples 'an offensive factory call' do |namespace|
- %i[build build_list create create_list].each do |forbidden_method|
+ %i[build build_list create create_list attributes_for].each do |forbidden_method|
namespaced_forbidden_method = "#{namespace}#{forbidden_method}(:user)"
it "registers an offense for #{namespaced_forbidden_method}" do
diff --git a/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb b/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb
index a6a44b3fa68..cacf0a1b67d 100644
--- a/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb
+++ b/spec/rubocop/cop/ruby_interpolation_in_translation_spec.rb
@@ -1,68 +1,51 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/ruby_interpolation_in_translation'
# Disabling interpolation check as we deliberately want to have #{} in strings.
# rubocop:disable Lint/InterpolationCheck
RSpec.describe RuboCop::Cop::RubyInterpolationInTranslation do
- subject(:cop) { described_class.new }
+ let(:msg) { "Don't use ruby interpolation \#{} inside translated strings, instead use %{}" }
- it 'does not add an offence for a regular messages' do
- inspect_source('_("Hello world")')
+ subject(:cop) { described_class.new }
- expect(cop.offenses).to be_empty
+ it 'does not add an offense for a regular messages' do
+ expect_no_offenses('_("Hello world")')
end
- it 'adds the correct offence when using interpolation in a string' do
- inspect_source('_("Hello #{world}")')
-
- offense = cop.offenses.first
-
- expect(offense.location.source).to eq('#{world}')
- expect(offense.message).to eq('Don\'t use ruby interpolation #{} inside translated strings, instead use %{}')
+ it 'adds the correct offense when using interpolation in a string' do
+ expect_offense(<<~CODE)
+ _("Hello \#{world}")
+ ^^^^^ #{msg}
+ ^^^^^^^^ #{msg}
+ CODE
end
it 'detects when using a ruby interpolation in the first argument of a pluralized string' do
- inspect_source('n_("Hello #{world}", "Hello world")')
-
- expect(cop.offenses).not_to be_empty
+ expect_offense(<<~CODE)
+ n_("Hello \#{world}", "Hello world")
+ ^^^^^ #{msg}
+ ^^^^^^^^ #{msg}
+ CODE
end
it 'detects when using a ruby interpolation in the second argument of a pluralized string' do
- inspect_source('n_("Hello world", "Hello #{world}")')
-
- expect(cop.offenses).not_to be_empty
+ expect_offense(<<~CODE)
+ n_("Hello world", "Hello \#{world}")
+ ^^^^^ #{msg}
+ ^^^^^^^^ #{msg}
+ CODE
end
it 'detects when using interpolation in a namespaced translation' do
- inspect_source('s_("Hello|#{world}")')
-
- expect(cop.offenses).not_to be_empty
- end
-
- it 'does not add an offence for messages defined over multiple lines' do
- source = <<~SRC
- _("Hello "\
- "world ")
- SRC
-
- inspect_source(source)
- expect(cop.offenses).to be_empty
- end
-
- it 'adds an offence for violations in a message defined over multiple lines' do
- source = <<~SRC
- _("Hello "\
- "\#{world} ")
- SRC
-
- inspect_source(source)
- expect(cop.offenses).not_to be_empty
+ expect_offense(<<~CODE)
+ s_("Hello|\#{world}")
+ ^^^^^ #{msg}
+ ^^^^^^^^ #{msg}
+ CODE
end
end
# rubocop:enable Lint/InterpolationCheck
diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb
index 8a38a318999..8656b07a6e4 100644
--- a/spec/rubocop/cop/static_translation_definition_spec.rb
+++ b/spec/rubocop/cop/static_translation_definition_spec.rb
@@ -8,78 +8,76 @@ require 'rspec-parameterized'
require_relative '../../../rubocop/cop/static_translation_definition'
RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
- include CopHelper
-
using RSpec::Parameterized::TableSyntax
+ let(:msg) do
+ "The text you're translating will be already in the translated form when it's assigned to the constant. " \
+ "When a users changes the locale, these texts won't be translated again. " \
+ "Consider moving the translation logic to a method."
+ end
+
subject(:cop) { described_class.new }
- shared_examples 'offense' do |code, highlight, line|
+ shared_examples 'offense' do |code|
it 'registers an offense' do
- inspect_source(code)
-
- expect(cop.offenses.size).to eq(1)
- expect(cop.offenses.map(&:line)).to eq([line])
- expect(cop.highlights).to eq([highlight])
+ expect_offense(code)
end
end
shared_examples 'no offense' do |code|
it 'does not register an offense' do
- inspect_source(code)
-
- expect(cop.offenses).to be_empty
+ expect_no_offenses(code)
end
end
describe 'offenses' do
- where(:code, :highlight, :line) do
+ where(:code) do
[
- ['A = _("a")', '_("a")', 1],
- ['B = s_("b")', 's_("b")', 1],
- ['C = n_("c")', 'n_("c")', 1],
- [
- <<~CODE,
- class MyClass
- def self.translations
- @cache ||= { hello: _("hello") }
- end
+ <<~CODE,
+ A = _("a")
+ ^^^^^^ #{msg}
+ CODE
+ <<~CODE,
+ B = s_("b")
+ ^^^^^^^ #{msg}
+ CODE
+ <<~CODE,
+ C = n_("c")
+ ^^^^^^^ #{msg}
+ CODE
+ <<~CODE,
+ class MyClass
+ def self.translations
+ @cache ||= { hello: _("hello") }
+ ^^^^^^^^^^ #{msg}
end
- CODE
- '_("hello")',
- 3
- ],
- [
- <<~CODE,
- module MyModule
- A = {
- b: {
- c: _("a")
- }
+ end
+ CODE
+ <<~CODE,
+ module MyModule
+ A = {
+ b: {
+ c: _("a")
+ ^^^^^^ #{msg}
}
- end
- CODE
- '_("a")',
- 4
- ],
- [
- <<~CODE,
- class MyClass
- B = [
- [
- s_("a")
- ]
+ }
+ end
+ CODE
+ <<~CODE
+ class MyClass
+ B = [
+ [
+ s_("a")
+ ^^^^^^^ #{msg}
]
- end
- CODE
- 's_("a")',
- 4
- ]
+ ]
+ end
+ CODE
]
end
with_them do
- include_examples 'offense', params[:code], params[:highlight], params[:line]
+ include_examples 'offense', params[:code]
end
end
diff --git a/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb b/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
index 1c90df798a5..b6711effe9e 100644
--- a/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
+++ b/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
@@ -1,17 +1,13 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/usage_data/distinct_count_by_large_foreign_key'
RSpec.describe RuboCop::Cop::UsageData::DistinctCountByLargeForeignKey do
- include CopHelper
-
let(:allowed_foreign_keys) { [:author_id, :user_id, :'merge_requests.target_project_id'] }
-
+ let(:msg) { 'Avoid doing `distinct_count` on foreign keys for large tables having above 100 million rows.' }
let(:config) do
RuboCop::Config.new('UsageData/DistinctCountByLargeForeignKey' => {
'AllowedForeignKeys' => allowed_foreign_keys
@@ -21,36 +17,32 @@ RSpec.describe RuboCop::Cop::UsageData::DistinctCountByLargeForeignKey do
subject(:cop) { described_class.new(config) }
context 'when counting by disallowed key' do
- it 'registers an offence' do
- inspect_source('distinct_count(Issue, :creator_id)')
-
- expect(cop.offenses.size).to eq(1)
+ it 'registers an offense' do
+ expect_offense(<<~CODE)
+ distinct_count(Issue, :creator_id)
+ ^^^^^^^^^^^^^^ #{msg}
+ CODE
end
- it 'does not register an offence when batch is false' do
- inspect_source('distinct_count(Issue, :creator_id, batch: false)')
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense when batch is false' do
+ expect_no_offenses('distinct_count(Issue, :creator_id, batch: false)')
end
- it 'register an offence when batch is true' do
- inspect_source('distinct_count(Issue, :creator_id, batch: true)')
-
- expect(cop.offenses.size).to eq(1)
+ it 'registers an offense when batch is true' do
+ expect_offense(<<~CODE)
+ distinct_count(Issue, :creator_id, batch: true)
+ ^^^^^^^^^^^^^^ #{msg}
+ CODE
end
end
context 'when calling by allowed key' do
- it 'does not register an offence with symbol' do
- inspect_source('distinct_count(Issue, :author_id)')
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense with symbol' do
+ expect_no_offenses('distinct_count(Issue, :author_id)')
end
- it 'does not register an offence with string' do
- inspect_source("distinct_count(Issue, 'merge_requests.target_project_id')")
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense with string' do
+ expect_no_offenses("distinct_count(Issue, 'merge_requests.target_project_id')")
end
end
end
diff --git a/spec/rubocop/cop/usage_data/large_table_spec.rb b/spec/rubocop/cop/usage_data/large_table_spec.rb
index 638e8c67dc8..26bd4e61625 100644
--- a/spec/rubocop/cop/usage_data/large_table_spec.rb
+++ b/spec/rubocop/cop/usage_data/large_table_spec.rb
@@ -1,18 +1,15 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/usage_data/large_table'
RSpec.describe RuboCop::Cop::UsageData::LargeTable do
- include CopHelper
-
let(:large_tables) { %i[Rails Time] }
let(:count_methods) { %i[count distinct_count] }
let(:allowed_methods) { %i[minimum maximum] }
+ let(:msg) { 'Use one of the count, distinct_count methods for counting on' }
let(:config) do
RuboCop::Config.new('UsageData/LargeTable' => {
@@ -31,59 +28,54 @@ RSpec.describe RuboCop::Cop::UsageData::LargeTable do
context 'with large tables' do
context 'when calling Issue.count' do
- it 'register an offence' do
- inspect_source('Issue.count')
-
- expect(cop.offenses.size).to eq(1)
+ it 'registers an offense' do
+ expect_offense(<<~CODE)
+ Issue.count
+ ^^^^^^^^^^^ #{msg} Issue
+ CODE
end
end
context 'when calling Issue.active.count' do
- it 'register an offence' do
- inspect_source('Issue.active.count')
-
- expect(cop.offenses.size).to eq(1)
+ it 'registers an offense' do
+ expect_offense(<<~CODE)
+ Issue.active.count
+ ^^^^^^^^^^^^ #{msg} Issue
+ CODE
end
end
context 'when calling count(Issue)' do
- it 'does not register an offence' do
- inspect_source('count(Issue)')
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense' do
+ expect_no_offenses('count(Issue)')
end
end
context 'when calling count(Ci::Build.active)' do
- it 'does not register an offence' do
- inspect_source('count(Ci::Build.active)')
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense' do
+ expect_no_offenses('count(Ci::Build.active)')
end
end
context 'when calling Ci::Build.active.count' do
- it 'register an offence' do
- inspect_source('Ci::Build.active.count')
-
- expect(cop.offenses.size).to eq(1)
+ it 'registers an offense' do
+ expect_offense(<<~CODE)
+ Ci::Build.active.count
+ ^^^^^^^^^^^^^^^^ #{msg} Ci::Build
+ CODE
end
end
context 'when using allowed methods' do
- it 'does not register an offence' do
- inspect_source('Issue.minimum')
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense' do
+ expect_no_offenses('Issue.minimum')
end
end
end
context 'with non related class' do
- it 'does not register an offence' do
- inspect_source('Rails.count')
-
- expect(cop.offenses).to be_empty
+ it 'does not register an offense' do
+ expect_no_offenses('Rails.count')
end
end
end
diff --git a/spec/rubocop/qa_helpers_spec.rb b/spec/rubocop/qa_helpers_spec.rb
index 051817903a8..cf6d2f1a845 100644
--- a/spec/rubocop/qa_helpers_spec.rb
+++ b/spec/rubocop/qa_helpers_spec.rb
@@ -6,7 +6,7 @@ require 'parser/current'
require_relative '../../rubocop/qa_helpers'
RSpec.describe RuboCop::QAHelpers do
- def parse_source(source, path = 'foo.rb')
+ def build_and_parse_source(source, path = 'foo.rb')
buffer = Parser::Source::Buffer.new(path)
buffer.source = source
@@ -24,13 +24,13 @@ RSpec.describe RuboCop::QAHelpers do
describe '#in_qa_file?' do
it 'returns true for a node in the qa/ directory' do
- node = parse_source('10', rails_root_join('qa', 'qa', 'page', 'dashboard', 'groups.rb'))
+ node = build_and_parse_source('10', rails_root_join('qa', 'qa', 'page', 'dashboard', 'groups.rb'))
expect(cop.in_qa_file?(node)).to eq(true)
end
it 'returns false for a node outside the qa/ directory' do
- node = parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
expect(cop.in_qa_file?(node)).to eq(false)
end
diff --git a/spec/serializers/admin/user_entity_spec.rb b/spec/serializers/admin/user_entity_spec.rb
index 7db49af09c3..42efe0eec54 100644
--- a/spec/serializers/admin/user_entity_spec.rb
+++ b/spec/serializers/admin/user_entity_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe Admin::UserEntity do
:username,
:last_activity_on,
:avatar_url,
+ :note,
:badges,
:projects_count,
:actions
diff --git a/spec/serializers/admin/user_serializer_spec.rb b/spec/serializers/admin/user_serializer_spec.rb
index 719a90384c6..53a9457409c 100644
--- a/spec/serializers/admin/user_serializer_spec.rb
+++ b/spec/serializers/admin/user_serializer_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Admin::UserSerializer do
:username,
:last_activity_on,
:avatar_url,
+ :note,
:badges,
:projects_count,
:actions
diff --git a/spec/serializers/ci/codequality_mr_diff_entity_spec.rb b/spec/serializers/ci/codequality_mr_diff_entity_spec.rb
new file mode 100644
index 00000000000..82708908d95
--- /dev/null
+++ b/spec/serializers/ci/codequality_mr_diff_entity_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CodequalityMrDiffEntity do
+ let(:entity) { described_class.new(mr_diff_report) }
+ let(:mr_diff_report) { Gitlab::Ci::Reports::CodequalityMrDiff.new(codequality_report) }
+ let(:codequality_report) { Gitlab::Ci::Reports::CodequalityReports.new }
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
+
+ describe '#as_json' do
+ subject(:report) { entity.as_json }
+
+ context 'when quality report has degradations' do
+ before do
+ codequality_report.add_degradation(degradation_1)
+ codequality_report.add_degradation(degradation_2)
+ end
+
+ it 'contains correct codequality mr diff report', :aggregate_failures do
+ expect(report[:files].keys).to eq(["file_a.rb"])
+ expect(report[:files]["file_a.rb"].first).to include(:line, :description, :severity)
+ end
+ end
+ end
+end
diff --git a/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb b/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb
new file mode 100644
index 00000000000..906ca36041f
--- /dev/null
+++ b/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CodequalityMrDiffReportSerializer do
+ let(:serializer) { described_class.new.represent(mr_diff_report) }
+ let(:mr_diff_report) { Gitlab::Ci::Reports::CodequalityMrDiff.new(codequality_report) }
+ let(:codequality_report) { Gitlab::Ci::Reports::CodequalityReports.new }
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
+
+ describe '#to_json' do
+ subject { serializer.as_json }
+
+ context 'when quality report has degradations' do
+ before do
+ codequality_report.add_degradation(degradation_1)
+ codequality_report.add_degradation(degradation_2)
+ end
+
+ it 'matches the schema' do
+ expect(subject).to match_schema('entities/codequality_mr_diff_report')
+ end
+ end
+
+ context 'when quality report has no degradations' do
+ it 'matches the schema' do
+ expect(subject).to match_schema('entities/codequality_mr_diff_report')
+ end
+ end
+ end
+end
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
new file mode 100644
index 00000000000..6ce3cef5f44
--- /dev/null
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -0,0 +1,261 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineEntity do
+ include Gitlab::Routing
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:request) { double('request', current_user: user) }
+ let(:entity) { described_class.represent(pipeline, request: request) }
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ context 'when pipeline is empty' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ it 'contains required fields' do
+ expect(subject).to include :id, :user, :path, :coverage, :source
+ expect(subject).to include :ref, :commit
+ expect(subject).to include :updated_at, :created_at
+ end
+
+ it 'excludes coverage data when disabled' do
+ entity = described_class
+ .represent(pipeline, request: request, disable_coverage: true)
+
+ expect(entity.as_json).not_to include(:coverage)
+ end
+
+ it 'contains details' do
+ expect(subject).to include :details
+ expect(subject[:details])
+ .to include :duration, :finished_at, :name
+ expect(subject[:details][:status]).to include :icon, :favicon, :text, :label, :tooltip
+ end
+
+ it 'contains flags' do
+ expect(subject).to include :flags
+ expect(subject[:flags])
+ .to include :stuck, :auto_devops, :yaml_errors,
+ :retryable, :cancelable, :merge_request
+ end
+ end
+
+ context 'when default branch not protected' do
+ before do
+ stub_not_protect_default_branch
+ end
+
+ context 'when pipeline is retryable' do
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, status: :success, project: project)
+ end
+
+ before do
+ create(:ci_build, :failed, pipeline: pipeline)
+ end
+
+ it 'does not serialize stage builds' do
+ subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
+ expect(stage).not_to include(:groups, :latest_statuses, :retries)
+ end
+ end
+
+ context 'user has ability to retry pipeline' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'contains retry path' do
+ expect(subject[:retry_path]).to be_present
+ end
+ end
+
+ context 'user does not have ability to retry pipeline' do
+ it 'does not contain retry path' do
+ expect(subject).not_to have_key(:retry_path)
+ end
+ end
+ end
+
+ context 'when pipeline is cancelable' do
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, status: :running, project: project)
+ end
+
+ before do
+ create(:ci_build, :pending, pipeline: pipeline)
+ end
+
+ it 'does not serialize stage builds' do
+ subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
+ expect(stage).not_to include(:groups, :latest_statuses, :retries)
+ end
+ end
+
+ context 'user has ability to cancel pipeline' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'contains cancel path' do
+ expect(subject[:cancel_path]).to be_present
+ end
+ end
+
+ context 'user does not have ability to cancel pipeline' do
+ it 'does not contain cancel path' do
+ expect(subject).not_to have_key(:cancel_path)
+ end
+ end
+ end
+ end
+
+ context 'delete path' do
+ context 'user has ability to delete pipeline' do
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'contains delete path' do
+ expect(subject[:delete_path]).to be_present
+ end
+ end
+
+ context 'user does not have ability to delete pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'does not contain delete path' do
+ expect(subject).not_to have_key(:delete_path)
+ end
+ end
+ end
+
+ context 'when pipeline ref is empty' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ before do
+ allow(pipeline).to receive(:ref).and_return(nil)
+ end
+
+ it 'does not generate branch path' do
+ expect(subject[:ref][:path]).to be_nil
+ end
+ end
+
+ context 'when pipeline has a failure reason set' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ before do
+ pipeline.drop!(:config_error)
+ end
+
+ it 'has a correct failure reason' do
+ expect(subject[:failure_reason])
+ .to eq 'CI/CD YAML configuration error!'
+ end
+ end
+
+ context 'when request has a project' do
+ before do
+ allow(request).to receive(:project).and_return(project)
+ end
+
+ context 'when pipeline is detached merge request pipeline' do
+ let_it_be(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:project) { merge_request.target_project }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
+
+ it 'makes detached flag true' do
+ expect(subject[:flags][:detached_merge_request_pipeline]).to be_truthy
+ end
+
+ it 'does not expose source sha and target sha' do
+ expect(subject[:source_sha]).to be_nil
+ expect(subject[:target_sha]).to be_nil
+ end
+
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'has merge request information' do
+ expect(subject[:merge_request][:iid]).to eq(merge_request.iid)
+
+ expect(project_merge_request_path(project, merge_request))
+ .to include(subject[:merge_request][:path])
+
+ expect(subject[:merge_request][:title]).to eq(merge_request.title)
+
+ expect(subject[:merge_request][:source_branch])
+ .to eq(merge_request.source_branch)
+
+ expect(project_commits_path(project, merge_request.source_branch))
+ .to include(subject[:merge_request][:source_branch_path])
+
+ expect(subject[:merge_request][:target_branch])
+ .to eq(merge_request.target_branch)
+
+ expect(project_commits_path(project, merge_request.target_branch))
+ .to include(subject[:merge_request][:target_branch_path])
+ end
+ end
+
+ context 'when user is an external user' do
+ it 'has no merge request information' do
+ expect(subject[:merge_request]).to be_nil
+ end
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let_it_be(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
+ let(:project) { merge_request.target_project }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
+
+ it 'makes detached flag false' do
+ expect(subject[:flags][:detached_merge_request_pipeline]).to be_falsy
+ end
+
+ it 'makes atached flag true' do
+ expect(subject[:flags][:merge_request_pipeline]).to be_truthy
+ end
+
+ it 'exposes source sha and target sha' do
+ expect(subject[:source_sha]).to be_present
+ expect(subject[:target_sha]).to be_present
+ end
+
+ it 'exposes merge request event type' do
+ expect(subject[:merge_request_event_type]).to be_present
+ end
+ end
+ end
+
+ context 'when pipeline has failed builds' do
+ let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
+ let_it_be(:build) { create(:ci_build, :success, pipeline: pipeline) }
+ let_it_be(:failed_1) { create(:ci_build, :failed, pipeline: pipeline) }
+ let_it_be(:failed_2) { create(:ci_build, :failed, pipeline: pipeline) }
+
+ context 'when the user can retry the pipeline' do
+ it 'exposes these failed builds' do
+ allow(entity).to receive(:can_retry?).and_return(true)
+
+ expect(subject[:failed_builds].map { |b| b[:id] }).to contain_exactly(failed_1.id, failed_2.id)
+ end
+ end
+
+ context 'when the user cannot retry the pipeline' do
+ it 'is nil' do
+ allow(entity).to receive(:can_retry?).and_return(false)
+
+ expect(subject[:failed_builds]).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/serializers/codequality_degradation_entity_spec.rb b/spec/serializers/codequality_degradation_entity_spec.rb
index fc969195e35..315f00baa72 100644
--- a/spec/serializers/codequality_degradation_entity_spec.rb
+++ b/spec/serializers/codequality_degradation_entity_spec.rb
@@ -10,77 +10,24 @@ RSpec.describe CodequalityDegradationEntity do
context 'when codequality contains an error' do
context 'when line is included in location' do
- let(:codequality_degradation) do
- {
- "categories": [
- "Complexity"
- ],
- "check_name": "argument_count",
- "content": {
- "body": ""
- },
- "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
- "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
- "location": {
- "path": "foo.rb",
- "lines": {
- "begin": 10,
- "end": 10
- }
- },
- "other_locations": [],
- "remediation_points": 900000,
- "severity": "major",
- "type": "issue",
- "engine_name": "structure"
- }.with_indifferent_access
- end
+ let(:codequality_degradation) { build(:codequality_degradation_2) }
it 'contains correct codequality degradation details', :aggregate_failures do
expect(subject[:description]).to eq("Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.")
expect(subject[:severity]).to eq("major")
- expect(subject[:file_path]).to eq("foo.rb")
+ expect(subject[:file_path]).to eq("file_a.rb")
expect(subject[:line]).to eq(10)
end
end
context 'when line is included in positions' do
- let(:codequality_degradation) do
- {
- "type": "Issue",
- "check_name": "Rubocop/Metrics/ParameterLists",
- "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
- "categories": [
- "Complexity"
- ],
- "remediation_points": 550000,
- "location": {
- "path": "foo.rb",
- "positions": {
- "begin": {
- "column": 24,
- "line": 14
- },
- "end": {
- "column": 49,
- "line": 14
- }
- }
- },
- "content": {
- "body": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count."
- },
- "engine_name": "rubocop",
- "fingerprint": "ab5f8b935886b942d621399f5a2ca16e",
- "severity": "minor"
- }.with_indifferent_access
- end
+ let(:codequality_degradation) { build(:codequality_degradation_3) }
it 'contains correct codequality degradation details', :aggregate_failures do
expect(subject[:description]).to eq("Avoid parameter lists longer than 5 parameters. [12/5]")
expect(subject[:severity]).to eq("minor")
- expect(subject[:file_path]).to eq("foo.rb")
- expect(subject[:line]).to eq(14)
+ expect(subject[:file_path]).to eq("file_b.rb")
+ expect(subject[:line]).to eq(10)
end
end
end
diff --git a/spec/serializers/codequality_reports_comparer_entity_spec.rb b/spec/serializers/codequality_reports_comparer_entity_spec.rb
index 7a79c30cc3f..3c74c20ca19 100644
--- a/spec/serializers/codequality_reports_comparer_entity_spec.rb
+++ b/spec/serializers/codequality_reports_comparer_entity_spec.rb
@@ -7,62 +7,8 @@ RSpec.describe CodequalityReportsComparerEntity do
let(:comparer) { Gitlab::Ci::Reports::CodequalityReportsComparer.new(base_report, head_report) }
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:head_report) { Gitlab::Ci::Reports::CodequalityReports.new }
- let(:degradation_1) do
- {
- "categories": [
- "Complexity"
- ],
- "check_name": "argument_count",
- "content": {
- "body": ""
- },
- "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
- "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
- "location": {
- "path": "foo.rb",
- "lines": {
- "begin": 10,
- "end": 10
- }
- },
- "other_locations": [],
- "remediation_points": 900000,
- "severity": "major",
- "type": "issue",
- "engine_name": "structure"
- }.with_indifferent_access
- end
-
- let(:degradation_2) do
- {
- "type": "Issue",
- "check_name": "Rubocop/Metrics/ParameterLists",
- "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
- "categories": [
- "Complexity"
- ],
- "remediation_points": 550000,
- "location": {
- "path": "foo.rb",
- "positions": {
- "begin": {
- "column": 14,
- "line": 10
- },
- "end": {
- "column": 39,
- "line": 10
- }
- }
- },
- "content": {
- "body": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count."
- },
- "engine_name": "rubocop",
- "fingerprint": "ab5f8b935886b942d621399f5a2ca16e",
- "severity": "minor"
- }.with_indifferent_access
- end
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/serializers/codequality_reports_comparer_serializer_spec.rb b/spec/serializers/codequality_reports_comparer_serializer_spec.rb
index 3c47bfb6adc..50c8a69737c 100644
--- a/spec/serializers/codequality_reports_comparer_serializer_spec.rb
+++ b/spec/serializers/codequality_reports_comparer_serializer_spec.rb
@@ -8,62 +8,8 @@ RSpec.describe CodequalityReportsComparerSerializer do
let(:comparer) { Gitlab::Ci::Reports::CodequalityReportsComparer.new(base_report, head_report) }
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:head_report) { Gitlab::Ci::Reports::CodequalityReports.new }
- let(:degradation_1) do
- {
- "categories": [
- "Complexity"
- ],
- "check_name": "argument_count",
- "content": {
- "body": ""
- },
- "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
- "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
- "location": {
- "path": "foo.rb",
- "lines": {
- "begin": 10,
- "end": 10
- }
- },
- "other_locations": [],
- "remediation_points": 900000,
- "severity": "major",
- "type": "issue",
- "engine_name": "structure"
- }.with_indifferent_access
- end
-
- let(:degradation_2) do
- {
- "type": "Issue",
- "check_name": "Rubocop/Metrics/ParameterLists",
- "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
- "categories": [
- "Complexity"
- ],
- "remediation_points": 550000,
- "location": {
- "path": "foo.rb",
- "positions": {
- "begin": {
- "column": 14,
- "line": 10
- },
- "end": {
- "column": 39,
- "line": 10
- }
- }
- },
- "content": {
- "body": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count."
- },
- "engine_name": "rubocop",
- "fingerprint": "ab5f8b935886b942d621399f5a2ca16e",
- "severity": "minor"
- }.with_indifferent_access
- end
+ let(:degradation_1) { build(:codequality_degradation_1) }
+ let(:degradation_2) { build(:codequality_degradation_2) }
describe '#to_json' do
subject { serializer.as_json }
diff --git a/spec/serializers/diff_file_metadata_entity_spec.rb b/spec/serializers/diff_file_metadata_entity_spec.rb
new file mode 100644
index 00000000000..3ce1ea49677
--- /dev/null
+++ b/spec/serializers/diff_file_metadata_entity_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DiffFileMetadataEntity do
+ let(:merge_request) { create(:merge_request_with_diffs) }
+ let(:raw_diff_file) { merge_request.merge_request_diff.diffs.raw_diff_files.first }
+ let(:entity) { described_class.new(raw_diff_file) }
+
+ context 'as json' do
+ subject { entity.as_json }
+
+ it 'exposes the expected fields' do
+ expect(subject.keys).to contain_exactly(
+ :added_lines,
+ :removed_lines,
+ :new_path,
+ :old_path,
+ :new_file,
+ :deleted_file,
+ :submodule,
+ :file_identifier_hash,
+ :file_hash
+ )
+ end
+ end
+end
diff --git a/spec/serializers/diffs_entity_spec.rb b/spec/serializers/diffs_entity_spec.rb
index 7569493573b..a7446f14745 100644
--- a/spec/serializers/diffs_entity_spec.rb
+++ b/spec/serializers/diffs_entity_spec.rb
@@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe DiffsEntity do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
+
let(:request) { EntityRequest.new(project: project, current_user: user) }
- let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
let(:options) do
{ request: request, merge_request: merge_request, merge_request_diffs: merge_request_diffs }
@@ -30,6 +31,14 @@ RSpec.describe DiffsEntity do
)
end
+ context 'broken merge request' do
+ let(:merge_request) { create(:merge_request, :invalid, target_project: project, source_project: project) }
+
+ it 'renders without errors' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
context "when a commit_id is passed" do
let(:commits) { merge_request.commits }
let(:entity) do
diff --git a/spec/serializers/group_group_link_entity_spec.rb b/spec/serializers/group_link/group_group_link_entity_spec.rb
index 9affe4af381..15bcbbcb1d6 100644
--- a/spec/serializers/group_group_link_entity_spec.rb
+++ b/spec/serializers/group_link/group_group_link_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupGroupLinkEntity do
+RSpec.describe GroupLink::GroupGroupLinkEntity do
include_context 'group_group_link'
let_it_be(:current_user) { create(:user) }
@@ -13,15 +13,15 @@ RSpec.describe GroupGroupLinkEntity do
end
it 'matches json schema' do
- expect(entity.to_json).to match_schema('entities/group_group_link')
+ expect(entity.to_json).to match_schema('group_link/group_group_link')
end
- context 'a user with :admin_group_member permissions' do
+ context 'when current user has `:admin_group_member` permissions' do
before do
allow(entity).to receive(:can?).with(current_user, :admin_group_member, shared_group).and_return(true)
end
- it 'sets `can_update` and `can_remove` to `true`' do
+ it 'exposes `can_update` and `can_remove` as `true`' do
json = entity.as_json
expect(json[:can_update]).to be true
diff --git a/spec/serializers/group_group_link_serializer_spec.rb b/spec/serializers/group_link/group_group_link_serializer_spec.rb
index 0d977ea0a9a..a4ca32dae7b 100644
--- a/spec/serializers/group_group_link_serializer_spec.rb
+++ b/spec/serializers/group_link/group_group_link_serializer_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe GroupGroupLinkSerializer do
+RSpec.describe GroupLink::GroupGroupLinkSerializer do
include_context 'group_group_link'
subject(:json) { described_class.new.represent(shared_group.shared_with_group_links).to_json }
it 'matches json schema' do
- expect(json).to match_schema('group_group_links')
+ expect(json).to match_schema('group_link/group_group_links')
end
end
diff --git a/spec/serializers/group_link/group_link_entity_spec.rb b/spec/serializers/group_link/group_link_entity_spec.rb
new file mode 100644
index 00000000000..941445feaa2
--- /dev/null
+++ b/spec/serializers/group_link/group_link_entity_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupLink::GroupLinkEntity do
+ include_context 'group_group_link'
+
+ let(:entity) { described_class.new(group_group_link) }
+ let(:entity_hash) { entity.as_json }
+
+ it 'matches json schema' do
+ expect(entity.to_json).to match_schema('group_link/group_link')
+ end
+
+ it 'correctly exposes `valid_roles`' do
+ expect(entity_hash[:valid_roles]).to include(Gitlab::Access.options_with_owner)
+ end
+
+ it 'correctly exposes `shared_with_group.avatar_url`' do
+ avatar_url = 'https://gitlab.com/uploads/-/system/group/avatar/24/foobar.png?width=40'
+ allow(shared_with_group).to receive(:avatar_url).with(only_path: false, size: Member::AVATAR_SIZE).and_return(avatar_url)
+
+ expect(entity_hash[:shared_with_group][:avatar_url]).to match(avatar_url)
+ end
+end
diff --git a/spec/serializers/group_link/project_group_link_entity_spec.rb b/spec/serializers/group_link/project_group_link_entity_spec.rb
new file mode 100644
index 00000000000..0bb3d06933b
--- /dev/null
+++ b/spec/serializers/group_link/project_group_link_entity_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupLink::ProjectGroupLinkEntity do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project_group_link) { create(:project_group_link) }
+ let(:entity) { described_class.new(project_group_link) }
+
+ before do
+ allow(entity).to receive(:current_user).and_return(current_user)
+ end
+
+ it 'matches json schema' do
+ expect(entity.to_json).to match_schema('group_link/project_group_link')
+ end
+
+ context 'when current user has `admin_project_member` permissions' do
+ before do
+ allow(entity).to receive(:can?).with(current_user, :admin_project_member, project_group_link.project).and_return(true)
+ end
+
+ it 'exposes `can_update` and `can_remove` as `true`' do
+ json = entity.as_json
+
+ expect(json[:can_update]).to be true
+ expect(json[:can_remove]).to be true
+ end
+ end
+end
diff --git a/spec/serializers/group_link/project_group_link_serializer_spec.rb b/spec/serializers/group_link/project_group_link_serializer_spec.rb
new file mode 100644
index 00000000000..ecd5d4d70e4
--- /dev/null
+++ b/spec/serializers/group_link/project_group_link_serializer_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupLink::ProjectGroupLinkSerializer do
+ let_it_be(:project_group_links) { create_list(:project_group_link, 1) }
+
+ subject(:json) { described_class.new.represent(project_group_links).to_json }
+
+ it 'matches json schema' do
+ expect(json).to match_schema('group_link/project_group_links')
+ end
+end
diff --git a/spec/serializers/member_entity_spec.rb b/spec/serializers/member_entity_spec.rb
index f34434188c1..883cb511abc 100644
--- a/spec/serializers/member_entity_spec.rb
+++ b/spec/serializers/member_entity_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe MemberEntity do
let_it_be(:current_user) { create(:user) }
- let(:entity) { described_class.new(member, { current_user: current_user, group: group }) }
+ let(:entity) { described_class.new(member, { current_user: current_user, group: group, source: source }) }
let(:entity_hash) { entity.as_json }
shared_examples 'member.json' do
@@ -40,8 +40,27 @@ RSpec.describe MemberEntity do
end
end
+ shared_examples 'is_direct_member' do
+ context 'when `source` is the same as `member.source`' do
+ let(:source) { direct_member_source }
+
+ it 'exposes `is_direct_member` as `true`' do
+ expect(entity_hash[:is_direct_member]).to be(true)
+ end
+ end
+
+ context 'when `source` is not the same as `member.source`' do
+ let(:source) { inherited_member_source }
+
+ it 'exposes `is_direct_member` as `false`' do
+ expect(entity_hash[:is_direct_member]).to be(false)
+ end
+ end
+ end
+
context 'group member' do
let(:group) { create(:group) }
+ let(:source) { group }
let(:member) { GroupMemberPresenter.new(create(:group_member, group: group), current_user: current_user) }
it_behaves_like 'member.json'
@@ -52,11 +71,19 @@ RSpec.describe MemberEntity do
it_behaves_like 'member.json'
it_behaves_like 'invite'
end
+
+ context 'is_direct_member' do
+ let(:direct_member_source) { group }
+ let(:inherited_member_source) { create(:group) }
+
+ it_behaves_like 'is_direct_member'
+ end
end
context 'project member' do
let(:project) { create(:project) }
let(:group) { project.group }
+ let(:source) { project }
let(:member) { ProjectMemberPresenter.new(create(:project_member, project: project), current_user: current_user) }
it_behaves_like 'member.json'
@@ -67,5 +94,12 @@ RSpec.describe MemberEntity do
it_behaves_like 'member.json'
it_behaves_like 'invite'
end
+
+ context 'is_direct_member' do
+ let(:direct_member_source) { project }
+ let(:inherited_member_source) { group }
+
+ it_behaves_like 'is_direct_member'
+ end
end
end
diff --git a/spec/serializers/member_serializer_spec.rb b/spec/serializers/member_serializer_spec.rb
index d3ec45fe9c4..af209c0191f 100644
--- a/spec/serializers/member_serializer_spec.rb
+++ b/spec/serializers/member_serializer_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe MemberSerializer do
let_it_be(:current_user) { create(:user) }
- subject { described_class.new.represent(members, { current_user: current_user, group: group }) }
+ subject { described_class.new.represent(members, { current_user: current_user, group: group, source: source }) }
shared_examples 'members.json' do
it 'matches json schema' do
@@ -17,6 +17,7 @@ RSpec.describe MemberSerializer do
context 'group member' do
let(:group) { create(:group) }
+ let(:source) { group }
let(:members) { present_members(create_list(:group_member, 1, group: group)) }
it_behaves_like 'members.json'
@@ -24,6 +25,7 @@ RSpec.describe MemberSerializer do
context 'project member' do
let(:project) { create(:project) }
+ let(:source) { project }
let(:group) { project.group }
let(:members) { present_members(create_list(:project_member, 1, project: project)) }
diff --git a/spec/serializers/merge_request_user_entity_spec.rb b/spec/serializers/merge_request_user_entity_spec.rb
index a2ad8e72845..dcd4ef6acfb 100644
--- a/spec/serializers/merge_request_user_entity_spec.rb
+++ b/spec/serializers/merge_request_user_entity_spec.rb
@@ -17,5 +17,23 @@ RSpec.describe MergeRequestUserEntity do
it 'exposes needed attributes' do
expect(subject).to include(:id, :name, :username, :state, :avatar_url, :web_url, :can_merge)
end
+
+ context 'when `status` is not preloaded' do
+ it 'does not expose the availability attribute' do
+ expect(subject).not_to include(:availability)
+ end
+ end
+
+ context 'when `status` is preloaded' do
+ before do
+ user.create_status!(availability: :busy)
+
+ user.status # make sure `status` is loaded
+ end
+
+ it 'exposes the availibility attribute' do
+ expect(subject[:availability]).to eq('busy')
+ end
+ end
end
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 42d843af596..926b33e8e1f 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe MergeRequestWidgetEntity do
end
describe 'codequality report artifacts', :request_store do
- let(:merge_base_pipeline) { create(:ci_pipeline, :with_codequality_report, project: project) }
+ let(:merge_base_pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
before do
project.add_developer(user)
@@ -90,7 +90,7 @@ RSpec.describe MergeRequestWidgetEntity do
end
context 'with report artifacts' do
- let(:pipeline) { create(:ci_pipeline, :with_codequality_report, project: project) }
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
let(:generic_job_id) { pipeline.builds.first.id }
let(:merge_base_job_id) { merge_base_pipeline.builds.first.id }
@@ -100,7 +100,7 @@ RSpec.describe MergeRequestWidgetEntity do
end
context 'on pipelines for merged results' do
- let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_codequality_report, project: project) }
+ let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_codequality_reports, project: project) }
it 'returns URLs from the head_pipeline and merge_base_pipeline' do
expect(subject[:codeclimate][:head_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index 74e91cc9cdd..2f54f45866b 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -10,8 +10,8 @@ RSpec.describe PipelineDetailsEntity do
described_class.represent(pipeline, request: request)
end
- it 'inherrits from PipelineEntity' do
- expect(described_class).to be < PipelineEntity
+ it 'inherits from PipelineEntity' do
+ expect(described_class).to be < Ci::PipelineEntity
end
before do
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
deleted file mode 100644
index d7cd13edec8..00000000000
--- a/spec/serializers/pipeline_entity_spec.rb
+++ /dev/null
@@ -1,264 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PipelineEntity do
- include Gitlab::Routing
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
- let(:request) { double('request') }
-
- before do
- stub_not_protect_default_branch
-
- allow(request).to receive(:current_user).and_return(user)
- allow(request).to receive(:project).and_return(project)
- end
-
- let(:entity) do
- described_class.represent(pipeline, request: request)
- end
-
- describe '#as_json' do
- subject { entity.as_json }
-
- context 'when pipeline is empty' do
- let(:pipeline) { create(:ci_empty_pipeline) }
-
- it 'contains required fields' do
- expect(subject).to include :id, :user, :path, :coverage, :source
- expect(subject).to include :ref, :commit
- expect(subject).to include :updated_at, :created_at
- end
-
- it 'excludes coverage data when disabled' do
- entity = described_class
- .represent(pipeline, request: request, disable_coverage: true)
-
- expect(entity.as_json).not_to include(:coverage)
- end
-
- it 'contains details' do
- expect(subject).to include :details
- expect(subject[:details])
- .to include :duration, :finished_at, :name
- expect(subject[:details][:status]).to include :icon, :favicon, :text, :label, :tooltip
- end
-
- it 'contains flags' do
- expect(subject).to include :flags
- expect(subject[:flags])
- .to include :stuck, :auto_devops, :yaml_errors,
- :retryable, :cancelable, :merge_request
- end
- end
-
- context 'when pipeline is retryable' do
- let(:project) { create(:project) }
-
- let(:pipeline) do
- create(:ci_pipeline, status: :success, project: project)
- end
-
- before do
- create(:ci_build, :failed, pipeline: pipeline)
- end
-
- it 'does not serialize stage builds' do
- subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
- expect(stage).not_to include(:groups, :latest_statuses, :retries)
- end
- end
-
- context 'user has ability to retry pipeline' do
- before do
- project.add_developer(user)
- end
-
- it 'contains retry path' do
- expect(subject[:retry_path]).to be_present
- end
- end
-
- context 'user does not have ability to retry pipeline' do
- it 'does not contain retry path' do
- expect(subject).not_to have_key(:retry_path)
- end
- end
- end
-
- context 'when pipeline is cancelable' do
- let(:project) { create(:project) }
-
- let(:pipeline) do
- create(:ci_pipeline, status: :running, project: project)
- end
-
- before do
- create(:ci_build, :pending, pipeline: pipeline)
- end
-
- it 'does not serialize stage builds' do
- subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
- expect(stage).not_to include(:groups, :latest_statuses, :retries)
- end
- end
-
- context 'user has ability to cancel pipeline' do
- before do
- project.add_developer(user)
- end
-
- it 'contains cancel path' do
- expect(subject[:cancel_path]).to be_present
- end
- end
-
- context 'user does not have ability to cancel pipeline' do
- it 'does not contain cancel path' do
- expect(subject).not_to have_key(:cancel_path)
- end
- end
- end
-
- context 'delete path' do
- context 'user has ability to delete pipeline' do
- let(:project) { create(:project, namespace: user.namespace) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- it 'contains delete path' do
- expect(subject[:delete_path]).to be_present
- end
- end
-
- context 'user does not have ability to delete pipeline' do
- let(:project) { create(:project) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- it 'does not contain delete path' do
- expect(subject).not_to have_key(:delete_path)
- end
- end
- end
-
- context 'when pipeline ref is empty' do
- let(:pipeline) { create(:ci_empty_pipeline) }
-
- before do
- allow(pipeline).to receive(:ref).and_return(nil)
- end
-
- it 'does not generate branch path' do
- expect(subject[:ref][:path]).to be_nil
- end
- end
-
- context 'when pipeline has a failure reason set' do
- let(:pipeline) { create(:ci_empty_pipeline) }
-
- before do
- pipeline.drop!(:config_error)
- end
-
- it 'has a correct failure reason' do
- expect(subject[:failure_reason])
- .to eq 'CI/CD YAML configuration error!'
- end
- end
-
- context 'when pipeline is detached merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:project) { merge_request.target_project }
- let(:pipeline) { merge_request.pipelines_for_merge_request.first }
-
- it 'makes detached flag true' do
- expect(subject[:flags][:detached_merge_request_pipeline]).to be_truthy
- end
-
- it 'does not expose source sha and target sha' do
- expect(subject[:source_sha]).to be_nil
- expect(subject[:target_sha]).to be_nil
- end
-
- context 'when user is a developer' do
- before do
- project.add_developer(user)
- end
-
- it 'has merge request information' do
- expect(subject[:merge_request][:iid]).to eq(merge_request.iid)
-
- expect(project_merge_request_path(project, merge_request))
- .to include(subject[:merge_request][:path])
-
- expect(subject[:merge_request][:title]).to eq(merge_request.title)
-
- expect(subject[:merge_request][:source_branch])
- .to eq(merge_request.source_branch)
-
- expect(project_commits_path(project, merge_request.source_branch))
- .to include(subject[:merge_request][:source_branch_path])
-
- expect(subject[:merge_request][:target_branch])
- .to eq(merge_request.target_branch)
-
- expect(project_commits_path(project, merge_request.target_branch))
- .to include(subject[:merge_request][:target_branch_path])
- end
- end
-
- context 'when user is an external user' do
- it 'has no merge request information' do
- expect(subject[:merge_request]).to be_nil
- end
- end
- end
-
- context 'when pipeline is merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
- let(:project) { merge_request.target_project }
- let(:pipeline) { merge_request.pipelines_for_merge_request.first }
-
- it 'makes detached flag false' do
- expect(subject[:flags][:detached_merge_request_pipeline]).to be_falsy
- end
-
- it 'makes atached flag true' do
- expect(subject[:flags][:merge_request_pipeline]).to be_truthy
- end
-
- it 'exposes source sha and target sha' do
- expect(subject[:source_sha]).to be_present
- expect(subject[:target_sha]).to be_present
- end
-
- it 'exposes merge request event type' do
- expect(subject[:merge_request_event_type]).to be_present
- end
- end
-
- context 'when pipeline has failed builds' do
- let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
- let_it_be(:build) { create(:ci_build, :success, pipeline: pipeline) }
- let_it_be(:failed_1) { create(:ci_build, :failed, pipeline: pipeline) }
- let_it_be(:failed_2) { create(:ci_build, :failed, pipeline: pipeline) }
-
- context 'when the user can retry the pipeline' do
- it 'exposes these failed builds' do
- allow(entity).to receive(:can_retry?).and_return(true)
-
- expect(subject[:failed_builds].map { |b| b[:id] }).to contain_exactly(failed_1.id, failed_2.id)
- end
- end
-
- context 'when the user cannot retry the pipeline' do
- it 'is nil' do
- allow(entity).to receive(:can_retry?).and_return(false)
-
- expect(subject[:failed_builds]).to be_nil
- end
- end
- end
- end
-end
diff --git a/spec/serializers/user_serializer_spec.rb b/spec/serializers/user_serializer_spec.rb
index d54f33b6a23..f2ef1508098 100644
--- a/spec/serializers/user_serializer_spec.rb
+++ b/spec/serializers/user_serializer_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe UserSerializer do
context 'serializer with merge request context' do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
- let(:serializer) { described_class.new(merge_request_iid: merge_request.iid) }
+ let(:serializer) { described_class.new(current_user: user1, merge_request_iid: merge_request.iid) }
before do
allow(project).to(
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index 8f81c1967d5..fb1a23996e3 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -158,7 +158,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
it 'writes a warning to the log' do
expect(Gitlab::AppLogger).to receive(:warn).with(
- message: 'Unable to create AlertManagement::Alert',
+ message: 'Unable to create AlertManagement::Alert from Prometheus',
project_id: project.id,
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 46483fede97..1352a595ba4 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe ApplicationSettings::UpdateService do
it_behaves_like 'invalidates markdown cache', { asset_proxy_enabled: true }
it_behaves_like 'invalidates markdown cache', { asset_proxy_url: 'http://test.com' }
it_behaves_like 'invalidates markdown cache', { asset_proxy_secret_key: 'another secret' }
- it_behaves_like 'invalidates markdown cache', { asset_proxy_whitelist: ['domain.com'] }
+ it_behaves_like 'invalidates markdown cache', { asset_proxy_allowlist: ['domain.com'] }
context 'when also setting the local_markdown_version' do
let(:params) { { asset_proxy_enabled: true, local_markdown_version: 12 } }
diff --git a/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb b/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
index a4637b6ba1c..0c944cad40c 100644
--- a/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
+++ b/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe AuthorizedProjectUpdate::RecalculateForUserRangeService do
it 'calls Users::RefreshAuthorizedProjectsService' do
users.each do |user|
expect(Users::RefreshAuthorizedProjectsService).to(
- receive(:new).with(user).and_call_original)
+ receive(:new).with(user, source: described_class.name).and_call_original)
end
range = users.map(&:id).minmax
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 674382ee14f..3ac993972c6 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -43,46 +43,6 @@ RSpec.describe BulkCreateIntegrationService do
end
end
- shared_examples 'updates project callbacks' do
- it 'updates projects#has_external_issue_tracker for issue tracker services' do
- described_class.new(integration, batch, association).execute
-
- expect(project.reload.has_external_issue_tracker).to eq(true)
- expect(excluded_project.reload.has_external_issue_tracker).to eq(false)
- end
-
- context 'with an external wiki integration' do
- before do
- integration.update!(category: 'common', type: 'ExternalWikiService')
- end
-
- it 'updates projects#has_external_wiki for external wiki services' do
- described_class.new(integration, batch, association).execute
-
- expect(project.reload.has_external_wiki).to eq(true)
- expect(excluded_project.reload.has_external_wiki).to eq(false)
- end
- end
- end
-
- shared_examples 'does not update project callbacks' do
- it 'does not update projects#has_external_issue_tracker for issue tracker services' do
- described_class.new(integration, batch, association).execute
-
- expect(project.reload.has_external_issue_tracker).to eq(false)
- end
-
- context 'with an inactive external wiki integration' do
- let(:integration) { create(:external_wiki_service, :instance, active: false) }
-
- it 'does not update projects#has_external_wiki for external wiki services' do
- described_class.new(integration, batch, association).execute
-
- expect(project.reload.has_external_wiki).to eq(false)
- end
- end
- end
-
context 'passing an instance-level integration' do
let(:integration) { instance_integration }
let(:inherit_from_id) { integration.id }
@@ -95,15 +55,6 @@ RSpec.describe BulkCreateIntegrationService do
it_behaves_like 'creates integration from batch ids'
it_behaves_like 'updates inherit_from_id'
- it_behaves_like 'updates project callbacks'
-
- context 'when integration is not active' do
- before do
- integration.update!(active: false)
- end
-
- it_behaves_like 'does not update project callbacks'
- end
end
context 'with a group association' do
@@ -130,7 +81,6 @@ RSpec.describe BulkCreateIntegrationService do
it_behaves_like 'creates integration from batch ids'
it_behaves_like 'updates inherit_from_id'
- it_behaves_like 'updates project callbacks'
end
context 'with a group association' do
@@ -157,7 +107,6 @@ RSpec.describe BulkCreateIntegrationService do
let(:inherit_from_id) { integration.id }
it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates project callbacks'
end
end
end
diff --git a/spec/services/captcha/captcha_verification_service_spec.rb b/spec/services/captcha/captcha_verification_service_spec.rb
new file mode 100644
index 00000000000..245e06703f5
--- /dev/null
+++ b/spec/services/captcha/captcha_verification_service_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Captcha::CaptchaVerificationService do
+ describe '#execute' do
+ let(:captcha_response) { nil }
+ let(:request) { double(:request) }
+ let(:service) { described_class.new }
+
+ subject { service.execute(captcha_response: captcha_response, request: request) }
+
+ context 'when there is no captcha_response' do
+ it 'returns false' do
+ expect(subject).to eq(false)
+ end
+ end
+
+ context 'when there is a captcha_response' do
+ let(:captcha_response) { 'abc123' }
+
+ before do
+ expect(Gitlab::Recaptcha).to receive(:load_configurations!)
+ end
+
+ it 'returns false' do
+ expect(service).to receive(:verify_recaptcha).with(response: captcha_response) { true }
+
+ expect(subject).to eq(true)
+ end
+
+ it 'has a request method which returns the request' do
+ subject
+
+ expect(service.send(:request)).to eq(request)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 8df9b0c3e60..a3a616f0f64 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -76,6 +76,31 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
}
end
end
+
+ context 'with resource group' do
+ let(:config) do
+ <<~YAML
+ instrumentation_test:
+ stage: test
+ resource_group: iOS
+ trigger:
+ include:
+ - local: path/to/child.yml
+ YAML
+ end
+
+ # TODO: This test will be properly implemented in the next MR
+ # for https://gitlab.com/gitlab-org/gitlab/-/issues/39057.
+ it 'creates bridge job but still resource group is no-op', :aggregate_failures do
+ pipeline = create_pipeline!
+
+ test = pipeline.statuses.find_by(name: 'instrumentation_test')
+
+ expect(pipeline).to be_persisted
+ expect(test).to be_a Ci::Bridge
+ expect(project.resource_groups.count).to eq(0)
+ end
+ end
end
describe 'child pipeline triggers' do
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index ac6c4c188e4..04ecac6a85a 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -145,20 +145,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(find_job('job-2').options.dig(:allow_failure_criteria)).to be_nil
expect(find_job('job-3').options.dig(:allow_failure_criteria, :exit_codes)).to eq([42])
end
-
- context 'with ci_allow_failure_with_exit_codes disabled' do
- before do
- stub_feature_flags(ci_allow_failure_with_exit_codes: false)
- end
-
- it 'does not persist allow_failure_criteria' do
- expect(pipeline).to be_persisted
-
- expect(find_job('job-1').options.key?(:allow_failure_criteria)).to be_falsey
- expect(find_job('job-2').options.key?(:allow_failure_criteria)).to be_falsey
- expect(find_job('job-3').options.key?(:allow_failure_criteria)).to be_falsey
- end
- end
end
context 'if:' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index e1f1bdc41a1..2a9d73e75c0 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -952,9 +952,9 @@ RSpec.describe Ci::CreatePipelineService do
expect(result).to be_persisted
expect(deploy_job.resource_group.key).to eq(resource_group_key)
expect(project.resource_groups.count).to eq(1)
- expect(resource_group.builds.count).to eq(1)
+ expect(resource_group.processables.count).to eq(1)
expect(resource_group.resources.count).to eq(1)
- expect(resource_group.resources.first.build).to eq(nil)
+ expect(resource_group.resources.first.processable).to eq(nil)
end
context 'when resource group key includes predefined variables' do
diff --git a/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb b/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb
new file mode 100644
index 00000000000..5d747a09f2a
--- /dev/null
+++ b/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::GenerateCodequalityMrDiffReportService do
+ let(:service) { described_class.new(project) }
+ let(:project) { create(:project, :repository) }
+
+ describe '#execute' do
+ subject { service.execute(base_pipeline, head_pipeline) }
+
+ context 'when head pipeline has codequality mr diff report' do
+ let!(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project) }
+ let!(:service) { described_class.new(project, nil, id: merge_request.id) }
+ let!(:head_pipeline) { merge_request.head_pipeline }
+ let!(:base_pipeline) { nil }
+
+ it 'returns status and data', :aggregate_failures do
+ expect_any_instance_of(Ci::PipelineArtifact) do |instance|
+ expect(instance).to receive(:present)
+ expect(instance).to receive(:for_files).with(merge_request.new_paths).and_call_original
+ end
+
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject[:data]).to eq(files: {})
+ end
+ end
+
+ context 'when head pipeline does not have a codequality mr diff report' do
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let!(:service) { described_class.new(project, nil, id: merge_request.id) }
+ let!(:head_pipeline) { merge_request.head_pipeline }
+ let!(:base_pipeline) { nil }
+
+ it 'returns status and error message' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:status_reason]).to include('An error occurred while fetching codequality mr diff reports.')
+ end
+ end
+
+ context 'when head pipeline has codequality mr diff report and no merge request associated' do
+ let!(:head_pipeline) { create(:ci_pipeline, :with_codequality_mr_diff_report, project: project) }
+ let!(:base_pipeline) { nil }
+
+ it 'returns status and error message' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:status_reason]).to include('An error occurred while fetching codequality mr diff reports.')
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
new file mode 100644
index 00000000000..0c48f15d726
--- /dev/null
+++ b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::PipelineArtifacts::CreateCodeQualityMrDiffReportService do
+ describe '#execute' do
+ subject(:pipeline_artifact) { described_class.new.execute(pipeline) }
+
+ context 'when pipeline has codequality reports' do
+ let(:project) { create(:project, :repository) }
+
+ describe 'pipeline completed status' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :result) do
+ :success | 1
+ :failed | 1
+ :canceled | 1
+ :skipped | 1
+ end
+
+ with_them do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, status: status, project: project) }
+
+ it 'creates a pipeline artifact' do
+ expect { pipeline_artifact }.to change(Ci::PipelineArtifact, :count).by(result)
+ end
+
+ it 'persists the default file name' do
+ expect(pipeline_artifact.file.filename).to eq('code_quality_mr_diff.json')
+ end
+
+ it 'sets expire_at to 1 week' do
+ freeze_time do
+ expect(pipeline_artifact.expire_at).to eq(1.week.from_now)
+ end
+ end
+ end
+ end
+
+ context 'when pipeline artifact has already been created' do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
+
+ it 'does not persist the same artifact twice' do
+ 2.times { described_class.new.execute(pipeline) }
+
+ expect(Ci::PipelineArtifact.count).to eq(1)
+ end
+ end
+ end
+
+ context 'when pipeline is not completed and codequality report does not exist' do
+ let(:pipeline) { create(:ci_pipeline, :running) }
+
+ it 'does not persist data' do
+ pipeline_artifact
+
+ expect(Ci::PipelineArtifact.count).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/process_build_service_spec.rb b/spec/services/ci/process_build_service_spec.rb
index 6d2af81a6e8..42a92504839 100644
--- a/spec/services/ci/process_build_service_spec.rb
+++ b/spec/services/ci/process_build_service_spec.rb
@@ -146,9 +146,11 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
end
end
- context 'when FF skip_dag_manual_and_delayed_jobs is disabled' do
+ context 'when FF skip_dag_manual_and_delayed_jobs is disabled on the project' do
+ let_it_be(:other_project) { create(:project) }
+
before do
- stub_feature_flags(skip_dag_manual_and_delayed_jobs: false)
+ stub_feature_flags(skip_dag_manual_and_delayed_jobs: other_project)
end
where(:build_when, :current_status, :after_status) do
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index a7889f0644d..d316c9a262b 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -50,6 +50,35 @@ RSpec.describe Ci::ProcessPipelineService do
expect(all_builds.retried).to contain_exactly(build_retried)
end
+ context 'counter ci_legacy_update_jobs_as_retried_total' do
+ let(:counter) { double(increment: true) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_call_original
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:ci_legacy_update_jobs_as_retried_total, anything)
+ .and_return(counter)
+ end
+
+ it 'increments the counter' do
+ expect(counter).to receive(:increment)
+
+ subject.execute
+ end
+
+ context 'when the previous build has already retried column true' do
+ before do
+ build_retried.update_columns(retried: true)
+ end
+
+ it 'does not increment the counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject.execute
+ end
+ end
+ end
+
def create_build(name, **opts)
create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
end
diff --git a/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb b/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb
new file mode 100644
index 00000000000..17330f5b77d
--- /dev/null
+++ b/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PrometheusMetrics::ObserveHistogramsService do
+ let_it_be(:project) { create(:project) }
+ let(:params) { {} }
+
+ subject(:execute) { described_class.new(project, params).execute }
+
+ before do
+ Gitlab::Metrics.reset_registry!
+ end
+
+ context 'with empty data' do
+ it 'does not raise errors' do
+ is_expected.to be_success
+ end
+ end
+
+ context 'observes metrics successfully' do
+ let(:params) do
+ {
+ histograms: [
+ { name: 'pipeline_graph_link_calculation_duration_seconds', value: '1' },
+ { name: 'pipeline_graph_links_per_job_ratio', value: '0.9' }
+ ]
+ }
+ end
+
+ it 'increments the metrics' do
+ execute
+
+ expect(histogram_data).to match(a_hash_including({ 0.8 => 0.0, 1 => 1.0, 2 => 1.0 }))
+
+ expect(histogram_data(:pipeline_graph_links_per_job_ratio))
+ .to match(a_hash_including({ 0.8 => 0.0, 0.9 => 1.0, 1 => 1.0 }))
+ end
+
+ it 'returns an empty body and status code' do
+ is_expected.to be_success
+ expect(subject.http_status).to eq(:created)
+ expect(subject.payload).to eq({})
+ end
+ end
+
+ context 'with unknown histograms' do
+ let(:params) do
+ { histograms: [{ name: 'chunky_bacon', value: '4' }] }
+ end
+
+ it 'raises ActiveRecord::RecordNotFound error' do
+ expect { subject }.to raise_error ActiveRecord::RecordNotFound
+ end
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(ci_accept_frontend_prometheus_metrics: false)
+ end
+
+ let(:params) do
+ {
+ histograms: [
+ { name: 'pipeline_graph_link_calculation_duration_seconds', value: '4' }
+ ]
+ }
+ end
+
+ it 'does not register the metrics' do
+ execute
+
+ expect(histogram_data).to be_nil
+ end
+
+ it 'returns an empty body and status code' do
+ is_expected.to be_success
+ expect(subject.http_status).to eq(:accepted)
+ expect(subject.payload).to eq({})
+ end
+ end
+
+ def histogram_data(name = :pipeline_graph_link_calculation_duration_seconds)
+ Gitlab::Metrics.registry.get(name)&.get({ project: project.full_path })
+ end
+end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 0cdc8d2c870..e4848bf7c16 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -455,7 +455,7 @@ module Ci
end
before do
- stub_feature_flags(ci_disable_validates_dependencies: false)
+ stub_feature_flags(ci_validate_build_dependencies_override: false)
end
let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
@@ -470,7 +470,7 @@ module Ci
context 'when validates for dependencies is enabled' do
before do
- stub_feature_flags(ci_disable_validates_dependencies: false)
+ stub_feature_flags(ci_validate_build_dependencies_override: false)
end
it_behaves_like 'validation is active'
@@ -478,7 +478,7 @@ module Ci
context 'when validates for dependencies is disabled' do
before do
- stub_feature_flags(ci_disable_validates_dependencies: true)
+ stub_feature_flags(ci_validate_build_dependencies_override: true)
end
it_behaves_like 'validation is not active'
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index c375e5a2fa3..40a2f954786 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -10,7 +10,12 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
let(:manifest) { dependency_proxy_manifest.file.read }
let(:group) { dependency_proxy_manifest.group }
let(:token) { Digest::SHA256.hexdigest('123') }
- let(:headers) { { 'docker-content-digest' => dependency_proxy_manifest.digest } }
+ let(:headers) do
+ {
+ 'docker-content-digest' => dependency_proxy_manifest.digest,
+ 'content-type' => dependency_proxy_manifest.content_type
+ }
+ end
describe '#execute' do
subject { described_class.new(group, image, tag, token).execute }
@@ -18,22 +23,37 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
context 'when no manifest exists' do
let_it_be(:image) { 'new-image' }
- before do
- stub_manifest_head(image, tag, digest: dependency_proxy_manifest.digest)
- stub_manifest_download(image, tag, headers: headers)
+ shared_examples 'downloading the manifest' do
+ it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
+ expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
+ expect(subject[:manifest]).to be_persisted
+ end
end
- it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
- expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
- expect(subject[:status]).to eq(:success)
- expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
- expect(subject[:manifest]).to be_persisted
+ context 'successful head request' do
+ before do
+ stub_manifest_head(image, tag, headers: headers)
+ stub_manifest_download(image, tag, headers: headers)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
+
+ context 'failed head request' do
+ before do
+ stub_manifest_head(image, tag, status: :error)
+ stub_manifest_download(image, tag, headers: headers)
+ end
+
+ it_behaves_like 'downloading the manifest'
end
end
context 'when manifest exists' do
before do
- stub_manifest_head(image, tag, digest: dependency_proxy_manifest.digest)
+ stub_manifest_head(image, tag, headers: headers)
end
shared_examples 'using the cached manifest' do
@@ -48,15 +68,17 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
context 'when digest is stale' do
let(:digest) { 'new-digest' }
+ let(:content_type) { 'new-content-type' }
before do
- stub_manifest_head(image, tag, digest: digest)
- stub_manifest_download(image, tag, headers: { 'docker-content-digest' => digest })
+ stub_manifest_head(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
+ stub_manifest_download(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
end
it 'downloads the new manifest and updates the existing record', :aggregate_failures do
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
+ expect(subject[:manifest].content_type).to eq(content_type)
expect(subject[:manifest].digest).to eq(digest)
end
end
diff --git a/spec/services/dependency_proxy/head_manifest_service_spec.rb b/spec/services/dependency_proxy/head_manifest_service_spec.rb
index 7c7ebe4d181..9c1e4d650f8 100644
--- a/spec/services/dependency_proxy/head_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/head_manifest_service_spec.rb
@@ -8,12 +8,19 @@ RSpec.describe DependencyProxy::HeadManifestService do
let(:tag) { 'latest' }
let(:token) { Digest::SHA256.hexdigest('123') }
let(:digest) { '12345' }
+ let(:content_type) { 'foo' }
+ let(:headers) do
+ {
+ 'docker-content-digest' => digest,
+ 'content-type' => content_type
+ }
+ end
subject { described_class.new(image, tag, token).execute }
context 'remote request is successful' do
before do
- stub_manifest_head(image, tag, digest: digest)
+ stub_manifest_head(image, tag, headers: headers)
end
it { expect(subject[:status]).to eq(:success) }
diff --git a/spec/services/dependency_proxy/pull_manifest_service_spec.rb b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
index b760839d1fb..b3053174cc0 100644
--- a/spec/services/dependency_proxy/pull_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
@@ -9,7 +9,10 @@ RSpec.describe DependencyProxy::PullManifestService do
let(:token) { Digest::SHA256.hexdigest('123') }
let(:manifest) { { foo: 'bar' }.to_json }
let(:digest) { '12345' }
- let(:headers) { { 'docker-content-digest' => digest } }
+ let(:content_type) { 'foo' }
+ let(:headers) do
+ { 'docker-content-digest' => digest, 'content-type' => content_type }
+ end
subject { described_class.new(image, tag, token).execute_with_manifest(&method(:check_response)) }
@@ -25,6 +28,7 @@ RSpec.describe DependencyProxy::PullManifestService do
expect(response[:status]).to eq(:success)
expect(response[:file].read).to eq(manifest)
expect(response[:digest]).to eq(digest)
+ expect(response[:content_type]).to eq(content_type)
end
subject
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index 2d157c9d114..0bb5949ddb1 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -41,6 +41,27 @@ RSpec.describe Deployments::CreateService do
expect(service.execute).to be_persisted
end
+
+ context 'when the last deployment has the same parameters' do
+ let(:params) do
+ {
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ ref: 'master',
+ tag: false,
+ status: 'success'
+ }
+ end
+
+ it 'does not create a new deployment' do
+ described_class.new(environment, user, params).execute
+
+ expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
+ expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
+ expect(Deployments::ExecuteHooksWorker).not_to receive(:perform_async)
+
+ described_class.new(environment.reload, user, params).execute
+ end
+ end
end
describe '#deployment_attributes' do
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index 42c4ef52741..2e30455eb0a 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -24,6 +24,13 @@ RSpec.describe Discussions::ResolveService do
expect(discussion).to be_resolved
end
+ it 'tracks thread resolve usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_resolve_thread_action).with(user: user)
+
+ service.execute
+ end
+
it 'executes the notification service' do
expect_next_instance_of(MergeRequests::ResolvedDiscussionNotificationService) do |instance|
expect(instance).to receive(:execute).with(discussion.noteable)
@@ -101,6 +108,13 @@ RSpec.describe Discussions::ResolveService do
service.execute
end
+ it 'does not track thread resolve usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_resolve_thread_action).with(user: user)
+
+ service.execute
+ end
+
it 'does not schedule an auto-merge' do
expect(AutoMergeProcessWorker).not_to receive(:perform_async)
diff --git a/spec/services/discussions/unresolve_service_spec.rb b/spec/services/discussions/unresolve_service_spec.rb
new file mode 100644
index 00000000000..6298a00a474
--- /dev/null
+++ b/spec/services/discussions/unresolve_service_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Discussions::UnresolveService do
+ describe "#execute" do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
+ let_it_be(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, source_project: project) }
+ let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
+
+ let(:service) { described_class.new(discussion, user) }
+
+ before do
+ project.add_developer(user)
+ discussion.resolve!(user)
+ end
+
+ it "unresolves the discussion" do
+ service.execute
+
+ expect(discussion).not_to be_resolved
+ end
+
+ it "counts the unresolve event" do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_unresolve_thread_action).with(user: user)
+
+ service.execute
+ end
+ end
+end
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index e115d8098c9..128fab114fe 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -66,18 +66,6 @@ RSpec.describe FeatureFlags::CreateService do
subject
end
- context 'the feature flag is disabled' do
- before do
- stub_feature_flags(jira_sync_feature_flags: false)
- end
-
- it 'does not sync the feature flag to Jira' do
- expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
it 'creates audit event' do
expected_message = 'Created feature flag <strong>feature_flag</strong> '\
'with description <strong>"description"</strong>. '\
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 8c4055ddd9e..9639cf3081d 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -26,18 +26,6 @@ RSpec.describe FeatureFlags::UpdateService do
expect(subject[:status]).to eq(:success)
end
- context 'the feature flag is disabled' do
- before do
- stub_feature_flags(jira_sync_feature_flags: false)
- end
-
- it 'does not sync the feature flag to Jira' do
- expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
it 'syncs the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index a5290f0be68..bf2a7390258 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -93,12 +93,12 @@ RSpec.describe Git::BranchHooksService do
describe 'Push Event' do
let(:event) { Event.pushed_action.first }
- before do
- service.execute
- end
+ subject(:execute_service) { service.execute }
context "with an existing branch" do
it 'generates a push event with one commit' do
+ execute_service
+
expect(event).to be_an_instance_of(PushEvent)
expect(event.project).to eq(project)
expect(event).to be_pushed_action
@@ -109,12 +109,87 @@ RSpec.describe Git::BranchHooksService do
expect(event.push_event_payload.ref).to eq('master')
expect(event.push_event_payload.commit_count).to eq(1)
end
+
+ context 'with changing CI config' do
+ before do
+ allow_next_instance_of(Gitlab::Git::Diff) do |diff|
+ allow(diff).to receive(:new_path).and_return('.gitlab-ci.yml')
+ end
+
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ end
+
+ let!(:commit_author) { create(:user, email: sample_commit.author_email) }
+
+ let(:tracking_params) do
+ ['o_pipeline_authoring_unique_users_committing_ciconfigfile', values: commit_author.id]
+ end
+
+ it 'tracks the event' do
+ execute_service
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to have_received(:track_event).with(*tracking_params)
+ end
+
+ context 'when the FF usage_data_unique_users_committing_ciconfigfile is disabled' do
+ before do
+ stub_feature_flags(usage_data_unique_users_committing_ciconfigfile: false)
+ end
+
+ it 'does not track the event' do
+ execute_service
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .not_to have_received(:track_event).with(*tracking_params)
+ end
+ end
+
+ context 'when usage ping is disabled' do
+ before do
+ stub_application_setting(usage_ping_enabled: false)
+ end
+
+ it 'does not track the event' do
+ execute_service
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .not_to have_received(:track_event).with(*tracking_params)
+ end
+ end
+
+ context 'when the branch is not the main branch' do
+ let(:branch) { 'feature' }
+
+ it 'does not track the event' do
+ execute_service
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .not_to have_received(:track_event).with(*tracking_params)
+ end
+ end
+
+ context 'when the CI config is a different path' do
+ before do
+ project.ci_config_path = 'config/ci.yml'
+ end
+
+ it 'does not track the event' do
+ execute_service
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .not_to have_received(:track_event).with(*tracking_params)
+ end
+ end
+ end
end
context "with a new branch" do
let(:oldrev) { Gitlab::Git::BLANK_SHA }
it 'generates a push event with more than one commit' do
+ execute_service
+
expect(event).to be_an_instance_of(PushEvent)
expect(event.project).to eq(project)
expect(event).to be_pushed_action
@@ -131,6 +206,8 @@ RSpec.describe Git::BranchHooksService do
let(:newrev) { Gitlab::Git::BLANK_SHA }
it 'generates a push event with no commits' do
+ execute_service
+
expect(event).to be_an_instance_of(PushEvent)
expect(event.project).to eq(project)
expect(event).to be_pushed_action
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index cd38f2e97fb..0431e06723f 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -257,6 +257,56 @@ RSpec.describe Git::WikiPushService, services: true do
end
end
+ describe '#perform_housekeeping', :clean_gitlab_redis_shared_state do
+ let(:housekeeping) { Repositories::HousekeepingService.new(wiki) }
+
+ subject { create_service(current_sha).execute }
+
+ before do
+ allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
+ end
+
+ it 'does not perform housekeeping when not needed' do
+ expect(housekeeping).not_to receive(:execute)
+
+ subject
+ end
+
+ context 'when housekeeping is needed' do
+ before do
+ allow(housekeeping).to receive(:needed?).and_return(true)
+ end
+
+ it 'performs housekeeping' do
+ expect(housekeeping).to receive(:execute)
+
+ subject
+ end
+
+ it 'does not raise an exception' do
+ allow(housekeeping).to receive(:try_obtain_lease).and_return(false)
+
+ expect { subject }.not_to raise_error
+ end
+
+ context 'when feature flag :wiki_housekeeping is disabled' do
+ it 'does not perform housekeeping' do
+ stub_feature_flags(wiki_housekeeping: false)
+
+ expect(housekeeping).not_to receive(:execute)
+
+ subject
+ end
+ end
+ end
+
+ it 'increments the push counter' do
+ expect(housekeeping).to receive(:increment!)
+
+ subject
+ end
+ end
+
# In order to construct the correct GitPostReceive object that represents the
# changes we are applying, we need to describe the changes between old-ref and
# new-ref. Old ref (the base sha) we have to capture before we perform any
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
index 690bcb94556..d6ce40f413b 100644
--- a/spec/services/groups/import_export/export_service_spec.rb
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -71,6 +71,16 @@ RSpec.describe Groups::ImportExport::ExportService do
service.execute
end
+ it 'compresses and removes tmp files' do
+ expect(group.import_export_upload).to be_nil
+ expect(Gitlab::ImportExport::Saver).to receive(:new).and_call_original
+
+ service.execute
+
+ expect(Dir.exist?(shared.archive_path)).to eq false
+ expect(File.exist?(group.import_export_upload.export_file.path)).to eq true
+ end
+
it 'notifies the user' do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:group_was_exported)
@@ -134,7 +144,7 @@ RSpec.describe Groups::ImportExport::ExportService do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
expect(group.import_export_upload).to be_nil
- expect(Dir.exist?(shared.base_path)).to eq(false)
+ expect(Dir.exist?(shared.archive_path)).to eq(false)
end
it 'notifies the user about failed group export' do
@@ -159,7 +169,7 @@ RSpec.describe Groups::ImportExport::ExportService do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
expect(group.import_export_upload).to be_nil
- expect(Dir.exist?(shared.base_path)).to eq(false)
+ expect(Dir.exist?(shared.archive_path)).to eq(false)
end
it 'notifies logger' do
diff --git a/spec/services/groups/open_issues_count_service_spec.rb b/spec/services/groups/open_issues_count_service_spec.rb
new file mode 100644
index 00000000000..8bbb1c90c6b
--- /dev/null
+++ b/spec/services/groups/open_issues_count_service_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_caching do
+ let_it_be(:group) { create(:group, :public)}
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, :opened, project: project) }
+ let_it_be(:confidential) { create(:issue, :opened, confidential: true, project: project) }
+ let_it_be(:closed) { create(:issue, :closed, project: project) }
+
+ subject { described_class.new(group, user) }
+
+ describe '#relation_for_count' do
+ before do
+ allow(IssuesFinder).to receive(:new).and_call_original
+ end
+
+ it 'uses the IssuesFinder to scope issues' do
+ expect(IssuesFinder)
+ .to receive(:new)
+ .with(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true, public_only: true)
+
+ subject.count
+ end
+ end
+
+ describe '#count' do
+ context 'when user is nil' do
+ it 'does not include confidential issues in the issue count' do
+ expect(described_class.new(group).count).to eq(1)
+ end
+ end
+
+ context 'when user is provided' do
+ context 'when user can read confidential issues' do
+ before do
+ group.add_reporter(user)
+ end
+
+ it 'returns the right count with confidential issues' do
+ expect(subject.count).to eq(2)
+ end
+ end
+
+ context 'when user cannot read confidential issues' do
+ before do
+ group.add_guest(user)
+ end
+
+ it 'does not include confidential issues' do
+ expect(subject.count).to eq(1)
+ end
+ end
+
+ context 'with different cache values' do
+ let(:public_count_key) { subject.cache_key(described_class::PUBLIC_COUNT_KEY) }
+ let(:under_threshold) { described_class::CACHED_COUNT_THRESHOLD - 1 }
+ let(:over_threshold) { described_class::CACHED_COUNT_THRESHOLD + 1 }
+
+ context 'when cache is empty' do
+ before do
+ Rails.cache.delete(public_count_key)
+ end
+
+ it 'refreshes cache if value over threshold' do
+ allow(subject).to receive(:uncached_count).and_return(over_threshold)
+
+ expect(subject.count).to eq(over_threshold)
+ expect(Rails.cache.read(public_count_key)).to eq(over_threshold)
+ end
+
+ it 'does not refresh cache if value under threshold' do
+ allow(subject).to receive(:uncached_count).and_return(under_threshold)
+
+ expect(subject.count).to eq(under_threshold)
+ expect(Rails.cache.read(public_count_key)).to be_nil
+ end
+ end
+
+ context 'when cached count is under the threshold value' do
+ before do
+ Rails.cache.write(public_count_key, under_threshold)
+ end
+
+ it 'does not refresh cache' do
+ expect(Rails.cache).not_to receive(:write)
+ expect(subject.count).to eq(under_threshold)
+ end
+ end
+
+ context 'when cached count is over the threshold value' do
+ before do
+ Rails.cache.write(public_count_key, over_threshold)
+ end
+
+ it 'does not refresh cache' do
+ expect(Rails.cache).not_to receive(:write)
+ expect(subject.count).to eq(over_threshold)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/integrations/test/project_service_spec.rb b/spec/services/integrations/test/project_service_spec.rb
index dd603765d59..aaaf8a11cf8 100644
--- a/spec/services/integrations/test/project_service_spec.rb
+++ b/spec/services/integrations/test/project_service_spec.rb
@@ -3,11 +3,10 @@
require 'spec_helper'
RSpec.describe Integrations::Test::ProjectService do
- let(:user) { double('user') }
-
describe '#execute' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:integration) { create(:slack_service, project: project) }
+ let(:user) { project.owner }
let(:event) { nil }
let(:sample_data) { { data: 'sample' } }
let(:success_result) { { success: true, result: {} } }
@@ -70,16 +69,34 @@ RSpec.describe Integrations::Test::ProjectService do
end
it 'executes integration' do
- allow(project).to receive(:notes).and_return([Note.new])
+ create(:note, project: project)
+
allow(Gitlab::DataBuilder::Note).to receive(:build).and_return(sample_data)
+ allow_next_instance_of(NotesFinder) do |finder|
+ allow(finder).to receive(:execute).and_return(Note.all)
+ end
expect(integration).to receive(:test).with(sample_data).and_return(success_result)
expect(subject).to eq(success_result)
end
+
+ context 'when the query optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(integrations_test_webhook_optimizations: false)
+ end
+
+ it 'executes the old query' do
+ allow(Gitlab::DataBuilder::Note).to receive(:build).and_return(sample_data)
+
+ expect(NotesFinder).not_to receive(:new)
+ expect(project).to receive(:notes).and_return([Note.new])
+ expect(integration).to receive(:test).with(sample_data).and_return(success_result)
+ expect(subject).to eq(success_result)
+ end
+ end
end
- context 'issue' do
- let(:event) { 'issue' }
+ shared_examples_for 'a test of an integration that operates on issues' do
let(:issue) { build(:issue) }
it 'returns error message if not enough data' do
@@ -90,32 +107,45 @@ RSpec.describe Integrations::Test::ProjectService do
it 'executes integration' do
allow(project).to receive(:issues).and_return([issue])
allow(issue).to receive(:to_hook_data).and_return(sample_data)
+ allow_next_instance_of(IssuesFinder) do |finder|
+ allow(finder).to receive(:execute).and_return([issue])
+ end
expect(integration).to receive(:test).with(sample_data).and_return(success_result)
expect(subject).to eq(success_result)
end
- end
- context 'confidential_issue' do
- let(:event) { 'confidential_issue' }
- let(:issue) { build(:issue) }
+ context 'when the query optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(integrations_test_webhook_optimizations: false)
+ end
- it 'returns error message if not enough data' do
- expect(integration).not_to receive(:test)
- expect(subject).to include({ status: :error, message: 'Ensure the project has issues.' })
+ it 'executes the old query' do
+ allow(issue).to receive(:to_hook_data).and_return(sample_data)
+
+ expect(IssuesFinder).not_to receive(:new)
+ expect(project).to receive(:issues).and_return([issue])
+ expect(integration).to receive(:test).with(sample_data).and_return(success_result)
+ expect(subject).to eq(success_result)
+ end
end
+ end
- it 'executes integration' do
- allow(project).to receive(:issues).and_return([issue])
- allow(issue).to receive(:to_hook_data).and_return(sample_data)
+ context 'issue' do
+ let(:event) { 'issue' }
- expect(integration).to receive(:test).with(sample_data).and_return(success_result)
- expect(subject).to eq(success_result)
- end
+ it_behaves_like 'a test of an integration that operates on issues'
+ end
+
+ context 'confidential_issue' do
+ let(:event) { 'confidential_issue' }
+
+ it_behaves_like 'a test of an integration that operates on issues'
end
context 'merge_request' do
let(:event) { 'merge_request' }
+ let(:merge_request) { build(:merge_request) }
it 'returns error message if not enough data' do
expect(integration).not_to receive(:test)
@@ -123,16 +153,34 @@ RSpec.describe Integrations::Test::ProjectService do
end
it 'executes integration' do
- create(:merge_request, source_project: project)
- allow_any_instance_of(MergeRequest).to receive(:to_hook_data).and_return(sample_data)
+ allow(merge_request).to receive(:to_hook_data).and_return(sample_data)
+ allow_next_instance_of(MergeRequestsFinder) do |finder|
+ allow(finder).to receive(:execute).and_return([merge_request])
+ end
expect(integration).to receive(:test).with(sample_data).and_return(success_result)
- expect(subject).to eq(success_result)
+ expect(subject).to include(success_result)
+ end
+
+ context 'when the query optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(integrations_test_webhook_optimizations: false)
+ end
+
+ it 'executes the old query' do
+ expect(MergeRequestsFinder).not_to receive(:new)
+ expect(project).to receive(:merge_requests).and_return([merge_request])
+
+ allow(merge_request).to receive(:to_hook_data).and_return(sample_data)
+
+ expect(integration).to receive(:test).with(sample_data).and_return(success_result)
+ expect(subject).to eq(success_result)
+ end
end
end
context 'deployment' do
- let(:project) { create(:project, :test_repo) }
+ let_it_be(:project) { create(:project, :test_repo) }
let(:event) { 'deployment' }
it 'returns error message if not enough data' do
@@ -167,7 +215,7 @@ RSpec.describe Integrations::Test::ProjectService do
end
context 'wiki_page' do
- let(:project) { create(:project, :wiki_repo) }
+ let_it_be(:project) { create(:project, :wiki_repo) }
let(:event) { 'wiki_page' }
it 'returns error message if wiki disabled' do
diff --git a/spec/services/issue_rebalancing_service_spec.rb b/spec/services/issue_rebalancing_service_spec.rb
index 94f594c8083..7b3d4213b24 100644
--- a/spec/services/issue_rebalancing_service_spec.rb
+++ b/spec/services/issue_rebalancing_service_spec.rb
@@ -32,70 +32,88 @@ RSpec.describe IssueRebalancingService do
project.reload.issues.reorder(relative_position: :asc).to_a
end
- it 'rebalances a set of issues with clumps at the end and start' do
- all_issues = start_clump + unclumped + end_clump.reverse
- service = described_class.new(project.issues.first)
+ shared_examples 'IssueRebalancingService shared examples' do
+ it 'rebalances a set of issues with clumps at the end and start' do
+ all_issues = start_clump + unclumped + end_clump.reverse
+ service = described_class.new(project.issues.first)
- expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
+ expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
- all_issues.each(&:reset)
+ all_issues.each(&:reset)
- gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b|
- b.relative_position - a.relative_position
+ gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b|
+ b.relative_position - a.relative_position
+ end
+
+ expect(gaps).to all(be > RelativePositioning::MIN_GAP)
+ expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999)
+ expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999)
end
- expect(gaps).to all(be > RelativePositioning::MIN_GAP)
- expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999)
- expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999)
- end
+ it 'is idempotent' do
+ service = described_class.new(project.issues.first)
- it 'is idempotent' do
- service = described_class.new(project.issues.first)
+ expect do
+ service.execute
+ service.execute
+ end.not_to change { issues_in_position_order.map(&:id) }
+ end
- expect do
- service.execute
- service.execute
- end.not_to change { issues_in_position_order.map(&:id) }
- end
+ it 'does nothing if the feature flag is disabled' do
+ stub_feature_flags(rebalance_issues: false)
+ issue = project.issues.first
+ issue.project
+ issue.project.group
+ old_pos = issue.relative_position
- it 'does nothing if the feature flag is disabled' do
- stub_feature_flags(rebalance_issues: false)
- issue = project.issues.first
- issue.project
- issue.project.group
- old_pos = issue.relative_position
+ service = described_class.new(issue)
- service = described_class.new(issue)
+ expect { service.execute }.not_to exceed_query_limit(0)
+ expect(old_pos).to eq(issue.reload.relative_position)
+ end
- expect { service.execute }.not_to exceed_query_limit(0)
- expect(old_pos).to eq(issue.reload.relative_position)
- end
+ it 'acts if the flag is enabled for the project' do
+ issue = create(:issue, project: project, author: user, relative_position: max_pos)
+ stub_feature_flags(rebalance_issues: issue.project)
- it 'acts if the flag is enabled for the project' do
- issue = create(:issue, project: project, author: user, relative_position: max_pos)
- stub_feature_flags(rebalance_issues: issue.project)
+ service = described_class.new(issue)
- service = described_class.new(issue)
+ expect { service.execute }.to change { issue.reload.relative_position }
+ end
- expect { service.execute }.to change { issue.reload.relative_position }
- end
+ it 'acts if the flag is enabled for the group' do
+ issue = create(:issue, project: project, author: user, relative_position: max_pos)
+ project.update!(group: create(:group))
+ stub_feature_flags(rebalance_issues: issue.project.group)
- it 'acts if the flag is enabled for the group' do
- issue = create(:issue, project: project, author: user, relative_position: max_pos)
- project.update!(group: create(:group))
- stub_feature_flags(rebalance_issues: issue.project.group)
+ service = described_class.new(issue)
- service = described_class.new(issue)
+ expect { service.execute }.to change { issue.reload.relative_position }
+ end
+
+ it 'aborts if there are too many issues' do
+ issue = project.issues.first
+ base = double(count: 10_001)
- expect { service.execute }.to change { issue.reload.relative_position }
+ allow(Issue).to receive(:relative_positioning_query_base).with(issue).and_return(base)
+
+ expect { described_class.new(issue).execute }.to raise_error(described_class::TooManyIssues)
+ end
end
- it 'aborts if there are too many issues' do
- issue = project.issues.first
- base = double(count: 10_001)
+ context 'when issue_rebalancing_optimization feature flag is on' do
+ before do
+ stub_feature_flags(issue_rebalancing_optimization: true)
+ end
+
+ it_behaves_like 'IssueRebalancingService shared examples'
+ end
- allow(Issue).to receive(:relative_positioning_query_base).with(issue).and_return(base)
+ context 'when issue_rebalancing_optimization feature flag is on' do
+ before do
+ stub_feature_flags(issue_rebalancing_optimization: false)
+ end
- expect { described_class.new(issue).execute }.to raise_error(described_class::TooManyIssues)
+ it_behaves_like 'IssueRebalancingService shared examples'
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index dc545f57d23..3cf45143594 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -84,6 +84,7 @@ RSpec.describe Issues::CloseService do
let!(:external_issue_tracker) { create(:jira_service, project: project) }
it 'closes the issue on the external issue tracker' do
+ project.reload
expect(project.external_issue_tracker).to receive(:close_issue)
described_class.new(project, user).close_issue(external_issue)
@@ -94,6 +95,7 @@ RSpec.describe Issues::CloseService do
let!(:external_issue_tracker) { create(:jira_service, project: project, active: false) }
it 'does not close the issue on the external issue tracker' do
+ project.reload
expect(project.external_issue_tracker).not_to receive(:close_issue)
described_class.new(project, user).close_issue(external_issue)
@@ -104,6 +106,7 @@ RSpec.describe Issues::CloseService do
let!(:external_issue_tracker) { create(:bugzilla_service, project: project) }
it 'does not close the issue on the external issue tracker' do
+ project.reload
expect(project.external_issue_tracker).not_to receive(:close_issue)
described_class.new(project, user).close_issue(external_issue)
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index cc6a49fc4cf..e42e9722297 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -452,162 +452,50 @@ RSpec.describe Issues::CreateService do
end
context 'checking spam' do
- include_context 'includes Spam constants'
+ let(:request) { double(:request) }
+ let(:api) { true }
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { 1 }
- let(:title) { 'Legit issue' }
- let(:description) { 'please fix' }
- let(:opts) do
+ let(:params) do
{
- title: title,
- description: description,
- request: double(:request, env: {})
+ title: 'Spam issue',
+ request: request,
+ api: api,
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
}
end
- subject { described_class.new(project, user, opts) }
-
- before do
- stub_feature_flags(allow_possible_spam: false)
+ subject do
+ described_class.new(project, user, params)
end
- context 'when reCAPTCHA was verified' do
- let(:log_user) { user }
- let(:spam_logs) { create_list(:spam_log, 2, user: log_user, title: title) }
- let(:target_spam_log) { spam_logs.last }
-
- before do
- opts[:recaptcha_verified] = true
- opts[:spam_log_id] = target_spam_log.id
-
- expect(Spam::SpamVerdictService).not_to receive(:new)
- end
-
- it 'does not mark an issue as spam' do
- expect(issue).not_to be_spam
- end
-
- it 'creates a valid issue' do
- expect(issue).to be_valid
- end
-
- it 'does not assign a spam_log to the issue' do
- expect(issue.spam_log).to be_nil
- end
-
- it 'marks related spam_log as recaptcha_verified' do
- expect { issue }.to change { target_spam_log.reload.recaptcha_verified }.from(false).to(true)
- end
-
- context 'when spam log does not belong to a user' do
- let(:log_user) { create(:user) }
-
- it 'does not mark spam_log as recaptcha_verified' do
- expect { issue }.not_to change { target_spam_log.reload.recaptcha_verified }
- end
+ before do
+ allow_next_instance_of(UserAgentDetailService) do |instance|
+ allow(instance).to receive(:create)
end
end
- context 'when reCAPTCHA was not verified' do
- before do
- expect_next_instance_of(Spam::SpamActionService) do |spam_service|
- expect(spam_service).to receive_messages(check_for_spam?: true)
- end
- end
-
- context 'when SpamVerdictService requires reCAPTCHA' do
- before do
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
- end
- end
-
- it 'does not mark the issue as spam' do
- expect(issue).not_to be_spam
- end
-
- it 'marks the issue as needing reCAPTCHA' do
- expect(issue.needs_recaptcha?).to be_truthy
- end
-
- it 'invalidates the issue' do
- expect(issue).to be_invalid
- end
-
- it 'creates a new spam_log' do
- expect { issue }
- .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
- end
- end
-
- context 'when SpamVerdictService disallows creation' do
- before do
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(DISALLOW)
- end
- end
-
- context 'when allow_possible_spam feature flag is false' do
- it 'marks the issue as spam' do
- expect(issue).to be_spam
- end
-
- it 'does not mark the issue as needing reCAPTCHA' do
- expect(issue.needs_recaptcha?).to be_falsey
- end
-
- it 'invalidates the issue' do
- expect(issue).to be_invalid
- end
-
- it 'creates a new spam_log' do
- expect { issue }
- .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
- end
- end
-
- context 'when allow_possible_spam feature flag is true' do
- before do
- stub_feature_flags(allow_possible_spam: true)
- end
-
- it 'does not mark the issue as spam' do
- expect(issue).not_to be_spam
- end
-
- it 'does not mark the issue as needing reCAPTCHA' do
- expect(issue.needs_recaptcha?).to be_falsey
- end
-
- it 'creates a valid issue' do
- expect(issue).to be_valid
- end
-
- it 'creates a new spam_log' do
- expect { issue }
- .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
- end
- end
+ it 'executes SpamActionService' do
+ spam_params = Spam::SpamParams.new(
+ api: api,
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ )
+ expect_next_instance_of(
+ Spam::SpamActionService,
+ {
+ spammable: an_instance_of(Issue),
+ request: request,
+ user: user,
+ action: :create
+ }
+ ) do |instance|
+ expect(instance).to receive(:execute).with(spam_params: spam_params)
end
- context 'when the SpamVerdictService allows creation' do
- before do
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(ALLOW)
- end
- end
-
- it 'does not mark an issue as spam' do
- expect(issue).not_to be_spam
- end
-
- it 'creates a valid issue' do
- expect(issue).to be_valid
- end
-
- it 'does not assign a spam_log to an issue' do
- expect(issue.spam_log).to be_nil
- end
- end
+ subject.execute
end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 06a6a52bc41..fd42a84e405 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -711,7 +711,7 @@ RSpec.describe Issues::UpdateService, :mailer do
}
service = described_class.new(project, user, params)
- expect(service).not_to receive(:spam_check)
+ expect(Spam::SpamActionService).not_to receive(:new)
service.execute(issue)
end
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index 2d4457f3f62..a1b1397d444 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -13,9 +13,11 @@ RSpec.describe Members::UpdateService do
{ access_level: Gitlab::Access::MAINTAINER }
end
+ subject { described_class.new(current_user, params).execute(member, permission: permission) }
+
shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do
it 'raises Gitlab::Access::AccessDeniedError' do
- expect { described_class.new(current_user, params).execute(member, permission: permission) }
+ expect { subject }
.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
@@ -24,18 +26,24 @@ RSpec.describe Members::UpdateService do
it 'updates the member' do
expect(TodosDestroyer::EntityLeaveWorker).not_to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, source.class.name)
- updated_member = described_class.new(current_user, params).execute(member, permission: permission)
+ updated_member = subject.fetch(:member)
expect(updated_member).to be_valid
expect(updated_member.access_level).to eq(Gitlab::Access::MAINTAINER)
end
+ it 'returns success status' do
+ result = subject.fetch(:status)
+
+ expect(result).to eq(:success)
+ end
+
context 'when member is downgraded to guest' do
shared_examples 'schedules to delete confidential todos' do
it do
expect(TodosDestroyer::EntityLeaveWorker).to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, source.class.name).once
- updated_member = described_class.new(current_user, params).execute(member, permission: permission)
+ updated_member = subject.fetch(:member)
expect(updated_member).to be_valid
expect(updated_member.access_level).to eq(Gitlab::Access::GUEST)
@@ -62,6 +70,16 @@ RSpec.describe Members::UpdateService do
expect { described_class.new(current_user, params) }.to raise_error(ArgumentError, 'invalid value for Integer(): "invalid"')
end
end
+
+ context 'when member is not valid' do
+ let(:params) { { expires_at: 2.days.ago } }
+
+ it 'returns error status' do
+ result = subject
+
+ expect(result[:status]).to eq(:error)
+ end
+ end
end
before do
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index 9ae310d8cee..f21feb70bc5 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe MergeRequests::AfterCreateService do
- include AfterNextHelpers
-
let_it_be(:merge_request) { create(:merge_request) }
subject(:after_create_service) do
@@ -66,15 +64,8 @@ RSpec.describe MergeRequests::AfterCreateService do
execute_service
end
- it 'registers an onboarding progress action' do
- OnboardingProgress.onboard(merge_request.target_project.namespace)
-
- expect_next(OnboardingProgressService, merge_request.target_project.namespace)
- .to receive(:execute).with(action: :merge_request_created).and_call_original
-
- execute_service
-
- expect(OnboardingProgress.completed?(merge_request.target_project.namespace, :merge_request_created)).to be(true)
+ it_behaves_like 'records an onboarding progress action', :merge_request_created do
+ let(:namespace) { merge_request.target_project.namespace }
end
end
end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index f83b8d98ce8..22b3456708f 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -252,6 +252,7 @@ RSpec.describe MergeRequests::BuildService do
issue.update!(iid: 123)
else
create(:"#{issue_tracker}_service", project: project)
+ project.reload
end
end
@@ -351,6 +352,7 @@ RSpec.describe MergeRequests::BuildService do
issue.update!(iid: 123)
else
create(:"#{issue_tracker}_service", project: project)
+ project.reload
end
end
diff --git a/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb b/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
index cdaacaf5fca..d2070a466b1 100644
--- a/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
+++ b/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe MergeRequests::DeleteNonLatestDiffsService, :clean_gitlab_redis_s
stub_const("#{described_class.name}::BATCH_SIZE", 2)
3.times { merge_request.create_merge_request_diff }
+ merge_request.create_merge_head_diff
+ merge_request.reset
end
it 'schedules non-latest merge request diffs removal' do
diff --git a/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb b/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
new file mode 100644
index 00000000000..1075f6f9034
--- /dev/null
+++ b/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::MarkReviewerReviewedService do
+ let(:current_user) { create(:user) }
+ let(:merge_request) { create(:merge_request, reviewers: [current_user]) }
+ let(:reviewer) { merge_request.merge_request_reviewers.find_by(user_id: current_user.id) }
+ let(:project) { merge_request.project }
+ let(:service) { described_class.new(project, current_user) }
+ let(:result) { service.execute(merge_request) }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ describe '#execute' do
+ describe 'invalid permissions' do
+ let(:service) { described_class.new(project, create(:user)) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ describe 'reviewer does not exist' do
+ let(:service) { described_class.new(project, create(:user)) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ describe 'reviewer exists' do
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates reviewers state' do
+ expect(result[:status]).to eq :success
+ expect(reviewer.state).to eq 'reviewed'
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 17bfa9d7368..e0baf5af8b4 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -33,6 +33,14 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
expect(merge_request.merge_status).to eq('can_be_merged')
end
+ it 'reloads merge head diff' do
+ expect_next_instance_of(MergeRequests::ReloadMergeHeadDiffService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
+
it 'update diff discussion positions' do
expect_next_instance_of(Discussions::CaptureDiffNotePositionsService) do |service|
expect(service).to receive(:execute)
@@ -142,7 +150,11 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
end
it 'resets one merge request upon execution' do
- expect_any_instance_of(MergeRequest).to receive(:reset).once
+ expect_next_instance_of(MergeRequests::ReloadMergeHeadDiffService) do |svc|
+ expect(svc).to receive(:execute).and_return(status: :success)
+ end
+
+ expect_any_instance_of(MergeRequest).to receive(:reset).once.and_call_original
execute_within_threads(amount: 2)
end
@@ -266,6 +278,14 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
it_behaves_like 'unmergeable merge request'
+ it 'reloads merge head diff' do
+ expect_next_instance_of(MergeRequests::ReloadMergeHeadDiffService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
+
it 'returns ServiceResponse.error' do
result = subject
@@ -329,6 +349,12 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
subject
end
+
+ it 'does not reload merge head diff' do
+ expect(MergeRequests::ReloadMergeHeadDiffService).not_to receive(:new)
+
+ subject
+ end
end
end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 85bcf4562b1..c2769d4fa88 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
+ let(:error_mr_required) { "A merge_request.create push option is required to create a merge request for branch #{source_branch}" }
shared_examples_for 'a service that can create a merge request' do
subject(:last_mr) { MergeRequest.last }
@@ -176,11 +177,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -197,11 +196,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -263,11 +260,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -308,11 +303,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -329,11 +322,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -374,11 +365,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -395,11 +384,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -440,11 +427,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -461,11 +446,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -506,11 +489,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -527,11 +508,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
diff --git a/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb b/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
new file mode 100644
index 00000000000..3152a4e3861
--- /dev/null
+++ b/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ReloadMergeHeadDiffService do
+ let(:merge_request) { create(:merge_request) }
+
+ subject { described_class.new(merge_request).execute }
+
+ describe '#execute' do
+ before do
+ MergeRequests::MergeToRefService
+ .new(merge_request.project, merge_request.author)
+ .execute(merge_request)
+ end
+
+ it 'creates a merge head diff' do
+ expect(subject[:status]).to eq(:success)
+ expect(merge_request.reload.merge_head_diff).to be_present
+ end
+
+ context 'when merge ref head is not present' do
+ before do
+ allow(merge_request).to receive(:merge_ref_head).and_return(nil)
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+
+ context 'when failed to create merge head diff' do
+ before do
+ allow(merge_request).to receive(:create_merge_head_diff!).and_raise('fail')
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+
+ context 'when there is existing merge head diff' do
+ let!(:existing_merge_head_diff) { create(:merge_request_diff, :merge_head, merge_request: merge_request) }
+
+ it 'recreates merge head diff' do
+ expect(subject[:status]).to eq(:success)
+ expect(merge_request.reload.merge_head_diff).not_to eq(existing_merge_head_diff)
+ end
+ end
+
+ context 'when default_merge_ref_for_diffs feature flag is disabled' do
+ before do
+ stub_feature_flags(default_merge_ref_for_diffs: false)
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/request_review_service_spec.rb b/spec/services/merge_requests/request_review_service_spec.rb
new file mode 100644
index 00000000000..5cb4120852a
--- /dev/null
+++ b/spec/services/merge_requests/request_review_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::RequestReviewService do
+ let(:current_user) { create(:user) }
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request, reviewers: [user]) }
+ let(:reviewer) { merge_request.find_reviewer(user) }
+ let(:project) { merge_request.project }
+ let(:service) { described_class.new(project, current_user) }
+ let(:result) { service.execute(merge_request, user) }
+ let(:todo_service) { spy('todo service') }
+ let(:notification_service) { spy('notification service') }
+
+ before do
+ allow(NotificationService).to receive(:new) { notification_service }
+ allow(service).to receive(:todo_service).and_return(todo_service)
+ allow(service).to receive(:notification_service).and_return(notification_service)
+
+ reviewer.update!(state: MergeRequestReviewer.states[:reviewed])
+
+ project.add_developer(current_user)
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ describe 'invalid permissions' do
+ let(:service) { described_class.new(project, create(:user)) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ describe 'reviewer does not exist' do
+ let(:result) { service.execute(merge_request, create(:user)) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ describe 'reviewer exists' do
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates reviewers state' do
+ service.execute(merge_request, user)
+ reviewer.reload
+
+ expect(reviewer.state).to eq 'unreviewed'
+ end
+
+ it 'sends email to reviewer' do
+ expect(notification_service).to receive_message_chain(:async, :review_requested_of_merge_request).with(merge_request, current_user, user)
+
+ service.execute(merge_request, user)
+ end
+
+ it 'creates a new todo for the reviewer' do
+ expect(todo_service).to receive(:create_request_review_todo).with(merge_request, current_user, user)
+
+ service.execute(merge_request, user)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 9eb82dcd0ad..c18f7e3754a 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -143,7 +143,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
let(:opts) { { reviewer_ids: [user2.id] } }
context 'when merge_request_reviewers feature is disabled' do
- before(:context) do
+ before do
stub_feature_flags(merge_request_reviewers: false)
end
@@ -152,6 +152,13 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(note).to be_nil
end
+
+ it 'does not update the tracking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_users_review_requested)
+
+ update_merge_request(reviewer_ids: [user.id])
+ end
end
context 'when merge_request_reviewers feature is enabled' do
@@ -165,6 +172,14 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(note).not_to be_nil
expect(note.note).to include "requested review from #{user2.to_reference}"
end
+
+ it 'updates the tracking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_users_review_requested)
+ .with(users: [user])
+
+ update_merge_request(reviewer_ids: [user.id])
+ end
end
end
@@ -794,6 +809,14 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(merge_request.assignee_ids).to eq([user.id])
end
+ it 'updates the tracking when user ids are valid' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_users_assigned_to_mr)
+ .with(users: [user])
+
+ update_merge_request(assignee_ids: [user.id])
+ end
+
it 'does not update assignee_id when user cannot read issue' do
non_member = create(:user)
original_assignees = merge_request.assignees
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
new file mode 100644
index 00000000000..7346a5b95ae
--- /dev/null
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
+ subject(:execute_service) { described_class.new(track, interval).execute }
+
+ let(:track) { :create }
+ let(:interval) { 1 }
+
+ let(:previous_action_completed_at) { 2.days.ago.middle_of_day }
+ let(:current_action_completed_at) { nil }
+ let(:experiment_enabled) { true }
+ let(:user_can_perform_current_track_action) { true }
+ let(:actions_completed) { { created_at: previous_action_completed_at, git_write_at: current_action_completed_at } }
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user, email_opted_in: true) }
+
+ before do
+ create(:onboarding_progress, namespace: group, **actions_completed)
+ group.add_developer(user)
+ stub_experiment_for_subject(in_product_marketing_emails: experiment_enabled)
+ allow(Ability).to receive(:allowed?).with(user, anything, anything).and_return(user_can_perform_current_track_action)
+ allow(Notify).to receive(:in_product_marketing_email).and_return(double(deliver_later: nil))
+ end
+
+ RSpec::Matchers.define :send_in_product_marketing_email do |*args|
+ match do
+ expect(Notify).to have_received(:in_product_marketing_email).with(*args).once
+ end
+
+ match_when_negated do
+ expect(Notify).not_to have_received(:in_product_marketing_email)
+ end
+ end
+
+ context 'for each track and series with the right conditions' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:track, :interval, :actions_completed) do
+ :create | 1 | { created_at: 2.days.ago.middle_of_day }
+ :create | 5 | { created_at: 6.days.ago.middle_of_day }
+ :create | 10 | { created_at: 11.days.ago.middle_of_day }
+ :verify | 1 | { created_at: 2.days.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day }
+ :verify | 5 | { created_at: 6.days.ago.middle_of_day, git_write_at: 6.days.ago.middle_of_day }
+ :verify | 10 | { created_at: 11.days.ago.middle_of_day, git_write_at: 11.days.ago.middle_of_day }
+ :trial | 1 | { created_at: 2.days.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day, pipeline_created_at: 2.days.ago.middle_of_day }
+ :trial | 5 | { created_at: 6.days.ago.middle_of_day, git_write_at: 6.days.ago.middle_of_day, pipeline_created_at: 6.days.ago.middle_of_day }
+ :trial | 10 | { created_at: 11.days.ago.middle_of_day, git_write_at: 11.days.ago.middle_of_day, pipeline_created_at: 11.days.ago.middle_of_day }
+ :team | 1 | { created_at: 2.days.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day, pipeline_created_at: 2.days.ago.middle_of_day, trial_started_at: 2.days.ago.middle_of_day }
+ :team | 5 | { created_at: 6.days.ago.middle_of_day, git_write_at: 6.days.ago.middle_of_day, pipeline_created_at: 6.days.ago.middle_of_day, trial_started_at: 6.days.ago.middle_of_day }
+ :team | 10 | { created_at: 11.days.ago.middle_of_day, git_write_at: 11.days.ago.middle_of_day, pipeline_created_at: 11.days.ago.middle_of_day, trial_started_at: 11.days.ago.middle_of_day }
+ end
+
+ with_them do
+ it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, described_class::INTERVAL_DAYS.index(interval)) }
+ end
+ end
+
+ context 'when initialized with a different track' do
+ let(:track) { :verify }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+
+ context 'when the previous track actions have been completed' do
+ let(:current_action_completed_at) { 2.days.ago.middle_of_day }
+
+ it { is_expected.to send_in_product_marketing_email(user.id, group.id, :verify, 0) }
+ end
+ end
+
+ context 'when initialized with a different interval' do
+ let(:interval) { 5 }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+
+ context 'when the previous track action was completed within the intervals range' do
+ let(:previous_action_completed_at) { 6.days.ago.middle_of_day }
+
+ it { is_expected.to send_in_product_marketing_email(user.id, group.id, :create, 1) }
+ end
+ end
+
+ describe 'experimentation' do
+ context 'when the experiment is enabled' do
+ it 'adds the group as an experiment subject in the experimental group' do
+ expect(Experiment).to receive(:add_group)
+ .with(:in_product_marketing_emails, variant: :experimental, group: group)
+
+ execute_service
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ let(:experiment_enabled) { false }
+
+ it 'adds the group as an experiment subject in the control group' do
+ expect(Experiment).to receive(:add_group)
+ .with(:in_product_marketing_emails, variant: :control, group: group)
+
+ execute_service
+ end
+
+ it { is_expected.not_to send_in_product_marketing_email }
+ end
+ end
+
+ context 'when the previous track action is not yet completed' do
+ let(:previous_action_completed_at) { nil }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+ end
+
+ context 'when the previous track action is completed outside the intervals range' do
+ let(:previous_action_completed_at) { 3.days.ago }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+ end
+
+ context 'when the current track action is completed' do
+ let(:current_action_completed_at) { Time.current }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+ end
+
+ context "when the user cannot perform the current track's action" do
+ let(:user_can_perform_current_track_action) { false }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+ end
+
+ context 'when the user has not opted into marketing emails' do
+ let(:user) { create(:user, email_opted_in: false) }
+
+ it { is_expected.not_to send_in_product_marketing_email }
+ end
+
+ context 'when the user has already received a marketing email as part of another group' do
+ before do
+ other_group = create(:group)
+ other_group.add_developer(user)
+ create(:onboarding_progress, namespace: other_group, created_at: previous_action_completed_at, git_write_at: current_action_completed_at)
+ end
+
+ # For any group Notify is called exactly once
+ it { is_expected.to send_in_product_marketing_email(user.id, anything, :create, 0) }
+ end
+
+ context 'when invoked with a non existing track' do
+ let(:track) { :foo }
+
+ before do
+ stub_const("#{described_class}::TRACKS", { foo: :git_write })
+ end
+
+ it { expect { subject }.to raise_error(NotImplementedError, 'No ability defined for track foo') }
+ end
+end
diff --git a/spec/services/notification_recipients/build_service_spec.rb b/spec/services/notification_recipients/build_service_spec.rb
index cc08f9fceff..ff54d6ccd2f 100644
--- a/spec/services/notification_recipients/build_service_spec.rb
+++ b/spec/services/notification_recipients/build_service_spec.rb
@@ -110,4 +110,28 @@ RSpec.describe NotificationRecipients::BuildService do
end
end
end
+
+ describe '#build_requested_review_recipients' do
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
+ before do
+ merge_request.reviewers.push(assignee)
+ end
+
+ shared_examples 'no N+1 queries' do
+ it 'avoids N+1 queries', :request_store do
+ create_user
+
+ service.build_requested_review_recipients(note)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ service.build_requested_review_recipients(note)
+ end
+
+ create_user
+
+ expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control_count)
+ end
+ end
+ end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 85234077b1f..b67c37ba02d 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2177,6 +2177,46 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.merge_when_pipeline_succeeds(merge_request, @u_disabled) }
end
end
+
+ describe '#review_requested_of_merge_request' do
+ let(:merge_request) { create(:merge_request, author: author, source_project: project, reviewers: [reviewer]) }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:reviewer) { create(:user) }
+
+ it 'sends email to reviewer', :aggregate_failures do
+ notification.review_requested_of_merge_request(merge_request, current_user, reviewer)
+
+ merge_request.reviewers.each { |reviewer| should_email(reviewer) }
+ should_not_email(merge_request.author)
+ should_not_email(@u_watcher)
+ should_not_email(@u_participant_mentioned)
+ should_not_email(@subscriber)
+ should_not_email(@watcher_and_subscriber)
+ should_not_email(@u_guest_watcher)
+ should_not_email(@u_guest_custom)
+ should_not_email(@u_custom_global)
+ should_not_email(@unsubscriber)
+ should_not_email(@u_participating)
+ should_not_email(@u_disabled)
+ should_not_email(@u_lazy_participant)
+ end
+
+ it 'adds "review requested" reason for new reviewer' do
+ notification.review_requested_of_merge_request(merge_request, current_user, [reviewer])
+
+ merge_request.reviewers.each do |reviewer|
+ email = find_email_for(reviewer)
+
+ expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::REVIEW_REQUESTED)
+ end
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { merge_request }
+ let(:notification_trigger) { notification.review_requested_of_merge_request(merge_request, current_user, reviewer) }
+ end
+ end
end
describe 'Projects', :deliver_mails_inline do
diff --git a/spec/services/packages/debian/destroy_distribution_service_spec.rb b/spec/services/packages/debian/destroy_distribution_service_spec.rb
new file mode 100644
index 00000000000..cbb112af86f
--- /dev/null
+++ b/spec/services/packages/debian/destroy_distribution_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::DestroyDistributionService do
+ RSpec.shared_examples 'Destroy Debian Distribution' do |expected_message|
+ it 'returns ServiceResponse', :aggregate_failures do
+ if expected_message.nil?
+ expect { response }
+ .to change { container.debian_distributions.klass.all.count }
+ .from(1).to(0)
+ .and change { container.debian_distributions.count }
+ .from(1).to(0)
+ .and change { component1.class.all.count }
+ .from(2).to(0)
+ .and change { architecture1.class.all.count }
+ .from(3).to(0)
+ else
+ expect { response }
+ .to not_change { container.debian_distributions.klass.all.count }
+ .and not_change { container.debian_distributions.count }
+ .and not_change { component1.class.all.count }
+ .and not_change { architecture1.class.all.count }
+ end
+
+ expect(response).to be_a(ServiceResponse)
+ expect(response.success?).to eq(expected_message.nil?)
+ expect(response.error?).to eq(!expected_message.nil?)
+ expect(response.message).to eq(expected_message)
+
+ if expected_message.nil?
+ expect(response.payload).to eq({})
+ else
+ expect(response.payload).to eq(distribution: distribution)
+ end
+ end
+ end
+
+ RSpec.shared_examples 'Debian Destroy Distribution Service' do |container_type, can_freeze|
+ context "with a Debian #{container_type} distribution" do
+ let_it_be(:container, freeze: can_freeze) { create(container_type) } # rubocop:disable Rails/SaveBang
+ let_it_be(:distribution, freeze: can_freeze) { create("debian_#{container_type}_distribution", container: container) }
+ let_it_be(:component1, freeze: can_freeze) { create("debian_#{container_type}_component", distribution: distribution, name: 'component1') }
+ let_it_be(:component2, freeze: can_freeze) { create("debian_#{container_type}_component", distribution: distribution, name: 'component2') }
+ let_it_be(:architecture0, freeze: true) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
+ let_it_be(:architecture1, freeze: can_freeze) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture1') }
+ let_it_be(:architecture2, freeze: can_freeze) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture2') }
+
+ subject { described_class.new(distribution) }
+
+ let(:response) { subject.execute }
+
+ context 'with a distribution' do
+ it_behaves_like 'Destroy Debian Distribution'
+ end
+
+ context 'when destroy fails' do
+ let(:distribution) { create("debian_#{container_type}_distribution", container: container) }
+
+ before do
+ expect(distribution).to receive(:destroy).and_return(false)
+ end
+
+ it_behaves_like 'Destroy Debian Distribution', "Unable to destroy Debian #{container_type} distribution"
+ end
+ end
+ end
+
+ it_behaves_like 'Debian Destroy Distribution Service', :project, true
+ it_behaves_like 'Debian Destroy Distribution Service', :group, false
+end
diff --git a/spec/services/packages/debian/update_distribution_service_spec.rb b/spec/services/packages/debian/update_distribution_service_spec.rb
new file mode 100644
index 00000000000..c60e8e3f735
--- /dev/null
+++ b/spec/services/packages/debian/update_distribution_service_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::UpdateDistributionService do
+ RSpec.shared_examples 'Update Debian Distribution' do |expected_message, expected_components, expected_architectures|
+ it 'returns ServiceResponse', :aggregate_failures do
+ expect(distribution).to receive(:update).with(simple_params).and_call_original if expected_message.nil?
+
+ expect { response }
+ .to not_change { container.debian_distributions.klass.all.count }
+ .and not_change { container.debian_distributions.count }
+ .and not_change { component1.class.all.count }
+ .and not_change { architecture1.class.all.count }
+
+ expect(response).to be_a(ServiceResponse)
+ expect(response.success?).to eq(expected_message.nil?)
+ expect(response.error?).to eq(!expected_message.nil?)
+ expect(response.message).to eq(expected_message)
+
+ expect(response.payload).to eq(distribution: distribution)
+
+ distribution.reload
+ distribution.components.reload
+ distribution.architectures.reload
+
+ if expected_message.nil?
+ simple_params.each_pair do |name, value|
+ expect(distribution.send(name)).to eq(value)
+ end
+ else
+ original_params.each_pair do |name, value|
+ expect(distribution.send(name)).to eq(value)
+ end
+ end
+
+ expect(distribution.components.map(&:name)).to contain_exactly(*expected_components)
+ expect(distribution.architectures.map(&:name)).to contain_exactly(*expected_architectures)
+ end
+ end
+
+ RSpec.shared_examples 'Debian Update Distribution Service' do |container_type, can_freeze|
+ context "with a Debian #{container_type} distribution" do
+ let_it_be(:container, freeze: can_freeze) { create(container_type) } # rubocop:disable Rails/SaveBang
+ let_it_be(:distribution, reload: true) { create("debian_#{container_type}_distribution", container: container) }
+ let_it_be(:component1) { create("debian_#{container_type}_component", distribution: distribution, name: 'component1') }
+ let_it_be(:component2) { create("debian_#{container_type}_component", distribution: distribution, name: 'component2') }
+ let_it_be(:architecture0) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
+ let_it_be(:architecture1) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture1') }
+ let_it_be(:architecture2) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture2') }
+
+ let(:original_params) do
+ {
+ suite: nil,
+ origin: nil,
+ label: nil,
+ version: nil,
+ description: nil,
+ valid_time_duration_seconds: nil,
+ automatic: true,
+ automatic_upgrades: false
+ }
+ end
+
+ let(:params) { {} }
+ let(:simple_params) { params.except(:components, :architectures) }
+
+ subject { described_class.new(distribution, params) }
+
+ let(:response) { subject.execute }
+
+ context 'with valid simple params' do
+ let(:params) do
+ {
+ suite: 'my-suite',
+ origin: 'my-origin',
+ label: 'my-label',
+ version: '42.0',
+ description: 'my-description',
+ valid_time_duration_seconds: 7.days,
+ automatic: false,
+ automatic_upgrades: true
+ }
+ end
+
+ it_behaves_like 'Update Debian Distribution', nil, %w[component1 component2], %w[all architecture1 architecture2]
+ end
+
+ context 'with invalid simple params' do
+ let(:params) do
+ {
+ suite: 'suite erronée',
+ origin: 'origin erronée',
+ label: 'label erronée',
+ version: 'version erronée',
+ description: 'description erronée',
+ valid_time_duration_seconds: 1.hour
+ }
+ end
+
+ it_behaves_like 'Update Debian Distribution', 'Suite is invalid, Origin is invalid, Label is invalid, Version is invalid, and Valid time duration seconds must be greater than or equal to 86400', %w[component1 component2], %w[all architecture1 architecture2]
+ end
+
+ context 'with valid components and architectures' do
+ let(:params) do
+ {
+ suite: 'my-suite',
+ components: %w[component2 component3],
+ architectures: %w[architecture2 architecture3]
+ }
+ end
+
+ it_behaves_like 'Update Debian Distribution', nil, %w[component2 component3], %w[all architecture2 architecture3]
+ end
+
+ context 'with invalid components' do
+ let(:params) do
+ {
+ suite: 'my-suite',
+ components: %w[component2 erroné],
+ architectures: %w[architecture2 architecture3]
+ }
+ end
+
+ it_behaves_like 'Update Debian Distribution', 'Component Name is invalid', %w[component1 component2], %w[all architecture1 architecture2]
+ end
+
+ context 'with invalid architectures' do
+ let(:params) do
+ {
+ suite: 'my-suite',
+ components: %w[component2 component3],
+ architectures: %w[architecture2 erroné]
+ }
+ end
+
+ it_behaves_like 'Update Debian Distribution', 'Architecture Name is invalid', %w[component1 component2], %w[all architecture1 architecture2]
+ end
+ end
+ end
+
+ it_behaves_like 'Debian Update Distribution Service', :project, true
+ it_behaves_like 'Debian Update Distribution Service', :group, false
+end
diff --git a/spec/services/packages/maven/find_or_create_package_service_spec.rb b/spec/services/packages/maven/find_or_create_package_service_spec.rb
index 82dffeefcde..191a443a837 100644
--- a/spec/services/packages/maven/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/maven/find_or_create_package_service_spec.rb
@@ -111,6 +111,13 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
expect(subject.errors).to include('Duplicate package is not allowed')
end
+ context 'when uploading to the versionless package which contains metadata about all versions' do
+ let(:version) { nil }
+ let(:param_path) { path }
+
+ it_behaves_like 'reuse existing package'
+ end
+
context 'when uploading different non-duplicate files to the same package' do
before do
package_file = existing_package.package_files.find_by(file_name: 'my-app-1.0-20180724.124855-1.jar')
diff --git a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
new file mode 100644
index 00000000000..5d335143719
--- /dev/null
+++ b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::MigrateFromLegacyStorageService do
+ let(:service) { described_class.new(Rails.logger, 3, 10) }
+
+ it 'does not try to migrate pages if pages are not deployed' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ expect(service.execute).to eq(migrated: 0, errored: 0)
+ end
+
+ it 'uses multiple threads' do
+ projects = create_list(:project, 20)
+ projects.each do |project|
+ project.mark_pages_as_deployed
+
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ service = described_class.new(Rails.logger, 3, 2)
+
+ threads = Concurrent::Set.new
+
+ expect(service).to receive(:migrate_project).exactly(20).times.and_wrap_original do |m, *args|
+ threads.add(Thread.current)
+
+ # sleep to be 100% certain that once thread can't consume all the queue
+ # it works without it, but I want to avoid making this test flaky
+ sleep(0.01)
+
+ m.call(*args)
+ end
+
+ expect(service.execute).to eq(migrated: 20, errored: 0)
+ expect(threads.length).to eq(3)
+ end
+
+ context 'when pages are marked as deployed' do
+ let(:project) { create(:project) }
+
+ before do
+ project.mark_pages_as_deployed
+ end
+
+ context 'when pages directory does not exist' do
+ it 'tries to migrate the project, but does not crash' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect(service.execute).to eq(migrated: 0, errored: 1)
+ end
+ end
+
+ context 'when pages directory exists on disk' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ it 'migrates pages projects without deployments' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect do
+ expect(service.execute).to eq(migrated: 1, errored: 0)
+ end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil)
+ end
+
+ context 'when deployed already exists for the project' do
+ before do
+ deployment = create(:pages_deployment, project: project)
+ project.set_first_pages_deployment!(deployment)
+ end
+
+ it 'does not try to migrate project' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ expect(service.execute).to eq(migrated: 0, errored: 0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 4d489d7fe4b..79654c9b190 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -135,7 +135,7 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService do
cert.add_extension ef.create_extension("authorityKeyIdentifier",
"keyid:always,issuer:always")
- cert.sign key, OpenSSL::Digest::SHA1.new
+ cert.sign key, OpenSSL::Digest.new('SHA1')
cert.to_pem
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 6e2cd7edf04..033194972c7 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe PostReceiveService do
include Gitlab::Routing
- include AfterNextHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
@@ -47,11 +46,7 @@ RSpec.describe PostReceiveService do
expect(subject).to be_empty
end
- it 'does not record an onboarding progress action' do
- expect_next(OnboardingProgressService).not_to receive(:execute)
-
- subject
- end
+ it_behaves_like 'does not record an onboarding progress action'
end
context 'when repository is nil' do
@@ -88,11 +83,8 @@ RSpec.describe PostReceiveService do
expect(response.reference_counter_decreased).to be(true)
end
- it 'records an onboarding progress action' do
- expect_next(OnboardingProgressService, project.namespace)
- .to receive(:execute).with(action: :git_write)
-
- subject
+ it_behaves_like 'records an onboarding progress action', :git_write do
+ let(:namespace) { project.namespace }
end
end
diff --git a/spec/services/projects/cleanup_service_spec.rb b/spec/services/projects/cleanup_service_spec.rb
index 6fd29813d98..f2c052d9397 100644
--- a/spec/services/projects/cleanup_service_spec.rb
+++ b/spec/services/projects/cleanup_service_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Projects::CleanupService do
end
it 'runs garbage collection on the repository' do
- expect_next_instance_of(GitGarbageCollectWorker) do |worker|
+ expect_next_instance_of(Projects::GitGarbageCollectWorker) do |worker|
expect(worker).to receive(:perform).with(project.id, :prune, "project_cleanup:gc:#{project.id}")
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 6c0e6654622..f7da6f75141 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -40,6 +40,48 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
+ describe 'setting name and path' do
+ subject(:project) { create_project(user, opts) }
+
+ context 'when both are set' do
+ let(:opts) { { name: 'one', path: 'two' } }
+
+ it 'keeps them as specified' do
+ expect(project.name).to eq('one')
+ expect(project.path).to eq('two')
+ end
+ end
+
+ context 'when path is set' do
+ let(:opts) { { path: 'one.two_three-four' } }
+
+ it 'sets name == path' do
+ expect(project.path).to eq('one.two_three-four')
+ expect(project.name).to eq(project.path)
+ end
+ end
+
+ context 'when name is a valid path' do
+ let(:opts) { { name: 'one.two_three-four' } }
+
+ it 'sets path == name' do
+ expect(project.name).to eq('one.two_three-four')
+ expect(project.path).to eq(project.name)
+ end
+ end
+
+ context 'when name is not a valid path' do
+ let(:opts) { { name: 'one.two_three-four and five' } }
+
+ # TODO: Retained for backwards compatibility. Remove in API v5.
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52725
+ it 'parameterizes the name' do
+ expect(project.name).to eq('one.two_three-four and five')
+ expect(project.path).to eq('one-two_three-four-and-five')
+ end
+ end
+ end
+
context 'user namespace' do
it do
project = create_project(user, opts)
@@ -419,7 +461,7 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'when another repository already exists on disk' do
let(:opts) do
{
- name: 'Existing',
+ name: 'existing',
namespace_id: user.namespace.id
}
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index a11f16573f5..df02f8ea15d 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -323,6 +323,50 @@ RSpec.describe Projects::ForkService do
end
end
end
+
+ describe 'fork with optional attributes' do
+ let(:public_project) { create(:project, :public) }
+
+ it 'sets optional attributes to specified values' do
+ forked_project = fork_project(
+ public_project,
+ nil,
+ namespace: public_project.namespace,
+ path: 'forked',
+ name: 'My Fork',
+ description: 'Description',
+ visibility: 'internal',
+ using_service: true
+ )
+
+ expect(forked_project.path).to eq('forked')
+ expect(forked_project.name).to eq('My Fork')
+ expect(forked_project.description).to eq('Description')
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ it 'sets visibility level to private if an unknown visibility is requested' do
+ forked_project = fork_project(public_project, nil, using_service: true, visibility: 'unknown')
+
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it 'sets visibility level to project visibility level if requested visibility is greater' do
+ private_project = create(:project, :private)
+
+ forked_project = fork_project(private_project, nil, using_service: true, visibility: 'public')
+
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it 'sets visibility level to target namespace visibility level if requested visibility is greater' do
+ private_group = create(:group, :private)
+
+ forked_project = fork_project(public_project, nil, namespace: private_group, using_service: true, visibility: 'public')
+
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
end
context 'when a project is already forked' do
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 8ae47ec266c..e196220eabe 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::Prometheus::Alerts::NotifyService do
include PrometheusHelpers
+ using RSpec::Parameterized::TableSyntax
let_it_be(:project, reload: true) { create(:project) }
@@ -61,8 +62,6 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'with project specific cluster' do
- using RSpec::Parameterized::TableSyntax
-
where(:cluster_enabled, :status, :configured_token, :token_input, :result) do
true | :installed | token | token | :success
true | :installed | nil | nil | :success
@@ -104,8 +103,6 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'with manual prometheus installation' do
- using RSpec::Parameterized::TableSyntax
-
where(:alerting_setting, :configured_token, :token_input, :result) do
true | token | token | :success
true | token | 'x' | :failure
@@ -139,8 +136,6 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'with HTTP integration' do
- using RSpec::Parameterized::TableSyntax
-
where(:active, :token, :result) do
:active | :valid | :success
:active | :invalid | :failure
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index a6730c5de52..3439a18f3a5 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -55,6 +55,15 @@ RSpec.describe Projects::UpdatePagesService do
end
end
+ it "doesn't deploy to legacy storage if it's disabled" do
+ stub_feature_flags(pages_update_legacy_storage: false)
+
+ expect(execute).to eq(:success)
+ expect(project.pages_deployed?).to be_truthy
+
+ expect(File.exist?(File.join(project.pages_path, 'public', 'index.html'))).to eq(false)
+ end
+
it 'creates pages_deployment and saves it in the metadata' do
expect do
expect(execute).to eq(:success)
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index ef8f166cc3f..828667fdfc2 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -59,13 +59,18 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
end
context 'when the filesystems are the same' do
- let(:destination) { project.repository_storage }
+ before do
+ expect(Gitlab::GitalyClient).to receive(:filesystem_id).twice.and_return(SecureRandom.uuid)
+ end
- it 'bails out and does nothing' do
+ it 'updates the database without trying to move the repostory', :aggregate_failures do
result = subject.execute
+ project.reload
- expect(result).to be_error
- expect(result.message).to match(/SameFilesystemError/)
+ expect(result).to be_success
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(project.project_repository.shard_name).to eq('test_second_storage')
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 21e294418a1..c0452e18f52 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -945,6 +945,12 @@ RSpec.describe QuickActions::InterpretService do
it_behaves_like 'assign_reviewer command'
end
+ context 'with the "request_review" alias' do
+ let(:content) { "/request_review @#{developer.username}" }
+
+ it_behaves_like 'assign_reviewer command'
+ end
+
context 'with no user' do
let(:content) { '/assign_reviewer' }
@@ -1787,6 +1793,24 @@ RSpec.describe QuickActions::InterpretService do
expect(text).to eq(" - list\n\ntest")
end
+ it 'tracks MAU for commands' do
+ content = "/shrug test\n/assign me\n/milestone %4"
+
+ expect(Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter)
+ .to receive(:track_unique_action)
+ .with('shrug', args: 'test', user: developer)
+
+ expect(Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter)
+ .to receive(:track_unique_action)
+ .with('assign', args: 'me', user: developer)
+
+ expect(Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter)
+ .to receive(:track_unique_action)
+ .with('milestone', args: '%4', user: developer)
+
+ service.execute(content, issue)
+ end
+
context '/create_merge_request command' do
let(:branch_name) { '1-feature' }
let(:content) { "/create_merge_request #{branch_name}" }
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
new file mode 100644
index 00000000000..1ecf45bcd16
--- /dev/null
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::ChangelogService do
+ describe '#execute' do
+ it 'generates and commits a changelog section' do
+ project = create(:project, :empty_repo)
+ creator = project.creator
+ author1 = create(:user)
+ author2 = create(:user)
+
+ project.add_maintainer(author1)
+ project.add_maintainer(author2)
+
+ mr1 = create(:merge_request, :merged, target_project: project)
+ mr2 = create(:merge_request, :merged, target_project: project)
+
+ # The range of commits ignores the first commit, but includes the last
+ # commit. To ensure both the commits below are included, we must create an
+ # extra commit.
+ #
+ # In the real world, the start commit of the range will be the last commit
+ # of the previous release, so ignoring that is expected and desired.
+ sha1 = create_commit(
+ project,
+ creator,
+ commit_message: 'Initial commit',
+ actions: [{ action: 'create', content: 'test', file_path: 'README.md' }]
+ )
+
+ sha2 = create_commit(
+ project,
+ author1,
+ commit_message: "Title 1\n\nChangelog: feature",
+ actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }]
+ )
+
+ sha3 = create_commit(
+ project,
+ author2,
+ commit_message: "Title 2\n\nChangelog: feature",
+ actions: [{ action: 'create', content: 'bar', file_path: 'b.txt' }]
+ )
+
+ commit1 = project.commit(sha2)
+ commit2 = project.commit(sha3)
+
+ allow(MergeRequestDiffCommit)
+ .to receive(:oldest_merge_request_id_per_commit)
+ .with(project.id, [commit2.id, commit1.id])
+ .and_return([
+ { sha: sha2, merge_request_id: mr1.id },
+ { sha: sha3, merge_request_id: mr2.id }
+ ])
+
+ recorder = ActiveRecord::QueryRecorder.new do
+ described_class
+ .new(project, creator, version: '1.0.0', from: sha1, to: sha3)
+ .execute
+ end
+
+ changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
+
+ expect(recorder.count).to eq(10)
+ expect(changelog).to include('Title 1', 'Title 2')
+ end
+ end
+
+ def create_commit(project, user, params)
+ params = { start_branch: 'master', branch_name: 'master' }.merge(params)
+ Files::MultiService.new(project, user, params).execute.fetch(:result)
+ end
+end
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 5cfa1ae93e6..517ed086713 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -195,6 +195,14 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
end
end
+
+ it 'logs the event' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ response = subject
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(/PROJECT ACCESS TOKEN CREATION: created_by: #{user.username}, project_id: #{resource.id}, token_user: #{response.payload[:access_token].user.name}, token_id: \d+/)
+ end
end
context 'when resource is a project' do
@@ -208,7 +216,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
response = subject
expect(response.error?).to be true
- expect(response.errors).to include("User does not have permission to create #{resource_type} Access Token")
+ expect(response.errors).to include("User does not have permission to create #{resource_type} access token")
end
end
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index af29ee2a721..99adb4bb7a0 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -40,6 +40,14 @@ RSpec.describe ResourceAccessTokens::RevokeService do
expect(User.exists?(resource_bot.id)).to be_falsy
end
+
+ it 'logs the event' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ subject
+
+ expect(Gitlab::AppLogger).to have_received(:info).with("PROJECT ACCESS TOKEN REVOCATION: revoked_by: #{user.username}, project_id: #{resource.id}, token_user: #{resource_bot.name}, token_id: #{access_token.id}")
+ end
end
shared_examples 'rollback revoke steps' do
diff --git a/spec/services/resource_events/change_milestone_service_spec.rb b/spec/services/resource_events/change_milestone_service_spec.rb
index a2131c5c1b0..ed234376381 100644
--- a/spec/services/resource_events/change_milestone_service_spec.rb
+++ b/spec/services/resource_events/change_milestone_service_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe ResourceEvents::ChangeMilestoneService do
let_it_be(:timebox) { create(:milestone) }
let(:created_at_time) { Time.utc(2019, 12, 30) }
- let(:add_timebox_args) { { created_at: created_at_time, old_milestone: nil } }
- let(:remove_timebox_args) { { created_at: created_at_time, old_milestone: timebox } }
+ let(:add_timebox_args) { { old_milestone: nil } }
+ let(:remove_timebox_args) { { old_milestone: timebox } }
[:issue, :merge_request].each do |issuable|
it_behaves_like 'timebox(milestone or iteration) resource events creator', ResourceMilestoneEvent do
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
new file mode 100644
index 00000000000..ff7ab614e08
--- /dev/null
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow do
+ describe '#execute' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:params) { {} }
+
+ subject(:result) { described_class.new(project, user, params).execute }
+
+ context 'user does not belong to project' do
+ it 'returns an error status' do
+ expect(result[:status]).to eq(:error)
+ expect(result[:success_path]).to be_nil
+ end
+
+ it 'does not track a snowplow event' do
+ subject
+
+ expect_no_snowplow_event
+ end
+ end
+
+ context 'user belongs to project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does track the snowplow event' do
+ subject
+
+ expect_snowplow_event(
+ category: 'Security::CiConfiguration::SastCreateService',
+ action: 'create',
+ label: 'false'
+ )
+ end
+
+ it 'raises exception if the user does not have permission to create a new branch' do
+ allow(project).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "You are not allowed to create protected branches on this project.")
+
+ expect { subject }.to raise_error(Gitlab::Git::PreReceiveError)
+ end
+
+ context 'with no parameters' do
+ it 'returns the path to create a new merge request' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
+ end
+ end
+
+ context 'with parameters' do
+ let(:params) do
+ { 'stage' => 'security',
+ 'SEARCH_MAX_DEPTH' => 1,
+ 'SECURE_ANALYZERS_PREFIX' => 'new_registry',
+ 'SAST_EXCLUDED_PATHS' => 'spec,docs' }
+ end
+
+ it 'returns the path to create a new merge request' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/security/ci_configuration/sast_parser_service_spec.rb b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
new file mode 100644
index 00000000000..21490f993c7
--- /dev/null
+++ b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::SastParserService do
+ describe '#configuration' do
+ include_context 'read ci configuration for sast enabled project'
+
+ let(:configuration) { described_class.new(project).configuration }
+ let(:secure_analyzers_prefix) { configuration['global'][0] }
+ let(:sast_excluded_paths) { configuration['global'][1] }
+ let(:sast_analyzer_image_tag) { configuration['global'][2] }
+ let(:sast_pipeline_stage) { configuration['pipeline'][0] }
+ let(:sast_search_max_depth) { configuration['pipeline'][1] }
+ let(:brakeman) { configuration['analyzers'][0] }
+ let(:bandit) { configuration['analyzers'][1] }
+ let(:sast_brakeman_level) { brakeman['variables'][0] }
+
+ it 'parses the configuration for SAST' do
+ expect(secure_analyzers_prefix['default_value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(sast_excluded_paths['default_value']).to eql('spec, test, tests, tmp')
+ expect(sast_analyzer_image_tag['default_value']).to eql('2')
+ expect(sast_pipeline_stage['default_value']).to eql('test')
+ expect(sast_search_max_depth['default_value']).to eql('4')
+ expect(brakeman['enabled']).to be(true)
+ expect(sast_brakeman_level['default_value']).to eql('1')
+ end
+
+ context 'while populating current values of the entities' do
+ context 'when .gitlab-ci.yml is present' do
+ it 'populates the current values from the file' do
+ allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_content)
+ expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers2')
+ expect(sast_excluded_paths['value']).to eql('spec, executables')
+ expect(sast_analyzer_image_tag['value']).to eql('2')
+ expect(sast_pipeline_stage['value']).to eql('our_custom_security_stage')
+ expect(sast_search_max_depth['value']).to eql('8')
+ expect(brakeman['enabled']).to be(false)
+ expect(bandit['enabled']).to be(true)
+ expect(sast_brakeman_level['value']).to eql('2')
+ end
+
+ context 'SAST_DEFAULT_ANALYZERS is set' do
+ it 'enables analyzers correctly' do
+ allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_default_analyzers_content)
+
+ expect(brakeman['enabled']).to be(false)
+ expect(bandit['enabled']).to be(true)
+ end
+ end
+
+ context 'SAST_EXCLUDED_ANALYZERS is set' do
+ it 'enables analyzers correctly' do
+ allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_excluded_analyzers_content)
+
+ expect(brakeman['enabled']).to be(false)
+ expect(bandit['enabled']).to be(true)
+ end
+ end
+ end
+
+ context 'when .gitlab-ci.yml is absent' do
+ it 'populates the current values with the default values' do
+ allow(project.repository).to receive(:blob_data_at).and_return(nil)
+ expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(sast_excluded_paths['value']).to eql('spec, test, tests, tmp')
+ expect(sast_analyzer_image_tag['value']).to eql('2')
+ expect(sast_pipeline_stage['value']).to eql('test')
+ expect(sast_search_max_depth['value']).to eql('4')
+ expect(brakeman['enabled']).to be(true)
+ expect(sast_brakeman_level['value']).to eql('1')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 96807fd629f..32a09e1afc8 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Snippets::CreateService do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:user, :admin) }
+ let(:action) { :create }
let(:opts) { base_opts.merge(extra_opts) }
let(:base_opts) do
{
@@ -309,7 +310,7 @@ RSpec.describe Snippets::CreateService do
it_behaves_like 'a service that creates a snippet'
it_behaves_like 'public visibility level restrictions apply'
- it_behaves_like 'snippets spam check is performed'
+ it_behaves_like 'checking spam'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
@@ -337,7 +338,7 @@ RSpec.describe Snippets::CreateService do
it_behaves_like 'a service that creates a snippet'
it_behaves_like 'public visibility level restrictions apply'
- it_behaves_like 'snippets spam check is performed'
+ it_behaves_like 'checking spam'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index b2bcd620d76..6ba09a9dca9 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -54,13 +54,18 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
end
context 'when the filesystems are the same' do
- let(:destination) { snippet.repository_storage }
+ before do
+ expect(Gitlab::GitalyClient).to receive(:filesystem_id).twice.and_return(SecureRandom.uuid)
+ end
- it 'bails out and does nothing' do
+ it 'updates the database without trying to move the repostory', :aggregate_failures do
result = subject.execute
+ snippet.reload
- expect(result).to be_error
- expect(result.message).to match(/SameFilesystemError/)
+ expect(result).to be_success
+ expect(snippet).not_to be_repository_read_only
+ expect(snippet.repository_storage).to eq(destination)
+ expect(snippet.snippet_repository.shard_name).to eq(destination)
end
end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index a2341dc71b2..e737c00ae67 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Snippets::UpdateService do
describe '#execute', :aggregate_failures do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create :user, admin: true }
+ let(:action) { :update }
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
let(:base_opts) do
{
@@ -738,11 +739,7 @@ RSpec.describe Snippets::UpdateService do
it_behaves_like 'only file_name is present'
it_behaves_like 'only content is present'
it_behaves_like 'invalid params error response'
- it_behaves_like 'snippets spam check is performed' do
- before do
- subject
- end
- end
+ it_behaves_like 'checking spam'
context 'when snippet does not have a repository' do
let!(:snippet) { create(:project_snippet, author: user, project: project) }
@@ -766,11 +763,7 @@ RSpec.describe Snippets::UpdateService do
it_behaves_like 'only file_name is present'
it_behaves_like 'only content is present'
it_behaves_like 'invalid params error response'
- it_behaves_like 'snippets spam check is performed' do
- before do
- subject
- end
- end
+ it_behaves_like 'checking spam'
context 'when snippet does not have a repository' do
let!(:snippet) { create(:personal_snippet, author: user, project: project) }
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 8edd9406bce..371923f1518 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -24,41 +24,16 @@ RSpec.describe Spam::SpamActionService do
issue.spam = false
end
- describe '#initialize' do
- subject { described_class.new(spammable: issue, request: request, user: user) }
-
- context 'when the request is nil' do
- let(:request) { nil }
-
- it 'assembles the options with information from the spammable' do
- aggregate_failures do
- expect(subject.options[:ip_address]).to eq(issue.ip_address)
- expect(subject.options[:user_agent]).to eq(issue.user_agent)
- expect(subject.options.key?(:referrer)).to be_falsey
- end
- end
- end
-
- context 'when the request is present' do
- let(:request) { double(:request, env: env) }
-
- it 'assembles the options with information from the spammable' do
- aggregate_failures do
- expect(subject.options[:ip_address]).to eq(fake_ip)
- expect(subject.options[:user_agent]).to eq(fake_user_agent)
- expect(subject.options[:referrer]).to eq(fake_referrer)
- end
- end
- end
- end
-
shared_examples 'only checks for spam if a request is provided' do
context 'when request is missing' do
- subject { described_class.new(spammable: issue, request: nil, user: user) }
+ let(:request) { nil }
it "doesn't check as spam" do
- subject
+ expect(fake_verdict_service).not_to receive(:execute)
+
+ response = subject
+ expect(response.message).to match(/request was not present/)
expect(issue).not_to be_spam
end
end
@@ -66,34 +41,88 @@ RSpec.describe Spam::SpamActionService do
context 'when request exists' do
it 'creates a spam log' do
expect { subject }
- .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
+ .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
end
end
end
+ shared_examples 'creates a spam log' do
+ it do
+ expect { subject }.to change { SpamLog.count }.by(1)
+
+ new_spam_log = SpamLog.last
+ expect(new_spam_log.user_id).to eq(user.id)
+ expect(new_spam_log.title).to eq(issue.title)
+ expect(new_spam_log.description).to eq(issue.description)
+ expect(new_spam_log.source_ip).to eq(fake_ip)
+ expect(new_spam_log.user_agent).to eq(fake_user_agent)
+ expect(new_spam_log.noteable_type).to eq('Issue')
+ expect(new_spam_log.via_api).to eq(false)
+ end
+ end
+
describe '#execute' do
let(:request) { double(:request, env: env) }
+ let(:fake_captcha_verification_service) { double(:captcha_verification_service) }
let(:fake_verdict_service) { double(:spam_verdict_service) }
let(:allowlisted) { false }
+ let(:api) { nil }
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { existing_spam_log.id }
+ let(:spam_params) do
+ Spam::SpamActionService.filter_spam_params!(
+ api: api,
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ )
+ end
+
+ let(:verdict_service_opts) do
+ {
+ ip_address: fake_ip,
+ user_agent: fake_user_agent,
+ referrer: fake_referrer
+ }
+ end
+
+ let(:verdict_service_args) do
+ {
+ target: issue,
+ user: user,
+ request: request,
+ options: verdict_service_opts,
+ context: {
+ action: :create,
+ target_type: 'Issue'
+ }
+ }
+ end
let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
subject do
- described_service = described_class.new(spammable: issue, request: request, user: user)
+ described_service = described_class.new(spammable: issue, request: request, user: user, action: :create)
allow(described_service).to receive(:allowlisted?).and_return(allowlisted)
- described_service.execute(api: nil, recaptcha_verified: recaptcha_verified, spam_log_id: existing_spam_log.id)
+ described_service.execute(spam_params: spam_params)
end
before do
- allow(Spam::SpamVerdictService).to receive(:new).and_return(fake_verdict_service)
+ allow(Captcha::CaptchaVerificationService).to receive(:new) { fake_captcha_verification_service }
+ allow(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args).and_return(fake_verdict_service)
end
- context 'when reCAPTCHA was already verified' do
- let(:recaptcha_verified) { true }
+ context 'when captcha response verification returns true' do
+ before do
+ expect(fake_captcha_verification_service)
+ .to receive(:execute).with(captcha_response: captcha_response, request: request) { true }
+ end
it "doesn't check with the SpamVerdictService" do
aggregate_failures do
- expect(SpamLog).to receive(:verify_recaptcha!)
+ expect(SpamLog).to receive(:verify_recaptcha!).with(
+ user_id: user.id,
+ id: spam_log_id
+ )
expect(fake_verdict_service).not_to receive(:execute)
end
@@ -105,8 +134,11 @@ RSpec.describe Spam::SpamActionService do
end
end
- context 'when reCAPTCHA was not verified' do
- let(:recaptcha_verified) { false }
+ context 'when captcha response verification returns false' do
+ before do
+ expect(fake_captcha_verification_service)
+ .to receive(:execute).with(captcha_response: captcha_response, request: request) { false }
+ end
context 'when spammable attributes have not changed' do
before do
@@ -120,6 +152,10 @@ RSpec.describe Spam::SpamActionService do
end
context 'when spammable attributes have changed' do
+ let(:expected_service_check_response_message) do
+ /check Issue spammable model for any errors or captcha requirement/
+ end
+
before do
issue.description = 'SPAM!'
end
@@ -130,7 +166,9 @@ RSpec.describe Spam::SpamActionService do
it 'does not perform spam check' do
expect(Spam::SpamVerdictService).not_to receive(:new)
- subject
+ response = subject
+
+ expect(response.message).to match(/user was allowlisted/)
end
end
@@ -147,8 +185,9 @@ RSpec.describe Spam::SpamActionService do
it_behaves_like 'only checks for spam if a request is provided'
it 'marks as spam' do
- subject
+ response = subject
+ expect(response.message).to match(expected_service_check_response_message)
expect(issue).to be_spam
end
end
@@ -157,8 +196,9 @@ RSpec.describe Spam::SpamActionService do
it_behaves_like 'only checks for spam if a request is provided'
it 'does not mark as spam' do
- subject
+ response = subject
+ expect(response.message).to match(expected_service_check_response_message)
expect(issue).not_to be_spam
end
end
@@ -176,15 +216,19 @@ RSpec.describe Spam::SpamActionService do
it_behaves_like 'only checks for spam if a request is provided'
+ it_behaves_like 'creates a spam log'
+
it 'does not mark as spam' do
- subject
+ response = subject
+ expect(response.message).to match(expected_service_check_response_message)
expect(issue).not_to be_spam
end
it 'marks as needing reCAPTCHA' do
- subject
+ response = subject
+ expect(response.message).to match(expected_service_check_response_message)
expect(issue.needs_recaptcha?).to be_truthy
end
end
@@ -192,9 +236,12 @@ RSpec.describe Spam::SpamActionService do
context 'when allow_possible_spam feature flag is true' do
it_behaves_like 'only checks for spam if a request is provided'
+ it_behaves_like 'creates a spam log'
+
it 'does not mark as needing reCAPTCHA' do
- subject
+ response = subject
+ expect(response.message).to match(expected_service_check_response_message)
expect(issue.needs_recaptcha).to be_falsey
end
end
@@ -209,6 +256,51 @@ RSpec.describe Spam::SpamActionService do
expect { subject }
.not_to change { SpamLog.count }
end
+
+ it 'clears spam flags' do
+ expect(issue).to receive(:clear_spam_flags!)
+
+ subject
+ end
+ end
+
+ context 'spam verdict service options' do
+ before do
+ allow(fake_verdict_service).to receive(:execute) { ALLOW }
+ end
+
+ context 'when the request is nil' do
+ let(:request) { nil }
+ let(:issue_ip_address) { '1.2.3.4' }
+ let(:issue_user_agent) { 'lynx' }
+ let(:verdict_service_opts) do
+ {
+ ip_address: issue_ip_address,
+ user_agent: issue_user_agent
+ }
+ end
+
+ before do
+ allow(issue).to receive(:ip_address) { issue_ip_address }
+ allow(issue).to receive(:user_agent) { issue_user_agent }
+ end
+
+ it 'assembles the options with information from the spammable' do
+ # TODO: This code untestable, because we do not perform a verification if there is not a
+ # request. See corresponding comment in code
+ # expect(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args)
+
+ subject
+ end
+ end
+
+ context 'when the request is present' do
+ it 'assembles the options with information from the request' do
+ expect(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args)
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index 3e7594bd30f..77d0e892725 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -32,8 +32,8 @@ RSpec.describe Suggestions::ApplyService do
create(:suggestion, :content_from_repo, suggestion_args)
end
- def apply(suggestions)
- result = apply_service.new(user, *suggestions).execute
+ def apply(suggestions, custom_message = nil)
+ result = apply_service.new(user, *suggestions, message: custom_message).execute
suggestions.map { |suggestion| suggestion.reload }
@@ -74,6 +74,14 @@ RSpec.describe Suggestions::ApplyService do
expect(commit.author_name).to eq(user.name)
end
+ it 'tracks apply suggestion event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_apply_suggestion_action)
+ .with(user: user)
+
+ apply(suggestions)
+ end
+
context 'when a custom suggestion commit message' do
before do
project.update!(suggestion_commit_message: message)
@@ -103,6 +111,16 @@ RSpec.describe Suggestions::ApplyService do
end
end
end
+
+ context 'with a user suggested commit message' do
+ let(:message) { "i'm a custom commit message!" }
+
+ it "uses the user's commit message" do
+ apply(suggestions, message)
+
+ expect(project.repository.commit.message).to(eq(message))
+ end
+ end
end
subject(:apply_service) { described_class }
@@ -570,56 +588,84 @@ RSpec.describe Suggestions::ApplyService do
project.add_maintainer(user)
end
+ shared_examples_for 'service not tracking apply suggestion event' do
+ it 'does not track apply suggestion event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_apply_suggestion_action)
+
+ result
+ end
+ end
+
context 'diff file was not found' do
- it 'returns error message' do
- expect(suggestion.note).to receive(:latest_diff_file) { nil }
+ let(:result) { apply_service.new(user, suggestion).execute }
- result = apply_service.new(user, suggestion).execute
+ before do
+ expect(suggestion.note).to receive(:latest_diff_file) { nil }
+ end
+ it 'returns error message' do
expect(result).to eq(message: 'A file was not found.',
status: :error)
end
+
+ it_behaves_like 'service not tracking apply suggestion event'
end
context 'when not all suggestions belong to the same branch' do
- it 'renders error message' do
- merge_request2 = create(:merge_request,
- :conflict,
- source_project: project,
- target_project: project)
-
- position2 = Gitlab::Diff::Position.new(old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
- old_line: nil,
- new_line: 15,
- diff_refs: merge_request2
- .diff_refs)
+ let(:merge_request2) do
+ create(
+ :merge_request,
+ :conflict,
+ source_project: project,
+ target_project: project
+ )
+ end
- diff_note2 = create(:diff_note_on_merge_request,
- noteable: merge_request2,
- position: position2,
- project: project)
+ let(:position2) do
+ Gitlab::Diff::Position.new(
+ old_path: "files/ruby/popen.rb",
+ new_path: "files/ruby/popen.rb",
+ old_line: nil,
+ new_line: 15,
+ diff_refs: merge_request2.diff_refs
+ )
+ end
- other_branch_suggestion = create(:suggestion, note: diff_note2)
+ let(:diff_note2) do
+ create(
+ :diff_note_on_merge_request,
+ noteable: merge_request2,
+ position: position2,
+ project: project
+ )
+ end
- result = apply_service.new(user, suggestion, other_branch_suggestion).execute
+ let(:other_branch_suggestion) { create(:suggestion, note: diff_note2) }
+ let(:result) { apply_service.new(user, suggestion, other_branch_suggestion).execute }
+ it 'renders error message' do
expect(result).to eq(message: 'Suggestions must all be on the same branch.',
status: :error)
end
+
+ it_behaves_like 'service not tracking apply suggestion event'
end
context 'suggestion is not appliable' do
let(:inapplicable_reason) { "Can't apply this suggestion." }
+ let(:result) { apply_service.new(user, suggestion).execute }
- it 'returns error message' do
+ before do
expect(suggestion).to receive(:appliable?).and_return(false)
expect(suggestion).to receive(:inapplicable_reason).and_return(inapplicable_reason)
+ end
- result = apply_service.new(user, suggestion).execute
-
+ it 'returns error message' do
expect(result).to eq(message: inapplicable_reason, status: :error)
end
+
+ it_behaves_like 'service not tracking apply suggestion event'
end
context 'lines of suggestions overlap' do
@@ -632,12 +678,14 @@ RSpec.describe Suggestions::ApplyService do
create_suggestion(to_content: "I Overlap!")
end
- it 'returns error message' do
- result = apply_service.new(user, suggestion, overlapping_suggestion).execute
+ let(:result) { apply_service.new(user, suggestion, overlapping_suggestion).execute }
+ it 'returns error message' do
expect(result).to eq(message: 'Suggestions are not applicable as their lines cannot overlap.',
status: :error)
end
+
+ it_behaves_like 'service not tracking apply suggestion event'
end
end
end
diff --git a/spec/services/suggestions/create_service_spec.rb b/spec/services/suggestions/create_service_spec.rb
index 80823364fe8..5148d6756fc 100644
--- a/spec/services/suggestions/create_service_spec.rb
+++ b/spec/services/suggestions/create_service_spec.rb
@@ -53,6 +53,15 @@ RSpec.describe Suggestions::CreateService do
subject { described_class.new(note) }
+ shared_examples_for 'service not tracking add suggestion event' do
+ it 'does not track add suggestion event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_add_suggestion_action)
+
+ subject.execute
+ end
+ end
+
describe '#execute' do
context 'should not try to parse suggestions' do
context 'when not a diff note for merge requests' do
@@ -66,6 +75,8 @@ RSpec.describe Suggestions::CreateService do
subject.execute
end
+
+ it_behaves_like 'service not tracking add suggestion event'
end
context 'when diff note is not for text' do
@@ -76,17 +87,21 @@ RSpec.describe Suggestions::CreateService do
note: markdown)
end
- it 'does not try to parse suggestions' do
+ before do
allow(note).to receive(:on_text?) { false }
+ end
+ it 'does not try to parse suggestions' do
expect(Gitlab::Diff::SuggestionsParser).not_to receive(:parse)
subject.execute
end
+
+ it_behaves_like 'service not tracking add suggestion event'
end
end
- context 'should not create suggestions' do
+ context 'when diff file is not found' do
let(:note) do
create(:diff_note_on_merge_request, project: project_with_repo,
noteable: merge_request,
@@ -94,13 +109,17 @@ RSpec.describe Suggestions::CreateService do
note: markdown)
end
- it 'creates no suggestion when diff file is not found' do
+ before do
expect_next_instance_of(DiffNote) do |diff_note|
expect(diff_note).to receive(:latest_diff_file).once { nil }
end
+ end
+ it 'creates no suggestion' do
expect { subject.execute }.not_to change(Suggestion, :count)
end
+
+ it_behaves_like 'service not tracking add suggestion event'
end
context 'should create suggestions' do
@@ -137,6 +156,14 @@ RSpec.describe Suggestions::CreateService do
end
end
+ it 'tracks add suggestion event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_add_suggestion_action)
+ .with(user: note.author)
+
+ subject.execute
+ end
+
context 'outdated position note' do
let!(:outdated_diff) { merge_request.merge_request_diff }
let!(:latest_diff) { merge_request.create_merge_request_diff }
diff --git a/spec/services/system_hooks_service_spec.rb b/spec/services/system_hooks_service_spec.rb
index 2020c67f465..1ec5237370f 100644
--- a/spec/services/system_hooks_service_spec.rb
+++ b/spec/services/system_hooks_service_spec.rb
@@ -45,15 +45,13 @@ RSpec.describe SystemHooksService do
it do
expect(event_data(group, :create)).to include(
- :event_name, :name, :created_at, :updated_at, :path, :group_id,
- :owner_name, :owner_email
+ :event_name, :name, :created_at, :updated_at, :path, :group_id
)
end
it do
expect(event_data(group, :destroy)).to include(
- :event_name, :name, :created_at, :updated_at, :path, :group_id,
- :owner_name, :owner_email
+ :event_name, :name, :created_at, :updated_at, :path, :group_id
)
end
@@ -156,9 +154,6 @@ RSpec.describe SystemHooksService do
it { expect(event_name(project_member, :update)).to eq "user_update_for_team" }
it { expect(event_name(key, :create)).to eq 'key_create' }
it { expect(event_name(key, :destroy)).to eq 'key_destroy' }
- it { expect(event_name(group, :create)).to eq 'group_create' }
- it { expect(event_name(group, :destroy)).to eq 'group_destroy' }
- it { expect(event_name(group, :rename)).to eq 'group_rename' }
end
def event_data(*args)
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 96afca2f2cb..ae18bc23c17 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -729,12 +729,14 @@ RSpec.describe ::SystemNotes::IssuablesService do
it 'is false with issue tracker supporting referencing' do
create(:jira_service, project: project)
+ project.reload
expect(service.cross_reference_disallowed?(noteable)).to be_falsey
end
it 'is true with issue tracker not supporting referencing' do
create(:bugzilla_service, project: project)
+ project.reload
expect(service.cross_reference_disallowed?(noteable)).to be_truthy
end
diff --git a/spec/services/test_hooks/project_service_spec.rb b/spec/services/test_hooks/project_service_spec.rb
index 7470bdff527..247270474bf 100644
--- a/spec/services/test_hooks/project_service_spec.rb
+++ b/spec/services/test_hooks/project_service_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe TestHooks::ProjectService do
let(:current_user) { create(:user) }
describe '#execute' do
- let(:project) { create(:project, :repository) }
- let(:hook) { create(:project_hook, project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let(:hook) { create(:project_hook, project: project) }
let(:trigger) { 'not_implemented_events' }
let(:service) { described_class.new(hook, current_user, trigger) }
let(:sample_data) { { data: 'sample' } }
@@ -61,17 +61,34 @@ RSpec.describe TestHooks::ProjectService do
end
it 'executes hook' do
- allow(project).to receive(:notes).and_return([Note.new])
+ create(:note, project: project)
+
allow(Gitlab::DataBuilder::Note).to receive(:build).and_return(sample_data)
+ allow_next_instance_of(NotesFinder) do |finder|
+ allow(finder).to receive(:execute).and_return(Note.all)
+ end
expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
expect(service.execute).to include(success_result)
end
+
+ context 'when the query optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(integrations_test_webhook_optimizations: false)
+ end
+
+ it 'executes the old query' do
+ allow(Gitlab::DataBuilder::Note).to receive(:build).and_return(sample_data)
+
+ expect(NotesFinder).not_to receive(:new)
+ expect(project).to receive(:notes).and_return([Note.new])
+ expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
+ expect(service.execute).to include(success_result)
+ end
+ end
end
- context 'issues_events' do
- let(:trigger) { 'issues_events' }
- let(:trigger_key) { :issue_hooks }
+ shared_examples_for 'a test webhook that operates on issues' do
let(:issue) { build(:issue) }
it 'returns error message if not enough data' do
@@ -80,36 +97,49 @@ RSpec.describe TestHooks::ProjectService do
end
it 'executes hook' do
- allow(project).to receive(:issues).and_return([issue])
allow(issue).to receive(:to_hook_data).and_return(sample_data)
+ allow_next_instance_of(IssuesFinder) do |finder|
+ allow(finder).to receive(:execute).and_return([issue])
+ end
expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
expect(service.execute).to include(success_result)
end
+
+ context 'when the query optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(integrations_test_webhook_optimizations: false)
+ end
+
+ it 'executes the old query' do
+ allow(issue).to receive(:to_hook_data).and_return(sample_data)
+
+ expect(IssuesFinder).not_to receive(:new)
+ expect(project).to receive(:issues).and_return([issue])
+ expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
+ expect(service.execute).to include(success_result)
+ end
+ end
+ end
+
+ context 'issues_events' do
+ let(:trigger) { 'issues_events' }
+ let(:trigger_key) { :issue_hooks }
+
+ it_behaves_like 'a test webhook that operates on issues'
end
context 'confidential_issues_events' do
let(:trigger) { 'confidential_issues_events' }
let(:trigger_key) { :confidential_issue_hooks }
- let(:issue) { build(:issue) }
- it 'returns error message if not enough data' do
- expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has issues.' })
- end
-
- it 'executes hook' do
- allow(project).to receive(:issues).and_return([issue])
- allow(issue).to receive(:to_hook_data).and_return(sample_data)
-
- expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
- expect(service.execute).to include(success_result)
- end
+ it_behaves_like 'a test webhook that operates on issues'
end
context 'merge_requests_events' do
let(:trigger) { 'merge_requests_events' }
let(:trigger_key) { :merge_request_hooks }
+ let(:merge_request) { build(:merge_request) }
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
@@ -117,12 +147,29 @@ RSpec.describe TestHooks::ProjectService do
end
it 'executes hook' do
- create(:merge_request, source_project: project)
- allow_any_instance_of(MergeRequest).to receive(:to_hook_data).and_return(sample_data)
+ allow(merge_request).to receive(:to_hook_data).and_return(sample_data)
+ allow_next_instance_of(MergeRequestsFinder) do |finder|
+ allow(finder).to receive(:execute).and_return([merge_request])
+ end
expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
expect(service.execute).to include(success_result)
end
+
+ context 'when the query optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(integrations_test_webhook_optimizations: false)
+ end
+
+ it 'executes the old query' do
+ allow(merge_request).to receive(:to_hook_data).and_return(sample_data)
+
+ expect(MergeRequestsFinder).not_to receive(:new)
+ expect(project).to receive(:merge_requests).and_return([merge_request])
+ expect(hook).to receive(:execute).with(sample_data, trigger_key).and_return(success_result)
+ expect(service.execute).to include(success_result)
+ end
+ end
end
context 'job_events' do
@@ -162,7 +209,7 @@ RSpec.describe TestHooks::ProjectService do
end
context 'wiki_page_events' do
- let(:project) { create(:project, :wiki_repo) }
+ let_it_be(:project) { create(:project, :wiki_repo) }
let(:trigger) { 'wiki_page_events' }
let(:trigger_key) { :wiki_page_hooks }
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 83d233a8112..743dc080b06 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -1193,6 +1193,17 @@ RSpec.describe TodoService do
end
end
+ describe '#create_request_review_todo' do
+ let(:target) { create(:merge_request, author: author, source_project: project) }
+ let(:reviewer) { create(:user) }
+
+ it 'creates a todo for reviewer' do
+ service.create_request_review_todo(target, author, reviewer)
+
+ should_create_todo(user: reviewer, target: target, action: Todo::REVIEW_REQUESTED)
+ end
+ end
+
def should_create_todo(attributes = {})
attributes.reverse_merge!(
project: project,
diff --git a/spec/services/users/approve_service_spec.rb b/spec/services/users/approve_service_spec.rb
index 55b2c83f4a8..9999e674c7d 100644
--- a/spec/services/users/approve_service_spec.rb
+++ b/spec/services/users/approve_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Users::ApproveService do
it 'returns error result' do
expect(subject[:status]).to eq(:error)
expect(subject[:message])
- .to match(/The user you are trying to approve is not pending an approval/)
+ .to match(/The user you are trying to approve is not pending approval/)
end
end
@@ -61,6 +61,14 @@ RSpec.describe Users::ApproveService do
expect(user.reload).to be_active
end
+ it 'logs approval in application logs' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ subject
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(message: "User instance access request approved", user: "#{user.username}", email: "#{user.email}", approved_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+ end
+
it 'emails the user on approval' do
expect(DeviseMailer).to receive(:user_admin_approval).with(user).and_call_original
expect { subject }.to have_enqueued_mail(DeviseMailer, :user_admin_approval)
@@ -82,6 +90,20 @@ RSpec.describe Users::ApproveService do
.not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
end
end
+
+ context 'audit events' do
+ context 'when not licensed' do
+ before do
+ stub_licensed_features(
+ admin_audit_log: false
+ )
+ end
+
+ it 'does not log any audit event' do
+ expect { subject }.not_to change(AuditEvent, :count)
+ end
+ end
+ end
end
context 'pending invitations' do
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index 446741221b3..b2a7d349ce6 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Users::BuildService do
+ using RSpec::Parameterized::TableSyntax
+
describe '#execute' do
let(:params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
@@ -72,8 +74,6 @@ RSpec.describe Users::BuildService do
end
context 'with "user_default_external" application setting' do
- using RSpec::Parameterized::TableSyntax
-
where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
true | nil | 'fl@example.com' | nil | true
true | true | 'fl@example.com' | nil | true
@@ -192,8 +192,6 @@ RSpec.describe Users::BuildService do
end
context 'with "user_default_external" application setting' do
- using RSpec::Parameterized::TableSyntax
-
where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
true | nil | 'fl@example.com' | nil | true
true | true | 'fl@example.com' | nil | true
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index 9404668e3c5..cc01b22f9d2 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -143,6 +143,21 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
expect(authorizations[0].project_id).to eq(project.id)
expect(authorizations[0].access_level).to eq(Gitlab::Access::MAINTAINER)
end
+
+ it 'logs the details of the refresh' do
+ source = :foo
+ service = described_class.new(user, source: source)
+ user.project_authorizations.delete_all
+
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:info)
+ .with(event: 'authorized_projects_refresh',
+ 'authorized_projects_refresh.source': source,
+ 'authorized_projects_refresh.rows_deleted': 0,
+ 'authorized_projects_refresh.rows_added': 1))
+
+ service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ end
end
describe '#fresh_access_levels_per_project' do
diff --git a/spec/services/users/reject_service_spec.rb b/spec/services/users/reject_service_spec.rb
index 07863d1a290..b9aaff5cde5 100644
--- a/spec/services/users/reject_service_spec.rb
+++ b/spec/services/users/reject_service_spec.rb
@@ -48,6 +48,26 @@ RSpec.describe Users::RejectService do
subject
end
+
+ it 'logs rejection in application logs' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ subject
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(message: "User instance access request rejected", user: "#{user.username}", email: "#{user.email}", rejected_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+ end
+ end
+ end
+
+ context 'audit events' do
+ context 'when not licensed' do
+ before do
+ stub_licensed_features(admin_audit_log: false)
+ end
+
+ it 'does not log any audit event' do
+ expect { subject }.not_to change(AuditEvent, :count)
+ end
end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 9fac6c8e192..cd3818b256d 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -13,6 +13,7 @@ end
Warning[:deprecated] = true unless ENV.key?('SILENCE_DEPRECATIONS')
require './spec/deprecation_toolkit_env'
+DeprecationToolkitEnv.configure!
require './spec/simplecov_env'
SimpleCovEnv.start!
@@ -174,6 +175,7 @@ RSpec.configure do |config|
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = ENV.fetch('RETRIES', 3).to_i + 1
+ config.exceptions_to_hard_fail = [DeprecationToolkitEnv::DeprecationBehaviors::SelectiveRaise::RaiseDisallowedDeprecation]
end
if ENV['FLAKY_RSPEC_GENERATE_REPORT']
@@ -216,16 +218,9 @@ RSpec.configure do |config|
# (ie. ApplicationSetting#auto_devops_enabled)
stub_feature_flags(force_autodevops_on_by_default: false)
- # The following can be removed once Vue Issuable Sidebar
- # is feature-complete and can be made default in place
- # of older sidebar.
- # See https://gitlab.com/groups/gitlab-org/-/epics/1863
- stub_feature_flags(vue_issuable_sidebar: false)
- stub_feature_flags(vue_issuable_epic_sidebar: false)
-
# Merge request widget GraphQL requests are disabled in the tests
# for now whilst we migrate as much as we can over the GraphQL
- stub_feature_flags(merge_request_widget_graphql: false)
+ # stub_feature_flags(merge_request_widget_graphql: false)
# Using FortiAuthenticator as OTP provider is disabled by default in
# tests, until we introduce it in user settings
@@ -289,6 +284,8 @@ RSpec.configure do |config|
current_user_mode.send(:user)&.admin?
end
end
+
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(false)
end
config.around(:example, :quarantine) do |example|
diff --git a/spec/support/gitlab_experiment.rb b/spec/support/gitlab_experiment.rb
index 1f283e4f06c..7844b85963d 100644
--- a/spec/support/gitlab_experiment.rb
+++ b/spec/support/gitlab_experiment.rb
@@ -1,4 +1,16 @@
# frozen_string_literal: true
+# Require the provided spec helper and matchers.
+require 'gitlab/experiment/rspec'
+
+# This is a temporary fix until we have a larger discussion around the
+# challenges raised in https://gitlab.com/gitlab-org/gitlab/-/issues/300104
+class ApplicationExperiment < Gitlab::Experiment # rubocop:disable Gitlab/NamespacedClass
+ def initialize(*args)
+ super
+ Feature.persist_used!(feature_flag_name)
+ end
+end
+
# Disable all caching for experiments in tests.
Gitlab::Experiment::Configuration.cache = nil
diff --git a/spec/support/graphql/field_selection.rb b/spec/support/graphql/field_selection.rb
index e2a3334acac..00323c46d69 100644
--- a/spec/support/graphql/field_selection.rb
+++ b/spec/support/graphql/field_selection.rb
@@ -47,7 +47,7 @@ module Graphql
NO_SKIP = ->(_name, _field) { false }
def self.select_fields(type, skip = NO_SKIP, parent_types = Set.new, max_depth = 3)
- return new if max_depth <= 0
+ return new if max_depth <= 0 || !type.kind.fields?
new(type.fields.flat_map do |name, field|
next [] if skip[name, field]
diff --git a/spec/support/helpers/dependency_proxy_helpers.rb b/spec/support/helpers/dependency_proxy_helpers.rb
index ebb849628bf..0d8f56906e3 100644
--- a/spec/support/helpers/dependency_proxy_helpers.rb
+++ b/spec/support/helpers/dependency_proxy_helpers.rb
@@ -18,11 +18,11 @@ module DependencyProxyHelpers
.to_return(status: status, body: body || manifest, headers: headers)
end
- def stub_manifest_head(image, tag, status: 200, body: nil, digest: '123456')
+ def stub_manifest_head(image, tag, status: 200, body: nil, headers: {})
manifest_url = registry.manifest_url(image, tag)
stub_full_request(manifest_url, method: :head)
- .to_return(status: status, body: body, headers: { 'docker-content-digest' => digest } )
+ .to_return(status: status, body: body, headers: headers )
end
def stub_blob_download(image, blob_sha, status = 200, body = '123456')
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 35c298a4d48..d123c51628a 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -237,6 +237,10 @@ module GraphqlHelpers
query_graphql_path([[name, args], node_selection], fields)
end
+ def query_graphql_fragment(name)
+ "... on #{name} { #{all_graphql_fields_for(name)} }"
+ end
+
# e.g:
# query_graphql_path(%i[foo bar baz], all_graphql_fields_for('Baz'))
# => foo { bar { baz { x y z } } }
@@ -510,8 +514,12 @@ module GraphqlHelpers
end
end
- def global_id_of(model)
- model.to_global_id.to_s
+ def global_id_of(model, id: nil, model_name: nil)
+ if id || model_name
+ ::Gitlab::GlobalId.build(model, id: id, model_name: model_name).to_s
+ else
+ model.to_global_id.to_s
+ end
end
def missing_required_argument(path, argument)
diff --git a/spec/support/helpers/next_found_instance_of.rb b/spec/support/helpers/next_found_instance_of.rb
index ff34fcdd1d3..feb63f90211 100644
--- a/spec/support/helpers/next_found_instance_of.rb
+++ b/spec/support/helpers/next_found_instance_of.rb
@@ -6,7 +6,7 @@ module NextFoundInstanceOf
def expect_next_found_instance_of(klass)
check_if_active_record!(klass)
- stub_allocate(expect(klass)) do |expectation|
+ stub_allocate(expect(klass), klass) do |expectation|
yield(expectation)
end
end
@@ -14,7 +14,7 @@ module NextFoundInstanceOf
def allow_next_found_instance_of(klass)
check_if_active_record!(klass)
- stub_allocate(allow(klass)) do |allowance|
+ stub_allocate(allow(klass), klass) do |allowance|
yield(allowance)
end
end
@@ -25,9 +25,17 @@ module NextFoundInstanceOf
raise ArgumentError.new(ERROR_MESSAGE) unless klass < ActiveRecord::Base
end
- def stub_allocate(target)
+ def stub_allocate(target, klass)
target.to receive(:allocate).and_wrap_original do |method|
- method.call.tap { |allocation| yield(allocation) }
+ method.call.tap do |allocation|
+ # ActiveRecord::Core.allocate returns a frozen object:
+ # https://github.com/rails/rails/blob/291a3d2ef29a3842d1156ada7526f4ee60dd2b59/activerecord/lib/active_record/core.rb#L620
+ # It's unexpected behavior and probably a bug in Rails
+ # Let's work it around by setting the attributes to default to unfreeze the object for now
+ allocation.instance_variable_set(:@attributes, klass._default_attributes)
+
+ yield(allocation)
+ end
end
end
end
diff --git a/spec/support/helpers/smime_helper.rb b/spec/support/helpers/smime_helper.rb
index 261aef9518e..fa16c433c6b 100644
--- a/spec/support/helpers/smime_helper.rb
+++ b/spec/support/helpers/smime_helper.rb
@@ -52,7 +52,7 @@ module SmimeHelper
cert.add_extension(extension_factory.create_extension('extendedKeyUsage', 'clientAuth,emailProtection', false))
end
- cert.sign(signed_by&.fetch(:key, nil) || key, OpenSSL::Digest::SHA256.new)
+ cert.sign(signed_by&.fetch(:key, nil) || key, OpenSSL::Digest.new('SHA256'))
{ key: key, cert: cert }
end
diff --git a/spec/support/helpers/snowplow_helpers.rb b/spec/support/helpers/snowplow_helpers.rb
index 70a4eadd8de..a6de8dabdac 100644
--- a/spec/support/helpers/snowplow_helpers.rb
+++ b/spec/support/helpers/snowplow_helpers.rb
@@ -46,7 +46,7 @@ module SnowplowHelpers
# }
# ]
# )
- def expect_snowplow_event(category:, action:, context: nil, **kwargs)
+ def expect_snowplow_event(category:, action:, context: nil, standard_context: nil, **kwargs)
if context
kwargs[:context] = []
context.each do |c|
@@ -56,6 +56,14 @@ module SnowplowHelpers
end
end
+ if standard_context
+ expect(Gitlab::Tracking::StandardContext)
+ .to have_received(:new)
+ .with(**standard_context)
+
+ kwargs[:standard_context] = an_instance_of(Gitlab::Tracking::StandardContext)
+ end
+
expect(Gitlab::Tracking).to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking
.with(category, action, **kwargs).at_least(:once)
end
diff --git a/spec/support/helpers/sorting_helper.rb b/spec/support/helpers/sorting_helper.rb
index 3801d25fb63..f19f8c12928 100644
--- a/spec/support/helpers/sorting_helper.rb
+++ b/spec/support/helpers/sorting_helper.rb
@@ -17,4 +17,35 @@ module SortingHelper
click_link value
end
end
+
+ def nils_last(value)
+ NilsLast.new(value)
+ end
+
+ class NilsLast
+ include Comparable
+
+ attr_reader :value
+ delegate :==, :eql?, :hash, to: :value
+
+ def initialize(value)
+ @value = value
+ @reverse = false
+ end
+
+ def <=>(other)
+ return unless other.is_a?(self.class)
+ return 0 if value.nil? && other.value.nil?
+ return 1 if value.nil?
+ return -1 if other.value.nil?
+
+ int = value <=> other.value
+ @reverse ? -int : int
+ end
+
+ def -@
+ @reverse = true
+ self
+ end
+ end
end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 3b733a2e57a..9851a3de9e9 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -121,6 +121,12 @@ module StubConfiguration
allow(::Gitlab.config.packages).to receive_messages(to_settings(messages))
end
+ def stub_maintenance_mode_setting(value)
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(true)
+
+ stub_application_setting(maintenance_mode: value)
+ end
+
private
# Modifies stubbed messages to also stub possible predicate versions
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index dc54a21d0fa..0d0ac171baa 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -85,6 +85,13 @@ module StubObjectStorage
**params)
end
+ def stub_composer_cache_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.packages.object_store,
+ uploader: ::Packages::Composer::CacheUploader,
+ remote_directory: 'packages',
+ **params)
+ end
+
def stub_uploads_object_storage(uploader = described_class, **params)
stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
uploader: uploader,
diff --git a/spec/support/helpers/wait_for_requests.rb b/spec/support/helpers/wait_for_requests.rb
index 43060e571a9..8fd9bb47053 100644
--- a/spec/support/helpers/wait_for_requests.rb
+++ b/spec/support/helpers/wait_for_requests.rb
@@ -52,6 +52,6 @@ module WaitForRequests
end
def finished_all_ajax_requests?
- Capybara.page.evaluate_script('window.pendingRequests || window.pendingRailsUJSRequests || 0').zero? # rubocop:disable Style/NumericPredicate
+ Capybara.page.evaluate_script('window.pendingRequests || window.pendingApolloRequests || window.pendingRailsUJSRequests || 0').zero? # rubocop:disable Style/NumericPredicate
end
end
diff --git a/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb b/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
new file mode 100644
index 00000000000..b49d4da8cda
--- /dev/null
+++ b/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define :have_pushed_frontend_feature_flags do |expected|
+ def to_js(key, value)
+ "\"#{key}\":#{value}"
+ end
+
+ match do |actual|
+ expected.all? do |feature_flag_name, enabled|
+ page.html.include?(to_js(feature_flag_name, enabled))
+ end
+ end
+
+ failure_message do |actual|
+ missing = expected.select do |feature_flag_name, enabled|
+ !page.html.include?(to_js(feature_flag_name, enabled))
+ end
+
+ formatted_missing_flags = missing.map { |feature_flag_name, enabled| to_js(feature_flag_name, enabled) }.join("\n")
+
+ "The following feature flag(s) cannot be found in the frontend HTML source: #{formatted_missing_flags}"
+ end
+end
diff --git a/spec/support/matchers/track_self_describing_event_matcher.rb b/spec/support/matchers/track_self_describing_event_matcher.rb
new file mode 100644
index 00000000000..c3723d2418f
--- /dev/null
+++ b/spec/support/matchers/track_self_describing_event_matcher.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define :track_self_describing_event do |schema, data|
+ match do
+ expect(Gitlab::Tracking).to have_received(:self_describing_event)
+ .with(schema, data: data)
+ end
+
+ match_when_negated do
+ expect(Gitlab::Tracking).not_to have_received(:self_describing_event)
+ end
+end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 57d8320b76a..3fd4f2698e9 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -18,7 +18,8 @@ RSpec.shared_context 'project navbar structure' do
{
nav_item: _('Security & Compliance'),
nav_sub_items: [
- _('Audit Events')
+ _('Configuration'),
+ (_('Audit Events') if Gitlab.ee?)
]
}
end
@@ -71,7 +72,7 @@ RSpec.shared_context 'project navbar structure' do
_('Schedules')
]
},
- (security_and_compliance_nav_item if Gitlab.ee?),
+ security_and_compliance_nav_item,
{
nav_item: _('Operations'),
nav_sub_items: [
@@ -190,7 +191,7 @@ RSpec.shared_context 'group navbar structure' do
nav_item: _('Merge Requests'),
nav_sub_items: []
},
- (security_and_compliance_nav_item if Gitlab.ee?),
+ security_and_compliance_nav_item,
(push_rules_nav_item if Gitlab.ee?),
{
nav_item: _('Kubernetes'),
diff --git a/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
index 580ebf00dcb..7ad73ea18f4 100644
--- a/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
@@ -22,7 +22,7 @@ RSpec.shared_context 'conan api setup' do
let(:jwt_secret) do
OpenSSL::HMAC.hexdigest(
- OpenSSL::Digest::SHA256.new,
+ OpenSSL::Digest.new('SHA256'),
base_secret,
Gitlab::ConanToken::HMAC_KEY
)
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index f89d52f81ad..7f49d20c83e 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -3,6 +3,8 @@
RSpec.shared_examples 'multiple issue boards' do
context 'authorized user' do
before do
+ stub_feature_flags(board_new_list: false)
+
parent.add_maintainer(user)
login_as(user)
diff --git a/spec/support/shared_examples/features/comment_and_close_button_shared_examples.rb b/spec/support/shared_examples/features/comment_and_close_button_shared_examples.rb
new file mode 100644
index 00000000000..4ee2840ed9f
--- /dev/null
+++ b/spec/support/shared_examples/features/comment_and_close_button_shared_examples.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'page with comment and close button' do |button_text|
+ context 'when remove_comment_close_reopen feature flag is enabled' do
+ before do
+ stub_feature_flags(remove_comment_close_reopen: true)
+ setup
+ end
+
+ it "does not show #{button_text} button" do
+ within '.note-form-actions' do
+ expect(page).not_to have_button(button_text)
+ end
+ end
+ end
+
+ context 'when remove_comment_close_reopen feature flag is disabled' do
+ before do
+ stub_feature_flags(remove_comment_close_reopen: false)
+ setup
+ end
+
+ it "shows #{button_text} button" do
+ within '.note-form-actions' do
+ expect(page).to have_button(button_text)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index 560cfbfb117..6bebd59ed70 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -150,12 +150,13 @@ RSpec.shared_examples 'thread comments' do |resource_name|
wait_for_requests
end
- it 'clicking "Start thread" will post a thread' do
+ it 'clicking "Start thread" will post a thread and show a reply component' do
expect(page).to have_content(comment)
new_comment = all(comments_selector).last
expect(new_comment).to have_selector('.discussion')
+ expect(new_comment).to have_css('.discussion-with-resolve-btn')
end
if resource_name =~ /(issue|merge request)/
diff --git a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
index 2fff4137934..ccd063faac4 100644
--- a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
@@ -48,7 +48,7 @@ RSpec.shared_examples 'an editable merge request' do
end
page.within '.reviewer' do
- expect(page).to have_content user.name
+ expect(page).to have_content user.username
end
page.within '.milestone' do
diff --git a/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb
index 48cde90bd9b..ad6ca3e1900 100644
--- a/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb
+++ b/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb
@@ -40,7 +40,7 @@ RSpec.shared_examples 'multiple reviewers merge request' do |action, save_button
# Closing dropdown to persist
click_link 'Edit'
- expect(page).to have_content user2.name
+ expect(page).to have_content user2.username
end
end
end
diff --git a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
new file mode 100644
index 00000000000..d294f034d2e
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'a mutation which can mutate a spammable' do
+ describe "#additional_spam_params" do
+ it 'passes additional spam params to the service' do
+ args = [
+ anything,
+ anything,
+ hash_including(
+ api: true,
+ request: instance_of(ActionDispatch::Request),
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ )
+ ]
+ expect(service).to receive(:new).with(*args).and_call_original
+
+ subject
+ end
+ end
+
+ describe "#with_spam_action_fields" do
+ it 'resolves with spam action fields' do
+ subject
+
+ # NOTE: We do not need to assert on the specific values of spam action fields here, we only need
+ # to verify that #with_spam_action_fields was invoked and that the fields are present in the
+ # response. The specific behavior of #with_spam_action_fields is covered in the
+ # CanMutateSpammable unit tests.
+ expect(mutation_response.keys)
+ .to include('spam', 'spamLogId', 'needsCaptchaResponse', 'captchaSiteKey')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb
index 0338eb43f8d..4468af1a603 100644
--- a/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/http_integrations_shared_examples.rb
@@ -17,3 +17,78 @@ RSpec.shared_examples 'creating a new HTTP integration' do
expect(integration_response['apiUrl']).to eq(nil)
end
end
+
+RSpec.shared_examples 'updating an existing HTTP integration' do
+ it 'updates the integration' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ integration_response = mutation_response['integration']
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(integration_response['id']).to eq(GitlabSchema.id_from_object(integration).to_s)
+ expect(integration_response['name']).to eq('Modified Name')
+ expect(integration_response['active']).to be_falsey
+ expect(integration_response['url']).to include('modified-name')
+ end
+end
+
+RSpec.shared_examples 'validating the payload_example' do
+ context 'with invalid payloadExample attribute' do
+ let(:payload_example) { 'not a JSON' }
+
+ it 'responds with errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect_graphql_errors_to_include(/was provided invalid value for payloadExample \(Invalid JSON string/)
+ end
+ end
+
+ it 'validates the payload_example size' do
+ allow(::Gitlab::Utils::DeepSize)
+ .to receive(:new)
+ .with(Gitlab::Json.parse(payload_example))
+ .and_return(double(valid?: false))
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect_graphql_errors_to_include(/payloadExample JSON is too big/)
+ end
+end
+
+RSpec.shared_examples 'validating the payload_attribute_mappings' do
+ context 'with invalid payloadAttributeMapping attribute does not contain fieldName' do
+ let(:payload_attribute_mappings) do
+ [{ path: %w[alert name], type: 'STRING' }]
+ end
+
+ it 'responds with errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect_graphql_errors_to_include(/was provided invalid value for payloadAttributeMappings\.0\.fieldName \(Expected value to not be null/)
+ end
+ end
+
+ context 'with invalid payloadAttributeMapping attribute does not contain path' do
+ let(:payload_attribute_mappings) do
+ [{ fieldName: 'TITLE', type: 'STRING' }]
+ end
+
+ it 'responds with errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect_graphql_errors_to_include(/was provided invalid value for payloadAttributeMappings\.0\.path \(Expected value to not be null/)
+ end
+ end
+
+ context 'with invalid payloadAttributeMapping attribute does not contain type' do
+ let(:payload_attribute_mappings) do
+ [{ fieldName: 'TITLE', path: %w[alert name] }]
+ end
+
+ it 'responds with errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect_graphql_errors_to_include(/was provided invalid value for payloadAttributeMappings\.0\.type \(Expected value to not be null/)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb b/spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb
deleted file mode 100644
index 8678b23ad31..00000000000
--- a/spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.shared_examples 'spam flag is present' do
- specify :aggregate_failures do
- subject
-
- expect(mutation_response).to have_key('spam')
- expect(mutation_response['spam']).to be_falsey
- end
-end
-
-RSpec.shared_examples 'can raise spam flag' do
- it 'spam parameters are passed to the service' do
- args = [anything, anything, hash_including(api: true, request: instance_of(ActionDispatch::Request))]
- expect(service).to receive(:new).with(*args).and_call_original
-
- subject
- end
-
- context 'when the snippet is detected as spam' do
- it 'raises spam flag' do
- allow_next_instance_of(service) do |instance|
- allow(instance).to receive(:spam_check) do |snippet, user, _|
- snippet.spam!
- end
- end
-
- subject
-
- expect(mutation_response['spam']).to be true
- expect(mutation_response['errors']).to include("Your snippet has been recognized as spam and has been discarded.")
- end
- end
-
- context 'when :snippet_spam flag is disabled' do
- before do
- stub_feature_flags(snippet_spam: false)
- end
-
- it 'request parameter is not passed to the service' do
- expect(service).to receive(:new)
- .with(anything, anything, hash_not_including(request: instance_of(ActionDispatch::Request)))
- .and_call_original
-
- subject
- end
- end
-end
diff --git a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
index fe99b1cacd9..42f82987989 100644
--- a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
+++ b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
@@ -9,19 +9,30 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
end
describe 'Validation' do
- before do
- allow_any_instance_of(described_class).to receive(:"ensure_#{scope}_#{internal_id_attribute}!")
-
- instance.valid?
- end
-
context 'when presence validation is required' do
before do
skip unless validate_presence
end
- it 'validates presence' do
- expect(instance.errors[internal_id_attribute]).to include("can't be blank")
+ context 'when creating an object' do
+ before do
+ allow_any_instance_of(described_class).to receive(:"ensure_#{scope}_#{internal_id_attribute}!")
+ end
+
+ it 'raises an error if the internal id is blank' do
+ expect { instance.save! }.to raise_error(AtomicInternalId::MissingValueError)
+ end
+ end
+
+ context 'when updating an object' do
+ it 'raises an error if the internal id is blank' do
+ instance.save!
+
+ write_internal_id(nil)
+ allow(instance).to receive(:"ensure_#{scope}_#{internal_id_attribute}!")
+
+ expect { instance.save! }.to raise_error(AtomicInternalId::MissingValueError)
+ end
end
end
@@ -30,8 +41,27 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
skip if validate_presence
end
- it 'does not validate presence' do
- expect(instance.errors[internal_id_attribute]).to be_empty
+ context 'when creating an object' do
+ before do
+ allow_any_instance_of(described_class).to receive(:"ensure_#{scope}_#{internal_id_attribute}!")
+ end
+
+ it 'does not raise an error if the internal id is blank' do
+ expect(read_internal_id).to be_nil
+
+ expect { instance.save! }.not_to raise_error
+ end
+ end
+
+ context 'when updating an object' do
+ it 'does not raise an error if the internal id is blank' do
+ instance.save!
+
+ write_internal_id(nil)
+ allow(instance).to receive(:"ensure_#{scope}_#{internal_id_attribute}!")
+
+ expect { instance.save! }.not_to raise_error
+ end
end
end
end
@@ -76,6 +106,51 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
end
end
+ describe 'unsetting the instance internal id on rollback' do
+ context 'when the internal id has been changed' do
+ context 'when the internal id is automatically set' do
+ it 'clears it on the instance' do
+ expect_iid_to_be_set_and_rollback
+
+ expect(read_internal_id).to be_nil
+ end
+ end
+
+ context 'when the internal id is manually set' do
+ it 'does not clear it on the instance' do
+ write_internal_id(100)
+
+ expect_iid_to_be_set_and_rollback
+
+ expect(read_internal_id).not_to be_nil
+ end
+ end
+ end
+
+ context 'when the internal id has not been changed' do
+ it 'preserves the value on the instance' do
+ instance.save!
+ original_id = read_internal_id
+
+ expect(original_id).not_to be_nil
+
+ expect_iid_to_be_set_and_rollback
+
+ expect(read_internal_id).to eq(original_id)
+ end
+ end
+
+ def expect_iid_to_be_set_and_rollback
+ ActiveRecord::Base.transaction(requires_new: true) do
+ instance.save!
+
+ expect(read_internal_id).not_to be_nil
+
+ raise ActiveRecord::Rollback
+ end
+ end
+ end
+
describe 'supply of internal ids' do
let(:scope_value) { scope_attrs.each_value.first }
let(:method_name) { :"with_#{scope}_#{internal_id_attribute}_supply" }
diff --git a/spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb
deleted file mode 100644
index 2f0b95427d2..00000000000
--- a/spec/support/shared_examples/models/concerns/can_housekeep_repository_shared_examples.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'can housekeep repository' do
- context 'with a clean redis state', :clean_gitlab_redis_shared_state do
- describe '#pushes_since_gc' do
- context 'without any pushes' do
- it 'returns 0' do
- expect(resource.pushes_since_gc).to eq(0)
- end
- end
-
- context 'with a number of pushes' do
- it 'returns the number of pushes' do
- 3.times { resource.increment_pushes_since_gc }
-
- expect(resource.pushes_since_gc).to eq(3)
- end
- end
- end
-
- describe '#increment_pushes_since_gc' do
- it 'increments the number of pushes since the last GC' do
- 3.times { resource.increment_pushes_since_gc }
-
- expect(resource.pushes_since_gc).to eq(3)
- end
- end
-
- describe '#reset_pushes_since_gc' do
- it 'resets the number of pushes since the last GC' do
- 3.times { resource.increment_pushes_since_gc }
-
- resource.reset_pushes_since_gc
-
- expect(resource.pushes_since_gc).to eq(0)
- end
- end
-
- describe '#pushes_since_gc_redis_shared_state_key' do
- it 'returns the proper redis key format' do
- expect(resource.send(:pushes_since_gc_redis_shared_state_key)).to eq("#{resource_key}/#{resource.id}/pushes_since_gc")
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/models/concerns/can_move_repository_storage_shared_examples.rb b/spec/support/shared_examples/models/concerns/can_move_repository_storage_shared_examples.rb
index 85a2c6f1449..8deeecea30d 100644
--- a/spec/support/shared_examples/models/concerns/can_move_repository_storage_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/can_move_repository_storage_shared_examples.rb
@@ -2,11 +2,12 @@
RSpec.shared_examples 'can move repository storage' do
let(:container) { raise NotImplementedError }
+ let(:repository) { container.repository }
describe '#set_repository_read_only!' do
it 'makes the repository read-only' do
expect { container.set_repository_read_only! }
- .to change(container, :repository_read_only?)
+ .to change { container.repository_read_only? }
.from(false)
.to(true)
end
@@ -28,7 +29,7 @@ RSpec.shared_examples 'can move repository storage' do
allow(container).to receive(:git_transfer_in_progress?) { true }
expect { container.set_repository_read_only!(skip_git_transfer_check: true) }
- .to change(container, :repository_read_only?)
+ .to change { container.repository_read_only? }
.from(false)
.to(true)
end
@@ -38,16 +39,16 @@ RSpec.shared_examples 'can move repository storage' do
describe '#set_repository_writable!' do
it 'sets repository_read_only to false' do
expect { container.set_repository_writable! }
- .to change(container, :repository_read_only)
+ .to change { container.repository_read_only? }
.from(true).to(false)
end
end
describe '#reference_counter' do
it 'returns a Gitlab::ReferenceCounter object' do
- expect(Gitlab::ReferenceCounter).to receive(:new).with(container.repository.gl_repository).and_call_original
+ expect(Gitlab::ReferenceCounter).to receive(:new).with(repository.gl_repository).and_call_original
- result = container.reference_counter(type: container.repository.repo_type)
+ result = container.reference_counter(type: repository.repo_type)
expect(result).to be_a Gitlab::ReferenceCounter
end
diff --git a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
index 826ee453919..1be4d9b80a4 100644
--- a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
@@ -152,36 +152,4 @@ RSpec.shared_examples 'model with repository' do
it { is_expected.to respond_to(:disk_path) }
it { is_expected.to respond_to(:gitlab_shell) }
end
-
- describe '.pick_repository_storage' do
- subject { described_class.pick_repository_storage }
-
- before do
- storages = {
- 'default' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories'),
- 'picked' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories')
- }
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- end
-
- it 'picks storage from ApplicationSetting' do
- expect(Gitlab::CurrentSettings).to receive(:pick_repository_storage).and_return('picked')
-
- expect(subject).to eq('picked')
- end
-
- it 'picks from the available storages based on weight', :request_store do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
- Gitlab::CurrentSettings.expire_current_application_settings
- Gitlab::CurrentSettings.current_application_settings
-
- settings = ApplicationSetting.last
- settings.repository_storages_weighted = { 'picked' => 100, 'default' => 0 }
- settings.save!
-
- expect(Gitlab::CurrentSettings.repository_storages_weighted).to eq({ 'default' => 100 })
- expect(subject).to eq('picked')
- expect(Gitlab::CurrentSettings.repository_storages_weighted).to eq({ 'default' => 0, 'picked' => 100 })
- end
- end
end
diff --git a/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb
index 2f0b95427d2..4006b8226ce 100644
--- a/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repositories/can_housekeep_repository_shared_examples.rb
@@ -41,5 +41,11 @@ RSpec.shared_examples 'can housekeep repository' do
expect(resource.send(:pushes_since_gc_redis_shared_state_key)).to eq("#{resource_key}/#{resource.id}/pushes_since_gc")
end
end
+
+ describe '#git_garbage_collect_worker_klass' do
+ it 'defines a git gargabe collect worker' do
+ expect(resource.git_garbage_collect_worker_klass).to eq(expected_worker_class)
+ end
+ end
end
end
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index 4c617f3ba46..7a09a437ab3 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -33,7 +33,7 @@ RSpec.shared_examples 'handles repository moves' do
subject { build(repository_storage_factory_key, container: container) }
it "does not allow the container to be read-only on create" do
- container.update!(repository_read_only: true)
+ container.set_repository_read_only!
expect(subject).not_to be_valid
expect(subject.errors[error_key].first).to match(/is read only/)
@@ -45,8 +45,8 @@ RSpec.shared_examples 'handles repository moves' do
context 'destination_storage_name' do
subject { build(repository_storage_factory_key) }
- it 'picks storage from ApplicationSetting' do
- expect(Gitlab::CurrentSettings).to receive(:pick_repository_storage).and_return('picked').at_least(:once)
+ it 'can pick new storage' do
+ expect(Repository).to receive(:pick_storage_shard).and_return('picked').at_least(:once)
expect(subject.destination_storage_name).to eq('picked')
end
diff --git a/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
new file mode 100644
index 00000000000..fba421e4dc8
--- /dev/null
+++ b/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'Debian Distribution Component' do |factory, container, can_freeze|
+ let_it_be_with_refind(:component) { create(factory) } # rubocop:disable Rails/SaveBang
+ let_it_be(:component_same_distribution, freeze: can_freeze) { create(factory, distribution: component.distribution) }
+ let_it_be(:component_same_name, freeze: can_freeze) { create(factory, name: component.name) }
+
+ subject { component }
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:distribution).class_name("Packages::Debian::#{container.capitalize}Distribution").inverse_of(:components) }
+ end
+
+ describe 'validations' do
+ describe "#distribution" do
+ it { is_expected.to validate_presence_of(:distribution) }
+ end
+
+ describe '#name' do
+ it { is_expected.to validate_presence_of(:name) }
+
+ it { is_expected.to allow_value('main').for(:name) }
+ it { is_expected.to allow_value('non-free').for(:name) }
+ it { is_expected.to allow_value('a' * 255).for(:name) }
+ it { is_expected.not_to allow_value('a' * 256).for(:name) }
+ it { is_expected.not_to allow_value('non/free').for(:name) }
+ it { is_expected.not_to allow_value('hé').for(:name) }
+ end
+ end
+
+ describe 'scopes' do
+ describe '.with_distribution' do
+ subject { described_class.with_distribution(component.distribution) }
+
+ it 'does not return other distributions' do
+ expect(subject.to_a).to eq([component, component_same_distribution])
+ end
+ end
+
+ describe '.with_name' do
+ subject { described_class.with_name(component.name) }
+
+ it 'does not return other distributions' do
+ expect(subject.to_a).to eq([component, component_same_name])
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
index af87d30099f..0587ad4c51c 100644
--- a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -17,6 +17,7 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
it { is_expected.to belong_to(container) }
it { is_expected.to belong_to(:creator).class_name('User') }
+ it { is_expected.to have_many(:components).class_name("Packages::Debian::#{container.capitalize}Component").inverse_of(:distribution) }
it { is_expected.to have_many(:architectures).class_name("Packages::Debian::#{container.capitalize}Architecture").inverse_of(:distribution) }
end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index 83ba72c12aa..8616a3bd0b4 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -201,7 +201,7 @@ RSpec.shared_examples 'rejects Debian access with unknown project id' do
let(:project) { double(id: non_existing_record_id) }
context 'as anonymous' do
- it_behaves_like 'Debian project repository GET request', :anonymous, true, :not_found, nil
+ it_behaves_like 'Debian project repository GET request', :anonymous, true, :unauthorized, nil
end
context 'as authenticated user' do
@@ -228,13 +228,13 @@ RSpec.shared_examples 'Debian project repository GET endpoint' do |success_statu
'PUBLIC' | :anonymous | false | true | success_status | success_body
'PRIVATE' | :developer | true | true | success_status | success_body
'PRIVATE' | :guest | true | true | :forbidden | nil
- 'PRIVATE' | :developer | true | false | :not_found | nil
- 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | true | false | :unauthorized | nil
+ 'PRIVATE' | :guest | true | false | :unauthorized | nil
'PRIVATE' | :developer | false | true | :not_found | nil
'PRIVATE' | :guest | false | true | :not_found | nil
- 'PRIVATE' | :developer | false | false | :not_found | nil
- 'PRIVATE' | :guest | false | false | :not_found | nil
- 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :unauthorized | nil
+ 'PRIVATE' | :guest | false | false | :unauthorized | nil
+ 'PRIVATE' | :anonymous | false | true | :unauthorized | nil
end
with_them do
@@ -263,13 +263,13 @@ RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_statu
'PUBLIC' | :anonymous | false | true | :unauthorized | nil
'PRIVATE' | :developer | true | true | success_status | nil
'PRIVATE' | :guest | true | true | :forbidden | nil
- 'PRIVATE' | :developer | true | false | :not_found | nil
- 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | true | false | :unauthorized | nil
+ 'PRIVATE' | :guest | true | false | :unauthorized | nil
'PRIVATE' | :developer | false | true | :not_found | nil
'PRIVATE' | :guest | false | true | :not_found | nil
- 'PRIVATE' | :developer | false | false | :not_found | nil
- 'PRIVATE' | :guest | false | false | :not_found | nil
- 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :unauthorized | nil
+ 'PRIVATE' | :guest | false | false | :unauthorized | nil
+ 'PRIVATE' | :anonymous | false | true | :unauthorized | nil
end
with_them do
@@ -321,7 +321,7 @@ RSpec.shared_examples 'rejects Debian access with unknown group id' do
let(:group) { double(id: non_existing_record_id) }
context 'as anonymous' do
- it_behaves_like 'Debian group repository GET request', :anonymous, true, :not_found, nil
+ it_behaves_like 'Debian group repository GET request', :anonymous, true, :unauthorized, nil
end
context 'as authenticated user' do
@@ -348,13 +348,13 @@ RSpec.shared_examples 'Debian group repository GET endpoint' do |success_status,
'PUBLIC' | :anonymous | false | true | success_status | success_body
'PRIVATE' | :developer | true | true | success_status | success_body
'PRIVATE' | :guest | true | true | :forbidden | nil
- 'PRIVATE' | :developer | true | false | :not_found | nil
- 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | true | false | :unauthorized | nil
+ 'PRIVATE' | :guest | true | false | :unauthorized | nil
'PRIVATE' | :developer | false | true | :not_found | nil
'PRIVATE' | :guest | false | true | :not_found | nil
- 'PRIVATE' | :developer | false | false | :not_found | nil
- 'PRIVATE' | :guest | false | false | :not_found | nil
- 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :unauthorized | nil
+ 'PRIVATE' | :guest | false | false | :unauthorized | nil
+ 'PRIVATE' | :anonymous | false | true | :unauthorized | nil
end
with_them do
diff --git a/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb
new file mode 100644
index 00000000000..9cf5bc04f65
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+# Requires `query(fields)`, `path_to_noteable`, `project`, and `noteable` bindings
+RSpec.shared_examples 'a noteable graphql type we can query' do
+ let(:note_factory) { :note }
+ let(:discussion_factory) { :discussion_note }
+
+ describe '.discussions' do
+ let(:fields) do
+ "discussions { nodes { #{all_graphql_fields_for('Discussion')} } }"
+ end
+
+ def expected
+ noteable.discussions.map do |discussion|
+ include(
+ 'id' => global_id_of(discussion),
+ 'replyId' => global_id_of(discussion, id: discussion.reply_id),
+ 'createdAt' => discussion.created_at.iso8601,
+ 'notes' => include(
+ 'nodes' => have_attributes(size: discussion.notes.size)
+ )
+ )
+ end
+ end
+
+ it 'can fetch discussions' do
+ create(discussion_factory, project: project, noteable: noteable)
+
+ post_graphql(query(fields), current_user: current_user)
+
+ expect(graphql_data_at(*path_to_noteable, :discussions, :nodes))
+ .to match_array(expected)
+ end
+ end
+
+ describe '.notes' do
+ let(:fields) do
+ "notes { nodes { #{all_graphql_fields_for('Note', max_depth: 2)} } }"
+ end
+
+ def expected
+ noteable.notes.map do |note|
+ include(
+ 'id' => global_id_of(note),
+ 'project' => include('id' => global_id_of(project)),
+ 'author' => include('id' => global_id_of(note.author)),
+ 'createdAt' => note.created_at.iso8601,
+ 'body' => eq(note.note)
+ )
+ end
+ end
+
+ it 'can fetch notes' do
+ create(note_factory, project: project, noteable: noteable)
+
+ post_graphql(query(fields), current_user: current_user)
+
+ expect(graphql_data_at(*path_to_noteable, :notes, :nodes))
+ .to match_array(expected)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/read_user_shared_examples.rb b/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
index 59cd0ab67b4..b9fd997bd2c 100644
--- a/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
@@ -7,21 +7,33 @@ RSpec.shared_examples 'allows the "read_user" scope' do |api_version|
context 'when the requesting token has the "api" scope' do
let(:token) { create(:personal_access_token, scopes: ['api'], user: user) }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, personal_access_token: token, version: version)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, personal_access_token: token, version: version)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token has the "read_user" scope' do
let(:token) { create(:personal_access_token, scopes: ['read_user'], user: user) }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, personal_access_token: token, version: version)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, personal_access_token: token, version: version)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token does not have any required scope' do
@@ -45,21 +57,33 @@ RSpec.shared_examples 'allows the "read_user" scope' do |api_version|
context 'when the requesting token has the "api" scope' do
let!(:token) { Doorkeeper::AccessToken.create! application_id: application.id, resource_owner_id: user.id, scopes: "api" }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, oauth_access_token: token)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, oauth_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token has the "read_user" scope' do
let!(:token) { Doorkeeper::AccessToken.create! application_id: application.id, resource_owner_id: user.id, scopes: "read_user" }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, oauth_access_token: token)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, oauth_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token does not have any required scope' do
diff --git a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
index b2970fd265d..3ca2b9fa6de 100644
--- a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
@@ -85,14 +85,37 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
end
describe "GET /#{container_type}/:id/repository_storage_moves" do
- it_behaves_like 'get container repository storage move list' do
- let(:url) { "/#{container_type}/#{container.id}/repository_storage_moves" }
+ let(:container_id) { container.id }
+ let(:url) { "/#{container_type}/#{container_id}/repository_storage_moves" }
+
+ it_behaves_like 'get container repository storage move list'
+
+ context 'non-existent container' do
+ let(:container_id) { non_existing_record_id }
+
+ it 'returns not found' do
+ get api(url, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
describe "GET /#{container_type}/:id/repository_storage_moves/:repository_storage_move_id" do
- it_behaves_like 'get single container repository storage move' do
- let(:url) { "/#{container_type}/#{container.id}/repository_storage_moves/#{repository_storage_move_id}" }
+ let(:container_id) { container.id }
+ let(:url) { "/#{container_type}/#{container_id}/repository_storage_moves/#{repository_storage_move_id}" }
+
+ it_behaves_like 'get single container repository storage move'
+
+ context 'non-existent container' do
+ let(:container_id) { non_existing_record_id }
+ let(:repository_storage_move_id) { storage_move.id }
+
+ it 'returns not found' do
+ get api(url, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
@@ -109,7 +132,8 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
end
describe "POST /#{container_type}/:id/repository_storage_moves" do
- let(:url) { "/#{container_type}/#{container.id}/repository_storage_moves" }
+ let(:container_id) { container.id }
+ let(:url) { "/#{container_type}/#{container_id}/repository_storage_moves" }
let(:destination_storage_name) { 'test_second_storage' }
def create_container_repository_storage_move
@@ -154,6 +178,16 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
expect(json_response['destination_storage_name']).to be_present
end
end
+
+ context 'when container does not exist' do
+ let(:container_id) { non_existing_record_id }
+
+ it 'returns not found' do
+ create_container_repository_storage_move
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
describe "POST /#{container_type.singularize}_repository_storage_moves" do
diff --git a/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
index 460e8d57a2b..b5139bd8c99 100644
--- a/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
@@ -13,6 +13,9 @@ RSpec.shared_examples 'resolvable discussions API' do |parent_type, noteable_typ
end
it "unresolves discussion if resolved is false" do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_unresolve_thread_action).with(user: user)
+
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}", user), params: { resolved: false }
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index 3b039049ca9..926da827e75 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -112,7 +112,7 @@ RSpec.shared_examples 'rate-limited token-authenticated requests' do
expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
- arguments = {
+ arguments = a_hash_including({
message: 'Rack_Attack',
env: :throttle,
remote_ip: '127.0.0.1',
@@ -121,7 +121,7 @@ RSpec.shared_examples 'rate-limited token-authenticated requests' do
user_id: user.id,
'meta.user' => user.username,
matched: throttle_types[throttle_setting_prefix]
- }
+ })
expect(Gitlab::AuthLogger).to receive(:error).with(arguments).once
@@ -278,7 +278,7 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
- arguments = {
+ arguments = a_hash_including({
message: 'Rack_Attack',
env: :throttle,
remote_ip: '127.0.0.1',
@@ -287,7 +287,7 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
user_id: user.id,
'meta.user' => user.username,
matched: throttle_types[throttle_setting_prefix]
- }
+ })
expect(Gitlab::AuthLogger).to receive(:error).with(arguments).once
expect { request_authenticated_web_url }.not_to exceed_query_limit(control_count)
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index f201c7b1780..1fb1b9f79b2 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -71,7 +71,7 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
it 'does not enqueue a GC run' do
expect { subject.execute }
- .not_to change(GitGarbageCollectWorker.jobs, :count)
+ .not_to change(Projects::GitGarbageCollectWorker.jobs, :count)
end
end
@@ -84,24 +84,29 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
stub_application_setting(housekeeping_enabled: false)
expect { subject.execute }
- .not_to change(GitGarbageCollectWorker.jobs, :count)
+ .not_to change(Projects::GitGarbageCollectWorker.jobs, :count)
end
it 'enqueues a GC run' do
expect { subject.execute }
- .to change(GitGarbageCollectWorker.jobs, :count).by(1)
+ .to change(Projects::GitGarbageCollectWorker.jobs, :count).by(1)
end
end
end
context 'when the filesystems are the same' do
- let(:destination) { project.repository_storage }
+ before do
+ expect(Gitlab::GitalyClient).to receive(:filesystem_id).twice.and_return(SecureRandom.uuid)
+ end
- it 'bails out and does nothing' do
+ it 'updates the database without trying to move the repostory', :aggregate_failures do
result = subject.execute
+ project.reload
- expect(result).to be_error
- expect(result.message).to match(/SameFilesystemError/)
+ expect(result).to be_success
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(project.project_repository.shard_name).to eq('test_second_storage')
end
end
diff --git a/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb b/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
index a174ae94b75..4c00faee56b 100644
--- a/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
+++ b/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
@@ -3,16 +3,16 @@
RSpec.shared_examples 'housekeeps repository' do
subject { described_class.new(resource) }
- context 'with a clean redis state', :clean_gitlab_redis_shared_state do
+ context 'with a clean redis state', :clean_gitlab_redis_shared_state, :aggregate_failures do
describe '#execute' do
it 'enqueues a sidekiq job' do
expect(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
expect(subject).to receive(:lease_key).and_return(:the_lease_key)
expect(subject).to receive(:task).and_return(:incremental_repack)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid).and_call_original
+ expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid).and_call_original
Sidekiq::Testing.fake! do
- expect { subject.execute }.to change(GitGarbageCollectWorker.jobs, :size).by(1)
+ expect { subject.execute }.to change(resource.git_garbage_collect_worker_klass.jobs, :size).by(1)
end
end
@@ -38,7 +38,7 @@ RSpec.shared_examples 'housekeeps repository' do
end
it 'does not enqueue a job' do
- expect(GitGarbageCollectWorker).not_to receive(:perform_async)
+ expect(resource.git_garbage_collect_worker_klass).not_to receive(:perform_async)
expect { subject.execute }.to raise_error(Repositories::HousekeepingService::LeaseTaken)
end
@@ -63,16 +63,16 @@ RSpec.shared_examples 'housekeeps repository' do
allow(subject).to receive(:lease_key).and_return(:the_lease_key)
# At push 200
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :gc, :the_lease_key, :the_uuid)
+ expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :gc, :the_lease_key, :the_uuid)
.once
# At push 50, 100, 150
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :full_repack, :the_lease_key, :the_uuid)
+ expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :full_repack, :the_lease_key, :the_uuid)
.exactly(3).times
# At push 10, 20, ... (except those above)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid)
+ expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid)
.exactly(16).times
# At push 6, 12, 18, ... (except those above)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :pack_refs, :the_lease_key, :the_uuid)
+ expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :pack_refs, :the_lease_key, :the_uuid)
.exactly(27).times
201.times do
@@ -90,7 +90,7 @@ RSpec.shared_examples 'housekeeps repository' do
allow(housekeeping).to receive(:try_obtain_lease).and_return(:gc_uuid)
allow(housekeeping).to receive(:lease_key).and_return(:gc_lease_key)
- expect(GitGarbageCollectWorker).to receive(:perform_async).with(resource.id, :gc, :gc_lease_key, :gc_uuid).twice
+ expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :gc, :gc_lease_key, :gc_uuid).twice
2.times do
housekeeping.execute
diff --git a/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb b/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
index d70ed707822..fac9f1d6253 100644
--- a/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
@@ -3,8 +3,12 @@
RSpec.shared_examples 'timebox(milestone or iteration) resource events creator' do |timebox_event_class|
let_it_be(:user) { create(:user) }
+ before do
+ resource.system_note_timestamp = created_at_time
+ end
+
context 'when milestone/iteration is added' do
- let(:service) { described_class.new(resource, user, add_timebox_args) }
+ let(:service) { described_class.new(resource, user, **add_timebox_args) }
before do
set_timebox(timebox_event_class, timebox)
@@ -18,7 +22,7 @@ RSpec.shared_examples 'timebox(milestone or iteration) resource events creator'
end
context 'when milestone/iteration is removed' do
- let(:service) { described_class.new(resource, user, remove_timebox_args) }
+ let(:service) { described_class.new(resource, user, **remove_timebox_args) }
before do
set_timebox(timebox_event_class, nil)
diff --git a/spec/support/shared_examples/services/snippets_shared_examples.rb b/spec/support/shared_examples/services/snippets_shared_examples.rb
index 4a08c0d4365..10add3a7299 100644
--- a/spec/support/shared_examples/services/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/services/snippets_shared_examples.rb
@@ -1,42 +1,56 @@
# frozen_string_literal: true
-RSpec.shared_examples 'snippets spam check is performed' do
- shared_examples 'marked as spam' do
- it 'marks a snippet as spam' do
- expect(snippet).to be_spam
- end
+RSpec.shared_examples 'checking spam' do
+ let(:request) { double(:request) }
+ let(:api) { true }
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { 1 }
+ let(:disable_spam_action_service) { false }
- it 'invalidates the snippet' do
- expect(snippet).to be_invalid
- end
+ let(:extra_opts) do
+ {
+ request: request,
+ api: api,
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id,
+ disable_spam_action_service: disable_spam_action_service
+ }
+ end
- it 'creates a new spam_log' do
- expect { snippet }
- .to have_spam_log(title: snippet.title, noteable_type: snippet.class.name)
+ before do
+ allow_next_instance_of(UserAgentDetailService) do |instance|
+ allow(instance).to receive(:create)
end
+ end
- it 'assigns a spam_log to an issue' do
- expect(snippet.spam_log).to eq(SpamLog.last)
+ it 'executes SpamActionService' do
+ spam_params = Spam::SpamParams.new(
+ api: api,
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ )
+ expect_next_instance_of(
+ Spam::SpamActionService,
+ {
+ spammable: kind_of(Snippet),
+ request: request,
+ user: an_instance_of(User),
+ action: action
+ }
+ ) do |instance|
+ expect(instance).to receive(:execute).with(spam_params: spam_params)
end
- end
- let(:extra_opts) do
- { visibility_level: Gitlab::VisibilityLevel::PUBLIC, request: double(:request, env: {}) }
+ subject
end
- before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
- end
- end
+ context 'when spam action service is disabled' do
+ let(:disable_spam_action_service) { true }
- [true, false, nil].each do |allow_possible_spam|
- context "when allow_possible_spam flag is #{allow_possible_spam.inspect}" do
- before do
- stub_feature_flags(allow_possible_spam: allow_possible_spam) unless allow_possible_spam.nil?
- end
+ it 'request parameter is not passed to the service' do
+ expect(Spam::SpamActionService).not_to receive(:new)
- it_behaves_like 'marked as spam'
+ subject
end
end
end
diff --git a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
new file mode 100644
index 00000000000..f2314793cb4
--- /dev/null
+++ b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
@@ -0,0 +1,320 @@
+# frozen_string_literal: true
+
+require 'fileutils'
+
+RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
+ include GitHelpers
+
+ let!(:lease_uuid) { SecureRandom.uuid }
+ let!(:lease_key) { "resource_housekeeping:#{resource.id}" }
+ let(:params) { [resource.id, task, lease_key, lease_uuid] }
+ let(:shell) { Gitlab::Shell.new }
+ let(:repository) { resource.repository }
+ let(:statistics_service_klass) { nil }
+
+ subject { described_class.new }
+
+ before do
+ allow(subject).to receive(:find_resource).and_return(resource)
+ end
+
+ shared_examples 'it calls Gitaly' do
+ specify do
+ repository_service = instance_double(Gitlab::GitalyClient::RepositoryService)
+
+ expect(subject).to receive(:get_gitaly_client).with(task, repository.raw_repository).and_return(repository_service)
+ expect(repository_service).to receive(gitaly_task)
+
+ subject.perform(*params)
+ end
+ end
+
+ shared_examples 'it updates the resource statistics' do
+ it 'updates the resource statistics' do
+ expect_next_instance_of(statistics_service_klass, anything, nil, statistics: statistics_keys) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject.perform(*params)
+ end
+
+ it 'does nothing if the database is read-only' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect(statistics_service_klass).not_to receive(:new)
+
+ subject.perform(*params)
+ end
+ end
+
+ describe '#perform', :aggregate_failures do
+ let(:gitaly_task) { :garbage_collect }
+ let(:task) { :gc }
+
+ context 'with active lease_uuid' do
+ before do
+ allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ end
+
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the resource statistics' if update_statistics
+
+ it "flushes ref caches when the task if 'gc'" do
+ expect(subject).to receive(:renew_lease).with(lease_key, lease_uuid).and_call_original
+ expect(repository).to receive(:expire_branches_cache).and_call_original
+ expect(repository).to receive(:branch_names).and_call_original
+ expect(repository).to receive(:has_visible_content?).and_call_original
+ expect(repository.raw_repository).to receive(:has_visible_content?).and_call_original
+
+ subject.perform(*params)
+ end
+
+ it 'handles gRPC errors' do
+ allow_next_instance_of(Gitlab::GitalyClient::RepositoryService, repository.raw_repository) do |instance|
+ allow(instance).to receive(:garbage_collect).and_raise(GRPC::NotFound)
+ end
+
+ expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
+ end
+ end
+
+ context 'with different lease than the active one' do
+ before do
+ allow(subject).to receive(:get_lease_uuid).and_return(SecureRandom.uuid)
+ end
+
+ it 'returns silently' do
+ expect(repository).not_to receive(:expire_branches_cache).and_call_original
+ expect(repository).not_to receive(:branch_names).and_call_original
+ expect(repository).not_to receive(:has_visible_content?).and_call_original
+
+ subject.perform(*params)
+ end
+ end
+
+ context 'with no active lease' do
+ let(:params) { [resource.id] }
+
+ before do
+ allow(subject).to receive(:get_lease_uuid).and_return(false)
+ end
+
+ context 'when is able to get the lease' do
+ before do
+ allow(subject).to receive(:try_obtain_lease).and_return(SecureRandom.uuid)
+ end
+
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the resource statistics' if update_statistics
+
+ it "flushes ref caches when the task if 'gc'" do
+ expect(subject).to receive(:get_lease_uuid).with("git_gc:#{task}:#{expected_default_lease}").and_return(false)
+ expect(repository).to receive(:expire_branches_cache).and_call_original
+ expect(repository).to receive(:branch_names).and_call_original
+ expect(repository).to receive(:has_visible_content?).and_call_original
+ expect(repository.raw_repository).to receive(:has_visible_content?).and_call_original
+
+ subject.perform(*params)
+ end
+ end
+
+ context 'when no lease can be obtained' do
+ it 'returns silently' do
+ expect(subject).to receive(:try_obtain_lease).and_return(false)
+
+ expect(subject).not_to receive(:command)
+ expect(repository).not_to receive(:expire_branches_cache).and_call_original
+ expect(repository).not_to receive(:branch_names).and_call_original
+ expect(repository).not_to receive(:has_visible_content?).and_call_original
+
+ subject.perform(*params)
+ end
+ end
+ end
+
+ context 'repack_full' do
+ let(:task) { :full_repack }
+ let(:gitaly_task) { :repack_full }
+
+ before do
+ expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ end
+
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the resource statistics' if update_statistics
+ end
+
+ context 'pack_refs' do
+ let(:task) { :pack_refs }
+ let(:gitaly_task) { :pack_refs }
+
+ before do
+ expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ end
+
+ it 'calls Gitaly' do
+ repository_service = instance_double(Gitlab::GitalyClient::RefService)
+
+ expect(subject).to receive(:get_gitaly_client).with(task, repository.raw_repository).and_return(repository_service)
+ expect(repository_service).to receive(gitaly_task)
+
+ subject.perform(*params)
+ end
+
+ it 'does not update the resource statistics' do
+ expect(statistics_service_klass).not_to receive(:new)
+
+ subject.perform(*params)
+ end
+ end
+
+ context 'repack_incremental' do
+ let(:task) { :incremental_repack }
+ let(:gitaly_task) { :repack_incremental }
+
+ before do
+ expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ end
+
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the resource statistics' if update_statistics
+ end
+
+ shared_examples 'gc tasks' do
+ before do
+ allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled)
+ end
+
+ it 'incremental repack adds a new packfile' do
+ create_objects(resource)
+ before_packs = packs(resource)
+
+ expect(before_packs.count).to be >= 1
+
+ subject.perform(resource.id, 'incremental_repack', lease_key, lease_uuid)
+ after_packs = packs(resource)
+
+ # Exactly one new pack should have been created
+ expect(after_packs.count).to eq(before_packs.count + 1)
+
+ # Previously existing packs are still around
+ expect(before_packs & after_packs).to eq(before_packs)
+ end
+
+ it 'full repack consolidates into 1 packfile' do
+ create_objects(resource)
+ subject.perform(resource.id, 'incremental_repack', lease_key, lease_uuid)
+ before_packs = packs(resource)
+
+ expect(before_packs.count).to be >= 2
+
+ subject.perform(resource.id, 'full_repack', lease_key, lease_uuid)
+ after_packs = packs(resource)
+
+ expect(after_packs.count).to eq(1)
+
+ # Previously existing packs should be gone now
+ expect(after_packs - before_packs).to eq(after_packs)
+
+ expect(File.exist?(bitmap_path(after_packs.first))).to eq(bitmaps_enabled)
+ end
+
+ it 'gc consolidates into 1 packfile and updates packed-refs' do
+ create_objects(resource)
+ before_packs = packs(resource)
+ before_packed_refs = packed_refs(resource)
+
+ expect(before_packs.count).to be >= 1
+
+ # It's quite difficult to use `expect_next_instance_of` in this place
+ # because the RepositoryService is instantiated several times to do
+ # some repository calls like `exists?`, `create_repository`, ... .
+ # Therefore, since we're instantiating the object several times,
+ # RSpec has troubles figuring out which instance is the next and which
+ # one we want to mock.
+ # Besides, at this point, we actually want to perform the call to Gitaly,
+ # otherwise we would just use `instance_double` like in other parts of the
+ # spec file.
+ expect_any_instance_of(Gitlab::GitalyClient::RepositoryService) # rubocop:disable RSpec/AnyInstanceOf
+ .to receive(:garbage_collect)
+ .with(bitmaps_enabled, prune: false)
+ .and_call_original
+
+ subject.perform(resource.id, 'gc', lease_key, lease_uuid)
+ after_packed_refs = packed_refs(resource)
+ after_packs = packs(resource)
+
+ expect(after_packs.count).to eq(1)
+
+ # Previously existing packs should be gone now
+ expect(after_packs - before_packs).to eq(after_packs)
+
+ # The packed-refs file should have been updated during 'git gc'
+ expect(before_packed_refs).not_to eq(after_packed_refs)
+
+ expect(File.exist?(bitmap_path(after_packs.first))).to eq(bitmaps_enabled)
+ end
+
+ it 'cleans up repository after finishing' do
+ expect(resource).to receive(:cleanup).and_call_original
+
+ subject.perform(resource.id, 'gc', lease_key, lease_uuid)
+ end
+
+ it 'prune calls garbage_collect with the option prune: true' do
+ repository_service = instance_double(Gitlab::GitalyClient::RepositoryService)
+
+ expect(subject).to receive(:get_gitaly_client).with(:prune, repository.raw_repository).and_return(repository_service)
+ expect(repository_service).to receive(:garbage_collect).with(bitmaps_enabled, prune: true)
+
+ subject.perform(resource.id, 'prune', lease_key, lease_uuid)
+ end
+
+ # Create a new commit on a random new branch
+ def create_objects(resource)
+ rugged = rugged_repo(resource.repository)
+ old_commit = rugged.branches.first.target
+ new_commit_sha = Rugged::Commit.create(
+ rugged,
+ message: "hello world #{SecureRandom.hex(6)}",
+ author: { email: 'foo@bar', name: 'baz' },
+ committer: { email: 'foo@bar', name: 'baz' },
+ tree: old_commit.tree,
+ parents: [old_commit]
+ )
+ rugged.references.create("refs/heads/#{SecureRandom.hex(6)}", new_commit_sha)
+ end
+
+ def packs(resource)
+ Dir["#{path_to_repo}/objects/pack/*.pack"]
+ end
+
+ def packed_refs(resource)
+ path = File.join(path_to_repo, 'packed-refs')
+ FileUtils.touch(path)
+ File.read(path)
+ end
+
+ def path_to_repo
+ @path_to_repo ||= File.join(TestEnv.repos_path, resource.repository.relative_path)
+ end
+
+ def bitmap_path(pack)
+ pack.sub(/\.pack\z/, '.bitmap')
+ end
+ end
+
+ context 'with bitmaps enabled' do
+ let(:bitmaps_enabled) { true }
+
+ include_examples 'gc tasks'
+ end
+
+ context 'with bitmaps disabled' do
+ let(:bitmaps_enabled) { false }
+
+ include_examples 'gc tasks'
+ end
+ end
+end
diff --git a/spec/support/snowplow.rb b/spec/support/snowplow.rb
index 0d6102f1705..91325d867d4 100644
--- a/spec/support/snowplow.rb
+++ b/spec/support/snowplow.rb
@@ -18,6 +18,7 @@ RSpec.configure do |config|
stub_application_setting(snowplow_enabled: true)
allow(SnowplowTracker::SelfDescribingJson).to receive(:new).and_call_original
+ allow(Gitlab::Tracking::StandardContext).to receive(:new).and_call_original
allow(Gitlab::Tracking).to receive(:event).and_call_original # rubocop:disable RSpec/ExpectGitlabTracking
end
diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb
index dc460611169..08d8651dcef 100644
--- a/spec/tasks/gitlab/cleanup_rake_spec.rb
+++ b/spec/tasks/gitlab/cleanup_rake_spec.rb
@@ -109,8 +109,7 @@ RSpec.describe 'gitlab:cleanup rake tasks' do
it 'passes dry_run correctly' do
expect(Gitlab::Cleanup::OrphanJobArtifactFiles)
.to receive(:new)
- .with(limit: anything,
- dry_run: false,
+ .with(dry_run: false,
niceness: anything,
logger: anything)
.and_call_original
@@ -145,7 +144,6 @@ RSpec.describe 'gitlab:cleanup rake tasks' do
expect(Gitlab::Cleanup::OrphanLfsFileReferences)
.to receive(:new)
.with(project,
- limit: anything,
dry_run: false,
logger: anything)
.and_call_original
@@ -153,24 +151,6 @@ RSpec.describe 'gitlab:cleanup rake tasks' do
rake_task
end
end
-
- context 'with LIMIT set to 100' do
- before do
- stub_env('LIMIT', '100')
- end
-
- it 'passes limit as integer' do
- expect(Gitlab::Cleanup::OrphanLfsFileReferences)
- .to receive(:new)
- .with(project,
- limit: 100,
- dry_run: true,
- logger: anything)
- .and_call_original
-
- rake_task
- end
- end
end
describe 'gitlab:cleanup:orphan_lfs_files' do
diff --git a/spec/tasks/gitlab/pages_rake_spec.rb b/spec/tasks/gitlab/pages_rake_spec.rb
index 76808f52890..9c26d3d73c8 100644
--- a/spec/tasks/gitlab/pages_rake_spec.rb
+++ b/spec/tasks/gitlab/pages_rake_spec.rb
@@ -9,59 +9,31 @@ RSpec.describe 'gitlab:pages:migrate_legacy_storagerake task' do
subject { run_rake_task('gitlab:pages:migrate_legacy_storage') }
- let(:project) { create(:project) }
-
- it 'does not try to migrate pages if pages are not deployed' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+ it 'calls migration service' do
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything, 3, 10) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
subject
end
- context 'when pages are marked as deployed' do
- before do
- project.mark_pages_as_deployed
- end
-
- context 'when pages directory does not exist' do
- it 'tries to migrate the project, but does not crash' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
- expect(service).to receive(:execute).and_call_original
- end
+ it 'uses PAGES_MIGRATION_THREADS environment variable' do
+ stub_env('PAGES_MIGRATION_THREADS', '5')
- subject
- end
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything, 5, 10) do |service|
+ expect(service).to receive(:execute).and_call_original
end
- context 'when pages directory exists on disk' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- it 'migrates pages projects without deployments' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- expect do
- subject
- end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil)
- end
-
- context 'when deployed already exists for the project' do
- before do
- deployment = create(:pages_deployment, project: project)
- project.set_first_pages_deployment!(deployment)
- end
+ subject
+ end
- it 'does not try to migrate project' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+ it 'uses PAGES_MIGRATION_BATCH_SIZE environment variable' do
+ stub_env('PAGES_MIGRATION_BATCH_SIZE', '100')
- subject
- end
- end
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything, 3, 100) do |service|
+ expect(service).to receive(:execute).and_call_original
end
+
+ subject
end
end
diff --git a/spec/tasks/gitlab/password_rake_spec.rb b/spec/tasks/gitlab/password_rake_spec.rb
new file mode 100644
index 00000000000..d5320f3b4af
--- /dev/null
+++ b/spec/tasks/gitlab/password_rake_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:password rake tasks' do
+ let_it_be(:user_1) { create(:user, username: 'foobar', password: 'initial_password') }
+
+ def stub_username(username)
+ allow(Gitlab::TaskHelpers).to receive(:prompt).with('Enter username: ').and_return(username)
+ end
+
+ def stub_password(password, confirmation = nil)
+ confirmation ||= password
+ allow(Gitlab::TaskHelpers).to receive(:prompt_for_password).and_return(password)
+ allow(Gitlab::TaskHelpers).to receive(:prompt_for_password).with('Confirm password: ').and_return(confirmation)
+ end
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/password'
+
+ stub_username('foobar')
+ stub_password('secretpassword')
+ end
+
+ describe ':reset' do
+ context 'when all inputs are correct' do
+ it 'updates the password properly' do
+ run_rake_task('gitlab:password:reset', user_1.username)
+ expect(user_1.reload.valid_password?('secretpassword')).to eq(true)
+ end
+ end
+
+ context 'when username is not provided' do
+ it 'asks for username' do
+ expect(Gitlab::TaskHelpers).to receive(:prompt).with('Enter username: ')
+
+ run_rake_task('gitlab:password:reset')
+ end
+
+ context 'when username is empty' do
+ it 'aborts with an error' do
+ stub_username('')
+ expect { run_rake_task('gitlab:password:reset') }.to raise_error(/Username can not be empty./)
+ end
+ end
+ end
+
+ context 'when username is passed as argument' do
+ it 'does not ask for username' do
+ expect(Gitlab::TaskHelpers).not_to receive(:prompt)
+
+ run_rake_task('gitlab:password:reset', 'foobar')
+ end
+ end
+
+ context 'when passwords do not match' do
+ before do
+ stub_password('randompassword', 'differentpassword')
+ end
+
+ it 'aborts with an error' do
+ expect { run_rake_task('gitlab:password:reset') }.to raise_error(%r{Unable to change password of the user with username foobar.\nPassword confirmation doesn't match Password})
+ end
+ end
+
+ context 'when user cannot be found' do
+ before do
+ stub_username('nonexistentuser')
+ end
+
+ it 'aborts with an error' do
+ expect { run_rake_task('gitlab:password:reset') }.to raise_error(/Unable to find user with username nonexistentuser./)
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
new file mode 100644
index 00000000000..4188521df8e
--- /dev/null
+++ b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:terraform_states' do
+ let_it_be(:version) { create(:terraform_state_version) }
+
+ let(:logger) { instance_double(Logger) }
+ let(:helper) { double }
+
+ before(:all) do
+ Rake.application.rake_require 'tasks/gitlab/terraform/migrate'
+ end
+
+ before do
+ allow(Logger).to receive(:new).with(STDOUT).and_return(logger)
+ end
+
+ describe 'gitlab:terraform_states:migrate' do
+ subject { run_rake_task('gitlab:terraform_states:migrate') }
+
+ it 'invokes the migration helper to move files to object storage' do
+ expect(Gitlab::Terraform::StateMigrationHelper).to receive(:migrate_to_remote_storage).and_yield(version)
+ expect(logger).to receive(:info).with('Starting transfer of Terraform states to object storage')
+ expect(logger).to receive(:info).with(/Transferred Terraform state version ID #{version.id}/)
+
+ subject
+ end
+
+ context 'an error is raised while migrating' do
+ let(:error_message) { 'Something went wrong' }
+
+ before do
+ allow(Gitlab::Terraform::StateMigrationHelper).to receive(:migrate_to_remote_storage).and_raise(StandardError, error_message)
+ end
+
+ it 'logs the error' do
+ expect(logger).to receive(:info).with('Starting transfer of Terraform states to object storage')
+ expect(logger).to receive(:error).with("Failed to migrate: #{error_message}")
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/base_linter_spec.rb b/spec/tooling/danger/base_linter_spec.rb
index 0136a0278ae..54d8f3dc1f7 100644
--- a/spec/lib/gitlab/danger/base_linter_spec.rb
+++ b/spec/tooling/danger/base_linter_spec.rb
@@ -1,12 +1,11 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'rspec-parameterized'
require_relative 'danger_spec_helper'
-require 'gitlab/danger/base_linter'
+require_relative '../../../tooling/danger/base_linter'
-RSpec.describe Gitlab::Danger::BaseLinter do
+RSpec.describe Tooling::Danger::BaseLinter do
let(:commit_class) do
Struct.new(:message, :sha, :diff_parent)
end
diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/tooling/danger/changelog_spec.rb
index 04c515f1205..c0eca67ce92 100644
--- a/spec/lib/gitlab/danger/changelog_spec.rb
+++ b/spec/tooling/danger/changelog_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require_relative 'danger_spec_helper'
-require 'gitlab/danger/changelog'
+require_relative '../../../tooling/danger/changelog'
-RSpec.describe Gitlab::Danger::Changelog do
+RSpec.describe Tooling::Danger::Changelog do
include DangerSpecHelper
let(:added_files) { nil }
@@ -53,8 +52,8 @@ RSpec.describe Gitlab::Danger::Changelog do
describe '#optional?' do
let(:category_with_changelog) { :backend }
let(:label_with_changelog) { 'frontend' }
- let(:category_without_changelog) { Gitlab::Danger::Changelog::NO_CHANGELOG_CATEGORIES.first }
- let(:label_without_changelog) { Gitlab::Danger::Changelog::NO_CHANGELOG_LABELS.first }
+ let(:category_without_changelog) { Tooling::Danger::Changelog::NO_CHANGELOG_CATEGORIES.first }
+ let(:label_without_changelog) { Tooling::Danger::Changelog::NO_CHANGELOG_LABELS.first }
subject { changelog.optional? }
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/tooling/danger/commit_linter_spec.rb
index d3d86037a53..694e524af21 100644
--- a/spec/lib/gitlab/danger/commit_linter_spec.rb
+++ b/spec/tooling/danger/commit_linter_spec.rb
@@ -1,12 +1,11 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'rspec-parameterized'
require_relative 'danger_spec_helper'
-require 'gitlab/danger/commit_linter'
+require_relative '../../../tooling/danger/commit_linter'
-RSpec.describe Gitlab::Danger::CommitLinter do
+RSpec.describe Tooling::Danger::CommitLinter do
using RSpec::Parameterized::TableSyntax
let(:total_files_changed) { 2 }
diff --git a/spec/lib/gitlab/danger/danger_spec_helper.rb b/spec/tooling/danger/danger_spec_helper.rb
index b1e84b3c13d..b1e84b3c13d 100644
--- a/spec/lib/gitlab/danger/danger_spec_helper.rb
+++ b/spec/tooling/danger/danger_spec_helper.rb
diff --git a/spec/lib/gitlab/danger/emoji_checker_spec.rb b/spec/tooling/danger/emoji_checker_spec.rb
index 6092c751e1c..bbd957b3d00 100644
--- a/spec/lib/gitlab/danger/emoji_checker_spec.rb
+++ b/spec/tooling/danger/emoji_checker_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'rspec-parameterized'
-require 'gitlab/danger/emoji_checker'
+require_relative '../../../tooling/danger/emoji_checker'
-RSpec.describe Gitlab::Danger::EmojiChecker do
+RSpec.describe Tooling::Danger::EmojiChecker do
using RSpec::Parameterized::TableSyntax
describe '#includes_text_emoji?' do
diff --git a/spec/tooling/danger/feature_flag_spec.rb b/spec/tooling/danger/feature_flag_spec.rb
new file mode 100644
index 00000000000..db63116cc37
--- /dev/null
+++ b/spec/tooling/danger/feature_flag_spec.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+require_relative 'danger_spec_helper'
+
+require_relative '../../../tooling/danger/feature_flag'
+
+RSpec.describe Tooling::Danger::FeatureFlag do
+ include DangerSpecHelper
+
+ let(:added_files) { nil }
+ let(:modified_files) { nil }
+ let(:deleted_files) { nil }
+ let(:fake_git) { double('fake-git', added_files: added_files, modified_files: modified_files, deleted_files: deleted_files) }
+
+ let(:mr_labels) { nil }
+ let(:mr_json) { nil }
+ let(:fake_gitlab) { double('fake-gitlab', mr_labels: mr_labels, mr_json: mr_json) }
+
+ let(:changes_by_category) { nil }
+ let(:sanitize_mr_title) { nil }
+ let(:ee?) { false }
+ let(:fake_helper) { double('fake-helper', changes_by_category: changes_by_category, sanitize_mr_title: sanitize_mr_title, ee?: ee?) }
+
+ let(:fake_danger) { new_fake_danger.include(described_class) }
+
+ subject(:feature_flag) { fake_danger.new(git: fake_git, gitlab: fake_gitlab, helper: fake_helper) }
+
+ describe '#feature_flag_files' do
+ let(:feature_flag_files) do
+ [
+ 'config/feature_flags/development/entry.yml',
+ 'ee/config/feature_flags/ops/entry.yml'
+ ]
+ end
+
+ let(:other_files) do
+ [
+ 'app/models/model.rb',
+ 'app/assets/javascripts/file.js'
+ ]
+ end
+
+ shared_examples 'an array of Found objects' do |change_type|
+ it 'returns an array of Found objects' do
+ expect(feature_flag.feature_flag_files(change_type: change_type)).to contain_exactly(an_instance_of(described_class::Found), an_instance_of(described_class::Found))
+ expect(feature_flag.feature_flag_files(change_type: change_type).map(&:path)).to eq(feature_flag_files)
+ end
+ end
+
+ shared_examples 'an empty array' do |change_type|
+ it 'returns an array of Found objects' do
+ expect(feature_flag.feature_flag_files(change_type: change_type)).to be_empty
+ end
+ end
+
+ describe 'retrieves added feature flag files' do
+ context 'with added added feature flag files' do
+ let(:added_files) { feature_flag_files }
+
+ include_examples 'an array of Found objects', :added
+ end
+
+ context 'without added added feature flag files' do
+ let(:added_files) { other_files }
+
+ include_examples 'an empty array', :added
+ end
+ end
+
+ describe 'retrieves modified feature flag files' do
+ context 'with modified modified feature flag files' do
+ let(:modified_files) { feature_flag_files }
+
+ include_examples 'an array of Found objects', :modified
+ end
+
+ context 'without modified modified feature flag files' do
+ let(:modified_files) { other_files }
+
+ include_examples 'an empty array', :modified
+ end
+ end
+
+ describe 'retrieves deleted feature flag files' do
+ context 'with deleted deleted feature flag files' do
+ let(:deleted_files) { feature_flag_files }
+
+ include_examples 'an array of Found objects', :deleted
+ end
+
+ context 'without deleted deleted feature flag files' do
+ let(:deleted_files) { other_files }
+
+ include_examples 'an empty array', :deleted
+ end
+ end
+ end
+
+ describe described_class::Found do
+ let(:feature_flag_path) { 'config/feature_flags/development/entry.yml' }
+ let(:group) { 'group::source code' }
+ let(:raw_yaml) do
+ YAML.dump('group' => group)
+ end
+
+ subject(:found) { described_class.new(feature_flag_path) }
+
+ before do
+ allow(File).to receive(:read).and_call_original
+ expect(File).to receive(:read).with(feature_flag_path).and_return(raw_yaml)
+ end
+
+ describe '#raw' do
+ it 'returns the raw YAML' do
+ expect(found.raw).to eq(raw_yaml)
+ end
+ end
+
+ describe '#group' do
+ it 'returns the group found in the YAML' do
+ expect(found.group).to eq(group)
+ end
+ end
+
+ describe '#group_match_mr_label?' do
+ subject(:result) { found.group_match_mr_label?(mr_group_label) }
+
+ context 'when MR labels match FF group' do
+ let(:mr_group_label) { 'group::source code' }
+
+ specify { expect(result).to eq(true) }
+ end
+
+ context 'when MR labels does not match FF group' do
+ let(:mr_group_label) { 'group::access' }
+
+ specify { expect(result).to eq(false) }
+ end
+
+ context 'when group is nil' do
+ let(:group) { nil }
+
+ context 'and MR has no group label' do
+ let(:mr_group_label) { nil }
+
+ specify { expect(result).to eq(true) }
+ end
+
+ context 'and MR has a group label' do
+ let(:mr_group_label) { 'group::source code' }
+
+ specify { expect(result).to eq(false) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/tooling/danger/helper_spec.rb
index bd5c746dd54..c338d138352 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/tooling/danger/helper_spec.rb
@@ -4,9 +4,9 @@ require 'fast_spec_helper'
require 'rspec-parameterized'
require_relative 'danger_spec_helper'
-require 'gitlab/danger/helper'
+require_relative '../../../tooling/danger/helper'
-RSpec.describe Gitlab::Danger::Helper do
+RSpec.describe Tooling::Danger::Helper do
using RSpec::Parameterized::TableSyntax
include DangerSpecHelper
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::Danger::Helper do
context 'when danger gitlab plugin is not available' do
it 'returns nil' do
invalid_danger = Class.new do
- include Gitlab::Danger::Helper
+ include Tooling::Danger::Helper
end.new
expect(invalid_danger.gitlab_helper).to be_nil
@@ -289,8 +289,8 @@ RSpec.describe Gitlab::Danger::Helper do
'.gitlab/ci/cng.gitlab-ci.yml' | [:engineering_productivity]
'.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:engineering_productivity]
'scripts/foo' | [:engineering_productivity]
- 'lib/gitlab/danger/foo' | [:engineering_productivity]
- 'ee/lib/gitlab/danger/foo' | [:engineering_productivity]
+ 'tooling/danger/foo' | [:engineering_productivity]
+ 'ee/tooling/danger/foo' | [:engineering_productivity]
'lefthook.yml' | [:engineering_productivity]
'.editorconfig' | [:engineering_productivity]
'tooling/bin/find_foss_tests' | [:engineering_productivity]
@@ -402,13 +402,55 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
- describe '#security_mr?' do
- it 'returns false when `gitlab_helper` is unavailable' do
+ describe '#mr_title' do
+ it 'returns "" when `gitlab_helper` is unavailable' do
expect(helper).to receive(:gitlab_helper).and_return(nil)
- expect(helper).not_to be_security_mr
+ expect(helper.mr_title).to eq('')
+ end
+
+ it 'returns the MR title when `gitlab_helper` is available' do
+ mr_title = 'My MR title'
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => mr_title)
+
+ expect(helper.mr_title).to eq(mr_title)
+ end
+ end
+
+ describe '#mr_web_url' do
+ it 'returns "" when `gitlab_helper` is unavailable' do
+ expect(helper).to receive(:gitlab_helper).and_return(nil)
+
+ expect(helper.mr_web_url).to eq('')
+ end
+
+ it 'returns the MR web_url when `gitlab_helper` is available' do
+ mr_web_url = 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1'
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('web_url' => mr_web_url)
+
+ expect(helper.mr_web_url).to eq(mr_web_url)
+ end
+ end
+
+ describe '#mr_target_branch' do
+ it 'returns "" when `gitlab_helper` is unavailable' do
+ expect(helper).to receive(:gitlab_helper).and_return(nil)
+
+ expect(helper.mr_target_branch).to eq('')
end
+ it 'returns the MR web_url when `gitlab_helper` is available' do
+ mr_target_branch = 'main'
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('target_branch' => mr_target_branch)
+
+ expect(helper.mr_target_branch).to eq(mr_target_branch)
+ end
+ end
+
+ describe '#security_mr?' do
it 'returns false when on a normal merge request' do
expect(fake_gitlab).to receive(:mr_json)
.and_return('web_url' => 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1')
@@ -425,12 +467,6 @@ RSpec.describe Gitlab::Danger::Helper do
end
describe '#draft_mr?' do
- it 'returns false when `gitlab_helper` is unavailable' do
- expect(helper).to receive(:gitlab_helper).and_return(nil)
-
- expect(helper).not_to be_draft_mr
- end
-
it 'returns true for a draft MR' do
expect(fake_gitlab).to receive(:mr_json)
.and_return('title' => 'Draft: My MR title')
@@ -447,12 +483,6 @@ RSpec.describe Gitlab::Danger::Helper do
end
describe '#cherry_pick_mr?' do
- it 'returns false when `gitlab_helper` is unavailable' do
- expect(helper).to receive(:gitlab_helper).and_return(nil)
-
- expect(helper).not_to be_cherry_pick_mr
- end
-
context 'when MR title does not mention a cherry-pick' do
it 'returns false' do
expect(fake_gitlab).to receive(:mr_json)
@@ -478,6 +508,46 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
+ describe '#run_all_rspec_mr?' do
+ context 'when MR title does not mention RUN ALL RSPEC' do
+ it 'returns false' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'Add feature xyz')
+
+ expect(helper).not_to be_run_all_rspec_mr
+ end
+ end
+
+ context 'when MR title mentions RUN ALL RSPEC' do
+ it 'returns true' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'Add feature xyz RUN ALL RSPEC')
+
+ expect(helper).to be_run_all_rspec_mr
+ end
+ end
+ end
+
+ describe '#run_as_if_foss_mr?' do
+ context 'when MR title does not mention RUN AS-IF-FOSS' do
+ it 'returns false' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'Add feature xyz')
+
+ expect(helper).not_to be_run_as_if_foss_mr
+ end
+ end
+
+ context 'when MR title mentions RUN AS-IF-FOSS' do
+ it 'returns true' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'Add feature xyz RUN AS-IF-FOSS')
+
+ expect(helper).to be_run_as_if_foss_mr
+ end
+ end
+ end
+
describe '#stable_branch?' do
it 'returns false when `gitlab_helper` is unavailable' do
expect(helper).to receive(:gitlab_helper).and_return(nil)
@@ -599,4 +669,14 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
end
+
+ describe '#group_label' do
+ it 'returns nil when no group label is present' do
+ expect(helper.group_label(%w[foo bar])).to be_nil
+ end
+
+ it 'returns the group label when a group label is present' do
+ expect(helper.group_label(['foo', 'group::source code', 'bar'])).to eq('group::source code')
+ end
+ end
end
diff --git a/spec/lib/gitlab/danger/merge_request_linter_spec.rb b/spec/tooling/danger/merge_request_linter_spec.rb
index 29facc9fdd6..3273b6b3d07 100644
--- a/spec/lib/gitlab/danger/merge_request_linter_spec.rb
+++ b/spec/tooling/danger/merge_request_linter_spec.rb
@@ -1,12 +1,11 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'rspec-parameterized'
require_relative 'danger_spec_helper'
-require 'gitlab/danger/merge_request_linter'
+require_relative '../../../tooling/danger/merge_request_linter'
-RSpec.describe Gitlab::Danger::MergeRequestLinter do
+RSpec.describe Tooling::Danger::MergeRequestLinter do
using RSpec::Parameterized::TableSyntax
let(:mr_class) do
diff --git a/spec/lib/gitlab/danger/roulette_spec.rb b/spec/tooling/danger/roulette_spec.rb
index 59ac3b12b6b..1e500a1ed08 100644
--- a/spec/lib/gitlab/danger/roulette_spec.rb
+++ b/spec/tooling/danger/roulette_spec.rb
@@ -3,10 +3,10 @@
require 'webmock/rspec'
require 'timecop'
-require 'gitlab/danger/roulette'
+require_relative '../../../tooling/danger/roulette'
require 'active_support/testing/time_helpers'
-RSpec.describe Gitlab::Danger::Roulette do
+RSpec.describe Tooling::Danger::Roulette do
include ActiveSupport::Testing::TimeHelpers
around do |example|
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:backend_available) { true }
let(:backend_tz_offset_hours) { 2.0 }
let(:backend_maintainer) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'backend-maintainer',
'name' => 'Backend maintainer',
'role' => 'Backend engineer',
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Danger::Roulette do
end
let(:frontend_reviewer) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'frontend-reviewer',
'name' => 'Frontend reviewer',
'role' => 'Frontend engineer',
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Danger::Roulette do
end
let(:frontend_maintainer) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'frontend-maintainer',
'name' => 'Frontend maintainer',
'role' => 'Frontend engineer',
@@ -49,18 +49,18 @@ RSpec.describe Gitlab::Danger::Roulette do
end
let(:software_engineer_in_test) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'software-engineer-in-test',
'name' => 'Software Engineer in Test',
'role' => 'Software Engineer in Test, Create:Source Code',
- 'projects' => { 'gitlab' => 'reviewer qa', 'gitlab-qa' => 'maintainer' },
+ 'projects' => { 'gitlab' => 'maintainer qa', 'gitlab-qa' => 'maintainer' },
'available' => true,
'tz_offset_hours' => 2.0
)
end
let(:engineering_productivity_reviewer) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'eng-prod-reviewer',
'name' => 'EP engineer',
'role' => 'Engineering Productivity',
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Danger::Roulette do
end
let(:ci_template_reviewer) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'ci-template-maintainer',
'name' => 'CI Template engineer',
'role' => '~"ci::templates"',
@@ -121,7 +121,7 @@ RSpec.describe Gitlab::Danger::Roulette do
let!(:project) { 'gitlab' }
let!(:mr_source_branch) { 'a-branch' }
let!(:mr_labels) { ['backend', 'devops::create'] }
- let!(:author) { Gitlab::Danger::Teammate.new('username' => 'johndoe') }
+ let!(:author) { Tooling::Danger::Teammate.new('username' => 'johndoe') }
let(:timezone_experiment) { false }
let(:spins) do
# Stub the request at the latest time so that we can modify the raw data, e.g. available fields.
@@ -176,8 +176,24 @@ RSpec.describe Gitlab::Danger::Roulette do
context 'when change contains QA category' do
let(:categories) { [:qa] }
- it 'assigns QA reviewer' do
- expect(spins).to eq([described_class::Spin.new(:qa, software_engineer_in_test, nil, false, false)])
+ it 'assigns QA maintainer' do
+ expect(spins).to eq([described_class::Spin.new(:qa, nil, software_engineer_in_test, false, false)])
+ end
+ end
+
+ context 'when change contains QA category and another category' do
+ let(:categories) { [:backend, :qa] }
+
+ it 'assigns QA maintainer' do
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, backend_maintainer, false, false), described_class::Spin.new(:qa, nil, software_engineer_in_test, :maintainer, false)])
+ end
+
+ context 'and author is an SET' do
+ let!(:author) { Tooling::Danger::Teammate.new('username' => software_engineer_in_test.username) }
+
+ it 'assigns QA reviewer' do
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, backend_maintainer, false, false), described_class::Spin.new(:qa, nil, nil, false, false)])
+ end
end
end
@@ -330,7 +346,7 @@ RSpec.describe Gitlab::Danger::Roulette do
describe '#spin_for_person' do
let(:person_tz_offset_hours) { 0.0 }
let(:person1) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'user1',
'available' => true,
'tz_offset_hours' => person_tz_offset_hours
@@ -338,21 +354,21 @@ RSpec.describe Gitlab::Danger::Roulette do
end
let(:person2) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'user2',
'available' => true,
'tz_offset_hours' => person_tz_offset_hours)
end
let(:author) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'johndoe',
'available' => true,
'tz_offset_hours' => 0.0)
end
let(:unavailable) do
- Gitlab::Danger::Teammate.new(
+ Tooling::Danger::Teammate.new(
'username' => 'janedoe',
'available' => false,
'tz_offset_hours' => 0.0)
diff --git a/spec/lib/gitlab/danger/sidekiq_queues_spec.rb b/spec/tooling/danger/sidekiq_queues_spec.rb
index 7dd1a2e6924..c5fc8592621 100644
--- a/spec/lib/gitlab/danger/sidekiq_queues_spec.rb
+++ b/spec/tooling/danger/sidekiq_queues_spec.rb
@@ -1,12 +1,11 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'rspec-parameterized'
require_relative 'danger_spec_helper'
-require 'gitlab/danger/sidekiq_queues'
+require_relative '../../../tooling/danger/sidekiq_queues'
-RSpec.describe Gitlab::Danger::SidekiqQueues do
+RSpec.describe Tooling::Danger::SidekiqQueues do
using RSpec::Parameterized::TableSyntax
include DangerSpecHelper
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/tooling/danger/teammate_spec.rb
index 9c066ba4c1b..f3afdc6e912 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/tooling/danger/teammate_spec.rb
@@ -1,12 +1,10 @@
# frozen_string_literal: true
-require 'timecop'
-require 'rspec-parameterized'
-
-require 'gitlab/danger/teammate'
+require_relative '../../../tooling/danger/teammate'
require 'active_support/testing/time_helpers'
+require 'rspec-parameterized'
-RSpec.describe Gitlab::Danger::Teammate do
+RSpec.describe Tooling::Danger::Teammate do
using RSpec::Parameterized::TableSyntax
subject { described_class.new(options) }
@@ -51,6 +49,13 @@ RSpec.describe Gitlab::Danger::Teammate do
context 'when having multiple capabilities' do
let(:capabilities) { ['reviewer backend', 'maintainer frontend', 'trainee_maintainer qa'] }
+ it '#any_capability? returns true if the person has any capability for the category in the given project' do
+ expect(subject.any_capability?(project, :backend)).to be_truthy
+ expect(subject.any_capability?(project, :frontend)).to be_truthy
+ expect(subject.any_capability?(project, :qa)).to be_truthy
+ expect(subject.any_capability?(project, :engineering_productivity)).to be_falsey
+ end
+
it '#reviewer? supports multiple roles per project' do
expect(subject.reviewer?(project, :backend, labels)).to be_truthy
end
diff --git a/spec/lib/gitlab/danger/title_linting_spec.rb b/spec/tooling/danger/title_linting_spec.rb
index b48d2c5e53d..7bc1684cd87 100644
--- a/spec/lib/gitlab/danger/title_linting_spec.rb
+++ b/spec/tooling/danger/title_linting_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'rspec-parameterized'
-require 'gitlab/danger/title_linting'
+require_relative '../../../tooling/danger/title_linting'
-RSpec.describe Gitlab::Danger::TitleLinting do
+RSpec.describe Tooling::Danger::TitleLinting do
using RSpec::Parameterized::TableSyntax
describe '#sanitize_mr_title' do
@@ -53,4 +52,40 @@ RSpec.describe Gitlab::Danger::TitleLinting do
expect(described_class.has_draft_flag?('My MR title')).to be false
end
end
+
+ describe '#has_cherry_pick_flag?' do
+ [
+ 'Cherry Pick !1234',
+ 'cherry-pick !1234',
+ 'CherryPick !1234'
+ ].each do |mr_title|
+ it 'returns true for cherry-pick title' do
+ expect(described_class.has_cherry_pick_flag?(mr_title)).to be true
+ end
+ end
+
+ it 'returns false for non cherry-pick title' do
+ expect(described_class.has_cherry_pick_flag?('My MR title')).to be false
+ end
+ end
+
+ describe '#has_run_all_rspec_flag?' do
+ it 'returns true for a title that includes RUN ALL RSPEC' do
+ expect(described_class.has_run_all_rspec_flag?('My MR title RUN ALL RSPEC')).to be true
+ end
+
+ it 'returns true for a title that does not include RUN ALL RSPEC' do
+ expect(described_class.has_run_all_rspec_flag?('My MR title')).to be false
+ end
+ end
+
+ describe '#has_run_as_if_foss_flag?' do
+ it 'returns true for a title that includes RUN AS-IF-FOSS' do
+ expect(described_class.has_run_as_if_foss_flag?('My MR title RUN AS-IF-FOSS')).to be true
+ end
+
+ it 'returns true for a title that does not include RUN AS-IF-FOSS' do
+ expect(described_class.has_run_as_if_foss_flag?('My MR title')).to be false
+ end
+ end
end
diff --git a/spec/lib/gitlab/danger/weightage/maintainers_spec.rb b/spec/tooling/danger/weightage/maintainers_spec.rb
index 066bb487fa2..b99ffe706a4 100644
--- a/spec/lib/gitlab/danger/weightage/maintainers_spec.rb
+++ b/spec/tooling/danger/weightage/maintainers_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-require 'gitlab/danger/weightage/maintainers'
+require_relative '../../../../tooling/danger/weightage/maintainers'
-RSpec.describe Gitlab::Danger::Weightage::Maintainers do
- let(:multiplier) { Gitlab::Danger::Weightage::CAPACITY_MULTIPLIER }
+RSpec.describe Tooling::Danger::Weightage::Maintainers do
+ let(:multiplier) { Tooling::Danger::Weightage::CAPACITY_MULTIPLIER }
let(:regular_maintainer) { double('Teammate', reduced_capacity: false) }
let(:reduced_capacity_maintainer) { double('Teammate', reduced_capacity: true) }
let(:maintainers) do
@@ -13,8 +13,8 @@ RSpec.describe Gitlab::Danger::Weightage::Maintainers do
]
end
- let(:maintainer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier }
- let(:reduced_capacity_maintainer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT }
+ let(:maintainer_count) { Tooling::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier }
+ let(:reduced_capacity_maintainer_count) { Tooling::Danger::Weightage::BASE_REVIEWER_WEIGHT }
subject(:weighted_maintainers) { described_class.new(maintainers).execute }
diff --git a/spec/lib/gitlab/danger/weightage/reviewers_spec.rb b/spec/tooling/danger/weightage/reviewers_spec.rb
index cca81f4d9b5..5693ce7a10c 100644
--- a/spec/lib/gitlab/danger/weightage/reviewers_spec.rb
+++ b/spec/tooling/danger/weightage/reviewers_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-require 'gitlab/danger/weightage/reviewers'
+require_relative '../../../../tooling/danger/weightage/reviewers'
-RSpec.describe Gitlab::Danger::Weightage::Reviewers do
- let(:multiplier) { Gitlab::Danger::Weightage::CAPACITY_MULTIPLIER }
+RSpec.describe Tooling::Danger::Weightage::Reviewers do
+ let(:multiplier) { Tooling::Danger::Weightage::CAPACITY_MULTIPLIER }
let(:regular_reviewer) { double('Teammate', hungry: false, reduced_capacity: false) }
let(:hungry_reviewer) { double('Teammate', hungry: true, reduced_capacity: false) }
let(:reduced_capacity_reviewer) { double('Teammate', hungry: false, reduced_capacity: true) }
@@ -26,11 +26,11 @@ RSpec.describe Gitlab::Danger::Weightage::Reviewers do
]
end
- let(:hungry_reviewer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier + described_class::DEFAULT_REVIEWER_WEIGHT }
+ let(:hungry_reviewer_count) { Tooling::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier + described_class::DEFAULT_REVIEWER_WEIGHT }
let(:hungry_traintainer_count) { described_class::TRAINTAINER_WEIGHT * multiplier + described_class::DEFAULT_REVIEWER_WEIGHT }
- let(:reviewer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier }
- let(:traintainer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT * described_class::TRAINTAINER_WEIGHT * multiplier }
- let(:reduced_capacity_reviewer_count) { Gitlab::Danger::Weightage::BASE_REVIEWER_WEIGHT }
+ let(:reviewer_count) { Tooling::Danger::Weightage::BASE_REVIEWER_WEIGHT * multiplier }
+ let(:traintainer_count) { Tooling::Danger::Weightage::BASE_REVIEWER_WEIGHT * described_class::TRAINTAINER_WEIGHT * multiplier }
+ let(:reduced_capacity_reviewer_count) { Tooling::Danger::Weightage::BASE_REVIEWER_WEIGHT }
let(:reduced_capacity_traintainer_count) { described_class::TRAINTAINER_WEIGHT }
subject(:weighted_reviewers) { described_class.new(reviewers, traintainers).execute }
diff --git a/spec/lib/gitlab_danger_spec.rb b/spec/tooling/gitlab_danger_spec.rb
index ed668c52a0e..20ac40d1d2a 100644
--- a/spec/lib/gitlab_danger_spec.rb
+++ b/spec/tooling/gitlab_danger_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require_relative '../../tooling/gitlab_danger'
RSpec.describe GitlabDanger do
let(:gitlab_danger_helper) { nil }
diff --git a/spec/uploaders/packages/composer/cache_uploader_spec.rb b/spec/uploaders/packages/composer/cache_uploader_spec.rb
new file mode 100644
index 00000000000..a4ba4cc2a1e
--- /dev/null
+++ b/spec/uploaders/packages/composer/cache_uploader_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Composer::CacheUploader do
+ let(:cache_file) { create(:composer_cache_file) } # rubocop:disable Rails/SaveBang
+ let(:uploader) { described_class.new(cache_file, :file) }
+ let(:path) { Gitlab.config.packages.storage_path }
+
+ subject { uploader }
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/composer_cache/\d+$],
+ cache_dir: %r[/packages/tmp/cache],
+ work_dir: %r[/packages/tmp/work]
+
+ context 'object store is remote' do
+ before do
+ stub_composer_cache_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/composer_cache/\d+$]
+ end
+
+ describe 'remote file' do
+ let(:cache_file) { create(:composer_cache_file, :object_storage) }
+
+ context 'with object storage enabled' do
+ before do
+ stub_composer_cache_object_storage
+ end
+
+ it 'can store file remotely' do
+ allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
+
+ cache_file
+
+ expect(cache_file.file_store).to eq(described_class::Store::REMOTE)
+ expect(cache_file.file.path).not_to be_blank
+ end
+ end
+ end
+end
diff --git a/spec/validators/variable_duplicates_validator_spec.rb b/spec/validators/nested_attributes_duplicates_validator_spec.rb
index acc47ff225f..f59e422e5d4 100644
--- a/spec/validators/variable_duplicates_validator_spec.rb
+++ b/spec/validators/nested_attributes_duplicates_validator_spec.rb
@@ -2,13 +2,16 @@
require 'spec_helper'
-RSpec.describe VariableDuplicatesValidator do
- let(:validator) { described_class.new(attributes: [:variables], **options) }
+RSpec.describe NestedAttributesDuplicatesValidator do
+ let(:validator) { described_class.new(attributes: [attribute], **options) }
describe '#validate_each' do
let(:project) { build(:project) }
+ let(:record) { project }
+ let(:attribute) { :variables }
+ let(:value) { project.variables }
- subject { validator.validate_each(project, :variables, project.variables) }
+ subject { validator.validate_each(record, attribute, value) }
context 'with no scope' do
let(:options) { {} }
@@ -65,5 +68,46 @@ RSpec.describe VariableDuplicatesValidator do
end
end
end
+
+ context 'with a child attribute' do
+ let(:release) { build(:release) }
+ let(:first_link) { build(:release_link, name: 'test1', url: 'https://www.google1.com', release: release) }
+ let(:second_link) { build(:release_link, name: 'test2', url: 'https://www.google2.com', release: release) }
+ let(:record) { release }
+ let(:attribute) { :links }
+ let(:value) { release.links }
+ let(:options) { { scope: :release, child_attributes: %i[name url] } }
+
+ before do
+ release.links << first_link
+ release.links << second_link
+ end
+
+ it 'does not have any errors' do
+ subject
+
+ expect(release.errors.empty?).to be true
+ end
+
+ context 'when name is duplicated' do
+ let(:second_link) { build(:release_link, name: 'test1', release: release) }
+
+ it 'has a duplicate error' do
+ subject
+
+ expect(release.errors).to have_key(attribute)
+ end
+ end
+
+ context 'when url is duplicated' do
+ let(:second_link) { build(:release_link, url: 'https://www.google1.com', release: release) }
+
+ it 'has a duplicate error' do
+ subject
+
+ expect(release.errors).to have_key(attribute)
+ end
+ end
+ end
end
end
diff --git a/spec/views/groups/show.html.haml_spec.rb b/spec/views/groups/show.html.haml_spec.rb
new file mode 100644
index 00000000000..a53aab43c18
--- /dev/null
+++ b/spec/views/groups/show.html.haml_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups/show.html.haml' do
+ let_it_be(:user) { build(:user) }
+ let_it_be(:group) { create(:group) }
+
+ before do
+ assign(:group, group)
+ end
+
+ context 'when rendering with the layout' do
+ subject(:render_page) { render template: 'groups/show.html.haml', layout: 'layouts/group' }
+
+ describe 'invite team members' do
+ before do
+ allow(view).to receive(:session).and_return({})
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:experiment_enabled?).and_return(false)
+ allow(view).to receive(:group_path).and_return('')
+ allow(view).to receive(:group_shared_path).and_return('')
+ allow(view).to receive(:group_archived_path).and_return('')
+ end
+
+ context 'when invite team members is not available in sidebar' do
+ before do
+ allow(view).to receive(:can_invite_members_for_group?).and_return(false)
+ end
+
+ it 'does not display the js-invite-members-trigger' do
+ render_page
+
+ expect(rendered).not_to have_selector('.js-invite-members-trigger')
+ end
+ end
+
+ context 'when invite team members is available' do
+ before do
+ allow(view).to receive(:can_invite_members_for_group?).and_return(true)
+ end
+
+ it 'includes the div for js-invite-members-trigger' do
+ render_page
+
+ expect(rendered).to have_selector('.js-invite-members-trigger')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/views/layouts/_head.html.haml_spec.rb b/spec/views/layouts/_head.html.haml_spec.rb
index 15fdfaaaa65..0b06309deff 100644
--- a/spec/views/layouts/_head.html.haml_spec.rb
+++ b/spec/views/layouts/_head.html.haml_spec.rb
@@ -102,6 +102,44 @@ RSpec.describe 'layouts/_head' do
expect(rendered).to match(/<script.*>.*var u="\/\/#{matomo_host}\/".*<\/script>/m)
expect(rendered).to match(%r(<noscript>.*<img src="//#{matomo_host}/matomo.php.*</noscript>))
end
+
+ context 'matomo_disable_cookies' do
+ context 'when true' do
+ before do
+ stub_config(extra: { matomo_url: matomo_host, matomo_site_id: 12345, matomo_disable_cookies: true })
+ end
+
+ it 'disables cookies' do
+ render
+
+ expect(rendered).to include('_paq.push(["disableCookies"])')
+ end
+ end
+
+ context 'when false' do
+ before do
+ stub_config(extra: { matomo_url: matomo_host, matomo_site_id: 12345, matomo_disable_cookies: false })
+ end
+
+ it 'does not disable cookies' do
+ render
+
+ expect(rendered).not_to include('_paq.push(["disableCookies"])')
+ end
+ end
+
+ context 'when absent' do
+ before do
+ stub_config(extra: { matomo_url: matomo_host, matomo_site_id: 12345 })
+ end
+
+ it 'does not disable cookies' do
+ render
+
+ expect(rendered).not_to include('_paq.push(["disableCookies"])')
+ end
+ end
+ end
end
def stub_helper_with_safe_string(method)
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
index 01892e72c97..80342cbdb41 100644
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'layouts/header/_new_dropdown' do
before do
allow(Gitlab::Experimentation).to receive(:active?).and_return(true)
allow(view).to receive(:experiment_tracking_category_and_group)
- allow(view).to receive(:tracking_label).with(user)
+ allow(view).to receive(:tracking_label)
end
context 'with ability to invite members' do
@@ -20,8 +20,8 @@ RSpec.describe 'layouts/header/_new_dropdown' do
subject
expect(view).to have_received(:experiment_tracking_category_and_group)
- .with(:invite_members_new_dropdown, subject: user)
- expect(view).to have_received(:tracking_label).with(user)
+ .with(:invite_members_new_dropdown)
+ expect(view).to have_received(:tracking_label)
end
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index c5b56b15431..e34d8b91b38 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
describe 'Packages' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let_it_be(:package_menu_name) { 'Packages & Registries' }
let_it_be(:package_entry_name) { 'Package Registry' }
diff --git a/spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb
new file mode 100644
index 00000000000..d3fb35bff6d
--- /dev/null
+++ b/spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/nav/sidebar/_project_security_link' do
+ let_it_be_with_reload(:project) { create(:project) }
+ context 'on security configuration' do
+ before do
+ assign(:project, project)
+ allow(controller).to receive(:controller_name).and_return('configuration')
+ allow(controller).to receive(:controller_path).and_return('projects/security/configuration')
+ allow(controller).to receive(:action_name).and_return('show')
+ allow(view).to receive(:any_project_nav_tab?).and_return(true)
+ allow(view).to receive(:project_nav_tab?).and_return(true)
+ end
+
+ it 'activates Security & Compliance tab' do
+ render
+
+ expect(rendered).to have_css('li.active', text: 'Security & Compliance')
+ end
+
+ it 'activates Configuration sub tab' do
+ render
+
+ expect(rendered).to have_css('.sidebar-sub-level-items > li.active', text: 'Configuration')
+ end
+ end
+end
diff --git a/spec/views/projects/_home_panel.html.haml_spec.rb b/spec/views/projects/_home_panel.html.haml_spec.rb
index 548dba7874a..cc0eb9919da 100644
--- a/spec/views/projects/_home_panel.html.haml_spec.rb
+++ b/spec/views/projects/_home_panel.html.haml_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'projects/_home_panel' do
let(:project) { create(:project) }
before do
+ stub_feature_flags(vue_notification_dropdown: false)
assign(:project, project)
allow(view).to receive(:current_user).and_return(user)
diff --git a/spec/views/projects/empty.html.haml_spec.rb b/spec/views/projects/empty.html.haml_spec.rb
index de83722160e..6762dcd22d5 100644
--- a/spec/views/projects/empty.html.haml_spec.rb
+++ b/spec/views/projects/empty.html.haml_spec.rb
@@ -79,4 +79,41 @@ RSpec.describe 'projects/empty' do
it_behaves_like 'no invite member info'
end
end
+
+ context 'when rendering with the layout' do
+ subject(:render_page) { render template: 'projects/empty.html.haml', layout: 'layouts/project' }
+
+ describe 'invite team members' do
+ before do
+ allow(view).to receive(:session).and_return({})
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:experiment_enabled?).and_return(false)
+ end
+
+ context 'when invite team members is not available in sidebar' do
+ before do
+ allow(view).to receive(:can_invite_members_for_project?).and_return(false)
+ end
+
+ it 'does not display the js-invite-members-trigger' do
+ render_page
+
+ expect(rendered).not_to have_selector('.js-invite-members-trigger')
+ end
+ end
+
+ context 'when invite team members is available' do
+ before do
+ allow(view).to receive(:can_invite_members_for_project?).and_return(true)
+ end
+
+ it 'includes the div for js-invite-members-trigger' do
+ render_page
+
+ expect(rendered).to have_selector('.js-invite-members-trigger')
+ end
+ end
+ end
+ end
end
diff --git a/spec/views/projects/show.html.haml_spec.rb b/spec/views/projects/show.html.haml_spec.rb
new file mode 100644
index 00000000000..995e31e83af
--- /dev/null
+++ b/spec/views/projects/show.html.haml_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/show.html.haml' do
+ let_it_be(:user) { build(:user) }
+ let_it_be(:project) { ProjectPresenter.new(create(:project, :repository), current_user: user) }
+
+ before do
+ assign(:project, project)
+ end
+
+ context 'when rendering with the layout' do
+ subject(:render_page) { render template: 'projects/show.html.haml', layout: 'layouts/project' }
+
+ describe 'invite team members' do
+ before do
+ allow(view).to receive(:event_filter_link)
+ allow(view).to receive(:session).and_return({})
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:experiment_enabled?).and_return(false)
+ allow(view).to receive(:add_page_startup_graphql_call)
+ end
+
+ context 'when invite team members is not available in sidebar' do
+ before do
+ allow(view).to receive(:can_invite_members_for_project?).and_return(false)
+ end
+
+ it 'does not display the js-invite-members-trigger' do
+ render_page
+
+ expect(rendered).not_to have_selector('.js-invite-members-trigger')
+ end
+ end
+
+ context 'when invite team members is available' do
+ before do
+ allow(view).to receive(:can_invite_members_for_project?).and_return(true)
+ end
+
+ it 'includes the div for js-invite-members-trigger' do
+ render_page
+
+ expect(rendered).to have_selector('.js-invite-members-trigger')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/views/projects/tree/_tree_row.html.haml_spec.rb b/spec/views/projects/tree/_tree_row.html.haml_spec.rb
deleted file mode 100644
index 43a37934afd..00000000000
--- a/spec/views/projects/tree/_tree_row.html.haml_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/tree/_tree_row' do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
-
- # rubocop: disable Rails/FindBy
- # This is not ActiveRecord where..first
- let(:blob_item) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID, 'files/ruby').first }
- # rubocop: enable Rails/FindBy
-
- before do
- assign(:project, project)
- assign(:repository, repository)
- assign(:id, File.join('master', ''))
- assign(:lfs_blob_ids, [])
- end
-
- it 'renders blob item' do
- render_partial(blob_item)
-
- expect(rendered).to have_content(blob_item.name)
- expect(rendered).not_to have_selector('.label-lfs', text: 'LFS')
- end
-
- describe 'LFS blob' do
- before do
- assign(:lfs_blob_ids, [blob_item].map(&:id))
-
- render_partial(blob_item)
- end
-
- it 'renders LFS badge' do
- expect(rendered).to have_selector('.label-lfs', text: 'LFS')
- end
- end
-
- def render_partial(items)
- render partial: 'projects/tree/tree_row', collection: [items].flatten
- end
-end
diff --git a/spec/views/search/_filter.html.haml_spec.rb b/spec/views/search/_filter.html.haml_spec.rb
deleted file mode 100644
index 868408f7beb..00000000000
--- a/spec/views/search/_filter.html.haml_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'search/_filter' do
- context 'when the search page is opened' do
- it 'displays the correct elements' do
- render
-
- expect(rendered).to have_selector('label[for="dashboard_search_group"]')
- expect(rendered).to have_selector('input#js-search-group-dropdown')
-
- expect(rendered).to have_selector('label[for="dashboard_search_project"]')
- expect(rendered).to have_selector('input#js-search-project-dropdown')
- end
- end
-end
diff --git a/spec/views/search/_form.html.haml_spec.rb b/spec/views/search/_form.html.haml_spec.rb
deleted file mode 100644
index 073a39e4ed6..00000000000
--- a/spec/views/search/_form.html.haml_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'search/_form' do
- context 'when the search page is opened' do
- it 'displays the correct elements' do
- render
-
- expect(rendered).to have_selector('.search-field-holder.form-group')
- expect(rendered).to have_selector('label[for="dashboard_search"]')
- end
- end
-end
diff --git a/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb b/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb
new file mode 100644
index 00000000000..400319a42b7
--- /dev/null
+++ b/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'shared/ssh_keys/_key_delete.html.haml' do
+ context 'when the text parameter is used' do
+ it 'has text' do
+ render 'shared/ssh_keys/key_delete.html.haml', text: 'Button', html_class: '', button_data: ''
+
+ expect(rendered).to have_button('Button')
+ end
+ end
+
+ context 'when the text parameter is not used' do
+ it 'does not have text' do
+ render 'shared/ssh_keys/key_delete.html.haml', html_class: '', button_data: ''
+
+ expect(rendered).to have_button('Delete')
+ end
+ end
+end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index d3a4144d606..8cf14ed6f8b 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -72,6 +72,21 @@ RSpec.describe BulkImportWorker do
expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
end
end
+
+ context 'when exception occurs' do
+ it 'tracks the exception & marks import as failed' do
+ bulk_import = create(:bulk_import, :created)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ allow(BulkImports::EntityWorker).to receive(:perform_async).and_raise(StandardError)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(kind_of(StandardError), bulk_import_id: bulk_import.id)
+
+ subject.perform(bulk_import.id)
+
+ expect(bulk_import.reload.failed?).to eq(true)
+ end
+ end
end
end
end
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index 31515b31947..cd9a6f605b9 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -24,6 +24,20 @@ RSpec.describe BulkImports::EntityWorker do
expect(entity.reload.jid).to eq(jid)
end
+
+ context 'when exception occurs' do
+ it 'tracks the exception & marks entity as failed' do
+ allow(BulkImports::Importers::GroupImporter).to receive(:new) { raise StandardError }
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(kind_of(StandardError), bulk_import_id: bulk_import.id, entity_id: entity.id)
+
+ subject.perform(entity.id)
+
+ expect(entity.reload.failed?).to eq(true)
+ end
+ end
end
context 'when started entity does not exist' do
diff --git a/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
new file mode 100644
index 00000000000..be351032b58
--- /dev/null
+++ b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::PipelineArtifacts::CreateQualityReportWorker do
+ describe '#perform' do
+ subject { described_class.new.perform(pipeline_id) }
+
+ context 'when pipeline exists' do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports) }
+ let(:pipeline_id) { pipeline.id }
+
+ it 'calls pipeline codequality report service' do
+ expect_next_instance_of(::Ci::PipelineArtifacts::CreateCodeQualityMrDiffReportService) do |quality_report_service|
+ expect(quality_report_service).to receive(:execute)
+ end
+
+ subject
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { pipeline_id }
+
+ it 'creates a pipeline artifact' do
+ expect { subject }.to change { pipeline.pipeline_artifacts.count }.by(1)
+ end
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ let(:pipeline_id) { non_existing_record_id }
+
+ it 'does not call pipeline codequality report service' do
+ expect(Ci::PipelineArtifacts::CreateCodeQualityMrDiffReportService).not_to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
index 9e9aa962b63..2bdd8345374 100644
--- a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
describe '#perform' do
let_it_be(:pipeline_artifact) do
- create(:ci_pipeline_artifact, expire_at: 1.week.ago)
+ create(:ci_pipeline_artifact, :with_coverage_report, expire_at: 1.week.ago)
end
it 'executes a service' do
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index e6592f7f204..ba6cf133b0c 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -82,6 +82,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
it 'skips the repository' do
expect(ContainerExpirationPolicies::CleanupService).not_to receive(:new)
expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, repository.project.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :skipped)
expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
@@ -213,8 +214,10 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, cleanup_status)
expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, repository.project.id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, cleanup_status)
+
%i[cleanup_tags_service_original_size cleanup_tags_service_before_truncate_size cleanup_tags_service_after_truncate_size cleanup_tags_service_before_delete_size].each do |field|
value = service_response.payload[field]
expect(worker).to receive(:log_extra_metadata_on_done).with(field, value) unless value.nil?
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 13089549086..3df64c35166 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -5,350 +5,22 @@ require 'fileutils'
require 'spec_helper'
RSpec.describe GitGarbageCollectWorker do
- include GitHelpers
-
let_it_be(:project) { create(:project, :repository) }
- let(:shell) { Gitlab::Shell.new }
- let!(:lease_uuid) { SecureRandom.uuid }
- let!(:lease_key) { "project_housekeeping:#{project.id}" }
- let(:params) { [project.id, task, lease_key, lease_uuid] }
-
- subject { described_class.new }
-
- shared_examples 'it calls Gitaly' do
- specify do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(gitaly_task)
- .and_return(nil)
-
- subject.perform(*params)
- end
- end
- shared_examples 'it updates the project statistics' do
- it 'updates the project statistics' do
- expect_next_instance_of(Projects::UpdateStatisticsService, project, nil, statistics: [:repository_size, :lfs_objects_size]) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- subject.perform(*params)
- end
+ let(:lease_uuid) { SecureRandom.uuid }
+ let(:lease_key) { "project_housekeeping:#{project.id}" }
+ let(:task) { :full_repack }
+ let(:params) { [project.id, task, lease_key, lease_uuid] }
- it 'does nothing if the database is read-only' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
-
- expect_any_instance_of(Projects::UpdateStatisticsService).not_to receive(:execute)
-
- subject.perform(*params)
- end
- end
+ subject { described_class.new }
describe "#perform" do
- let(:gitaly_task) { :garbage_collect }
- let(:task) { :gc }
-
- context 'with active lease_uuid' do
- before do
- allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- end
-
- it_behaves_like 'it calls Gitaly'
- it_behaves_like 'it updates the project statistics'
-
- it "flushes ref caches when the task if 'gc'" do
- expect(subject).to receive(:renew_lease).with(lease_key, lease_uuid).and_call_original
- expect_any_instance_of(Repository).to receive(:expire_branches_cache).and_call_original
- expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
- expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
-
- subject.perform(*params)
- end
-
- it 'handles gRPC errors' do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:garbage_collect).and_raise(GRPC::NotFound)
-
- expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
- end
- end
-
- context 'with different lease than the active one' do
- before do
- allow(subject).to receive(:get_lease_uuid).and_return(SecureRandom.uuid)
- end
-
- it 'returns silently' do
- expect_any_instance_of(Repository).not_to receive(:expire_branches_cache).and_call_original
- expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
-
- subject.perform(*params)
- end
- end
-
- context 'with no active lease' do
- let(:params) { [project.id] }
-
- before do
- allow(subject).to receive(:get_lease_uuid).and_return(false)
- end
-
- context 'when is able to get the lease' do
- before do
- allow(subject).to receive(:try_obtain_lease).and_return(SecureRandom.uuid)
- end
-
- it_behaves_like 'it calls Gitaly'
- it_behaves_like 'it updates the project statistics'
-
- it "flushes ref caches when the task if 'gc'" do
- expect(subject).to receive(:get_lease_uuid).with("git_gc:#{task}:#{project.id}").and_return(false)
- expect_any_instance_of(Repository).to receive(:expire_branches_cache).and_call_original
- expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
- expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
-
- subject.perform(*params)
- end
-
- context 'when the repository has joined a pool' do
- let!(:pool) { create(:pool_repository, :ready) }
- let(:project) { pool.source_project }
-
- it 'ensures the repositories are linked' do
- expect_any_instance_of(PoolRepository).to receive(:link_repository).once
-
- subject.perform(*params)
- end
- end
-
- context 'LFS object garbage collection' do
- before do
- stub_lfs_setting(enabled: true)
- end
-
- let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
- let(:lfs_object) { lfs_reference.lfs_object }
-
- it 'cleans up unreferenced LFS objects' do
- expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
- expect(svc.project).to eq(project)
- expect(svc.dry_run).to be_falsy
- expect(svc).to receive(:run!).and_call_original
- end
-
- subject.perform(*params)
-
- expect(project.lfs_objects.reload).not_to include(lfs_object)
- end
-
- it 'catches and logs exceptions' do
- expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences)
- .to receive(:run!)
- .and_raise(/Failed/)
-
- expect(Gitlab::GitLogger).to receive(:warn)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
-
- subject.perform(*params)
- end
-
- it 'does nothing if the database is read-only' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
- expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
-
- subject.perform(*params)
-
- expect(project.lfs_objects.reload).to include(lfs_object)
- end
- end
- end
-
- context 'when no lease can be obtained' do
- before do
- expect(subject).to receive(:try_obtain_lease).and_return(false)
- end
-
- it 'returns silently' do
- expect(subject).not_to receive(:command)
- expect_any_instance_of(Repository).not_to receive(:expire_branches_cache).and_call_original
- expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
-
- subject.perform(*params)
- end
- end
- end
-
- context "repack_full" do
- let(:task) { :full_repack }
- let(:gitaly_task) { :repack_full }
-
- before do
- expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ it 'calls the Projects::GitGarbageGitGarbageCollectWorker with the same params' do
+ expect_next_instance_of(Projects::GitGarbageCollectWorker) do |instance|
+ expect(instance).to receive(:perform).with(*params)
end
- it_behaves_like 'it calls Gitaly'
- it_behaves_like 'it updates the project statistics'
- end
-
- context "pack_refs" do
- let(:task) { :pack_refs }
- let(:gitaly_task) { :pack_refs }
-
- before do
- expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- end
-
- it "calls Gitaly" do
- expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(task)
- .and_return(nil)
-
- subject.perform(*params)
- end
-
- it 'does not update the project statistics' do
- expect(Projects::UpdateStatisticsService).not_to receive(:new)
-
- subject.perform(*params)
- end
- end
-
- context "repack_incremental" do
- let(:task) { :incremental_repack }
- let(:gitaly_task) { :repack_incremental }
-
- before do
- expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- end
-
- it_behaves_like 'it calls Gitaly'
- it_behaves_like 'it updates the project statistics'
- end
-
- shared_examples 'gc tasks' do
- before do
- allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled)
- end
-
- it 'incremental repack adds a new packfile' do
- create_objects(project)
- before_packs = packs(project)
-
- expect(before_packs.count).to be >= 1
-
- subject.perform(project.id, 'incremental_repack', lease_key, lease_uuid)
- after_packs = packs(project)
-
- # Exactly one new pack should have been created
- expect(after_packs.count).to eq(before_packs.count + 1)
-
- # Previously existing packs are still around
- expect(before_packs & after_packs).to eq(before_packs)
- end
-
- it 'full repack consolidates into 1 packfile' do
- create_objects(project)
- subject.perform(project.id, 'incremental_repack', lease_key, lease_uuid)
- before_packs = packs(project)
-
- expect(before_packs.count).to be >= 2
-
- subject.perform(project.id, 'full_repack', lease_key, lease_uuid)
- after_packs = packs(project)
-
- expect(after_packs.count).to eq(1)
-
- # Previously existing packs should be gone now
- expect(after_packs - before_packs).to eq(after_packs)
-
- expect(File.exist?(bitmap_path(after_packs.first))).to eq(bitmaps_enabled)
- end
-
- it 'gc consolidates into 1 packfile and updates packed-refs' do
- create_objects(project)
- before_packs = packs(project)
- before_packed_refs = packed_refs(project)
-
- expect(before_packs.count).to be >= 1
-
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService)
- .to receive(:garbage_collect)
- .with(bitmaps_enabled, prune: false)
- .and_call_original
-
- subject.perform(project.id, 'gc', lease_key, lease_uuid)
- after_packed_refs = packed_refs(project)
- after_packs = packs(project)
-
- expect(after_packs.count).to eq(1)
-
- # Previously existing packs should be gone now
- expect(after_packs - before_packs).to eq(after_packs)
-
- # The packed-refs file should have been updated during 'git gc'
- expect(before_packed_refs).not_to eq(after_packed_refs)
-
- expect(File.exist?(bitmap_path(after_packs.first))).to eq(bitmaps_enabled)
- end
-
- it 'cleans up repository after finishing' do
- expect_any_instance_of(Project).to receive(:cleanup).and_call_original
-
- subject.perform(project.id, 'gc', lease_key, lease_uuid)
- end
-
- it 'prune calls garbage_collect with the option prune: true' do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService)
- .to receive(:garbage_collect)
- .with(bitmaps_enabled, prune: true)
- .and_return(nil)
-
- subject.perform(project.id, 'prune', lease_key, lease_uuid)
- end
- end
-
- context 'with bitmaps enabled' do
- let(:bitmaps_enabled) { true }
-
- include_examples 'gc tasks'
- end
-
- context 'with bitmaps disabled' do
- let(:bitmaps_enabled) { false }
-
- include_examples 'gc tasks'
- end
- end
-
- # Create a new commit on a random new branch
- def create_objects(project)
- rugged = rugged_repo(project.repository)
- old_commit = rugged.branches.first.target
- new_commit_sha = Rugged::Commit.create(
- rugged,
- message: "hello world #{SecureRandom.hex(6)}",
- author: { email: 'foo@bar', name: 'baz' },
- committer: { email: 'foo@bar', name: 'baz' },
- tree: old_commit.tree,
- parents: [old_commit]
- )
- rugged.references.create("refs/heads/#{SecureRandom.hex(6)}", new_commit_sha)
- end
-
- def packs(project)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- Dir["#{project.repository.path_to_repo}/objects/pack/*.pack"]
+ subject.perform(*params)
end
end
-
- def packed_refs(project)
- path = "#{project.repository.path_to_repo}/packed-refs"
- FileUtils.touch(path)
- File.read(path)
- end
-
- def bitmap_path(pack)
- pack.sub(/\.pack\z/, '.bitmap')
- end
end
diff --git a/spec/workers/jira_connect/sync_builds_worker_spec.rb b/spec/workers/jira_connect/sync_builds_worker_spec.rb
index 7c58803d778..8fb8692fdf7 100644
--- a/spec/workers/jira_connect/sync_builds_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_builds_worker_spec.rb
@@ -32,29 +32,5 @@ RSpec.describe ::JiraConnect::SyncBuildsWorker do
subject
end
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(jira_sync_builds: false)
- end
-
- it 'does not call the sync service' do
- expect_next(::JiraConnect::SyncService).not_to receive(:execute)
-
- subject
- end
- end
-
- context 'when the feature flag is enabled for this project' do
- before do
- stub_feature_flags(jira_sync_builds: pipeline.project)
- end
-
- it 'calls the sync service' do
- expect_next(::JiraConnect::SyncService).to receive(:execute)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/jira_connect/sync_deployments_worker_spec.rb b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
index 9485f4cd3a7..16fa2643d04 100644
--- a/spec/workers/jira_connect/sync_deployments_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
@@ -32,29 +32,5 @@ RSpec.describe ::JiraConnect::SyncDeploymentsWorker do
subject
end
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(jira_sync_deployments: false)
- end
-
- it 'does not call the sync service' do
- expect_next(::JiraConnect::SyncService).not_to receive(:execute)
-
- subject
- end
- end
-
- context 'when the feature flag is enabled for this project' do
- before do
- stub_feature_flags(jira_sync_deployments: deployment.project)
- end
-
- it 'calls the sync service' do
- expect_next(::JiraConnect::SyncService).to receive(:execute)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
index 035f4ebdd3c..038eed7b9f1 100644
--- a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
@@ -32,29 +32,5 @@ RSpec.describe ::JiraConnect::SyncFeatureFlagsWorker do
subject
end
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(jira_sync_feature_flags: false)
- end
-
- it 'does not call the sync service' do
- expect_next(::JiraConnect::SyncService).not_to receive(:execute)
-
- subject
- end
- end
-
- context 'when the feature flag is enabled for this project' do
- before do
- stub_feature_flags(jira_sync_feature_flags: feature_flag.project)
- end
-
- it 'calls the sync service' do
- expect_next(::JiraConnect::SyncService).to receive(:execute)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/merge_request_cleanup_refs_worker_spec.rb b/spec/workers/merge_request_cleanup_refs_worker_spec.rb
index 88d7322536b..7401c6dd4d7 100644
--- a/spec/workers/merge_request_cleanup_refs_worker_spec.rb
+++ b/spec/workers/merge_request_cleanup_refs_worker_spec.rb
@@ -17,6 +17,18 @@ RSpec.describe MergeRequestCleanupRefsWorker do
subject
end
end
+
+ context 'when merge_request_refs_cleanup flag is disabled' do
+ before do
+ stub_feature_flags(merge_request_refs_cleanup: false)
+ end
+
+ it 'does not clean up the merge request' do
+ expect(MergeRequests::CleanupRefsService).not_to receive(:new)
+
+ perform_multiple(1)
+ end
+ end
end
context 'when merge request does not exist' do
diff --git a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
new file mode 100644
index 00000000000..722ecfc1dec
--- /dev/null
+++ b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform' do
+ context 'when the experiment is inactive' do
+ before do
+ stub_experiment(in_product_marketing_emails: false)
+ end
+
+ it 'does not execute the in product marketing emails service' do
+ expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
+
+ subject.perform
+ end
+ end
+
+ context 'when the experiment is active' do
+ before do
+ stub_experiment(in_product_marketing_emails: true)
+ end
+
+ it 'calls the send_for_all_tracks_and_intervals method on the in product marketing emails service' do
+ expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
+
+ subject.perform
+ end
+ end
+end
diff --git a/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb b/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
index f1789fa8fbd..6d69ccb50bd 100644
--- a/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
@@ -3,30 +3,15 @@
require 'spec_helper'
RSpec.describe Namespaces::OnboardingPipelineCreatedWorker, '#perform' do
- include AfterNextHelpers
-
let_it_be(:ci_pipeline) { create(:ci_pipeline) }
- before do
- OnboardingProgress.onboard(ci_pipeline.project.namespace)
- end
-
- it 'registers an onboarding progress action' do
- expect_next(OnboardingProgressService, ci_pipeline.project.namespace)
- .to receive(:execute).with(action: :pipeline_created).and_call_original
+ it_behaves_like 'records an onboarding progress action', :pipeline_created do
+ let(:namespace) { ci_pipeline.project.namespace }
- subject.perform(ci_pipeline.project.namespace_id)
-
- expect(OnboardingProgress.completed?(ci_pipeline.project.namespace, :pipeline_created)).to eq(true)
+ subject { described_class.new.perform(ci_pipeline.project.namespace_id) }
end
- context "when a namespace doesn't exist" do
- it 'does not register an onboarding progress action' do
- expect_next(OnboardingProgressService, ci_pipeline.project.namespace).not_to receive(:execute)
-
- subject.perform(nil)
-
- expect(OnboardingProgress.completed?(ci_pipeline.project.namespace, :pipeline_created)).to eq(false)
- end
+ it_behaves_like 'does not record an onboarding progress action' do
+ subject { described_class.new.perform(nil) }
end
end
diff --git a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb b/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
index a773e160fab..14428c0ecb8 100644
--- a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
@@ -3,20 +3,9 @@
require 'spec_helper'
RSpec.describe Namespaces::OnboardingUserAddedWorker, '#perform' do
- include AfterNextHelpers
+ let_it_be(:namespace) { create(:group) }
- let_it_be(:group) { create(:group) }
+ subject { described_class.new.perform(namespace.id) }
- before do
- OnboardingProgress.onboard(group)
- end
-
- it 'registers an onboarding progress action' do
- expect_next(OnboardingProgressService, group)
- .to receive(:execute).with(action: :user_added).and_call_original
-
- subject.perform(group.id)
-
- expect(OnboardingProgress.completed?(group, :user_added)).to be(true)
- end
+ it_behaves_like 'records an onboarding progress action', :user_added
end
diff --git a/spec/workers/projects/git_garbage_collect_worker_spec.rb b/spec/workers/projects/git_garbage_collect_worker_spec.rb
new file mode 100644
index 00000000000..8c44643ae51
--- /dev/null
+++ b/spec/workers/projects/git_garbage_collect_worker_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::GitGarbageCollectWorker do
+ let_it_be(:project) { create(:project, :repository) }
+
+ it_behaves_like 'can collect git garbage' do
+ let(:resource) { project }
+ let(:statistics_service_klass) { Projects::UpdateStatisticsService }
+ let(:statistics_keys) { [:repository_size, :lfs_objects_size] }
+ let(:expected_default_lease) { "projects:#{resource.id}" }
+ end
+
+ context 'when is able to get the lease' do
+ let(:params) { [project.id] }
+
+ subject { described_class.new }
+
+ before do
+ allow(subject).to receive(:get_lease_uuid).and_return(false)
+ allow(subject).to receive(:find_resource).and_return(project)
+ allow(subject).to receive(:try_obtain_lease).and_return(SecureRandom.uuid)
+ end
+
+ context 'when the repository has joined a pool' do
+ let!(:pool) { create(:pool_repository, :ready) }
+ let(:project) { pool.source_project }
+
+ it 'ensures the repositories are linked' do
+ expect(project.pool_repository).to receive(:link_repository).once
+
+ subject.perform(*params)
+ end
+ end
+
+ context 'LFS object garbage collection' do
+ let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
+ let(:lfs_object) { lfs_reference.lfs_object }
+
+ before do
+ stub_lfs_setting(enabled: true)
+ end
+
+ it 'cleans up unreferenced LFS objects' do
+ expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
+ expect(svc.project).to eq(project)
+ expect(svc.dry_run).to be_falsy
+ expect(svc).to receive(:run!).and_call_original
+ end
+
+ subject.perform(*params)
+
+ expect(project.lfs_objects.reload).not_to include(lfs_object)
+ end
+
+ it 'catches and logs exceptions' do
+ allow_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
+ allow(svg).to receive(:run!).and_raise(/Failed/)
+ end
+
+ expect(Gitlab::GitLogger).to receive(:warn)
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ subject.perform(*params)
+ end
+
+ it 'does nothing if the database is read-only' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ expect(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:new)
+
+ subject.perform(*params)
+
+ expect(project.lfs_objects.reload).to include(lfs_object)
+ end
+ end
+ end
+end
diff --git a/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb b/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
index 0dd50efba1c..869818b257e 100644
--- a/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
+++ b/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
@@ -20,6 +20,18 @@ RSpec.describe ScheduleMergeRequestCleanupRefsWorker do
worker.perform
end
+ context 'when merge_request_refs_cleanup flag is disabled' do
+ before do
+ stub_feature_flags(merge_request_refs_cleanup: false)
+ end
+
+ it 'does not schedule any merge request clean ups' do
+ expect(MergeRequestCleanupRefsWorker).not_to receive(:bulk_perform_in)
+
+ worker.perform
+ end
+ end
+
include_examples 'an idempotent worker' do
it 'schedules MergeRequestCleanupRefsWorker to be performed by batch' do
expect(MergeRequestCleanupRefsWorker)
diff --git a/spec/workers/wikis/git_garbage_collect_worker_spec.rb b/spec/workers/wikis/git_garbage_collect_worker_spec.rb
new file mode 100644
index 00000000000..77c2e49a83a
--- /dev/null
+++ b/spec/workers/wikis/git_garbage_collect_worker_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Wikis::GitGarbageCollectWorker do
+ it_behaves_like 'can collect git garbage' do
+ let_it_be(:resource) { create(:project_wiki) }
+ let_it_be(:page) { create(:wiki_page, wiki: resource) }
+
+ let(:statistics_service_klass) { Projects::UpdateStatisticsService }
+ let(:statistics_keys) { [:wiki_size] }
+ let(:expected_default_lease) { "project_wikis:#{resource.id}" }
+ end
+end